|
| 1 | +package cmd |
| 2 | + |
| 3 | +import ( |
| 4 | + _ "embed" |
| 5 | + "encoding/json" |
| 6 | + "fmt" |
| 7 | + "os" |
| 8 | + "path/filepath" |
| 9 | + "runtime" |
| 10 | + "strings" |
| 11 | + "time" |
| 12 | + |
| 13 | + "gpt_cmd/utils" |
| 14 | + |
| 15 | + dedent "github.com/lithammer/dedent" |
| 16 | +) |
| 17 | + |
| 18 | +//go:embed system_prompt.txt |
| 19 | +var SYSTEM_PROMPT string |
| 20 | + |
| 21 | +var PROJECT_FILES_DIR = filepath.Join(utils.GetHomeDir(), ".gpt_cmd") |
| 22 | +var CONVOS_DIR = filepath.Join(PROJECT_FILES_DIR, ".convos") |
| 23 | +var ansi = utils.Ansi{} |
| 24 | + |
| 25 | +type RuntimeOptions struct { |
| 26 | + DangerouslySkipPrompts bool |
| 27 | + Model string |
| 28 | + APIToken string |
| 29 | +} |
| 30 | + |
| 31 | +type GPTResponse struct { |
| 32 | + Commands []string `json:"commands"` |
| 33 | + Context string `json:"context"` |
| 34 | + ConvoFileName string `json:"convo-file-name"` |
| 35 | + Status string `json:"status"` |
| 36 | +} |
| 37 | + |
| 38 | +func RunLoop(goal string, opts *RuntimeOptions) { |
| 39 | + systemInfo := fmt.Sprintf("System info:\nOS: %s\nArchitecture: %s", runtime.GOOS, runtime.GOARCH) |
| 40 | + messages := []ChatMessage{ |
| 41 | + { |
| 42 | + Role: "system", |
| 43 | + Content: SYSTEM_PROMPT, |
| 44 | + }, |
| 45 | + { |
| 46 | + Role: "user", |
| 47 | + Content: fmt.Sprintf("%s\n%s", goal, systemInfo), |
| 48 | + }, |
| 49 | + } |
| 50 | + |
| 51 | + convoTimestamp := time.Now().Format("2006-01-02_15-04-05") |
| 52 | + var convoFileName *string |
| 53 | + |
| 54 | + // used to progressively update the local file for this convo |
| 55 | + saveConvo := func() { |
| 56 | + fileName := convoTimestamp |
| 57 | + if convoFileName != nil { |
| 58 | + fileName = fmt.Sprintf("%s_%s", *convoFileName, convoTimestamp) |
| 59 | + } |
| 60 | + fileName += ".json" |
| 61 | + |
| 62 | + filePath := filepath.Join(CONVOS_DIR, fileName) |
| 63 | + utils.EnsureDir(CONVOS_DIR) |
| 64 | + utils.WriteFile(filePath, utils.JsonStringify(messages, true)) |
| 65 | + } |
| 66 | + |
| 67 | + fmt.Printf("%s %s\n", ansi.Blue("Goal:"), goal) |
| 68 | + for { |
| 69 | + fmt.Println("\n----------") |
| 70 | + |
| 71 | + // In each iteration, call GPT with the latest messages thread |
| 72 | + rawResponse := GetGPTResponse(messages, opts.Model, opts.APIToken) |
| 73 | + // Add GPT's response to the messages thread |
| 74 | + messages = append(messages, ChatMessage{ |
| 75 | + Role: "assistant", |
| 76 | + Content: rawResponse, |
| 77 | + }) |
| 78 | + var response GPTResponse |
| 79 | + json.Unmarshal([]byte(rawResponse), &response) |
| 80 | + |
| 81 | + if convoFileName == nil && response.ConvoFileName != "" { |
| 82 | + convoFileName = &response.ConvoFileName |
| 83 | + } |
| 84 | + |
| 85 | + // If `status` prop is provided, it means GPT determined the |
| 86 | + // goal is completed. Report the status and print any context |
| 87 | + // the GPT provided |
| 88 | + if response.Status != "" { |
| 89 | + wasSuccess := response.Status == "success" |
| 90 | + |
| 91 | + if wasSuccess { |
| 92 | + fmt.Println(ansi.Green("✅ Goal successfully achieved.")) |
| 93 | + } else { |
| 94 | + fmt.Println(ansi.Red("❌ Goal failed.")) |
| 95 | + } |
| 96 | + |
| 97 | + if response.Context != "" { |
| 98 | + fmt.Println(response.Context) |
| 99 | + } |
| 100 | + |
| 101 | + saveConvo() |
| 102 | + if wasSuccess { |
| 103 | + os.Exit(0) |
| 104 | + } else { |
| 105 | + os.Exit(1) |
| 106 | + } |
| 107 | + } |
| 108 | + |
| 109 | + if len(response.Commands) > 0 { |
| 110 | + // This use of the `context` prop is for the GPT to provide |
| 111 | + // info about the command(s) it's running |
| 112 | + if response.Context != "" { |
| 113 | + fmt.Printf("%s %s\n", ansi.Blue("Context:"), response.Context) |
| 114 | + } |
| 115 | + |
| 116 | + var cmdResults []map[string]interface{} |
| 117 | + for index, cmd := range response.Commands { |
| 118 | + if index > 0 { |
| 119 | + fmt.Println("") |
| 120 | + } |
| 121 | + |
| 122 | + fmt.Printf("%s %s\n", ansi.Blue("Command:"), ansi.Dim(cmd)) |
| 123 | + if !opts.DangerouslySkipPrompts { |
| 124 | + if utils.PromptUserYN("OK to run command?") { |
| 125 | + utils.ClearPrevLine() |
| 126 | + } else { |
| 127 | + // User didn't want to run command, so save convo and exit |
| 128 | + saveConvo() |
| 129 | + os.Exit(1) |
| 130 | + } |
| 131 | + } |
| 132 | + |
| 133 | + stdout, exitCode := utils.ExecCmd(cmd) |
| 134 | + |
| 135 | + var exitCodeText = "Exit code:" |
| 136 | + if exitCode == 0 { |
| 137 | + exitCodeText = ansi.Green(exitCodeText) |
| 138 | + } else { |
| 139 | + exitCodeText = ansi.Red(exitCodeText) |
| 140 | + } |
| 141 | + fmt.Printf("%s %s\n", exitCodeText, ansi.Dim(fmt.Sprint(exitCode))) |
| 142 | + if len(stdout) > 0 { |
| 143 | + fmt.Println(ansi.Dim(stdout)) |
| 144 | + } |
| 145 | + |
| 146 | + cmdResults = append(cmdResults, map[string]interface{}{ |
| 147 | + "command": cmd, |
| 148 | + "stdout": stdout, |
| 149 | + "exit_code": exitCode, |
| 150 | + }) |
| 151 | + |
| 152 | + if exitCode != 0 { |
| 153 | + break |
| 154 | + } |
| 155 | + } |
| 156 | + |
| 157 | + // Add new message with the result(s) of the command(s) |
| 158 | + messages = append(messages, ChatMessage{ |
| 159 | + Role: "user", |
| 160 | + Content: utils.JsonStringify(cmdResults, false), |
| 161 | + }) |
| 162 | + } else { |
| 163 | + fmt.Println(ansi.Red("ERROR: No further commands provided, and no success/failure status was provided by GPT")) |
| 164 | + saveConvo() |
| 165 | + os.Exit(1) |
| 166 | + } |
| 167 | + } |
| 168 | +} |
| 169 | + |
| 170 | +func Execute() { |
| 171 | + helpText := strings.TrimSpace(dedent.Dedent(` |
| 172 | + Usage: |
| 173 | + gpt_cmd <goal> |
| 174 | + gpt_cmd --get-convos-dir |
| 175 | + gpt_cmd --help, -h |
| 176 | +
|
| 177 | + Environment vars: |
| 178 | + GPT_CMD_DANGEROUSLY_SKIP_PROMPTS [true] |
| 179 | + GPT_CMD_MODEL [string] (Default: gpt-4o) |
| 180 | + GPT_CMD_TOKEN [string] |
| 181 | + GPT_CMD_TOKEN_FILE_PATH [string] (Default: ~/OPENAI_TOKEN) |
| 182 | + `)) |
| 183 | + |
| 184 | + if len(os.Args) != 2 || os.Args[1] == "" { |
| 185 | + fmt.Println(helpText) |
| 186 | + os.Exit(1) |
| 187 | + } |
| 188 | + |
| 189 | + if os.Args[1] == "--help" || os.Args[1] == "-h" { |
| 190 | + fmt.Println(helpText) |
| 191 | + os.Exit(0) |
| 192 | + } |
| 193 | + |
| 194 | + if os.Args[1] == "--get-convos-dir" { |
| 195 | + fmt.Println(CONVOS_DIR) |
| 196 | + os.Exit(0) |
| 197 | + } |
| 198 | + |
| 199 | + // unrecognized arg passed in |
| 200 | + if strings.HasPrefix(os.Args[1], "--") { |
| 201 | + fmt.Println(helpText) |
| 202 | + os.Exit(1) |
| 203 | + } |
| 204 | + |
| 205 | + var options = RuntimeOptions{ |
| 206 | + DangerouslySkipPrompts: utils.GetEnv("GPT_CMD_DANGEROUSLY_SKIP_PROMPTS", "") == "true", |
| 207 | + Model: utils.GetEnv("GPT_CMD_MODEL", "gpt-4o"), |
| 208 | + APIToken: "", |
| 209 | + } |
| 210 | + |
| 211 | + token := utils.GetEnv("GPT_CMD_TOKEN", "") |
| 212 | + if token == "" { |
| 213 | + tokenFilePath := utils.GetEnv( |
| 214 | + "GPT_CMD_TOKEN_FILE_PATH", |
| 215 | + filepath.Join(utils.GetHomeDir(), "OPENAI_TOKEN"), |
| 216 | + ) |
| 217 | + |
| 218 | + if data, err := os.ReadFile(tokenFilePath); err == nil { |
| 219 | + token = strings.TrimSpace(string(data)) |
| 220 | + } |
| 221 | + } |
| 222 | + options.APIToken = token |
| 223 | + |
| 224 | + if options.APIToken == "" { |
| 225 | + fmt.Println(ansi.Red("ERROR: Unable to resolve an OpenAI token\n")) |
| 226 | + fmt.Println(helpText) |
| 227 | + os.Exit(1) |
| 228 | + } |
| 229 | + |
| 230 | + RunLoop(os.Args[1], &options) |
| 231 | +} |
0 commit comments