Skip to content

Commit 90d6538

Browse files
authored
Rewrite in Go (#1)
1 parent 6557fc9 commit 90d6538

File tree

14 files changed

+509
-371
lines changed

14 files changed

+509
-371
lines changed

.github/workflows/release.yml

Lines changed: 16 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -5,60 +5,29 @@ on:
55
- 'v*.*.*'
66

77
jobs:
8-
build:
9-
strategy:
10-
matrix:
11-
include:
12-
- os: windows-latest
13-
artifact_name: windows
14-
- os: ubuntu-latest
15-
artifact_name: linux
16-
- os: macos-latest
17-
artifact_name: macos
18-
runs-on: ${{ matrix.os }}
19-
steps:
20-
- uses: actions/checkout@v4
21-
- name: Set up Python
22-
uses: actions/setup-python@v5
23-
with:
24-
python-version: '3.x'
25-
- name: Install dependencies
26-
run: pip install -r requirements.txt
27-
- name: Build binary
28-
run: pyinstaller --onefile gpt_cmd.py
29-
- name: Upload artifact
30-
uses: actions/upload-artifact@v4
31-
with:
32-
name: ${{ matrix.artifact_name }}
33-
path: 'dist/gpt_cmd*'
34-
retention-days: 1
35-
36-
release:
37-
runs-on: ubuntu-latest
38-
needs: build
8+
build_and_release:
399
permissions:
4010
contents: write
11+
runs-on: ubuntu-latest
4112
steps:
4213
- uses: actions/checkout@v4
43-
- name: Download artifacts
44-
uses: actions/download-artifact@v4
45-
- name: Rename artifacts
14+
- name: Set up Go
15+
uses: actions/setup-go@v4
16+
with:
17+
go-version: '1.22'
18+
- name: Build binaries
4619
run: |
47-
mkdir -p bin
48-
for os in windows linux macos; do
49-
ext=""
50-
if [ "$os" = "windows" ]; then
51-
ext=".exe"
52-
fi
20+
GOOS=linux GOARCH=386 go build -o bin/gpt_cmd-linux-386
21+
GOOS=linux GOARCH=amd64 go build -o bin/gpt_cmd-linux
22+
GOOS=linux GOARCH=arm go build -o bin/gpt_cmd-linux-arm
23+
GOOS=linux GOARCH=arm64 go build -o bin/gpt_cmd-linux-arm64
5324
54-
src="${os}/gpt_cmd${ext}"
55-
dest="bin/gpt_cmd-${os}${ext}"
25+
GOOS=darwin GOARCH=amd64 go build -o bin/gpt_cmd-darwin-amd64
26+
GOOS=darwin GOARCH=arm64 go build -o bin/gpt_cmd-darwin-arm64
5627
57-
echo "Moving $src to $dest"
58-
mv "$src" "$dest"
59-
rm -rf "${os}/"
60-
done
28+
GOOS=windows GOARCH=386 go build -o bin/gpt_cmd-windows-386.exe
29+
GOOS=windows GOARCH=amd64 go build -o bin/gpt_cmd-windows.exe
6130
- name: Create release
6231
uses: ncipollo/release-action@v1.14.0
6332
with:
64-
artifacts: 'bin/gpt_cmd*'
33+
artifacts: 'bin/*'

CONTRIBUTING.md

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,18 @@
22

33
## Running locally
44

5-
First, install the dependencies (**note**: make sure you're using python 3 and pip 3):
5+
First, install the dependencies (**note**: this was written with go v1.22.x):
66

77
```sh
8-
# create virtual env
9-
python -m venv env
10-
11-
# activate env
12-
source env/bin/activate
13-
14-
# install deps
15-
pip install -r requirements.txt
8+
go mod tidy
169
```
1710

1811
Now you can run the tool via:
1912

2013
```sh
21-
python -m gpt_cmd [...]
14+
go run main.go [...]
2215
```
2316

2417
## Cutting a release
2518

2619
Pushing a version tag (e.g. `v1.0.0`) will trigger the [release.yml](.github/workflows/release.yml) GitHub workflow, which will build binaries for supported OSes and publish a release with them.
27-
28-
The binaries are generated using [pyinstaller](https://pyinstaller.org/en/stable/).

cmd/gpt.go

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
package cmd
2+
3+
import (
4+
"context"
5+
6+
openai "github.com/sashabaranov/go-openai"
7+
)
8+
9+
var OPENAI_CLIENT *openai.Client
10+
11+
type ChatMessage = openai.ChatCompletionMessage
12+
13+
func GetGPTResponse(messages []ChatMessage, model string, token string) string {
14+
if OPENAI_CLIENT == nil {
15+
OPENAI_CLIENT = openai.NewClient(token)
16+
}
17+
18+
resp, err := OPENAI_CLIENT.CreateChatCompletion(
19+
context.Background(),
20+
openai.ChatCompletionRequest{
21+
Model: model,
22+
Messages: messages,
23+
ResponseFormat: &openai.ChatCompletionResponseFormat{
24+
Type: "json_object",
25+
},
26+
},
27+
)
28+
if err != nil {
29+
panic(err)
30+
}
31+
32+
return resp.Choices[0].Message.Content
33+
}

cmd/root.go

Lines changed: 231 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,231 @@
1+
package cmd
2+
3+
import (
4+
_ "embed"
5+
"encoding/json"
6+
"fmt"
7+
"os"
8+
"path/filepath"
9+
"runtime"
10+
"strings"
11+
"time"
12+
13+
"gpt_cmd/utils"
14+
15+
dedent "github.com/lithammer/dedent"
16+
)
17+
18+
//go:embed system_prompt.txt
19+
var SYSTEM_PROMPT string
20+
21+
var PROJECT_FILES_DIR = filepath.Join(utils.GetHomeDir(), ".gpt_cmd")
22+
var CONVOS_DIR = filepath.Join(PROJECT_FILES_DIR, ".convos")
23+
var ansi = utils.Ansi{}
24+
25+
type RuntimeOptions struct {
26+
DangerouslySkipPrompts bool
27+
Model string
28+
APIToken string
29+
}
30+
31+
type GPTResponse struct {
32+
Commands []string `json:"commands"`
33+
Context string `json:"context"`
34+
ConvoFileName string `json:"convo-file-name"`
35+
Status string `json:"status"`
36+
}
37+
38+
func RunLoop(goal string, opts *RuntimeOptions) {
39+
systemInfo := fmt.Sprintf("System info:\nOS: %s\nArchitecture: %s", runtime.GOOS, runtime.GOARCH)
40+
messages := []ChatMessage{
41+
{
42+
Role: "system",
43+
Content: SYSTEM_PROMPT,
44+
},
45+
{
46+
Role: "user",
47+
Content: fmt.Sprintf("%s\n%s", goal, systemInfo),
48+
},
49+
}
50+
51+
convoTimestamp := time.Now().Format("2006-01-02_15-04-05")
52+
var convoFileName *string
53+
54+
// used to progressively update the local file for this convo
55+
saveConvo := func() {
56+
fileName := convoTimestamp
57+
if convoFileName != nil {
58+
fileName = fmt.Sprintf("%s_%s", *convoFileName, convoTimestamp)
59+
}
60+
fileName += ".json"
61+
62+
filePath := filepath.Join(CONVOS_DIR, fileName)
63+
utils.EnsureDir(CONVOS_DIR)
64+
utils.WriteFile(filePath, utils.JsonStringify(messages, true))
65+
}
66+
67+
fmt.Printf("%s %s\n", ansi.Blue("Goal:"), goal)
68+
for {
69+
fmt.Println("\n----------")
70+
71+
// In each iteration, call GPT with the latest messages thread
72+
rawResponse := GetGPTResponse(messages, opts.Model, opts.APIToken)
73+
// Add GPT's response to the messages thread
74+
messages = append(messages, ChatMessage{
75+
Role: "assistant",
76+
Content: rawResponse,
77+
})
78+
var response GPTResponse
79+
json.Unmarshal([]byte(rawResponse), &response)
80+
81+
if convoFileName == nil && response.ConvoFileName != "" {
82+
convoFileName = &response.ConvoFileName
83+
}
84+
85+
// If `status` prop is provided, it means GPT determined the
86+
// goal is completed. Report the status and print any context
87+
// the GPT provided
88+
if response.Status != "" {
89+
wasSuccess := response.Status == "success"
90+
91+
if wasSuccess {
92+
fmt.Println(ansi.Green("✅ Goal successfully achieved."))
93+
} else {
94+
fmt.Println(ansi.Red("❌ Goal failed."))
95+
}
96+
97+
if response.Context != "" {
98+
fmt.Println(response.Context)
99+
}
100+
101+
saveConvo()
102+
if wasSuccess {
103+
os.Exit(0)
104+
} else {
105+
os.Exit(1)
106+
}
107+
}
108+
109+
if len(response.Commands) > 0 {
110+
// This use of the `context` prop is for the GPT to provide
111+
// info about the command(s) it's running
112+
if response.Context != "" {
113+
fmt.Printf("%s %s\n", ansi.Blue("Context:"), response.Context)
114+
}
115+
116+
var cmdResults []map[string]interface{}
117+
for index, cmd := range response.Commands {
118+
if index > 0 {
119+
fmt.Println("")
120+
}
121+
122+
fmt.Printf("%s %s\n", ansi.Blue("Command:"), ansi.Dim(cmd))
123+
if !opts.DangerouslySkipPrompts {
124+
if utils.PromptUserYN("OK to run command?") {
125+
utils.ClearPrevLine()
126+
} else {
127+
// User didn't want to run command, so save convo and exit
128+
saveConvo()
129+
os.Exit(1)
130+
}
131+
}
132+
133+
stdout, exitCode := utils.ExecCmd(cmd)
134+
135+
var exitCodeText = "Exit code:"
136+
if exitCode == 0 {
137+
exitCodeText = ansi.Green(exitCodeText)
138+
} else {
139+
exitCodeText = ansi.Red(exitCodeText)
140+
}
141+
fmt.Printf("%s %s\n", exitCodeText, ansi.Dim(fmt.Sprint(exitCode)))
142+
if len(stdout) > 0 {
143+
fmt.Println(ansi.Dim(stdout))
144+
}
145+
146+
cmdResults = append(cmdResults, map[string]interface{}{
147+
"command": cmd,
148+
"stdout": stdout,
149+
"exit_code": exitCode,
150+
})
151+
152+
if exitCode != 0 {
153+
break
154+
}
155+
}
156+
157+
// Add new message with the result(s) of the command(s)
158+
messages = append(messages, ChatMessage{
159+
Role: "user",
160+
Content: utils.JsonStringify(cmdResults, false),
161+
})
162+
} else {
163+
fmt.Println(ansi.Red("ERROR: No further commands provided, and no success/failure status was provided by GPT"))
164+
saveConvo()
165+
os.Exit(1)
166+
}
167+
}
168+
}
169+
170+
func Execute() {
171+
helpText := strings.TrimSpace(dedent.Dedent(`
172+
Usage:
173+
gpt_cmd <goal>
174+
gpt_cmd --get-convos-dir
175+
gpt_cmd --help, -h
176+
177+
Environment vars:
178+
GPT_CMD_DANGEROUSLY_SKIP_PROMPTS [true]
179+
GPT_CMD_MODEL [string] (Default: gpt-4o)
180+
GPT_CMD_TOKEN [string]
181+
GPT_CMD_TOKEN_FILE_PATH [string] (Default: ~/OPENAI_TOKEN)
182+
`))
183+
184+
if len(os.Args) != 2 || os.Args[1] == "" {
185+
fmt.Println(helpText)
186+
os.Exit(1)
187+
}
188+
189+
if os.Args[1] == "--help" || os.Args[1] == "-h" {
190+
fmt.Println(helpText)
191+
os.Exit(0)
192+
}
193+
194+
if os.Args[1] == "--get-convos-dir" {
195+
fmt.Println(CONVOS_DIR)
196+
os.Exit(0)
197+
}
198+
199+
// unrecognized arg passed in
200+
if strings.HasPrefix(os.Args[1], "--") {
201+
fmt.Println(helpText)
202+
os.Exit(1)
203+
}
204+
205+
var options = RuntimeOptions{
206+
DangerouslySkipPrompts: utils.GetEnv("GPT_CMD_DANGEROUSLY_SKIP_PROMPTS", "") == "true",
207+
Model: utils.GetEnv("GPT_CMD_MODEL", "gpt-4o"),
208+
APIToken: "",
209+
}
210+
211+
token := utils.GetEnv("GPT_CMD_TOKEN", "")
212+
if token == "" {
213+
tokenFilePath := utils.GetEnv(
214+
"GPT_CMD_TOKEN_FILE_PATH",
215+
filepath.Join(utils.GetHomeDir(), "OPENAI_TOKEN"),
216+
)
217+
218+
if data, err := os.ReadFile(tokenFilePath); err == nil {
219+
token = strings.TrimSpace(string(data))
220+
}
221+
}
222+
options.APIToken = token
223+
224+
if options.APIToken == "" {
225+
fmt.Println(ansi.Red("ERROR: Unable to resolve an OpenAI token\n"))
226+
fmt.Println(helpText)
227+
os.Exit(1)
228+
}
229+
230+
RunLoop(os.Args[1], &options)
231+
}

cmd/system_prompt.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
Your job is to run commands necessary for achieving a task from a terminal.
2+
3+
You'll be provided with an end goal, and you'll send replies in JSON format containing an array of commands to run in the terminal. Each time you send command(s) to run, you'll then be provided with the resulting stdout and stderr (you're being accessed via the OpenAI API, so when possible, include arguments in your commands to reduce noise in stdout and stderr to limit API usage).
4+
5+
To convey context, you can use a JSON object with `context` (string) and `commands` (array of strings).
6+
7+
When you believe that the end goal is accomplished or unrecoverably failed, send a JSON object containing `status` ("success" or "failed") and `context` (noting things like commands that can be used to use any tools you installed, or why it failed if it did).
8+
9+
IMPORTANT NOTE: each command you provide is being executed in a subshell via a golang script, which means things like `cd` won't persist across commands, so you'll need to account for that.
10+
11+
IMPORTANT NOTE: in your response to the first user prompt, generate a short (5 words max) dash-separated file name to describe their prompt. Provide this in a `convo-file-name` property in your JSON object.

go.mod

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
module gpt_cmd
2+
3+
go 1.22.3
4+
5+
require github.com/sashabaranov/go-openai v1.24.1
6+
7+
require github.com/lithammer/dedent v1.1.0

go.sum

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=
2+
github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc=
3+
github.com/sashabaranov/go-openai v1.24.1 h1:DWK95XViNb+agQtuzsn+FyHhn3HQJ7Va8z04DQDJ1MI=
4+
github.com/sashabaranov/go-openai v1.24.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=

0 commit comments

Comments
 (0)