Skip to content

Commit dbacae8

Browse files
authored
Rename outdated WaveAI types (#1609)
A bunch of the Wave AI types still mentioned OpenAI. Now that most of them are being used for multiple AI backends, we need to update the names to be more generic.
1 parent 5cfbdca commit dbacae8

File tree

13 files changed

+123
-134
lines changed

13 files changed

+123
-134
lines changed

frontend/app/store/services.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ class BlockServiceType {
1515
SaveTerminalState(blockId: string, state: string, stateType: string, ptyOffset: number, termSize: TermSize): Promise<void> {
1616
return WOS.callBackendService("block", "SaveTerminalState", Array.from(arguments))
1717
}
18-
SaveWaveAiData(arg2: string, arg3: OpenAIPromptMessageType[]): Promise<void> {
18+
SaveWaveAiData(arg2: string, arg3: WaveAIPromptMessageType[]): Promise<void> {
1919
return WOS.callBackendService("block", "SaveWaveAiData", Array.from(arguments))
2020
}
2121
}

frontend/app/store/wshclientapi.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ class RpcApiType {
303303
}
304304

305305
// command "streamwaveai" [responsestream]
306-
StreamWaveAiCommand(client: WshClient, data: OpenAiStreamRequest, opts?: RpcOpts): AsyncGenerator<OpenAIPacketType, void, boolean> {
306+
StreamWaveAiCommand(client: WshClient, data: WaveAIStreamRequest, opts?: RpcOpts): AsyncGenerator<WaveAIPacketType, void, boolean> {
307307
return client.wshRpcStream("streamwaveai", data, opts);
308308
}
309309

frontend/app/view/waveai/waveai.tsx

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ interface ChatItemProps {
3535
model: WaveAiModel;
3636
}
3737

38-
function promptToMsg(prompt: OpenAIPromptMessageType): ChatMessageType {
38+
function promptToMsg(prompt: WaveAIPromptMessageType): ChatMessageType {
3939
return {
4040
id: crypto.randomUUID(),
4141
user: prompt.role,
@@ -67,7 +67,7 @@ export class WaveAiModel implements ViewModel {
6767
blockAtom: Atom<Block>;
6868
presetKey: Atom<string>;
6969
presetMap: Atom<{ [k: string]: MetaType }>;
70-
aiOpts: Atom<OpenAIOptsType>;
70+
aiOpts: Atom<WaveAIOptsType>;
7171
viewIcon?: Atom<string | IconButtonDecl>;
7272
viewName?: Atom<string>;
7373
viewText?: Atom<string | HeaderElem[]>;
@@ -167,7 +167,7 @@ export class WaveAiModel implements ViewModel {
167167
...settings,
168168
...meta,
169169
};
170-
const opts: OpenAIOptsType = {
170+
const opts: WaveAIOptsType = {
171171
model: settings["ai:model"] ?? null,
172172
apitype: settings["ai:apitype"] ?? null,
173173
orgid: settings["ai:orgid"] ?? null,
@@ -293,12 +293,12 @@ export class WaveAiModel implements ViewModel {
293293
globalStore.set(this.messagesAtom, history.map(promptToMsg));
294294
}
295295

296-
async fetchAiData(): Promise<Array<OpenAIPromptMessageType>> {
296+
async fetchAiData(): Promise<Array<WaveAIPromptMessageType>> {
297297
const { data } = await fetchWaveFile(this.blockId, "aidata");
298298
if (!data) {
299299
return [];
300300
}
301-
const history: Array<OpenAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
301+
const history: Array<WaveAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
302302
return history.slice(Math.max(history.length - slidingWindowSize, 0));
303303
}
304304

@@ -333,7 +333,7 @@ export class WaveAiModel implements ViewModel {
333333
globalStore.set(this.addMessageAtom, newMessage);
334334
// send message to backend and get response
335335
const opts = globalStore.get(this.aiOpts);
336-
const newPrompt: OpenAIPromptMessageType = {
336+
const newPrompt: WaveAIPromptMessageType = {
337337
role: "user",
338338
content: text,
339339
};
@@ -368,7 +368,7 @@ export class WaveAiModel implements ViewModel {
368368
// only save the author's prompt
369369
await BlockService.SaveWaveAiData(this.blockId, [...history, newPrompt]);
370370
} else {
371-
const responsePrompt: OpenAIPromptMessageType = {
371+
const responsePrompt: WaveAIPromptMessageType = {
372372
role: "assistant",
373373
content: fullMsg,
374374
};
@@ -383,7 +383,7 @@ export class WaveAiModel implements ViewModel {
383383
globalStore.set(this.removeLastMessageAtom);
384384
} else {
385385
globalStore.set(this.updateLastMessageAtom, "", false);
386-
const responsePrompt: OpenAIPromptMessageType = {
386+
const responsePrompt: WaveAIPromptMessageType = {
387387
role: "assistant",
388388
content: fullMsg,
389389
};
@@ -397,7 +397,7 @@ export class WaveAiModel implements ViewModel {
397397
};
398398
globalStore.set(this.addMessageAtom, errorMessage);
399399
globalStore.set(this.updateLastMessageAtom, "", false);
400-
const errorPrompt: OpenAIPromptMessageType = {
400+
const errorPrompt: WaveAIPromptMessageType = {
401401
role: "error",
402402
content: errMsg,
403403
};

frontend/types/gotypes.d.ts

Lines changed: 46 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -519,52 +519,6 @@ declare global {
519519
// waveobj.ORef
520520
type ORef = string;
521521

522-
// wshrpc.OpenAIOptsType
523-
type OpenAIOptsType = {
524-
model: string;
525-
apitype?: string;
526-
apitoken: string;
527-
orgid?: string;
528-
apiversion?: string;
529-
baseurl?: string;
530-
maxtokens?: number;
531-
maxchoices?: number;
532-
timeoutms?: number;
533-
};
534-
535-
// wshrpc.OpenAIPacketType
536-
type OpenAIPacketType = {
537-
type: string;
538-
model?: string;
539-
created?: number;
540-
finish_reason?: string;
541-
usage?: OpenAIUsageType;
542-
index?: number;
543-
text?: string;
544-
error?: string;
545-
};
546-
547-
// wshrpc.OpenAIPromptMessageType
548-
type OpenAIPromptMessageType = {
549-
role: string;
550-
content: string;
551-
name?: string;
552-
};
553-
554-
// wshrpc.OpenAIUsageType
555-
type OpenAIUsageType = {
556-
prompt_tokens?: number;
557-
completion_tokens?: number;
558-
total_tokens?: number;
559-
};
560-
561-
// wshrpc.OpenAiStreamRequest
562-
type OpenAiStreamRequest = {
563-
clientid?: string;
564-
opts: OpenAIOptsType;
565-
prompt: OpenAIPromptMessageType[];
566-
};
567-
568522
// wshrpc.PathCommandData
569523
type PathCommandData = {
570524
pathtype: string;
@@ -1016,6 +970,52 @@ declare global {
1016970
fullconfig: FullConfigType;
1017971
};
1018972

973+
// wshrpc.WaveAIOptsType
974+
type WaveAIOptsType = {
975+
model: string;
976+
apitype?: string;
977+
apitoken: string;
978+
orgid?: string;
979+
apiversion?: string;
980+
baseurl?: string;
981+
maxtokens?: number;
982+
maxchoices?: number;
983+
timeoutms?: number;
984+
};
985+
986+
// wshrpc.WaveAIPacketType
987+
type WaveAIPacketType = {
988+
type: string;
989+
model?: string;
990+
created?: number;
991+
finish_reason?: string;
992+
usage?: WaveAIUsageType;
993+
index?: number;
994+
text?: string;
995+
error?: string;
996+
};
997+
998+
// wshrpc.WaveAIPromptMessageType
999+
type WaveAIPromptMessageType = {
1000+
role: string;
1001+
content: string;
1002+
name?: string;
1003+
};
1004+
1005+
// wshrpc.WaveAIStreamRequest
1006+
type WaveAIStreamRequest = {
1007+
clientid?: string;
1008+
opts: WaveAIOptsType;
1009+
prompt: WaveAIPromptMessageType[];
1010+
};
1011+
1012+
// wshrpc.WaveAIUsageType
1013+
type WaveAIUsageType = {
1014+
prompt_tokens?: number;
1015+
completion_tokens?: number;
1016+
total_tokens?: number;
1017+
};
1018+
10191019
// wps.WaveEvent
10201020
type WaveEvent = {
10211021
event: string;

pkg/service/blockservice/blockservice.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ func (bs *BlockService) SaveTerminalState(ctx context.Context, blockId string, s
7070
return nil
7171
}
7272

73-
func (bs *BlockService) SaveWaveAiData(ctx context.Context, blockId string, history []wshrpc.OpenAIPromptMessageType) error {
73+
func (bs *BlockService) SaveWaveAiData(ctx context.Context, blockId string, history []wshrpc.WaveAIPromptMessageType) error {
7474
block, err := wstore.DBMustGet[*waveobj.Block](ctx, blockId)
7575
if err != nil {
7676
return err

pkg/waveai/anthropicbackend.go

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -109,8 +109,8 @@ func parseSSE(reader *bufio.Reader) (*sseEvent, error) {
109109
}
110110
}
111111

112-
func (AnthropicBackend) StreamCompletion(ctx context.Context, request wshrpc.OpenAiStreamRequest) chan wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType] {
113-
rtn := make(chan wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType])
112+
func (AnthropicBackend) StreamCompletion(ctx context.Context, request wshrpc.WaveAIStreamRequest) chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType] {
113+
rtn := make(chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType])
114114

115115
go func() {
116116
defer func() {
@@ -231,23 +231,23 @@ func (AnthropicBackend) StreamCompletion(ctx context.Context, request wshrpc.Ope
231231
switch sse.Event {
232232
case "message_start":
233233
if event.Message != nil {
234-
pk := MakeOpenAIPacket()
234+
pk := MakeWaveAIPacket()
235235
pk.Model = event.Message.Model
236-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *pk}
236+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *pk}
237237
}
238238

239239
case "content_block_start":
240240
if event.ContentBlock != nil && event.ContentBlock.Text != "" {
241-
pk := MakeOpenAIPacket()
241+
pk := MakeWaveAIPacket()
242242
pk.Text = event.ContentBlock.Text
243-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *pk}
243+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *pk}
244244
}
245245

246246
case "content_block_delta":
247247
if event.Delta != nil && event.Delta.Text != "" {
248-
pk := MakeOpenAIPacket()
248+
pk := MakeWaveAIPacket()
249249
pk.Text = event.Delta.Text
250-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *pk}
250+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *pk}
251251
}
252252

253253
case "content_block_stop":
@@ -258,27 +258,27 @@ func (AnthropicBackend) StreamCompletion(ctx context.Context, request wshrpc.Ope
258258
case "message_delta":
259259
// Update message metadata, usage stats
260260
if event.Usage != nil {
261-
pk := MakeOpenAIPacket()
262-
pk.Usage = &wshrpc.OpenAIUsageType{
261+
pk := MakeWaveAIPacket()
262+
pk.Usage = &wshrpc.WaveAIUsageType{
263263
PromptTokens: event.Usage.InputTokens,
264264
CompletionTokens: event.Usage.OutputTokens,
265265
TotalTokens: event.Usage.InputTokens + event.Usage.OutputTokens,
266266
}
267-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *pk}
267+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *pk}
268268
}
269269

270270
case "message_stop":
271271
if event.Message != nil {
272-
pk := MakeOpenAIPacket()
272+
pk := MakeWaveAIPacket()
273273
pk.FinishReason = event.Message.StopReason
274274
if event.Message.Usage != nil {
275-
pk.Usage = &wshrpc.OpenAIUsageType{
275+
pk.Usage = &wshrpc.WaveAIUsageType{
276276
PromptTokens: event.Message.Usage.InputTokens,
277277
CompletionTokens: event.Message.Usage.OutputTokens,
278278
TotalTokens: event.Message.Usage.InputTokens + event.Message.Usage.OutputTokens,
279279
}
280280
}
281-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *pk}
281+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *pk}
282282
}
283283

284284
default:

pkg/waveai/cloudbackend.go

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,22 +20,22 @@ type WaveAICloudBackend struct{}
2020

2121
var _ AIBackend = WaveAICloudBackend{}
2222

23-
type OpenAICloudReqPacketType struct {
23+
type WaveAICloudReqPacketType struct {
2424
Type string `json:"type"`
2525
ClientId string `json:"clientid"`
26-
Prompt []wshrpc.OpenAIPromptMessageType `json:"prompt"`
26+
Prompt []wshrpc.WaveAIPromptMessageType `json:"prompt"`
2727
MaxTokens int `json:"maxtokens,omitempty"`
2828
MaxChoices int `json:"maxchoices,omitempty"`
2929
}
3030

31-
func MakeOpenAICloudReqPacket() *OpenAICloudReqPacketType {
32-
return &OpenAICloudReqPacketType{
31+
func MakeWaveAICloudReqPacket() *WaveAICloudReqPacketType {
32+
return &WaveAICloudReqPacketType{
3333
Type: OpenAICloudReqStr,
3434
}
3535
}
3636

37-
func (WaveAICloudBackend) StreamCompletion(ctx context.Context, request wshrpc.OpenAiStreamRequest) chan wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType] {
38-
rtn := make(chan wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType])
37+
func (WaveAICloudBackend) StreamCompletion(ctx context.Context, request wshrpc.WaveAIStreamRequest) chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType] {
38+
rtn := make(chan wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType])
3939
wsEndpoint := wcloud.GetWSEndpoint()
4040
go func() {
4141
defer func() {
@@ -69,14 +69,14 @@ func (WaveAICloudBackend) StreamCompletion(ctx context.Context, request wshrpc.O
6969
rtn <- makeAIError(fmt.Errorf("unable to close openai channel: %v", err))
7070
}
7171
}()
72-
var sendablePromptMsgs []wshrpc.OpenAIPromptMessageType
72+
var sendablePromptMsgs []wshrpc.WaveAIPromptMessageType
7373
for _, promptMsg := range request.Prompt {
7474
if promptMsg.Role == "error" {
7575
continue
7676
}
7777
sendablePromptMsgs = append(sendablePromptMsgs, promptMsg)
7878
}
79-
reqPk := MakeOpenAICloudReqPacket()
79+
reqPk := MakeWaveAICloudReqPacket()
8080
reqPk.ClientId = request.ClientId
8181
reqPk.Prompt = sendablePromptMsgs
8282
reqPk.MaxTokens = request.Opts.MaxTokens
@@ -101,7 +101,7 @@ func (WaveAICloudBackend) StreamCompletion(ctx context.Context, request wshrpc.O
101101
rtn <- makeAIError(fmt.Errorf("OpenAI request, websocket error reading message: %v", err))
102102
break
103103
}
104-
var streamResp *wshrpc.OpenAIPacketType
104+
var streamResp *wshrpc.WaveAIPacketType
105105
err = json.Unmarshal(socketMessage, &streamResp)
106106
if err != nil {
107107
rtn <- makeAIError(fmt.Errorf("OpenAI request, websocket response json decode error: %v", err))
@@ -115,7 +115,7 @@ func (WaveAICloudBackend) StreamCompletion(ctx context.Context, request wshrpc.O
115115
rtn <- makeAIError(fmt.Errorf("%v", streamResp.Error))
116116
break
117117
}
118-
rtn <- wshrpc.RespOrErrorUnion[wshrpc.OpenAIPacketType]{Response: *streamResp}
118+
rtn <- wshrpc.RespOrErrorUnion[wshrpc.WaveAIPacketType]{Response: *streamResp}
119119
}
120120
}()
121121
return rtn

0 commit comments

Comments
 (0)