Skip to content

Commit 63e6f19

Browse files
committed
refactor: Minor code cleanup
1 parent b64a5d4 commit 63e6f19

File tree

11 files changed

+64
-127
lines changed

11 files changed

+64
-127
lines changed

package-lock.json

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
"peerDependencies": {
5353
"react": ">=16.14.0",
5454
"react-dom": ">=16.14.0",
55-
"react-chatbotify": "^2.0.0-beta.36",
55+
"react-chatbotify": "^2.0.0-beta.37",
5656
"@mlc-ai/web-llm": "^0.2.78",
5757
"@wllama/wllama": "^2.3.1"
5858
},

src/App.tsx

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,7 @@ const App = () => {
2020
];
2121

2222
const onUserMessageCheck = async (message: Message) => {
23-
if (
24-
typeof message.content === 'string' &&
25-
message.content.toUpperCase() === 'RESTART'
26-
) {
23+
if (typeof message.content === 'string' && message.content.toUpperCase() === 'RESTART') {
2724
return 'start';
2825
}
2926
};
@@ -33,7 +30,7 @@ const App = () => {
3330
return 'start';
3431
}
3532
return null;
36-
}
33+
};
3734

3835
// example flow for testing
3936
const flow: Flow = {
@@ -52,16 +49,23 @@ const App = () => {
5249
await params.simulateStreamMessage(
5350
`You selected ${params.userInput}. This model runs in your browser, so responses may be slower and less accurate.`
5451
);
55-
// if gemini/openai chosen, check for presence of api keys since examples are running in 'direct' mode
52+
// if gemini/openai chosen, check for presence of api keys since examples are running in 'direct' mode
5653
} else {
57-
if ((params.userInput === "Gemini" && !geminiApiKey) || (params.userInput === "OpenAI" && !openaiApiKey)) {
58-
await params.simulateStreamMessage(`You selected ${params.userInput} in 'direct' mode but no API key was set!`);
59-
return "start";
54+
if (
55+
(params.userInput === 'Gemini' && !geminiApiKey) ||
56+
(params.userInput === 'OpenAI' && !openaiApiKey)
57+
) {
58+
await params.simulateStreamMessage(
59+
`You selected ${params.userInput} in 'direct' mode but no API key was set!`
60+
);
61+
return 'start';
6062
} else {
6163
await params.simulateStreamMessage(`You selected ${params.userInput}, ask away!`);
6264
}
6365
}
64-
await params.simulateStreamMessage("You may type 'RESTART' or hit the 'ESC' key to select another model.")
66+
await params.simulateStreamMessage(
67+
"You may type 'RESTART' or hit the 'ESC' key to select another model."
68+
);
6569
return params.userInput.toLowerCase();
6670
},
6771
} as LlmConnectorBlock,
@@ -80,7 +84,8 @@ const App = () => {
8084
wllama: {
8185
llmConnector: {
8286
provider: new WllamaProvider({
83-
modelUrl: 'https://huggingface.co/HuggingFaceTB/SmolLM2-360M-Instruct-GGUF/resolve/main/smollm2-360m-instruct-q8_0.gguf',
87+
modelUrl:
88+
'https://huggingface.co/HuggingFaceTB/SmolLM2-360M-Instruct-GGUF/resolve/main/smollm2-360m-instruct-q8_0.gguf',
8489
loadModelConfig: {
8590
n_ctx: 8192,
8691
},
@@ -124,13 +129,7 @@ const App = () => {
124129
} as LlmConnectorBlock,
125130
};
126131

127-
return (
128-
<ChatBot
129-
id="chatbot-id"
130-
plugins={plugins}
131-
flow={flow}
132-
></ChatBot>
133-
);
132+
return <ChatBot id="chatbot-id" plugins={plugins} flow={flow}></ChatBot>;
134133
};
135134

136135
export default App;

src/core/useRcbPlugin.tsx

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import { useEffect, useRef } from 'react';
22
import {
3-
useBotId,
43
useFlow,
54
Plugin,
65
useAudio,
@@ -29,12 +28,11 @@ const useRcbPlugin = (pluginConfig?: PluginConfig): ReturnType<Plugin> => {
2928
const outputTypeRef = useRef<'character' | 'chunk' | 'full'>('chunk');
3029
const outputSpeedRef = useRef<number>(30);
3130
const historySizeRef = useRef<number>(0);
32-
const initialMessageRef = useRef<string>("");
31+
const initialMessageRef = useRef<string>('');
3332
const errorMessageRef = useRef<string>('Unable to get response, please try again.');
3433
const onUserMessageRef = useRef<((msg: Message) => Promise<string | null>) | null>(null);
3534
const onKeyDownRef = useRef<((e: KeyboardEvent) => Promise<string | null>) | null>(null);
3635

37-
const { getBotId } = useBotId();
3836
const { getFlow } = useFlow();
3937
const { speakAudio } = useAudio();
4038
const { messages, injectMessage, simulateStreamMessage, streamMessage, endStreamMessage } = useMessages();
@@ -50,12 +48,12 @@ const useRcbPlugin = (pluginConfig?: PluginConfig): ReturnType<Plugin> => {
5048
}, [messages]);
5149

5250
// handles changing of conversation path (block)
53-
useChangePath(getBotId, getFlow, (block) => {
51+
useChangePath(getFlow, (block) => {
5452
providerRef.current = block.llmConnector?.provider ?? null;
5553
outputTypeRef.current = block.llmConnector?.outputType ?? 'chunk';
5654
outputSpeedRef.current = block.llmConnector?.outputSpeed ?? 30;
5755
historySizeRef.current = block.llmConnector?.historySize ?? 0;
58-
initialMessageRef.current = block.llmConnector?.initialMessage ?? "";
56+
initialMessageRef.current = block.llmConnector?.initialMessage ?? '';
5957
errorMessageRef.current = block.llmConnector?.errorMessage ?? 'Unable to get response, please try again.';
6058
onUserMessageRef.current = block.llmConnector?.stopConditions?.onUserMessage ?? null;
6159
onKeyDownRef.current = block.llmConnector?.stopConditions?.onKeyDown ?? null;
@@ -86,10 +84,10 @@ const useRcbPlugin = (pluginConfig?: PluginConfig): ReturnType<Plugin> => {
8684
};
8785

8886
// handles pre-processing and post-processing of blocks.
89-
useProcessBlock(getBotId, refs, actions);
87+
useProcessBlock(refs, actions);
9088

9189
// handles message events
92-
useMessageHandler(getBotId, refs, actions);
90+
useMessageHandler(refs, actions);
9391

9492
// initializes plugin metadata with plugin name
9593
const pluginMetaData: ReturnType<Plugin> = { name: '@rcb-plugins/llm-connector' };

src/hooks/useChangePath.ts

Lines changed: 5 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,47 +1,34 @@
1-
import { useEffect, useCallback } from 'react';
2-
import { Flow, RcbChangePathEvent } from 'react-chatbotify';
1+
import { useCallback } from 'react';
2+
import { Flow, RcbChangePathEvent, RcbEvent, useOnRcbEvent } from 'react-chatbotify';
33

44
import { LlmConnectorBlock } from '../types/LlmConnectorBlock';
55

66
/**
77
* Handles changing of conversation path (block).
88
*
9-
* @param getBotId id of the chatbot
109
* @param getFlow flow of the chatbot
1110
* @param setConnectorBlockFields sets all fields required for llm connector block
1211
*/
13-
const useChangePath = (
14-
getBotId: () => string | null,
15-
getFlow: () => Flow,
16-
setConnectorBlockFields: (block: LlmConnectorBlock) => void
17-
) => {
12+
const useChangePath = (getFlow: () => Flow, setConnectorBlockFields: (block: LlmConnectorBlock) => void) => {
1813
/**
1914
* Handles setting of provider on change of path.
2015
*
2116
* @param event change path event received
2217
*/
2318
const handler = useCallback(
2419
(event: RcbChangePathEvent) => {
25-
// if event is not for chatbot, return
26-
if (getBotId() !== event.detail.botId) {
27-
return;
28-
}
29-
3020
// update llm connector block fields
3121
// if is llm connector block, will populate valid fields
3222
// else will reset all to null
3323
const flow = getFlow();
3424
const nextBlock = flow[event.data.nextPath] as LlmConnectorBlock;
3525
setConnectorBlockFields(nextBlock);
3626
},
37-
[getBotId, getFlow, setConnectorBlockFields]
27+
[getFlow, setConnectorBlockFields]
3828
);
3929

4030
// adds required events for change path
41-
useEffect(() => {
42-
window.addEventListener('rcb-change-path', handler);
43-
return () => window.removeEventListener('rcb-change-path', handler);
44-
}, [handler]);
31+
useOnRcbEvent(RcbEvent.CHANGE_PATH, handler);
4532
};
4633

4734
export { useChangePath };

src/hooks/useMessageHandler.ts

Lines changed: 16 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ import {
44
RcbStopStreamMessageEvent,
55
RcbStopSimulateStreamMessageEvent,
66
Message,
7+
useOnRcbEvent,
8+
RcbEvent,
79
} from 'react-chatbotify';
810
import { handlePrompt } from '../utils/promptHandler';
911
import { Provider } from '../types/Provider';
@@ -15,12 +17,10 @@ const STREAM_DEBOUNCE_MS = 500;
1517
/**
1618
* Handles message events.
1719
*
18-
* @param getBotId id of the chatbot
1920
* @param refs object containing relevant refs
2021
* @param actions object containing relevant actions
2122
*/
2223
const useMessageHandler = (
23-
getBotId: () => string | null,
2424
refs: {
2525
providerRef: React.MutableRefObject<Provider | null>;
2626
messagesRef: React.MutableRefObject<Message[]>;
@@ -45,17 +45,11 @@ const useMessageHandler = (
4545
}
4646
) => {
4747
const { messagesRef, outputTypeRef, onUserMessageRef, onKeyDownRef, errorMessageRef } = refs;
48-
const {
49-
injectMessage,
50-
simulateStreamMessage,
51-
toggleTextAreaDisabled,
52-
toggleIsBotTyping,
53-
goToPath,
54-
focusTextArea
55-
} = actions;
48+
const { injectMessage, simulateStreamMessage, toggleTextAreaDisabled, toggleIsBotTyping, goToPath, focusTextArea } =
49+
actions;
5650

5751
// controller to abort streaming responses if required
58-
const abortControllerRef = useRef<AbortController | null>(null);
52+
const abortControllerRef = useRef<AbortController | null>(null);
5953

6054
/**
6155
* Handles message events to determine whether to prompt LLM.
@@ -64,7 +58,7 @@ const useMessageHandler = (
6458
*/
6559
const handler = useCallback(
6660
(event: RcbPostInjectMessageEvent | RcbStopStreamMessageEvent | RcbStopSimulateStreamMessageEvent) => {
67-
if (getBotId() !== event.detail.botId || !refs.providerRef.current) {
61+
if (!refs.providerRef.current) {
6862
return;
6963
}
7064
const msg = event.data.message!;
@@ -90,10 +84,10 @@ const useMessageHandler = (
9084
const historySize = refs.historySizeRef.current;
9185
const past = messagesRef.current;
9286
const messagesToSend = historySize ? [...past.slice(-(historySize - 1)), msg] : [msg];
93-
87+
9488
// create & stash a new controller
95-
const ctrl = new AbortController();
96-
abortControllerRef.current = ctrl;
89+
const ctrl = new AbortController();
90+
abortControllerRef.current = ctrl;
9791

9892
handlePrompt(messagesToSend, refs, actions, { signal: ctrl.signal }).catch((err) => {
9993
toggleIsBotTyping(false);
@@ -102,28 +96,21 @@ const useMessageHandler = (
10296
focusTextArea();
10397
});
10498
console.error('LLM prompt failed', err);
105-
if (outputTypeRef.current === "full") {
99+
if (outputTypeRef.current === 'full') {
106100
injectMessage(errorMessageRef.current);
107101
} else {
108102
simulateStreamMessage(errorMessageRef.current);
109103
}
110104
});
111105
}, STREAM_DEBOUNCE_MS);
112106
},
113-
[getBotId, refs, actions]
107+
[refs, actions]
114108
);
115109

116110
// adds required events for message streaming
117-
useEffect(() => {
118-
window.addEventListener('rcb-post-inject-message', handler);
119-
window.addEventListener('rcb-stop-simulate-stream-message', handler);
120-
window.addEventListener('rcb-stop-stream-message', handler);
121-
return () => {
122-
window.removeEventListener('rcb-post-inject-message', handler);
123-
window.removeEventListener('rcb-stop-simulate-stream-message', handler);
124-
window.removeEventListener('rcb-stop-stream-message', handler);
125-
};
126-
}, [handler]);
111+
useOnRcbEvent(RcbEvent.POST_INJECT_MESSAGE, handler);
112+
useOnRcbEvent(RcbEvent.STOP_SIMULATE_STREAM_MESSAGE, handler);
113+
useOnRcbEvent(RcbEvent.STOP_STREAM_MESSAGE, handler);
127114

128115
// handles keydown event for stop condition
129116
useEffect(() => {
@@ -133,8 +120,8 @@ const useMessageHandler = (
133120
if (path) {
134121
abortControllerRef.current?.abort();
135122
abortControllerRef.current = null;
136-
goToPath(path)
137-
};
123+
goToPath(path);
124+
}
138125
}
139126
};
140127
window.addEventListener('keydown', onKey);

src/hooks/useProcessBlock.ts

Lines changed: 6 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,15 @@
1-
import { useEffect, useCallback } from 'react';
2-
import { RcbPreProcessBlockEvent, RcbPostProcessBlockEvent, Message } from 'react-chatbotify';
1+
import { useCallback } from 'react';
2+
import { RcbPreProcessBlockEvent, RcbPostProcessBlockEvent, Message, RcbEvent, useOnRcbEvent } from 'react-chatbotify';
33
import { LlmConnectorBlock } from '../types/LlmConnectorBlock';
44
import { Provider } from '../types/Provider';
55

66
/**
77
* Handles pre-processing and post-processing of blocks.
88
*
9-
* @param getBotId id of the chatbot
109
* @param refs object containing relevant refs
1110
* @param actions object containing relevant actions
1211
*/
1312
const useProcessBlock = (
14-
getBotId: () => string | null,
1513
refs: {
1614
providerRef: React.MutableRefObject<Provider | null>;
1715
messagesRef: React.MutableRefObject<Message[]>;
@@ -45,11 +43,6 @@ const useProcessBlock = (
4543
*/
4644
const handler = useCallback(
4745
(event: RcbPreProcessBlockEvent | RcbPostProcessBlockEvent) => {
48-
// if event is not for chatbot, return
49-
if (getBotId() !== event.detail.botId) {
50-
return;
51-
}
52-
5346
// if not an llm connector block, return
5447
const block = event.data.block as LlmConnectorBlock;
5548
if (!block.llmConnector) {
@@ -65,7 +58,7 @@ const useProcessBlock = (
6558
// disabling typing indicator, enabling text area and focusing on it again
6659
if (event.type === 'rcb-pre-process-block') {
6760
if (block.llmConnector?.initialMessage) {
68-
if (outputTypeRef.current === "full") {
61+
if (outputTypeRef.current === 'full') {
6962
injectMessage(refs.initialMessageRef.current);
7063
} else {
7164
simulateStreamMessage(refs.initialMessageRef.current);
@@ -78,18 +71,11 @@ const useProcessBlock = (
7871
});
7972
}
8073
},
81-
[getBotId, toggleIsBotTyping, toggleTextAreaDisabled, focusTextArea]
74+
[toggleIsBotTyping, toggleTextAreaDisabled, focusTextArea]
8275
);
8376

8477
// adds required events for block processing
85-
useEffect(() => {
86-
window.addEventListener('rcb-pre-process-block', handler);
87-
window.addEventListener('rcb-post-process-block', handler);
88-
return () => {
89-
window.removeEventListener('rcb-pre-process-block', handler);
90-
window.removeEventListener('rcb-post-process-block', handler);
91-
};
92-
}, [handler]);
78+
useOnRcbEvent(RcbEvent.PRE_PROCESS_BLOCK, handler);
79+
useOnRcbEvent(RcbEvent.POST_PROCESS_BLOCK, handler);
9380
};
94-
9581
export { useProcessBlock };

0 commit comments

Comments
 (0)