@@ -16,8 +16,7 @@ stream.push("New prompt")
1616stream.on('change', (prompt) => console.log('New prompt:', prompt))
1717*/
1818
19- import { useState } from "react" ;
20- import useAsyncEffect from "use-async-effect" ;
19+ import { useEffect , useRef , useState } from "react" ;
2120
2221import type { DStream } from "@cocalc/conat/sync/dstream" ;
2322import { redux } from "@cocalc/frontend/app-framework" ;
@@ -76,6 +75,11 @@ const getDStream = reuseInFlight(async () => {
7675export function useLLMHistory ( type : LLMHistoryType = "general" ) {
7776 const [ prompts , setPrompts ] = useState < string [ ] > ( [ ] ) ;
7877
78+ // Use ref to store stable listener function
79+ const listenerRef = useRef < ( ( newEntry : LLMHistoryEntry ) => void ) | null > (
80+ null ,
81+ ) ;
82+
7983 // Filter prompts by type and extract just the prompt strings (newest first)
8084 function filterPromptsByType ( entries : LLMHistoryEntry [ ] ) : string [ ] {
8185 return entries
@@ -85,33 +89,51 @@ export function useLLMHistory(type: LLMHistoryType = "general") {
8589 }
8690
8791 // Initialize dstream and set up listeners
88- useAsyncEffect ( async ( ) => {
89- try {
90- const stream = await getDStream ( ) ;
91- const allEntries = stream . getAll ( ) ;
92- setPrompts ( filterPromptsByType ( allEntries ) ) ;
92+ useEffect ( ( ) => {
93+ let isMounted = true ;
94+ let stream : DStream < LLMHistoryEntry > | null = null ;
95+
96+ const initializeStream = async ( ) => {
97+ try {
98+ stream = await getDStream ( ) ;
99+
100+ // Check if component was unmounted while we were waiting
101+ if ( ! isMounted ) {
102+ return ;
103+ }
104+
105+ const allEntries = stream . getAll ( ) ;
106+ setPrompts ( filterPromptsByType ( allEntries ) ) ;
107+
108+ // Create stable listener function
109+ listenerRef . current = ( newEntry : LLMHistoryEntry ) => {
110+ // Only update if the new entry matches our type
111+ if ( newEntry . type !== type ) return ;
93112
94- // Listen for new prompts being added
95- const handleChange = ( newEntry : LLMHistoryEntry ) => {
96- // Only update if the new entry matches our type
97- if ( newEntry . type === type ) {
98113 setPrompts ( ( prev ) => {
99114 // Remove duplicate if exists, then add to front
100115 const filtered = prev . filter ( ( p ) => p !== newEntry . prompt ) ;
101116 return [ newEntry . prompt , ...filtered ] ;
102117 } ) ;
103- }
104- } ;
118+ } ;
105119
106- stream . on ( "change" , handleChange ) ;
120+ // Add our listener to the stream
121+ stream . on ( "change" , listenerRef . current ) ;
122+ } catch ( err ) {
123+ console . warn ( `LLM history hook initialization error -- ${ err } ` ) ;
124+ }
125+ } ;
107126
108- // Cleanup listener on unmount/type change
109- return ( ) => {
110- stream . off ( "change" , handleChange ) ;
111- } ;
112- } catch ( err ) {
113- console . warn ( `LLM history hook initialization error -- ${ err } ` ) ;
114- }
127+ initializeStream ( ) ;
128+
129+ // Cleanup function for useEffect
130+ return ( ) => {
131+ isMounted = false ;
132+ if ( stream && listenerRef . current ) {
133+ stream . off ( "change" , listenerRef . current ) ;
134+ listenerRef . current = null ;
135+ }
136+ } ;
115137 } , [ type ] ) ;
116138
117139 async function addPrompt ( prompt : string ) {
0 commit comments