From 670692b10e10dda5a5f6b4a7713def35b2266ab8 Mon Sep 17 00:00:00 2001 From: David Rodriguez Date: Fri, 27 Jun 2025 19:21:31 -0500 Subject: [PATCH 1/2] chore: update dependencies and devDependencies in package.json - Added @ai-sdk/google and @ai-sdk/groq dependencies. - Updated @types/node, @types/react, and @types/react-dom to specific patch versions. --- .env.example | 5 + Dockerfile | 24 ++ app/api/fire-cache/search/route.ts | 28 +- app/api/fireplexity/search/route.ts | 18 +- app/chat-interface.tsx | 7 +- app/search.tsx | 12 +- components/ui/button.tsx | 2 +- components/ui/input.tsx | 2 +- components/ui/textarea.tsx | 2 +- lib/llm-provider.ts | 25 ++ package-lock.json | 629 +++++++++++++++++++++++++++- package.json | 10 +- 12 files changed, 705 insertions(+), 59 deletions(-) create mode 100644 .env.example create mode 100644 Dockerfile create mode 100644 lib/llm-provider.ts diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..506118a --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +# .env.local +OPENAI_API_KEY="sk-..." +GROQ_API_KEY="gsk_..." +GOOGLE_API_KEY="AIza..." +FIRECRAWL_API_KEY="..." \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..01ea385 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +# Usa una imagen base de Node.js 20 +FROM node:20-alpine AS base + +# Establece el directorio de trabajo +WORKDIR /app + +# Instala las dependencias necesarias para la compilación +RUN apk add --no-cache libc6-compat + +# Copia los archivos de manifiesto del paquete e instala las dependencias +COPY package*.json ./ +RUN npm install + +# Copia el resto de los archivos de la aplicación +COPY . . + +# Construye la aplicación +RUN npm run build + +# Expone el puerto en el que se ejecuta la aplicación +EXPOSE 3000 + +# Define el comando para iniciar la aplicación +CMD ["npm", "start"] diff --git a/app/api/fire-cache/search/route.ts b/app/api/fire-cache/search/route.ts index 3f6fd24..4900565 100644 --- a/app/api/fire-cache/search/route.ts +++ b/app/api/fire-cache/search/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server' -import { createOpenAI } from '@ai-sdk/openai' +import { getLLMProvider, Provider } from '@/lib/llm-provider' import { streamText, generateText, createDataStreamResponse } from 'ai' import { detectCompanyTicker } from '@/lib/company-ticker-map' @@ -8,7 +8,7 @@ export async function POST(request: Request) { console.log(`[${requestId}] Fire Cache Search API called`) try { const body = await request.json() - const messages = body.messages || [] + const { messages = [], provider = 'groq', model = 'llama3-8b-8192' } = body const query = messages[messages.length - 1]?.content || body.query console.log(`[${requestId}] Query received:`, query) @@ -17,20 +17,14 @@ export async function POST(request: Request) { } const firecrawlApiKey = process.env.FIRECRAWL_API_KEY - const openaiApiKey = process.env.OPENAI_API_KEY - + const llmProvider = getLLMProvider(provider as Provider) + const llmModel = llmProvider(model) + console.log(`[${requestId}] Using provider: ${provider}, model: ${model}`) + console.log(`[${requestId}] LLM Model Object:`, llmModel) + if (!firecrawlApiKey) { return NextResponse.json({ error: 'Firecrawl API key not configured' }, { status: 500 }) } - - if (!openaiApiKey) { - return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 }) - } - - // Configure OpenAI with API key - const openai = createOpenAI({ - apiKey: openaiApiKey - }) // Always perform a fresh search for each query to ensure relevant results const isFollowUp = messages.length > 2 @@ -200,7 +194,7 @@ export async function POST(request: Request) { : `user: ${query}` const followUpPromise = generateText({ - model: openai('gpt-4o'), + model: llmModel, messages: [ { role: 'system', @@ -213,15 +207,15 @@ export async function POST(request: Request) { content: `Query: ${query}\n\nConversation context:\n${conversationPreview}\n\n${sources.length > 0 ? `Available sources about: ${sources.map((s: { title: string }) => s.title).join(', ')}\n\n` : ''}Generate 5 diverse follow-up questions that would help the user learn more about this topic from different angles.` } ], - temperature: 0.7, + temperature: 0.5, maxTokens: 150, }) // Stream the text generation const result = streamText({ - model: openai('gpt-4o'), + model: llmModel, messages: aiMessages, - temperature: 0.7, + temperature: 0.5, maxTokens: 2000 }) diff --git a/app/api/fireplexity/search/route.ts b/app/api/fireplexity/search/route.ts index 43e511c..f1d5e3c 100644 --- a/app/api/fireplexity/search/route.ts +++ b/app/api/fireplexity/search/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server' -import { createOpenAI } from '@ai-sdk/openai' +import { getLLMProvider, Provider } from '@/lib/llm-provider' import { streamText, generateText, createDataStreamResponse } from 'ai' import { detectCompanyTicker } from '@/lib/company-ticker-map' import { selectRelevantContent } from '@/lib/content-selection' @@ -10,8 +10,11 @@ export async function POST(request: Request) { console.log(`[${requestId}] Fireplexity Search API called`) try { const body = await request.json() - const messages = body.messages || [] + const { messages = [], provider = 'groq', model = 'llama3-8b-8192' } = body + // pick the requested LLM + const llmModel = getLLMProvider(provider as Provider)(model) const query = messages[messages.length - 1]?.content || body.query + console.log(`[${requestId}] Query received:`, query) if (!query) { @@ -30,11 +33,6 @@ export async function POST(request: Request) { return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 }) } - // Configure OpenAI with API key - const openai = createOpenAI({ - apiKey: openaiApiKey - }) - // Initialize Firecrawl const firecrawl = new FirecrawlApp({ apiKey: firecrawlApiKey }) @@ -170,13 +168,13 @@ export async function POST(request: Request) { ] } - // Start generating follow-up questions in parallel (before streaming answer) + // Start generating follow-up questions in parallel const conversationPreview = isFollowUp ? messages.map((m: { role: string; content: string }) => `${m.role}: ${m.content}`).join('\n\n') : `user: ${query}` const followUpPromise = generateText({ - model: openai('gpt-4o-mini'), + model: llmModel, messages: [ { role: 'system', @@ -195,7 +193,7 @@ export async function POST(request: Request) { // Stream the text generation const result = streamText({ - model: openai('gpt-4o-mini'), + model: llmModel, messages: aiMessages, temperature: 0.7, maxTokens: 2000 diff --git a/app/chat-interface.tsx b/app/chat-interface.tsx index 6422da5..55c4fa4 100644 --- a/app/chat-interface.tsx +++ b/app/chat-interface.tsx @@ -199,7 +199,7 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat )} - ))} + ))} )} @@ -614,7 +614,7 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat } }} placeholder="Ask a follow-up question..." - className="resize-none border-0 focus:ring-0 focus:outline-none bg-transparent placeholder:text-gray-400 dark:placeholder:text-gray-500 px-4 py-2 pr-2 shadow-none focus-visible:ring-0 focus-visible:border-0" + className="resize-none border-0 focus:ring-0 focus:outline-none bg-transparent placeholder:text-gray-500 dark:placeholder:text-gray-400 text-gray-900 dark:text-gray-100 px-4 py-2 pr-2 shadow-none focus-visible:ring-0 focus-visible:border-0" rows={1} style={{ minHeight: '36px', @@ -626,7 +626,8 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat diff --git a/components/ui/button.tsx b/components/ui/button.tsx index 8f9c1c1..9e065ff 100644 --- a/components/ui/button.tsx +++ b/components/ui/button.tsx @@ -19,7 +19,7 @@ const buttonVariants = cva( ghost: "hover:bg-accent hover:text-accent-foreground", link: "text-primary underline-offset-4 hover:underline", code: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-[#36322F] text-[#fff] hover:bg-[#4a4542] disabled:bg-[#8c8885] disabled:hover:bg-[#8c8885] [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#171310,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(58,_33,_8,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#171310,_0px_1px_3px_0px_rgba(58,_33,_8,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#171310,_0px_1px_2px_0px_rgba(58,_33,_8,_30%)] disabled:shadow-none disabled:hover:translate-y-0 disabled:hover:scale-100", - orange: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-orange-500 text-white hover:bg-orange-300 dark:bg-orange-500 dark:hover:bg-orange-300 dark:text-white [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#c2410c,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(234,_88,_12,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#c2410c,_0px_1px_3px_0px_rgba(234,_88,_12,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#c2410c,_0px_1px_2px_0px_rgba(234,_88,_12,_30%)] disabled:shadow-none disabled:hover:translate-y-0 disabled:hover:scale-100", + orange: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-orange-500 text-white hover:bg-orange-600 dark:bg-orange-500 dark:hover:bg-orange-400 dark:text-white disabled:bg-gray-300 disabled:text-gray-400 [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#c2410c,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(234,_88,_12,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#c2410c,_0px_1px_3px_0px_rgba(234,_88,_12,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#c2410c,_0px_1px_2px_0px_rgba(234,_88,_12,_40%)]", }, size: { default: "h-10 px-4 py-2", diff --git a/components/ui/input.tsx b/components/ui/input.tsx index 0c2c6ff..aa28ef6 100644 --- a/components/ui/input.tsx +++ b/components/ui/input.tsx @@ -8,7 +8,7 @@ function Input({ className, type, ...props }: React.ComponentProps<"input">) { type={type} data-slot="input" className={cn( - "file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm", + "file:text-foreground placeholder:text-gray-500 dark:placeholder:text-gray-400 selection:bg-primary selection:text-primary-foreground dark:bg-zinc-800 bg-white text-gray-900 dark:text-gray-100", "focus-visible:border-orange-400 focus-visible:ring-orange-400/20 focus-visible:ring-2", "aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive", className diff --git a/components/ui/textarea.tsx b/components/ui/textarea.tsx index 7f21b5e..b0097f3 100644 --- a/components/ui/textarea.tsx +++ b/components/ui/textarea.tsx @@ -7,7 +7,7 @@ function Textarea({ className, ...props }: React.ComponentProps<"textarea">) {