Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,22 @@
# Firecrawl API Key
# Get your API key from: https://www.firecrawl.dev/
# not required if FIRECRAWL_API_URL is self-hosted
FIRECRAWL_API_KEY=fc-YOUR_API_KEY_HERE

# For self-hosting firecrawl via "docker compose"
# see: https://github.com/firecrawl/firecrawl/blob/main/SELF_HOST.md
# FIRECRAWL_API_URL=http://localhost:8001
# FIRECRAWL_API_KEY=fc-not-required-for-self-host

# AI Provider: 'groq' or 'ollama' and providers to be supported in the future
AI_PROVIDER=groq

# Groq API Key
# Get your API key from: https://console.groq.com/keys
GROQ_API_KEY=gsk_YOUR_GROQ_API_KEY_HERE
GROQ_API_KEY=gsk_YOUR_GROQ_API_KEY_HERE
GROQ_MODEL=moonshotai/kimi-k2-instruct

# ===== Example for ollama LLM where API is served from http://localhost:11434/api =====
# AI_PROVIDER=ollama
# OLLAMA_HOST=http://localhost:11434
# OLLAMA_MODEL=qwen3:14b
75 changes: 55 additions & 20 deletions app/api/fireplexity/search/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { NextResponse } from 'next/server'
import { createGroq } from '@ai-sdk/groq'
import { createOllama } from 'ollama-ai-provider-v2'
import { streamText, generateText, createUIMessageStream, createUIMessageStreamResponse, convertToModelMessages } from 'ai'
import type { ModelMessage } from 'ai'
import { detectCompanyTicker } from '@/lib/company-ticker-map'
Expand Down Expand Up @@ -31,21 +32,41 @@ export async function POST(request: Request) {
}

// Use API key from request body if provided, otherwise fall back to environment variable
const firecrawlApiKey = body.firecrawlApiKey || process.env.FIRECRAWL_API_KEY
const groqApiKey = process.env.GROQ_API_KEY

if (!firecrawlApiKey) {
return NextResponse.json({ error: 'Firecrawl API key not configured' }, { status: 500 })
const firecrawlApiHost = process.env.FIRECRAWL_API_URL || "https://api.firecrawl.dev"
const resolvedFirecrawlApiHost = firecrawlApiHost.startsWith('http') ? firecrawlApiHost : `http://${firecrawlApiHost}`
// Skip API key check for localhost/127.0.0.1 hosts (does not account for self-hosting at another machine)
const isFirecrawlLocalhost = firecrawlApiHost.includes('localhost') || firecrawlApiHost.includes('127.0.0.1')
const firecrawlApiKey = body.firecrawlApiKey || process.env.FIRECRAWL_API_KEY || 'fc-not-required-for-self-host'
if (!isFirecrawlLocalhost && !firecrawlApiKey) {
return NextResponse.json({ error: `Firecrawl API key required but not configured for ${firecrawlApiHost}` }, { status: 500 })
}

if (!groqApiKey) {
return NextResponse.json({ error: 'Groq API key not configured' }, { status: 500 })

// AI Provider selection
const aiProvider = process.env.AI_PROVIDER || 'groq'
let providerInstance: any
let providerModel: string | undefined

if (aiProvider === 'ollama') {
// https://ai-sdk.dev/providers/community-providers/ollama
const ollamaHost = process.env.OLLAMA_HOST || "http://localhost:11434"
const resolvedOllamaHost = ollamaHost.startsWith('http') ? ollamaHost : `http://${ollamaHost}`
const resolvedOllamaApiUrl = resolvedOllamaHost.endsWith("/api") ? resolvedOllamaHost : `${resolvedOllamaHost}/api`
providerInstance = createOllama({ baseURL: resolvedOllamaApiUrl })
providerModel = process.env.OLLAMA_MODEL || 'qwen3:14b'
console.log(`Ollama API URL: ${resolvedOllamaApiUrl} / Model: ${providerModel}`)
} else {
const groqApiKey = process.env.GROQ_API_KEY
if (!groqApiKey) {
return NextResponse.json({ error: 'Groq API key not configured' }, { status: 500 })
}
providerInstance = createGroq({ apiKey: groqApiKey })
providerModel = process.env.GROQ_MODEL || 'moonshotai/kimi-k2-instruct'
console.log(`Groq Model: ${providerModel}`)
}

// Configure Groq with the OSS 120B model
const groq = createGroq({
apiKey: groqApiKey
})
const llm = providerInstance(providerModel)
console.log(llm)
const followUpLlm = providerInstance(providerModel)

// Always perform a fresh search for each query to ensure relevant results
const isFollowUp = messages.length > 2
Expand Down Expand Up @@ -102,7 +123,8 @@ export async function POST(request: Request) {
})

// Make direct API call to Firecrawl v2 search endpoint
const searchResponse = await fetch('https://api.firecrawl.dev/v2/search', {
console.log(`Requesting Firecrawl API at ${resolvedFirecrawlApiHost}`)
const searchResponse = await fetch(`${resolvedFirecrawlApiHost}/v2/search`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${firecrawlApiKey}`,
Expand Down Expand Up @@ -134,6 +156,18 @@ export async function POST(request: Request) {
const imagesData = searchData.images || []

// Transform web sources metadata
const cleanWebResults = webResults.filter((item: any) => {
try {
if (item.metadata?.statusCode && item.metadata.statusCode > 400) {
console.warn(`Skipping web search result for URL: ${item.url} due to HTTP status code: ${item.metadata.statusCode}`);
return false; // Skip this item
}
return true; // Keep this item if no status code or status code <= 400
} catch (error) {
console.error(`Error checking status code for web search result URL: ${item.url}. Skipping item.`, error);
return false; // Skip on any error during metadata access
}
});
sources = webResults.map((item: any) => {
return {
url: item.url,
Expand Down Expand Up @@ -169,7 +203,7 @@ export async function POST(request: Request) {
url: item.url,
title: item.title || 'Untitled',
thumbnail: item.imageUrl, // Direct API returns 'imageUrl' field
source: item.url ? new URL(item.url).hostname : undefined,
source: item.url ? new URL(item.url).hostname : undefined, // new URL(item.url) can throw TypeError when status code 4xx 5xx
width: item.imageWidth,
height: item.imageHeight,
position: item.position
Expand Down Expand Up @@ -284,7 +318,7 @@ export async function POST(request: Request) {

// Stream the text generation using Groq's Kimi K2 Instruct model
const result = streamText({
model: groq('moonshotai/kimi-k2-instruct'),
model: llm,
messages: aiMessages,
temperature: 0.7,
maxRetries: 2
Expand All @@ -308,7 +342,7 @@ export async function POST(request: Request) {

try {
const followUpResponse = await generateText({
model: groq('moonshotai/kimi-k2-instruct'),
model: followUpLlm,
messages: [
{
role: 'system',
Expand Down Expand Up @@ -343,7 +377,7 @@ export async function POST(request: Request) {
}

} catch (error) {

console.error("Error in Firecrawl POST request:", error);
// Handle specific error types
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
const statusCode = error && typeof error === 'object' && 'statusCode' in error
Expand All @@ -356,11 +390,11 @@ export async function POST(request: Request) {
const errorResponses: Record<number, { error: string; suggestion?: string }> = {
401: {
error: 'Invalid API key',
suggestion: 'Please check your Firecrawl API key is correct.'
suggestion: 'Please check your Firecrawl API key is correct if not using self-hosted Firecrawl API host.'
},
402: {
error: 'Insufficient credits',
suggestion: 'You\'ve run out of Firecrawl credits. Please upgrade your plan.'
suggestion: 'You\'ve run out of Firecrawl credits. Please upgrade your plan. You can also self-host Firecrawl API.'
},
429: {
error: 'Rate limit exceeded',
Expand Down Expand Up @@ -393,11 +427,12 @@ export async function POST(request: Request) {
return createUIMessageStreamResponse({ stream })

} catch (error) {
console.error("Error in search POST request:", error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
const errorStack = error instanceof Error ? error.stack : ''
return NextResponse.json(
{ error: 'Search failed', message: errorMessage, details: errorStack },
{ status: 500 }
)
}
}
}
4 changes: 2 additions & 2 deletions app/markdown-renderer.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
'use client'

import React, { useMemo, useCallback } from 'react'
import Streamdown from 'streamdown'
import { Streamdown } from 'streamdown'
import { CitationTooltip } from './citation-tooltip-portal'
import { SearchResult } from './types'

Expand Down Expand Up @@ -105,4 +105,4 @@ export function MarkdownRenderer({ content, sources }: MarkdownRendererProps) {
{sources && sources.length > 0 && <CitationTooltip sources={sources} />}
</>
)
}
}
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"lucide-react": "^0.511.0",
"next": "15.3.2",
"next-themes": "^0.4.6",
"ollama-ai-provider-v2": "^1.2.1",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-markdown": "^10.1.0",
Expand Down