diff --git a/app/api/chat/engine/chat.ts b/app/api/chat/engine/chat.ts index 719d786..0f06445 100644 --- a/app/api/chat/engine/chat.ts +++ b/app/api/chat/engine/chat.ts @@ -4,25 +4,30 @@ import path from "node:path"; import { getDataSource } from "./index"; import { createTools } from "./tools"; import { createQueryEngineTool } from "./tools/query-engine"; +import { WebScraperTool } from "./tools/web-search"; // 👈 Import your tool export async function createChatEngine(documentIds?: string[], params?: any) { const tools: BaseToolWithCall[] = []; // Add a query engine tool if we have a data source - // Delete this code if you don't have a data source const index = await getDataSource(params); if (index) { tools.push(createQueryEngineTool(index, { documentIds })); } + // Manually add WebScraperTool (no need for tools.json) + tools.push(new WebScraperTool()); + + // Optionally load tools from config if file exists const configFile = path.join("config", "tools.json"); let toolConfig: any; try { // add tools from config file if it exists toolConfig = JSON.parse(await fs.readFile(configFile, "utf8")); } catch (e) { - console.info(`Could not read ${configFile} file. Using no tools.`); + console.info(`Could not read ${configFile} file. Using no config tools.`); } + if (toolConfig) { tools.push(...(await createTools(toolConfig))); } diff --git a/app/api/chat/engine/generate.ts b/app/api/chat/engine/generate.ts index 4647361..2b3af88 100644 --- a/app/api/chat/engine/generate.ts +++ b/app/api/chat/engine/generate.ts @@ -1,6 +1,8 @@ import { VectorStoreIndex } from "llamaindex"; import { storageContextFromDefaults } from "llamaindex/storage/StorageContext"; - +import { BlobServiceClient } from "@azure/storage-blob"; // ✅ Azure Blob SDK +import * as fs from "fs/promises"; // ✅ FileSystem promises +import * as path from "path"; // ✅ Node.js path helper import * as dotenv from "dotenv"; import { getDocuments } from "./loader"; @@ -16,24 +18,36 @@ async function getRuntime(func: any) { return end - start; } +async function uploadStorageToBlob(persistDir: string) { + const containerName = process.env.AZURE_STORAGE_CONTAINER_NAME || "llama-index-data"; + const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AZURE_STORAGE_CONNECTION_STRING!); + const containerClient = blobServiceClient.getContainerClient(containerName); + + await containerClient.createIfNotExists(); // Make sure container exists + + const files = await fs.readdir(persistDir); + for (const file of files) { + const filePath = path.join(persistDir, file); + const blockBlobClient = containerClient.getBlockBlobClient(file); + const fileContent = await fs.readFile(filePath); + await blockBlobClient.upload(fileContent, fileContent.length); + console.log(`Uploaded ${file} to blob storage`); + } +} + async function generateDatasource() { console.log(`Generating storage context...`); - // Split documents, create embeddings and store them in the storage context const persistDir = process.env.STORAGE_CACHE_DIR; if (!persistDir) { throw new Error("STORAGE_CACHE_DIR environment variable is required!"); } const ms = await getRuntime(async () => { - const storageContext = await storageContextFromDefaults({ - persistDir, - }); + const storageContext = await storageContextFromDefaults({ persistDir }); const documents = await getDocuments(); - - await VectorStoreIndex.fromDocuments(documents, { - storageContext, - }); + await VectorStoreIndex.fromDocuments(documents, { storageContext }); + await uploadStorageToBlob(persistDir); }); - console.log(`Storage context successfully generated in ${ms / 1000}s.`); + console.log(`Storage context successfully generated and uploaded in ${ms / 1000}s.`); } (async () => { diff --git a/app/api/chat/engine/provider.ts b/app/api/chat/engine/provider.ts index f833ebb..5cab0d9 100644 --- a/app/api/chat/engine/provider.ts +++ b/app/api/chat/engine/provider.ts @@ -17,7 +17,7 @@ export function setupProvider() { const azure = { azureADTokenProvider, - deployment: process.env.AZURE_DEPLOYMENT_NAME ?? "gpt-35-turbo", + deployment: process.env.AZURE_DEPLOYMENT_NAME ?? "gpt-4o-mini", }; // configure LLM model diff --git a/app/api/chat/engine/tools/scraper.ts b/app/api/chat/engine/tools/scraper.ts new file mode 100644 index 0000000..dce4480 --- /dev/null +++ b/app/api/chat/engine/tools/scraper.ts @@ -0,0 +1,28 @@ +import axios from "axios"; +import { Document } from "@llamaindex/core/schema"; + +const SCRAPER_API_URL = process.env.SCRAPER_API_URL || "http://localhost:5001/scrape"; + +export async function scrapeWebDocuments(urls: string[]): Promise { + try { + const response = await axios.post(SCRAPER_API_URL, { urls }); + + if (!Array.isArray(response.data)) { + console.warn("Unexpected response format from scraper:", response.data); + return []; + } + + return response.data.map((entry: any) => + new Document({ + text: entry.text, + metadata: { + source: entry.url || "web", + private: "false", + }, + }) + ); + } catch (error) { + console.error("Error calling web scraper service:", error); + return []; + } +} diff --git a/app/api/chat/engine/tools/web-search.ts b/app/api/chat/engine/tools/web-search.ts new file mode 100644 index 0000000..a8dfd74 --- /dev/null +++ b/app/api/chat/engine/tools/web-search.ts @@ -0,0 +1,31 @@ +import { BaseTool, ToolMetadata } from "llamaindex"; +import { JSONSchemaType } from "ajv"; +import { Document } from "@llamaindex/core/schema"; +import { scrapeWebDocuments } from "./scraper"; + +type WebScraperParams = { + urls: string[]; +}; + +export class WebScraperTool implements BaseTool { + metadata: ToolMetadata> = { + name: "web_scraper", + description: "Scrape web page content from a list of URLs", + parameters: { + type: "object", + properties: { + urls: { + type: "array", + items: { type: "string" }, + description: "List of URLs to scrape", + }, + }, + required: ["urls"], + }, + }; + + async call(input: WebScraperParams): Promise { + const docs: Document[] = await scrapeWebDocuments(input.urls); + return docs.map((d) => d.text).join("\n---\n"); + } +} diff --git a/app/api/chat/llamaindex/streaming/events.ts b/app/api/chat/llamaindex/streaming/events.ts index 538e001..45235dd 100644 --- a/app/api/chat/llamaindex/streaming/events.ts +++ b/app/api/chat/llamaindex/streaming/events.ts @@ -14,29 +14,29 @@ import { downloadFile } from "./file"; const LLAMA_CLOUD_DOWNLOAD_FOLDER = "output/llamacloud"; -export function appendSourceData( - data: StreamData, - sourceNodes?: NodeWithScore[], -) { - if (!sourceNodes?.length) return; - try { - const nodes = sourceNodes.map((node) => ({ - metadata: node.node.metadata, - id: node.node.id_, - score: node.score ?? null, - url: getNodeUrl(node.node.metadata), - text: node.node.getContent(MetadataMode.NONE), - })); - data.appendMessageAnnotation({ - type: "sources", - data: { - nodes, - }, - }); - } catch (error) { - console.error("Error appending source data:", error); - } -} +// export function appendSourceData( +// data: StreamData, +// sourceNodes?: NodeWithScore[], +// ) { +// if (!sourceNodes?.length) return; +// try { +// const nodes = sourceNodes.map((node) => ({ +// metadata: node.node.metadata, +// id: node.node.id_, +// score: node.score ?? null, +// url: getNodeUrl(node.node.metadata), +// text: node.node.getContent(MetadataMode.NONE), +// })); +// data.appendMessageAnnotation({ +// type: "sources", +// data: { +// nodes, +// }, +// }); +// } catch (error) { +// console.error("Error appending source data:", error); +// } +// } export function appendEventData(data: StreamData, title?: string) { if (!title) return; @@ -74,7 +74,7 @@ export function createCallbackManager(stream: StreamData) { callbackManager.on("retrieve-end", (data) => { const { nodes, query } = data.detail; - appendSourceData(stream, nodes); + // appendSourceData(stream, nodes); appendEventData(stream, `Retrieving context for query: '${query.query}'`); appendEventData( stream, diff --git a/app/components/header.tsx b/app/components/header.tsx index f02ce73..815e5b1 100644 --- a/app/components/header.tsx +++ b/app/components/header.tsx @@ -1,27 +1,38 @@ +'use client'; + import Image from "next/image"; +import { Bars3Icon, Cog6ToothIcon } from '@heroicons/react/24/outline'; +import { useRouter } from 'next/navigation'; +import Link from "next/link"; export default function Header() { + const router = useRouter(); + return (
-

- Get started by editing  - app/page.tsx -

-
- - Built by LlamaIndex - Llama Logo - + + {/* Wrap the entire

in a so the whole bar is clickable */} + +

+   + NEX4 ICT Solutions +

+ + +
+ router.push('/menu')} + /> + router.push('/settings')} + />
); diff --git a/app/components/ui/contact.tsx b/app/components/ui/contact.tsx new file mode 100644 index 0000000..cf38437 --- /dev/null +++ b/app/components/ui/contact.tsx @@ -0,0 +1,198 @@ +import React, { useState } from 'react'; +import Contact from 'app/components/UI/Contact'; +const ContactUsPage: React.FC = () => { + const [formData, setFormData] = useState({ + name: '', + email: '', + subject: '', + message: '', + }); + + const [formErrors, setFormErrors] = useState<{ [key: string]: string }>({}); + const [submitted, setSubmitted] = useState(false); + + const handleChange = (e: React.ChangeEvent) => { + setFormData({ + ...formData, + [e.target.name]: e.target.value, + }); + setFormErrors({ + ...formErrors, + [e.target.name]: '', + }); + }; + + const validate = () => { + const errors: { [key: string]: string } = {}; + if (!formData.name.trim()) { + errors.name = 'Name is required'; + } + if (!formData.email.trim()) { + errors.email = 'Email is required'; + } else if (!/\S+@\S+\.\S+/.test(formData.email)) { + errors.email = 'Email is invalid'; + } + if (!formData.subject.trim()) { + errors.subject = 'Subject is required'; + } + if (!formData.message.trim()) { + errors.message = 'Message is required'; + } + return errors; + }; + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + const errors = validate(); + if (Object.keys(errors).length === 0) { + setSubmitted(true); + // Here you could handle sending form data to backend or an API + // For now, just simulate success + } else { + setFormErrors(errors); + setSubmitted(false); + } + }; + + return ( +
+

Contact Us

+

We'd love to hear from you! Please fill out the form below.

+
+
+ + + {formErrors.name && {formErrors.name}} +
+
+ + + {formErrors.email && {formErrors.email}} +
+
+ + + {formErrors.subject && {formErrors.subject}} +
+
+ +