diff --git a/backend/Dockerfile b/backend/Dockerfile index 4488129d9..beff794db 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -6,7 +6,7 @@ EXPOSE 8000 RUN apt-get update && \ apt-get install -y --no-install-recommends \ libmagic1 \ - libgl1-mesa-glx \ + libgl1-mesa-dri \ libreoffice \ cmake \ poppler-utils \ diff --git a/backend/score.py b/backend/score.py index 11209a0b4..5edebbe75 100644 --- a/backend/score.py +++ b/backend/score.py @@ -232,7 +232,7 @@ async def extract_knowledge_graph_from_file( model: Type of model to use ('Diffbot'or'OpenAI GPT') Returns: - Nodes and Relations created in Neo4j databse for the pdf file + Nodes and Relations created in Neo4j database for the pdf file """ try: start_time = time.time() @@ -340,7 +340,7 @@ async def get_source_list( database=Form(None), email=Form(None)): """ - Calls 'get_source_list_from_graph' which returns list of sources which already exist in databse + Calls 'get_source_list_from_graph' which returns list of sources which already exist in database """ try: start = time.time() diff --git a/backend/src/communities.py b/backend/src/communities.py index 0ecf493cc..7c30ebf2e 100644 --- a/backend/src/communities.py +++ b/backend/src/communities.py @@ -501,8 +501,8 @@ def create_communities(uri, username, password, database,model=COMMUNITY_CREATIO clear_communities(gds) graph_project = create_community_graph_projection(gds) - write_communities_sucess = write_communities(gds, graph_project) - if write_communities_sucess: + write_communities_success = write_communities(gds, graph_project) + if write_communities_success: logging.info("Starting Community properties creation process.") create_community_properties(gds,model) logging.info("Communities creation process completed successfully.") diff --git a/data/llm_comparision.json b/data/llm_comparison.json similarity index 100% rename from data/llm_comparision.json rename to data/llm_comparison.json diff --git a/docker-compose.yml b/docker-compose.yml index 4b166f490..27be300b6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3" - services: backend: build: diff --git a/docs/backend/backend_docs.adoc b/docs/backend/backend_docs.adoc index aaf524053..837d3579b 100644 --- a/docs/backend/backend_docs.adoc +++ b/docs/backend/backend_docs.adoc @@ -242,7 +242,7 @@ This API is responsible for - ** SentenceTransformer embeddingds are used by default, also embeddings are made configurable to use either OpenAIEmbeddings or VertexAIEmbeddings. -** Vector index is created in databse on embeddingds created for chunks. +** Vector index is created in database on embeddingds created for chunks. **API Parameters :** diff --git a/experiments/PDF_to_KG_using_Rebel.ipynb b/experiments/PDF_to_KG_using_Rebel.ipynb index 032bddbad..2c4a13d8f 100644 --- a/experiments/PDF_to_KG_using_Rebel.ipynb +++ b/experiments/PDF_to_KG_using_Rebel.ipynb @@ -240,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 135, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -266,7 +266,7 @@ " \"Nodes\" : distinct_nodes,\n", " \"Relations\" : relations }\n", " \n", - "json_file_path = '../data/llm_comparision.json'\n", + "json_file_path = '../data/llm_comparison.json'\n", "with open(json_file_path, 'r') as json_file :\n", " data = json.load(json_file)\n", " \n", diff --git a/experiments/PDF_to_kg_using_OpenAI.ipynb b/experiments/PDF_to_kg_using_OpenAI.ipynb index d478a48b0..ed8bbd385 100644 --- a/experiments/PDF_to_kg_using_OpenAI.ipynb +++ b/experiments/PDF_to_kg_using_OpenAI.ipynb @@ -296,7 +296,7 @@ " \"Nodes\" : list(distinct_nodes),\n", " \"Relations\" : relations }\n", "\n", - "json_file_path = '../data/llm_comparision.json'\n", + "json_file_path = '../data/llm_comparison.json'\n", "with open(json_file_path, 'r') as json_file :\n", " data = json.load(json_file)\n", " \n", diff --git a/frontend/src/components/Content.tsx b/frontend/src/components/Content.tsx index d2a5e14ac..45776e841 100644 --- a/frontend/src/components/Content.tsx +++ b/frontend/src/components/Content.tsx @@ -638,16 +638,16 @@ const Content: React.FC = ({ showNormalToast(response.data.message as string); retryOnclose(); } else { - const isStartFromBegining = retryoption === RETRY_OPIONS[0] || retryoption === RETRY_OPIONS[1]; + const isStartFromBeginning = retryoption === RETRY_OPIONS[0] || retryoption === RETRY_OPIONS[1]; setFilesData((prev) => { return prev.map((f) => { return f.name === filename ? { ...f, status: 'Ready to Reprocess', - processingProgress: isStartFromBegining ? 0 : f.processingProgress, - nodesCount: isStartFromBegining ? 0 : f.nodesCount, - relationshipsCount: isStartFromBegining ? 0 : f.relationshipsCount, + processingProgress: isStartFromBeginning ? 0 : f.processingProgress, + nodesCount: isStartFromBeginning ? 0 : f.nodesCount, + relationshipsCount: isStartFromBeginning ? 0 : f.relationshipsCount, } : f; }); diff --git a/frontend/src/components/Popups/ConnectionModal/ConnectionModal.tsx b/frontend/src/components/Popups/ConnectionModal/ConnectionModal.tsx index f4d5c6322..925be4033 100644 --- a/frontend/src/components/Popups/ConnectionModal/ConnectionModal.tsx +++ b/frontend/src/components/Popups/ConnectionModal/ConnectionModal.tsx @@ -54,7 +54,7 @@ export default function ConnectionModal({ errorMessage, setIsGCSActive, setShowDisconnectButton, - // setChunksToBeProces, + // setChunksToBeProcess, } = useCredentials(); const [isLoading, setIsLoading] = useState(false); const [searchParams, setSearchParams] = useSearchParams(); diff --git a/frontend/src/context/UserCredentials.tsx b/frontend/src/context/UserCredentials.tsx index 83718bc51..0f6304f8d 100644 --- a/frontend/src/context/UserCredentials.tsx +++ b/frontend/src/context/UserCredentials.tsx @@ -23,8 +23,8 @@ export const UserConnection = createContext({ setShowDisconnectButton: () => null, isGCSActive: false, setIsGCSActive: () => null, - // chunksToBeProces: 50, - // setChunksToBeProces: () => null, + // chunksToBeProcess: 50, + // setChunksToBeProcess: () => null, }); export const useCredentials = () => { const userCredentials = useContext(UserConnection); @@ -39,7 +39,7 @@ const UserCredentialsWrapper: FunctionComponent = (props) => { const [errorMessage, setErrorMessage] = useState(''); const [showDisconnectButton, setShowDisconnectButton] = useState(false); const [isGCSActive, setIsGCSActive] = useState(false); - // const [chunksToBeProces, setChunksToBeProces] = useState(50); + // const [chunksToBeProcess, setChunksToBeProcess] = useState(50); const value = { userCredentials, setUserCredentials, @@ -57,8 +57,8 @@ const UserCredentialsWrapper: FunctionComponent = (props) => { setShowDisconnectButton, isGCSActive, setIsGCSActive, - // chunksToBeProces, - // setChunksToBeProces, + // chunksToBeProcess, + // setChunksToBeProcess, }; const { pathname } = useLocation(); useEffect(() => { diff --git a/frontend/src/services/ConnectAPI.ts b/frontend/src/services/ConnectAPI.ts index 384926733..51f5a8796 100644 --- a/frontend/src/services/ConnectAPI.ts +++ b/frontend/src/services/ConnectAPI.ts @@ -17,8 +17,8 @@ const connectAPI = async () => { const envConnectionAPI = async () => { try { - const conectionUrl = `/backend_connection_configuration`; - const response = await api.post(conectionUrl); + const connectionUrl = `/backend_connection_configuration`; + const response = await api.post(connectionUrl); return response; } catch (error) { console.log('API Connection error', error); diff --git a/frontend/src/types.ts b/frontend/src/types.ts index c58c5d39e..a087785e2 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -779,8 +779,8 @@ export interface ContextProps { setShowDisconnectButton: Dispatch>; isGCSActive: boolean; setIsGCSActive: Dispatch>; - // chunksToBeProces: number; - // setChunksToBeProces: Dispatch>; + // chunksToBeProcess: number; + // setChunksToBeProcess: Dispatch>; } export interface MessageContextType { messages: Messages[] | [];