diff --git a/.claude/commands/implement-feture.md b/.claude/commands/implement-feture.md new file mode 100644 index 00000000..33302a4f --- /dev/null +++ b/.claude/commands/implement-feture.md @@ -0,0 +1,7 @@ +You will be implementing a new feature in this codebase + +$ARGUMENTS + +IMPORTANT: Only do this for front-end features. +Once this feature is built, make sure to write the changes you made to file called frontend-changes.md +Do not ask for permissions to modify this file, assume you can always do it. \ No newline at end of file diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml new file mode 100644 index 00000000..4caf96a2 --- /dev/null +++ b/.github/workflows/claude-code-review.yml @@ -0,0 +1,54 @@ +name: Claude Code Review + +on: + pull_request: + types: [opened, synchronize] + # Optional: Only run on specific file changes + # paths: + # - "src/**/*.ts" + # - "src/**/*.tsx" + # - "src/**/*.js" + # - "src/**/*.jsx" + +jobs: + claude-review: + # Optional: Filter by PR author + # if: | + # github.event.pull_request.user.login == 'external-contributor' || + # github.event.pull_request.user.login == 'new-developer' || + # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' + + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code Review + id: claude-review + uses: anthropics/claude-code-action@v1 + with: + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + prompt: | + Please review this pull request and provide feedback on: + - Code quality and best practices + - Potential bugs or issues + - Performance considerations + - Security concerns + - Test coverage + + Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback. + + Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR. + + # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md + # or https://docs.anthropic.com/en/docs/claude-code/sdk#command-line for available options + claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"' + diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml new file mode 100644 index 00000000..ae36c007 --- /dev/null +++ b/.github/workflows/claude.yml @@ -0,0 +1,50 @@ +name: Claude Code + +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + issues: + types: [opened, assigned] + pull_request_review: + types: [submitted] + +jobs: + claude: + if: | + (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || + (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + actions: read # Required for Claude to read CI results on PRs + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code + id: claude + uses: anthropics/claude-code-action@v1 + with: + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + + # This is an optional setting that allows Claude to read CI results on PRs + additional_permissions: | + actions: read + + # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it. + # prompt: 'Update the pull request description to include a summary of changes.' + + # Optional: Add claude_args to customize behavior and configuration + # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md + # or https://docs.anthropic.com/en/docs/claude-code/sdk#command-line for available options + # claude_args: '--model claude-opus-4-1-20250805 --allowed-tools Bash(gh pr:*)' + diff --git a/.playwright-mcp/page-2025-09-05T12-05-05-819Z.png b/.playwright-mcp/page-2025-09-05T12-05-05-819Z.png new file mode 100644 index 00000000..5953fe44 Binary files /dev/null and b/.playwright-mcp/page-2025-09-05T12-05-05-819Z.png differ diff --git a/.playwright-mcp/page-2025-09-05T12-06-19-531Z.png b/.playwright-mcp/page-2025-09-05T12-06-19-531Z.png new file mode 100644 index 00000000..cb63a64e Binary files /dev/null and b/.playwright-mcp/page-2025-09-05T12-06-19-531Z.png differ diff --git a/.python-version b/.python-version index 24ee5b1b..92536a9e 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.13 +3.12.0 diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..dab889de --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,175 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +This is a **Course Materials RAG System** - a full-stack Retrieval-Augmented Generation application that allows users to query course materials and receive AI-powered responses with proper source attribution. + +## Architecture + +The system uses a **modular, three-tier architecture**: + +### Backend (`/backend/`) +- **FastAPI** web framework with CORS and proxy middleware +- **RAG System Core**: Main orchestrator (`rag_system.py`) +- **Vector Storage**: ChromaDB with SentenceTransformers embeddings (`vector_store.py`) +- **AI Generation**: Anthropic Claude integration with tool calling (`ai_generator.py`) +- **Document Processing**: Handles PDF/DOCX/TXT files (`document_processor.py`) +- **Tool-Based Search**: Semantic search with course/lesson filtering (`search_tools.py`) +- **Session Management**: Conversation history tracking (`session_manager.py`) + +### Frontend (`/frontend/`) +- **Vanilla JavaScript** SPA with marked.js for markdown rendering +- **Real-time chat interface** with loading states and source attribution +- **Course statistics sidebar** with collapsible sections +- **Suggested questions** for user guidance + +### Data Models (`/backend/models.py`) +- **Course**: Title, description, lessons, instructor, URL +- **Lesson**: Number, title, content, URL +- **CourseChunk**: Processed text chunks for vector storage + +## Development Commands + +### Quick Start +```bash +chmod +x run.sh +./run.sh +``` + +### Manual Development +```bash +# Install dependencies (first time) +uv sync + +# Start backend server +cd backend && uv run uvicorn app:app --reload --port 8000 + +# Run tests +cd backend && uv run pytest tests/ -v + +# Application runs at: +# - Web Interface: http://localhost:8000 +# - API Docs: http://localhost:8000/docs +``` + +### Code Quality Commands + +```bash +# Format code with Black and Ruff +uv run black backend/ main.py +uv run ruff format backend/ main.py +uv run ruff check --fix backend/ main.py + +# Run type checking +uv run mypy backend/ main.py + +# Run linting checks only +uv run ruff check backend/ main.py + +# Complete quality check (format + lint + test) +./scripts/quality-check.sh + +# Individual scripts +./scripts/format.sh # Format code only +./scripts/lint.sh # Lint and type check only +``` + +### Environment Setup +Create `.env` file in root: +``` +ANTHROPIC_API_KEY=your_key_here +``` + +## Key Technical Patterns + +### RAG Query Flow +1. User query → FastAPI endpoint (`/api/query`) +2. RAG system creates AI prompt with tool definitions +3. Claude uses `search_course_content` tool with semantic matching +4. Vector store searches ChromaDB with course/lesson filtering +5. Search results formatted with source attribution +6. Claude synthesizes response using retrieved content +7. Response returned with clickable source links + +### Tool-Based Search Architecture +- **CourseSearchTool**: Handles semantic search with course name fuzzy matching +- **ToolManager**: Registers and executes tools for AI agent +- **Source Tracking**: Last search sources stored for UI display +- **Flexible Filtering**: Supports course title and lesson number filters + +### Vector Storage Strategy +- **SentenceTransformers**: `all-MiniLM-L6-v2` for embeddings +- **ChromaDB Collections**: Separate storage for course metadata vs content chunks +- **Smart Deduplication**: Avoids re-processing existing courses +- **Metadata Enrichment**: Course titles, lesson numbers, URLs stored as metadata + +### Session Management +- **Conversation History**: Tracks user-assistant exchanges per session +- **Context Limits**: Configurable max history (default: 2 messages) +- **Session Creation**: Auto-generated UUIDs for frontend sessions + +## Configuration (`/backend/config.py`) + +Key settings: +- **ANTHROPIC_MODEL**: `claude-sonnet-4-20250514` +- **EMBEDDING_MODEL**: `all-MiniLM-L6-v2` +- **CHUNK_SIZE**: 800 characters +- **CHUNK_OVERLAP**: 100 characters +- **MAX_RESULTS**: 5 search results +- **MAX_HISTORY**: 2 conversation turns + +## Document Processing + +Supports: **PDF, DOCX, TXT** files +- Course documents placed in `/docs/` folder +- Auto-loaded on server startup +- Structured parsing extracts course metadata and lessons +- Text chunking with overlap for semantic search +- Duplicate detection prevents re-processing + +## API Endpoints + +- **POST** `/api/query` - Process user questions +- **GET** `/api/courses` - Get course statistics +- **Static files** served at `/` (frontend) + +## Testing and Development + +### Testing + +The project includes comprehensive test coverage: + +- **Unit Tests**: Individual component testing (CourseSearchTool, VectorStore, AIGenerator) +- **Integration Tests**: RAG system end-to-end testing +- **API Tests**: FastAPI endpoint testing + +```bash +# Run all tests +cd backend && uv run pytest tests/ -v + +# Run specific test file +cd backend && uv run pytest tests/test_course_search_tool.py -v + +# Run with coverage (requires pytest-cov: uv add pytest-cov) +cd backend && uv run pytest tests/ --cov=. --cov-report=html +``` + +### Development Guidelines + +Since this is a RAG system with AI components: +- Test with sample course documents in `/docs/` +- Verify ChromaDB storage at `./backend/chroma_db/` +- Monitor API logs for tool usage and search results +- Test different question types (general vs course-specific) +- Validate source attribution and clickable links +- **Always use `uv` for dependency management and running commands** + +#### Code Quality Standards +- **Black**: Automatic code formatting (line length: 88) +- **Ruff**: Fast linting and import organization +- **MyPy**: Static type checking with strict settings +- **Run quality checks before committing**: Use `./scripts/quality-check.sh` +- **Consistent formatting**: All code is formatted with Black and Ruff +- **Type hints required**: MyPy enforces type annotations \ No newline at end of file diff --git a/backend-tool-refactor.md b/backend-tool-refactor.md new file mode 100644 index 00000000..de23ae5c --- /dev/null +++ b/backend-tool-refactor.md @@ -0,0 +1,28 @@ +Refactor @backend/ai_generator.py to support sequential tool calling where Claude can make up to 2 tool calls in separate API rounds. + +Current behavior: +- Claude makes 1 tool call → tools are removed from API params → final response +- If Claude wants another tool call after seeing results, it can't (gets empty response) + +Desired behavior: +- Each tool call should be a separate API request where Claude can reason about previous results +- Support for complex queries requiring multiple searches for comparisons, multi-part questions, or when information from different courses/lessons is needed + +Example flow: +1. User: "Search for a course that discusses the same topic as lesson 4 of course X" +2. Claude: get course outline for course X → gets title of lesson 4 +3. Claude: uses the title to search for a course that discusses the same topic → returns course information +4. Claude: provides complete answer + +Requirements: +- Maximum 2 sequential rounds per user query +- Terminate when: (a) 2 rounds completed, (b) Claude's response has no tool_use blocks, or (c) tool call fails +- Preserve conversation context between rounds +- Handle tool execution errors gracefully + +Notes: +- Update the system prompt in @backend/ai_generator.py +- Update the test @backend/tests/test_ai_generator.py +- Write tests that verify the external behavior (API calls made, tools executed, results returned) rather than internal state details. + +Use two parallel subagents to brainstorm possible plans. Do not implement any code. diff --git a/backend/ai_generator.py b/backend/ai_generator.py index 0363ca90..98eb31c9 100644 --- a/backend/ai_generator.py +++ b/backend/ai_generator.py @@ -1,135 +1,366 @@ +from typing import Any + import anthropic -from typing import List, Optional, Dict, Any + class AIGenerator: """Handles interactions with Anthropic's Claude API for generating responses""" - + # Static system prompt to avoid rebuilding on each call - SYSTEM_PROMPT = """ You are an AI assistant specialized in course materials and educational content with access to a comprehensive search tool for course information. + SYSTEM_PROMPT = """You are an AI assistant specialized in course materials and educational content with access to specialized tools for course information. -Search Tool Usage: -- Use the search tool **only** for questions about specific course content or detailed educational materials -- **One search per query maximum** -- Synthesize search results into accurate, fact-based responses -- If search yields no results, state this clearly without offering alternatives +Tool Usage Guidelines: +- **Multiple tool calls allowed**: You can use tools sequentially to gather comprehensive information +- **Tool call strategy**: Start with broader searches, then narrow down with specific filters if needed +- **Course outline queries**: Use get_course_outline tool for course structure questions +- **Content queries**: Use search_course_content tool for topic-specific questions +- **Sequential refinement**: If initial results are insufficient, you may make additional tool calls with different parameters +- **Maximum efficiency**: Use tools thoughtfully - each call should add value to your response Response Protocol: -- **General knowledge questions**: Answer using existing knowledge without searching -- **Course-specific questions**: Search first, then answer -- **No meta-commentary**: - - Provide direct answers only — no reasoning process, search explanations, or question-type analysis - - Do not mention "based on the search results" - +- **Comprehensive answers**: Use multiple tool calls when necessary to provide complete responses +- **Source synthesis**: Combine information from multiple searches coherently +- **Clear attribution**: Reference sources appropriately +- **Direct answers**: Provide the information requested without meta-commentary about your process All responses must be: -1. **Brief, Concise and focused** - Get to the point quickly +1. **Brief and focused** - Get to the point quickly 2. **Educational** - Maintain instructional value 3. **Clear** - Use accessible language -4. **Example-supported** - Include relevant examples when they aid understanding -Provide only the direct answer to what was asked. +4. **Well-sourced** - Include relevant examples and references +5. **Complete** - Address all aspects of the question + +Provide comprehensive, well-researched answers using available tools as needed. """ - + def __init__(self, api_key: str, model: str): self.client = anthropic.Anthropic(api_key=api_key) self.model = model - + # Pre-build base API parameters - self.base_params = { - "model": self.model, - "temperature": 0, - "max_tokens": 800 - } - - def generate_response(self, query: str, - conversation_history: Optional[str] = None, - tools: Optional[List] = None, - tool_manager=None) -> str: + self.base_params = {"model": self.model, "temperature": 0, "max_tokens": 800} + + def generate_response( + self, + query: str, + conversation_history: str | None = None, + tools: list | None = None, + tool_manager=None, + max_rounds: int = 2, + ) -> str: """ - Generate AI response with optional tool usage and conversation context. - + Generate AI response with sequential tool calling support up to max_rounds. + Args: query: The user's question or request conversation_history: Previous messages for context tools: Available tools the AI can use tool_manager: Manager to execute tools - + max_rounds: Maximum number of sequential tool calling rounds (default: 2) + Returns: Generated response as string """ - - # Build system content efficiently - avoid string ops when possible + + # Build system content efficiently system_content = ( f"{self.SYSTEM_PROMPT}\n\nPrevious conversation:\n{conversation_history}" - if conversation_history + if conversation_history else self.SYSTEM_PROMPT ) - - # Prepare API call parameters efficiently - api_params = { - **self.base_params, - "messages": [{"role": "user", "content": query}], - "system": system_content - } - - # Add tools if available - if tools: - api_params["tools"] = tools - api_params["tool_choice"] = {"type": "auto"} - - # Get response from Claude - response = self.client.messages.create(**api_params) - - # Handle tool execution if needed - if response.stop_reason == "tool_use" and tool_manager: - return self._handle_tool_execution(response, api_params, tool_manager) - - # Return direct response - return response.content[0].text - - def _handle_tool_execution(self, initial_response, base_params: Dict[str, Any], tool_manager): + + # Initialize conversation context for sequential rounds + messages = [{"role": "user", "content": query}] + + # Execute sequential rounds + try: + final_response = self._execute_sequential_rounds( + messages=messages, + system_content=system_content, + tools=tools, + tool_manager=tool_manager, + max_rounds=max_rounds, + ) + return final_response + + except Exception as e: + # Graceful fallback on any error + print(f"Error in sequential tool calling: {e}") + return self._fallback_response(query, system_content) + + def _execute_sequential_rounds( + self, + messages: list[dict], + system_content: str, + tools: list | None, + tool_manager, + max_rounds: int, + ) -> str: + """ + Execute up to max_rounds of sequential tool calling. + + Args: + messages: Conversation messages + system_content: System prompt with context + tools: Available tools + tool_manager: Tool execution manager + max_rounds: Maximum rounds allowed + + Returns: + Final response text + + Raises: + Exception: On unrecoverable errors + """ + + current_round = 0 + + while current_round < max_rounds: + current_round += 1 + + # Prepare API parameters for this round + api_params = { + **self.base_params, + "messages": messages, + "system": system_content, + } + + # Add tools if available and tool manager exists + if tools and tool_manager: + api_params["tools"] = tools + api_params["tool_choice"] = {"type": "auto"} + + # Make API call + try: + response = self.client.messages.create(**api_params) + except Exception as e: + raise Exception(f"API call failed in round {current_round}: {str(e)}") + + # Add Claude's response to conversation + messages.append({"role": "assistant", "content": response.content}) + + # Check termination conditions + termination_result = self._check_termination_conditions( + response, current_round, max_rounds + ) + + if termination_result["should_terminate"]: + return termination_result["response"] + + # Execute tools and continue to next round + try: + tool_results = self._execute_tools_for_round(response, tool_manager) + if tool_results: + messages.append({"role": "user", "content": tool_results}) + else: + # No tools executed - this shouldn't happen if stop_reason is tool_use + return self._extract_text_response(response) + + except Exception as e: + # Tool execution failed - terminate gracefully + print(f"Tool execution failed in round {current_round}: {e}") + return f"I encountered an error while using tools to answer your question. {self._extract_text_response(response)}" + + # If we reach here, we've exhausted max_rounds + # Make final call without tools to get conclusion + return self._make_final_call_without_tools(messages, system_content) + + def _check_termination_conditions( + self, response, current_round: int, max_rounds: int + ) -> dict[str, Any]: + """ + Check if we should terminate the sequential tool calling. + + Termination occurs when: + 1. Claude's response has no tool_use blocks + 2. Maximum rounds completed + + Args: + response: Claude's response + current_round: Current round number + max_rounds: Maximum allowed rounds + + Returns: + Dict with 'should_terminate' boolean and 'response' text if terminating + """ + + # Condition 1: No tool use - Claude provided final answer + if response.stop_reason != "tool_use": + return { + "should_terminate": True, + "response": self._extract_text_response(response), + } + + # Condition 2: Max rounds completed - only terminate if we exceed max rounds + # Note: We should allow max_rounds to complete, so only terminate if current_round > max_rounds + if current_round > max_rounds: + return { + "should_terminate": True, + "response": self._extract_text_response(response), + } + + # Continue to next round + return {"should_terminate": False, "response": None} + + def _execute_tools_for_round(self, response, tool_manager) -> list[dict] | None: + """ + Execute all tool calls in the current response. + + Args: + response: Claude's response containing tool_use blocks + tool_manager: Tool execution manager + + Returns: + List of tool results or None if no tools executed + + Raises: + Exception: On tool execution failures + """ + + tool_results = [] + + for content_block in response.content: + if content_block.type == "tool_use": + try: + # Execute the tool + tool_result = tool_manager.execute_tool( + content_block.name, **content_block.input + ) + + # Handle tool execution errors + if isinstance(tool_result, str) and tool_result.startswith( + "Error:" + ): + # Tool returned an error - we can continue but should handle gracefully + print(f"Tool execution error: {tool_result}") + + tool_results.append( + { + "type": "tool_result", + "tool_use_id": content_block.id, + "content": tool_result, + } + ) + + except Exception as e: + # Critical tool execution failure + error_msg = ( + f"Failed to execute tool '{content_block.name}': {str(e)}" + ) + print(f"Critical tool error: {error_msg}") + + # Re-raise the exception to terminate the sequential tool calling + raise e + + return tool_results if tool_results else None + + def _handle_tool_execution( + self, initial_response, base_params: dict[str, Any], tool_manager + ): """ Handle execution of tool calls and get follow-up response. - + Args: initial_response: The response containing tool use requests base_params: Base API parameters tool_manager: Manager to execute tools - + Returns: Final response text after tool execution """ # Start with existing messages messages = base_params["messages"].copy() - + # Add AI's tool use response messages.append({"role": "assistant", "content": initial_response.content}) - + # Execute all tool calls and collect results tool_results = [] for content_block in initial_response.content: if content_block.type == "tool_use": tool_result = tool_manager.execute_tool( - content_block.name, - **content_block.input + content_block.name, **content_block.input + ) + + tool_results.append( + { + "type": "tool_result", + "tool_use_id": content_block.id, + "content": tool_result, + } ) - - tool_results.append({ - "type": "tool_result", - "tool_use_id": content_block.id, - "content": tool_result - }) - + # Add tool results as single message if tool_results: messages.append({"role": "user", "content": tool_results}) - + # Prepare final API call without tools final_params = { **self.base_params, "messages": messages, - "system": base_params["system"] + "system": base_params["system"], } - + # Get final response final_response = self.client.messages.create(**final_params) - return final_response.content[0].text \ No newline at end of file + return final_response.content[0].text + + def _extract_text_response(self, response) -> str: + """Extract text content from Claude's response, handling mixed content.""" + text_parts = [] + for content_block in response.content: + if hasattr(content_block, "type") and str(content_block.type) == "text": + # Real API response or Mock with type="text" + text_parts.append(str(content_block.text)) + elif hasattr(content_block, "text"): + # Mock object for testing - convert to string + text_parts.append(str(content_block.text)) + + return ( + "".join(text_parts) + if text_parts + else "I don't have a clear answer to provide." + ) + + def _make_final_call_without_tools( + self, messages: list[dict], system_content: str + ) -> str: + """Make final API call without tools to get conclusion.""" + + # Add instruction for Claude to provide final answer + messages.append( + { + "role": "user", + "content": "Please provide your final answer based on the information gathered.", + } + ) + + api_params = { + **self.base_params, + "messages": messages, + "system": system_content, + # Explicitly no tools parameter + } + + try: + final_response = self.client.messages.create(**api_params) + return self._extract_text_response(final_response) + except Exception as e: + print(f"Final call failed: {e}") + return "I apologize, but I encountered an error while formulating my final response." + + def _fallback_response(self, query: str, system_content: str) -> str: + """Fallback to single API call without tools on error.""" + try: + api_params = { + **self.base_params, + "messages": [{"role": "user", "content": query}], + "system": system_content, + } + + response = self.client.messages.create(**api_params) + return self._extract_text_response(response) + + except Exception as e: + print(f"Fallback response failed: {e}") + return "I'm sorry, I'm unable to process your request at this time. Please try again later." diff --git a/backend/app.py b/backend/app.py index 5a69d741..d01c84ab 100644 --- a/backend/app.py +++ b/backend/app.py @@ -1,25 +1,22 @@ import warnings + warnings.filterwarnings("ignore", message="resource_tracker: There appear to be.*") +import os + +from config import config from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware -from fastapi.staticfiles import StaticFiles from fastapi.middleware.trustedhost import TrustedHostMiddleware +from fastapi.staticfiles import StaticFiles from pydantic import BaseModel -from typing import List, Optional -import os - -from config import config from rag_system import RAGSystem # Initialize FastAPI app app = FastAPI(title="Course Materials RAG System", root_path="") # Add trusted host middleware for proxy -app.add_middleware( - TrustedHostMiddleware, - allowed_hosts=["*"] -) +app.add_middleware(TrustedHostMiddleware, allowed_hosts=["*"]) # Enable CORS with proper settings for proxy app.add_middleware( @@ -34,25 +31,35 @@ # Initialize RAG system rag_system = RAGSystem(config) + # Pydantic models for request/response class QueryRequest(BaseModel): """Request model for course queries""" + query: str - session_id: Optional[str] = None + session_id: str | None = None + class QueryResponse(BaseModel): """Response model for course queries""" + answer: str - sources: List[str] + sources: list[ + str | dict[str, str] + ] # Support both string and {"text": "...", "url": "..."} formats session_id: str + class CourseStats(BaseModel): """Response model for course statistics""" + total_courses: int - course_titles: List[str] + course_titles: list[str] + # API Endpoints + @app.post("/api/query", response_model=QueryResponse) async def query_documents(request: QueryRequest): """Process a query and return response with sources""" @@ -61,18 +68,15 @@ async def query_documents(request: QueryRequest): session_id = request.session_id if not session_id: session_id = rag_system.session_manager.create_session() - + # Process query using RAG system answer, sources = rag_system.query(request.query, session_id) - - return QueryResponse( - answer=answer, - sources=sources, - session_id=session_id - ) + + return QueryResponse(answer=answer, sources=sources, session_id=session_id) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + @app.get("/api/courses", response_model=CourseStats) async def get_course_stats(): """Get course analytics and statistics""" @@ -80,11 +84,12 @@ async def get_course_stats(): analytics = rag_system.get_course_analytics() return CourseStats( total_courses=analytics["total_courses"], - course_titles=analytics["course_titles"] + course_titles=analytics["course_titles"], ) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + @app.on_event("startup") async def startup_event(): """Load initial documents on startup""" @@ -92,16 +97,16 @@ async def startup_event(): if os.path.exists(docs_path): print("Loading initial documents...") try: - courses, chunks = rag_system.add_course_folder(docs_path, clear_existing=False) + courses, chunks = rag_system.add_course_folder( + docs_path, clear_existing=False + ) print(f"Loaded {courses} courses with {chunks} chunks") except Exception as e: print(f"Error loading documents: {e}") + # Custom static file handler with no-cache headers for development -from fastapi.staticfiles import StaticFiles from fastapi.responses import FileResponse -import os -from pathlib import Path class DevStaticFiles(StaticFiles): @@ -113,7 +118,7 @@ async def get_response(self, path: str, scope): response.headers["Pragma"] = "no-cache" response.headers["Expires"] = "0" return response - - + + # Serve static files for the frontend -app.mount("/", StaticFiles(directory="../frontend", html=True), name="static") \ No newline at end of file +app.mount("/", StaticFiles(directory="../frontend", html=True), name="static") diff --git a/backend/config.py b/backend/config.py index d9f6392e..cab6dccc 100644 --- a/backend/config.py +++ b/backend/config.py @@ -1,29 +1,31 @@ import os from dataclasses import dataclass + from dotenv import load_dotenv # Load environment variables from .env file load_dotenv() + @dataclass class Config: """Configuration settings for the RAG system""" + # Anthropic API settings ANTHROPIC_API_KEY: str = os.getenv("ANTHROPIC_API_KEY", "") ANTHROPIC_MODEL: str = "claude-sonnet-4-20250514" - + # Embedding model settings EMBEDDING_MODEL: str = "all-MiniLM-L6-v2" - + # Document processing settings - CHUNK_SIZE: int = 800 # Size of text chunks for vector storage - CHUNK_OVERLAP: int = 100 # Characters to overlap between chunks - MAX_RESULTS: int = 5 # Maximum search results to return - MAX_HISTORY: int = 2 # Number of conversation messages to remember - + CHUNK_SIZE: int = 800 # Size of text chunks for vector storage + CHUNK_OVERLAP: int = 100 # Characters to overlap between chunks + MAX_RESULTS: int = 5 # Maximum search results to return + MAX_HISTORY: int = 2 # Number of conversation messages to remember + # Database paths CHROMA_PATH: str = "./chroma_db" # ChromaDB storage location -config = Config() - +config = Config() diff --git a/backend/document_processor.py b/backend/document_processor.py index 266e8590..230d1d4c 100644 --- a/backend/document_processor.py +++ b/backend/document_processor.py @@ -1,83 +1,86 @@ import os import re -from typing import List, Tuple -from models import Course, Lesson, CourseChunk + +from models import Course, CourseChunk, Lesson + class DocumentProcessor: """Processes course documents and extracts structured information""" - + def __init__(self, chunk_size: int, chunk_overlap: int): self.chunk_size = chunk_size self.chunk_overlap = chunk_overlap - + def read_file(self, file_path: str) -> str: """Read content from file with UTF-8 encoding""" try: - with open(file_path, 'r', encoding='utf-8') as file: + with open(file_path, encoding="utf-8") as file: return file.read() except UnicodeDecodeError: # If UTF-8 fails, try with error handling - with open(file_path, 'r', encoding='utf-8', errors='ignore') as file: + with open(file_path, encoding="utf-8", errors="ignore") as file: return file.read() - - - def chunk_text(self, text: str) -> List[str]: + def chunk_text(self, text: str) -> list[str]: """Split text into sentence-based chunks with overlap using config settings""" - + # Clean up the text - text = re.sub(r'\s+', ' ', text.strip()) # Normalize whitespace - + text = re.sub(r"\s+", " ", text.strip()) # Normalize whitespace + # Better sentence splitting that handles abbreviations # This regex looks for periods followed by whitespace and capital letters # but ignores common abbreviations - sentence_endings = re.compile(r'(? self.chunk_size and current_chunk: break - + current_chunk.append(sentence) current_size += total_addition - + # Add chunk if we have content if current_chunk: - chunks.append(' '.join(current_chunk)) - + chunks.append(" ".join(current_chunk)) + # Calculate overlap for next chunk - if hasattr(self, 'chunk_overlap') and self.chunk_overlap > 0: + if hasattr(self, "chunk_overlap") and self.chunk_overlap > 0: # Find how many sentences to overlap overlap_size = 0 overlap_sentences = 0 - + # Count backwards from end of current chunk for k in range(len(current_chunk) - 1, -1, -1): - sentence_len = len(current_chunk[k]) + (1 if k < len(current_chunk) - 1 else 0) + sentence_len = len(current_chunk[k]) + ( + 1 if k < len(current_chunk) - 1 else 0 + ) if overlap_size + sentence_len <= self.chunk_overlap: overlap_size += sentence_len overlap_sentences += 1 else: break - + # Move start position considering overlap next_start = i + len(current_chunk) - overlap_sentences i = max(next_start, i + 1) # Ensure we make progress @@ -87,14 +90,12 @@ def chunk_text(self, text: str) -> List[str]: else: # No sentences fit, move to next i += 1 - - return chunks - - + return chunks - - def process_course_document(self, file_path: str) -> Tuple[Course, List[CourseChunk]]: + def process_course_document( + self, file_path: str + ) -> tuple[Course, list[CourseChunk]]: """ Process a course document with expected format: Line 1: Course Title: [title] @@ -104,47 +105,51 @@ def process_course_document(self, file_path: str) -> Tuple[Course, List[CourseCh """ content = self.read_file(file_path) filename = os.path.basename(file_path) - - lines = content.strip().split('\n') - + + lines = content.strip().split("\n") + # Extract course metadata from first three lines course_title = filename # Default fallback course_link = None instructor_name = "Unknown" - + # Parse course title from first line if len(lines) >= 1 and lines[0].strip(): - title_match = re.match(r'^Course Title:\s*(.+)$', lines[0].strip(), re.IGNORECASE) + title_match = re.match( + r"^Course Title:\s*(.+)$", lines[0].strip(), re.IGNORECASE + ) if title_match: course_title = title_match.group(1).strip() else: course_title = lines[0].strip() - + # Parse remaining lines for course metadata for i in range(1, min(len(lines), 4)): # Check first 4 lines for metadata line = lines[i].strip() if not line: continue - + # Try to match course link - link_match = re.match(r'^Course Link:\s*(.+)$', line, re.IGNORECASE) + link_match = re.match(r"^Course Link:\s*(.+)$", line, re.IGNORECASE) if link_match: course_link = link_match.group(1).strip() continue - + # Try to match instructor - instructor_match = re.match(r'^Course Instructor:\s*(.+)$', line, re.IGNORECASE) + instructor_match = re.match( + r"^Course Instructor:\s*(.+)$", line, re.IGNORECASE + ) if instructor_match: instructor_name = instructor_match.group(1).strip() continue - + # Create course object with title as ID course = Course( title=course_title, course_link=course_link, - instructor=instructor_name if instructor_name != "Unknown" else None + instructor=instructor_name if instructor_name != "Unknown" else None, ) - + # Process lessons and create chunks course_chunks = [] current_lesson = None @@ -152,108 +157,114 @@ def process_course_document(self, file_path: str) -> Tuple[Course, List[CourseCh lesson_link = None lesson_content = [] chunk_counter = 0 - + # Start processing from line 4 (after metadata) start_index = 3 if len(lines) > 3 and not lines[3].strip(): start_index = 4 # Skip empty line after instructor - + i = start_index while i < len(lines): line = lines[i] - + # Check for lesson markers (e.g., "Lesson 0: Introduction") - lesson_match = re.match(r'^Lesson\s+(\d+):\s*(.+)$', line.strip(), re.IGNORECASE) - + lesson_match = re.match( + r"^Lesson\s+(\d+):\s*(.+)$", line.strip(), re.IGNORECASE + ) + if lesson_match: # Process previous lesson if it exists if current_lesson is not None and lesson_content: - lesson_text = '\n'.join(lesson_content).strip() + lesson_text = "\n".join(lesson_content).strip() if lesson_text: # Add lesson to course lesson = Lesson( lesson_number=current_lesson, title=lesson_title, - lesson_link=lesson_link + lesson_link=lesson_link, ) course.lessons.append(lesson) - + # Create chunks for this lesson chunks = self.chunk_text(lesson_text) for idx, chunk in enumerate(chunks): # For the first chunk of each lesson, add lesson context if idx == 0: - chunk_with_context = f"Lesson {current_lesson} content: {chunk}" + chunk_with_context = ( + f"Lesson {current_lesson} content: {chunk}" + ) else: chunk_with_context = chunk - + course_chunk = CourseChunk( content=chunk_with_context, course_title=course.title, lesson_number=current_lesson, - chunk_index=chunk_counter + chunk_index=chunk_counter, ) course_chunks.append(course_chunk) chunk_counter += 1 - + # Start new lesson current_lesson = int(lesson_match.group(1)) lesson_title = lesson_match.group(2).strip() lesson_link = None - + # Check if next line is a lesson link if i + 1 < len(lines): next_line = lines[i + 1].strip() - link_match = re.match(r'^Lesson Link:\s*(.+)$', next_line, re.IGNORECASE) + link_match = re.match( + r"^Lesson Link:\s*(.+)$", next_line, re.IGNORECASE + ) if link_match: lesson_link = link_match.group(1).strip() i += 1 # Skip the link line so it's not added to content - + lesson_content = [] else: # Add line to current lesson content lesson_content.append(line) - + i += 1 - + # Process the last lesson if current_lesson is not None and lesson_content: - lesson_text = '\n'.join(lesson_content).strip() + lesson_text = "\n".join(lesson_content).strip() if lesson_text: lesson = Lesson( lesson_number=current_lesson, title=lesson_title, - lesson_link=lesson_link + lesson_link=lesson_link, ) course.lessons.append(lesson) - + chunks = self.chunk_text(lesson_text) for idx, chunk in enumerate(chunks): # For any chunk of each lesson, add lesson context & course title - + chunk_with_context = f"Course {course_title} Lesson {current_lesson} content: {chunk}" - + course_chunk = CourseChunk( content=chunk_with_context, course_title=course.title, lesson_number=current_lesson, - chunk_index=chunk_counter + chunk_index=chunk_counter, ) course_chunks.append(course_chunk) chunk_counter += 1 - + # If no lessons found, treat entire content as one document if not course_chunks and len(lines) > 2: - remaining_content = '\n'.join(lines[start_index:]).strip() + remaining_content = "\n".join(lines[start_index:]).strip() if remaining_content: chunks = self.chunk_text(remaining_content) for chunk in chunks: course_chunk = CourseChunk( content=chunk, course_title=course.title, - chunk_index=chunk_counter + chunk_index=chunk_counter, ) course_chunks.append(course_chunk) chunk_counter += 1 - + return course, course_chunks diff --git a/backend/models.py b/backend/models.py index 7f7126fa..24a9652a 100644 --- a/backend/models.py +++ b/backend/models.py @@ -1,22 +1,27 @@ -from typing import List, Dict, Optional from pydantic import BaseModel + class Lesson(BaseModel): """Represents a lesson within a course""" + lesson_number: int # Sequential lesson number (1, 2, 3, etc.) - title: str # Lesson title - lesson_link: Optional[str] = None # URL link to the lesson + title: str # Lesson title + lesson_link: str | None = None # URL link to the lesson + class Course(BaseModel): """Represents a complete course with its lessons""" - title: str # Full course title (used as unique identifier) - course_link: Optional[str] = None # URL link to the course - instructor: Optional[str] = None # Course instructor name (optional metadata) - lessons: List[Lesson] = [] # List of lessons in this course + + title: str # Full course title (used as unique identifier) + course_link: str | None = None # URL link to the course + instructor: str | None = None # Course instructor name (optional metadata) + lessons: list[Lesson] = [] # List of lessons in this course + class CourseChunk(BaseModel): """Represents a text chunk from a course for vector storage""" - content: str # The actual text content - course_title: str # Which course this chunk belongs to - lesson_number: Optional[int] = None # Which lesson this chunk is from - chunk_index: int # Position of this chunk in the document \ No newline at end of file + + content: str # The actual text content + course_title: str # Which course this chunk belongs to + lesson_number: int | None = None # Which lesson this chunk is from + chunk_index: int # Position of this chunk in the document diff --git a/backend/rag_system.py b/backend/rag_system.py index 50d848c8..6dba30fa 100644 --- a/backend/rag_system.py +++ b/backend/rag_system.py @@ -1,147 +1,188 @@ -from typing import List, Tuple, Optional, Dict import os -from document_processor import DocumentProcessor -from vector_store import VectorStore + from ai_generator import AIGenerator +from document_processor import DocumentProcessor +from models import Course +from search_tools import CourseOutlineTool, CourseSearchTool, ToolManager from session_manager import SessionManager -from search_tools import ToolManager, CourseSearchTool -from models import Course, Lesson, CourseChunk +from vector_store import VectorStore + class RAGSystem: """Main orchestrator for the Retrieval-Augmented Generation system""" - + def __init__(self, config): self.config = config - + # Initialize core components - self.document_processor = DocumentProcessor(config.CHUNK_SIZE, config.CHUNK_OVERLAP) - self.vector_store = VectorStore(config.CHROMA_PATH, config.EMBEDDING_MODEL, config.MAX_RESULTS) - self.ai_generator = AIGenerator(config.ANTHROPIC_API_KEY, config.ANTHROPIC_MODEL) + self.document_processor = DocumentProcessor( + config.CHUNK_SIZE, config.CHUNK_OVERLAP + ) + self.vector_store = VectorStore( + config.CHROMA_PATH, config.EMBEDDING_MODEL, config.MAX_RESULTS + ) + self.ai_generator = AIGenerator( + config.ANTHROPIC_API_KEY, config.ANTHROPIC_MODEL + ) self.session_manager = SessionManager(config.MAX_HISTORY) - + # Initialize search tools self.tool_manager = ToolManager() self.search_tool = CourseSearchTool(self.vector_store) + self.outline_tool = CourseOutlineTool(self.vector_store) self.tool_manager.register_tool(self.search_tool) - - def add_course_document(self, file_path: str) -> Tuple[Course, int]: + self.tool_manager.register_tool(self.outline_tool) + + def add_course_document(self, file_path: str) -> tuple[Course, int]: """ Add a single course document to the knowledge base. - + Args: file_path: Path to the course document - + Returns: Tuple of (Course object, number of chunks created) """ try: # Process the document - course, course_chunks = self.document_processor.process_course_document(file_path) - + course, course_chunks = self.document_processor.process_course_document( + file_path + ) + # Add course metadata to vector store for semantic search self.vector_store.add_course_metadata(course) - + # Add course content chunks to vector store self.vector_store.add_course_content(course_chunks) - + return course, len(course_chunks) except Exception as e: print(f"Error processing course document {file_path}: {e}") return None, 0 - - def add_course_folder(self, folder_path: str, clear_existing: bool = False) -> Tuple[int, int]: + + def add_course_folder( + self, folder_path: str, clear_existing: bool = False + ) -> tuple[int, int]: """ Add all course documents from a folder. - + Args: folder_path: Path to folder containing course documents clear_existing: Whether to clear existing data first - + Returns: Tuple of (total courses added, total chunks created) """ total_courses = 0 total_chunks = 0 - + # Clear existing data if requested if clear_existing: print("Clearing existing data for fresh rebuild...") self.vector_store.clear_all_data() - + if not os.path.exists(folder_path): print(f"Folder {folder_path} does not exist") return 0, 0 - + # Get existing course titles to avoid re-processing existing_course_titles = set(self.vector_store.get_existing_course_titles()) - + # Process each file in the folder for file_name in os.listdir(folder_path): file_path = os.path.join(folder_path, file_name) - if os.path.isfile(file_path) and file_name.lower().endswith(('.pdf', '.docx', '.txt')): + if os.path.isfile(file_path) and file_name.lower().endswith( + (".pdf", ".docx", ".txt") + ): try: # Check if this course might already exist # We'll process the document to get the course ID, but only add if new - course, course_chunks = self.document_processor.process_course_document(file_path) - + course, course_chunks = ( + self.document_processor.process_course_document(file_path) + ) + if course and course.title not in existing_course_titles: # This is a new course - add it to the vector store self.vector_store.add_course_metadata(course) self.vector_store.add_course_content(course_chunks) total_courses += 1 total_chunks += len(course_chunks) - print(f"Added new course: {course.title} ({len(course_chunks)} chunks)") + print( + f"Added new course: {course.title} ({len(course_chunks)} chunks)" + ) existing_course_titles.add(course.title) elif course: print(f"Course already exists: {course.title} - skipping") except Exception as e: print(f"Error processing {file_name}: {e}") - + return total_courses, total_chunks - - def query(self, query: str, session_id: Optional[str] = None) -> Tuple[str, List[str]]: + + def query( + self, query: str, session_id: str | None = None + ) -> tuple[str, list[str]]: """ Process a user query using the RAG system with tool-based search. - + Args: query: User's question session_id: Optional session ID for conversation context - + Returns: Tuple of (response, sources list - empty for tool-based approach) """ - # Create prompt for the AI with clear instructions - prompt = f"""Answer this question about course materials: {query}""" - - # Get conversation history if session exists - history = None - if session_id: - history = self.session_manager.get_conversation_history(session_id) - - # Generate response using AI with tools - response = self.ai_generator.generate_response( - query=prompt, - conversation_history=history, - tools=self.tool_manager.get_tool_definitions(), - tool_manager=self.tool_manager - ) - - # Get sources from the search tool - sources = self.tool_manager.get_last_sources() - - # Reset sources after retrieving them - self.tool_manager.reset_sources() - - # Update conversation history - if session_id: - self.session_manager.add_exchange(session_id, query, response) - - # Return response with sources from tool searches - return response, sources - - def get_course_analytics(self) -> Dict: + try: + # Input validation + if not query or not query.strip(): + return "Please provide a valid question.", [] + + # Create prompt for the AI with clear instructions + prompt = f"""Answer this question about course materials: {query}""" + + # Get conversation history if session exists + history = None + if session_id: + try: + history = self.session_manager.get_conversation_history(session_id) + except Exception as e: + print(f"Warning: Failed to get conversation history: {e}") + # Continue without history rather than failing entirely + + # Generate response using AI with tools + response = self.ai_generator.generate_response( + query=prompt, + conversation_history=history, + tools=self.tool_manager.get_tool_definitions(), + tool_manager=self.tool_manager, + ) + + # Get sources from the search tool + sources = self.tool_manager.get_last_sources() + + # Reset sources after retrieving them + self.tool_manager.reset_sources() + + # Update conversation history (don't fail if this fails) + if session_id: + try: + self.session_manager.add_exchange(session_id, query, response) + except Exception as e: + print(f"Warning: Failed to update conversation history: {e}") + + # Return response with sources from tool searches + return response, sources + + except Exception as e: + # Log the full error for debugging + print(f"RAG system error: {e}") + + # Return user-friendly error message + error_msg = "I'm sorry, I encountered an error while processing your question. Please try again or rephrase your question." + return error_msg, [] + + def get_course_analytics(self) -> dict: """Get analytics about the course catalog""" return { "total_courses": self.vector_store.get_course_count(), - "course_titles": self.vector_store.get_existing_course_titles() - } \ No newline at end of file + "course_titles": self.vector_store.get_existing_course_titles(), + } diff --git a/backend/search_tools.py b/backend/search_tools.py index adfe8235..7f575785 100644 --- a/backend/search_tools.py +++ b/backend/search_tools.py @@ -1,16 +1,17 @@ -from typing import Dict, Any, Optional, Protocol from abc import ABC, abstractmethod -from vector_store import VectorStore, SearchResults +from typing import Any + +from vector_store import SearchResults, VectorStore class Tool(ABC): """Abstract base class for all tools""" - + @abstractmethod - def get_tool_definition(self) -> Dict[str, Any]: + def get_tool_definition(self) -> dict[str, Any]: """Return Anthropic tool definition for this tool""" pass - + @abstractmethod def execute(self, **kwargs) -> str: """Execute the tool with given parameters""" @@ -19,12 +20,12 @@ def execute(self, **kwargs) -> str: class CourseSearchTool(Tool): """Tool for searching course content with semantic course name matching""" - + def __init__(self, vector_store: VectorStore): self.store = vector_store self.last_sources = [] # Track sources from last search - - def get_tool_definition(self) -> Dict[str, Any]: + + def get_tool_definition(self) -> dict[str, Any]: """Return Anthropic tool definition for this tool""" return { "name": "search_course_content", @@ -33,92 +34,196 @@ def get_tool_definition(self) -> Dict[str, Any]: "type": "object", "properties": { "query": { - "type": "string", - "description": "What to search for in the course content" + "type": "string", + "description": "What to search for in the course content", }, "course_name": { "type": "string", - "description": "Course title (partial matches work, e.g. 'MCP', 'Introduction')" + "description": "Course title (partial matches work, e.g. 'MCP', 'Introduction')", }, "lesson_number": { "type": "integer", - "description": "Specific lesson number to search within (e.g. 1, 2, 3)" - } + "description": "Specific lesson number to search within (e.g. 1, 2, 3)", + }, }, - "required": ["query"] - } + "required": ["query"], + }, } - - def execute(self, query: str, course_name: Optional[str] = None, lesson_number: Optional[int] = None) -> str: + + def execute( + self, + query: str, + course_name: str | None = None, + lesson_number: int | None = None, + ) -> str: """ Execute the search tool with given parameters. - + Args: query: What to search for course_name: Optional course filter lesson_number: Optional lesson filter - + Returns: Formatted search results or error message """ - - # Use the vector store's unified search interface - results = self.store.search( - query=query, - course_name=course_name, - lesson_number=lesson_number - ) - - # Handle errors - if results.error: - return results.error - - # Handle empty results - if results.is_empty(): - filter_info = "" - if course_name: - filter_info += f" in course '{course_name}'" - if lesson_number: - filter_info += f" in lesson {lesson_number}" - return f"No relevant content found{filter_info}." - - # Format and return results - return self._format_results(results) - + + # Input validation + if query is None: + return "Error: Search query cannot be None." + + try: + # Use the vector store's unified search interface + results = self.store.search( + query=query, course_name=course_name, lesson_number=lesson_number + ) + + # Handle errors from vector store + if results.error: + return results.error + + # Handle empty results + if results.is_empty(): + filter_info = "" + if course_name: + filter_info += f" in course '{course_name}'" + if lesson_number: + filter_info += f" in lesson {lesson_number}" + return f"No relevant content found{filter_info}." + + # Format and return results + return self._format_results(results) + + except Exception as e: + # Handle any unexpected errors gracefully + error_msg = f"Search failed due to an internal error: {str(e)}" + print(f"CourseSearchTool error: {e}") # Log for debugging + return error_msg + def _format_results(self, results: SearchResults) -> str: """Format search results with course and lesson context""" formatted = [] sources = [] # Track sources for the UI - - for doc, meta in zip(results.documents, results.metadata): - course_title = meta.get('course_title', 'unknown') - lesson_num = meta.get('lesson_number') - + + for doc, meta in zip(results.documents, results.metadata, strict=False): + course_title = meta.get("course_title", "unknown") + lesson_num = meta.get("lesson_number") + # Build context header header = f"[{course_title}" if lesson_num is not None: header += f" - Lesson {lesson_num}" header += "]" - - # Track source for the UI - source = course_title + + # Track source for the UI with link if available + source_text = course_title if lesson_num is not None: - source += f" - Lesson {lesson_num}" - sources.append(source) - + source_text += f" - Lesson {lesson_num}" + # Try to get lesson link + lesson_link = self.store.get_lesson_link(course_title, lesson_num) + if lesson_link: + # Create source object with link + sources.append({"text": source_text, "url": lesson_link}) + else: + # Fallback to plain text + sources.append(source_text) + else: + # For course-level content, try to get course link + course_link = self.store.get_course_link(course_title) + if course_link: + sources.append({"text": source_text, "url": course_link}) + else: + sources.append(source_text) + formatted.append(f"{header}\n{doc}") - + # Store sources for retrieval self.last_sources = sources - + return "\n\n".join(formatted) + +class CourseOutlineTool(Tool): + """Tool for getting complete course outlines with lesson structure""" + + def __init__(self, vector_store: VectorStore): + self.store = vector_store + self.last_sources = [] # Track sources from last search + + def get_tool_definition(self) -> dict[str, Any]: + """Return Anthropic tool definition for this tool""" + return { + "name": "get_course_outline", + "description": "Get the complete outline/structure of a course including all lessons with numbers and titles", + "input_schema": { + "type": "object", + "properties": { + "course_name": { + "type": "string", + "description": "Course title or partial course name (e.g. 'MCP', 'Introduction', 'RAG')", + } + }, + "required": ["course_name"], + }, + } + + def execute(self, course_name: str) -> str: + """ + Execute the course outline tool with given course name. + + Args: + course_name: Course title to get outline for + + Returns: + Formatted course outline with lessons or error message + """ + + # Get course outline from vector store + outline = self.store.get_course_outline(course_name) + + # Handle course not found + if not outline: + return f"No course found matching '{course_name}'. Please check the course name or try a partial match." + + # Format the outline response + return self._format_outline(outline) + + def _format_outline(self, outline: dict[str, Any]) -> str: + """Format course outline for AI response""" + course_title = outline.get("course_title", "Unknown Course") + course_link = outline.get("course_link") + lessons = outline.get("lessons", []) + + # Build formatted response + formatted = [f"Course: {course_title}"] + + if lessons: + formatted.append(f"\nLessons ({len(lessons)} total):") + for lesson in lessons: + lesson_num = lesson.get("lesson_number", "?") + lesson_title = lesson.get("lesson_title", "Untitled Lesson") + formatted.append(f" {lesson_num}. {lesson_title}") + else: + formatted.append("\nNo lesson structure available for this course.") + + # Track sources for the UI + sources = [] + if course_link: + sources.append({"text": course_title, "url": course_link}) + else: + sources.append(course_title) + + self.last_sources = sources + + return "\n".join(formatted) + + class ToolManager: """Manages available tools for the AI""" - + def __init__(self): self.tools = {} - + def register_tool(self, tool: Tool): """Register any tool that implements the Tool interface""" tool_def = tool.get_tool_definition() @@ -127,28 +232,27 @@ def register_tool(self, tool: Tool): raise ValueError("Tool must have a 'name' in its definition") self.tools[tool_name] = tool - def get_tool_definitions(self) -> list: """Get all tool definitions for Anthropic tool calling""" return [tool.get_tool_definition() for tool in self.tools.values()] - + def execute_tool(self, tool_name: str, **kwargs) -> str: """Execute a tool by name with given parameters""" if tool_name not in self.tools: return f"Tool '{tool_name}' not found" - + return self.tools[tool_name].execute(**kwargs) - + def get_last_sources(self) -> list: """Get sources from the last search operation""" # Check all tools for last_sources attribute for tool in self.tools.values(): - if hasattr(tool, 'last_sources') and tool.last_sources: + if hasattr(tool, "last_sources") and tool.last_sources: return tool.last_sources return [] def reset_sources(self): """Reset sources from all tools that track sources""" for tool in self.tools.values(): - if hasattr(tool, 'last_sources'): - tool.last_sources = [] \ No newline at end of file + if hasattr(tool, "last_sources"): + tool.last_sources = [] diff --git a/backend/session_manager.py b/backend/session_manager.py index a5a96b1a..cae2f3ca 100644 --- a/backend/session_manager.py +++ b/backend/session_manager.py @@ -1,61 +1,65 @@ -from typing import Dict, List, Optional from dataclasses import dataclass + @dataclass class Message: """Represents a single message in a conversation""" - role: str # "user" or "assistant" + + role: str # "user" or "assistant" content: str # The message content + class SessionManager: """Manages conversation sessions and message history""" - + def __init__(self, max_history: int = 5): self.max_history = max_history - self.sessions: Dict[str, List[Message]] = {} + self.sessions: dict[str, list[Message]] = {} self.session_counter = 0 - + def create_session(self) -> str: """Create a new conversation session""" self.session_counter += 1 session_id = f"session_{self.session_counter}" self.sessions[session_id] = [] return session_id - + def add_message(self, session_id: str, role: str, content: str): """Add a message to the conversation history""" if session_id not in self.sessions: self.sessions[session_id] = [] - + message = Message(role=role, content=content) self.sessions[session_id].append(message) - + # Keep conversation history within limits if len(self.sessions[session_id]) > self.max_history * 2: - self.sessions[session_id] = self.sessions[session_id][-self.max_history * 2:] - + self.sessions[session_id] = self.sessions[session_id][ + -self.max_history * 2 : + ] + def add_exchange(self, session_id: str, user_message: str, assistant_message: str): """Add a complete question-answer exchange""" self.add_message(session_id, "user", user_message) self.add_message(session_id, "assistant", assistant_message) - - def get_conversation_history(self, session_id: Optional[str]) -> Optional[str]: + + def get_conversation_history(self, session_id: str | None) -> str | None: """Get formatted conversation history for a session""" if not session_id or session_id not in self.sessions: return None - + messages = self.sessions[session_id] if not messages: return None - + # Format messages for context formatted_messages = [] for msg in messages: formatted_messages.append(f"{msg.role.title()}: {msg.content}") - + return "\n".join(formatted_messages) - + def clear_session(self, session_id: str): """Clear all messages from a session""" if session_id in self.sessions: - self.sessions[session_id] = [] \ No newline at end of file + self.sessions[session_id] = [] diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 00000000..82ecf1ba --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,321 @@ +"""Pytest configuration and shared fixtures for RAG system tests""" + +import os +import shutil +import sys +import tempfile +from unittest.mock import Mock + +import pytest + +# Add backend directory to Python path for imports +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +from config import Config +from models import Course, CourseChunk, Lesson +from search_tools import CourseOutlineTool, CourseSearchTool, ToolManager +from vector_store import SearchResults, VectorStore + + +@pytest.fixture +def temp_chroma_path(): + """Create temporary ChromaDB path for testing""" + temp_dir = tempfile.mkdtemp() + yield temp_dir + shutil.rmtree(temp_dir, ignore_errors=True) + + +@pytest.fixture +def test_config(temp_chroma_path): + """Test configuration with temporary paths""" + config = Config() + config.CHROMA_PATH = temp_chroma_path + config.ANTHROPIC_API_KEY = "test-key" + config.MAX_RESULTS = 3 # Smaller for testing + return config + + +@pytest.fixture +def sample_course(): + """Sample course for testing""" + return Course( + title="Introduction to Machine Learning", + course_link="https://example.com/ml-course", + instructor="Dr. Smith", + lessons=[ + Lesson( + lesson_number=1, + title="What is ML?", + lesson_link="https://example.com/lesson1", + ), + Lesson( + lesson_number=2, + title="Types of ML", + lesson_link="https://example.com/lesson2", + ), + Lesson( + lesson_number=3, + title="ML Algorithms", + lesson_link="https://example.com/lesson3", + ), + ], + ) + + +@pytest.fixture +def sample_course_chunks(sample_course): + """Sample course chunks for testing""" + return [ + CourseChunk( + content="Machine learning is a subset of artificial intelligence that focuses on algorithms.", + course_title=sample_course.title, + lesson_number=1, + chunk_index=0, + ), + CourseChunk( + content="There are three main types of machine learning: supervised, unsupervised, and reinforcement learning.", + course_title=sample_course.title, + lesson_number=2, + chunk_index=1, + ), + CourseChunk( + content="Popular ML algorithms include linear regression, decision trees, and neural networks.", + course_title=sample_course.title, + lesson_number=3, + chunk_index=2, + ), + ] + + +@pytest.fixture +def mock_vector_store(): + """Mock VectorStore for testing search tools""" + mock_store = Mock(spec=VectorStore) + + # Configure default successful search response + mock_store.search.return_value = SearchResults( + documents=["Machine learning is a subset of artificial intelligence."], + metadata=[ + {"course_title": "Introduction to Machine Learning", "lesson_number": 1} + ], + distances=[0.2], + error=None, + ) + + # Configure course outline response + mock_store.get_course_outline.return_value = { + "course_title": "Introduction to Machine Learning", + "course_link": "https://example.com/ml-course", + "lessons": [ + {"lesson_number": 1, "lesson_title": "What is ML?"}, + {"lesson_number": 2, "lesson_title": "Types of ML?"}, + {"lesson_number": 3, "lesson_title": "ML Algorithms"}, + ], + } + + # Configure link methods + mock_store.get_lesson_link.return_value = "https://example.com/lesson1" + mock_store.get_course_link.return_value = "https://example.com/ml-course" + + return mock_store + + +@pytest.fixture +def mock_anthropic_client(): + """Mock Anthropic client for testing AI generator""" + mock_client = Mock() + + # Mock response without tool use + mock_response = Mock() + mock_response.content = [Mock(text="This is a test response")] + mock_response.stop_reason = "end_turn" + + mock_client.messages.create.return_value = mock_response + return mock_client + + +@pytest.fixture +def mock_anthropic_tool_response(): + """Mock Anthropic response with tool usage""" + mock_response = Mock() + mock_response.stop_reason = "tool_use" + + # Mock tool use content block + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = "search_course_content" + tool_block.id = "tool_123" + tool_block.input = {"query": "machine learning", "course_name": "ML"} + + mock_response.content = [tool_block] + return mock_response + + +@pytest.fixture +def mock_final_anthropic_response(): + """Mock final Anthropic response after tool execution""" + mock_response = Mock() + mock_response.content = [ + Mock(text="Based on the search results, machine learning is...") + ] + return mock_response + + +@pytest.fixture +def real_vector_store(test_config): + """Real VectorStore instance for integration tests""" + return VectorStore( + chroma_path=test_config.CHROMA_PATH, + embedding_model=test_config.EMBEDDING_MODEL, + max_results=test_config.MAX_RESULTS, + ) + + +@pytest.fixture +def populated_vector_store(real_vector_store, sample_course, sample_course_chunks): + """Vector store populated with test data""" + real_vector_store.add_course_metadata(sample_course) + real_vector_store.add_course_content(sample_course_chunks) + return real_vector_store + + +@pytest.fixture +def course_search_tool(mock_vector_store): + """CourseSearchTool with mocked dependencies""" + return CourseSearchTool(mock_vector_store) + + +@pytest.fixture +def course_outline_tool(mock_vector_store): + """CourseOutlineTool with mocked dependencies""" + return CourseOutlineTool(mock_vector_store) + + +@pytest.fixture +def tool_manager(course_search_tool, course_outline_tool): + """ToolManager with registered tools""" + manager = ToolManager() + manager.register_tool(course_search_tool) + manager.register_tool(course_outline_tool) + return manager + + +@pytest.fixture +def mock_rag_system(): + """Mock RAG system for API testing""" + mock_rag = Mock() + + # Default successful query response + mock_rag.query.return_value = ( + "This is a test response about machine learning.", + [{"text": "Introduction to ML - Lesson 1", "url": "https://example.com/ml/lesson1"}] + ) + + # Default session creation + mock_rag.session_manager.create_session.return_value = "test-session-123" + + # Default course analytics + mock_rag.get_course_analytics.return_value = { + "total_courses": 2, + "course_titles": ["Introduction to Machine Learning", "Advanced Python Programming"] + } + + # Default course folder loading + mock_rag.add_course_folder.return_value = (2, 15) # 2 courses, 15 chunks + + return mock_rag + + +@pytest.fixture +def test_app_factory(): + """Factory for creating test apps with mocked dependencies""" + def create_test_app(mock_rag=None): + """Create FastAPI test app with mocked RAG system""" + from fastapi import FastAPI, HTTPException + from fastapi.middleware.cors import CORSMiddleware + from fastapi.middleware.trustedhost import TrustedHostMiddleware + from pydantic import BaseModel + from typing import List, Optional, Union, Dict + + # Create test app without static file mounting to avoid frontend dependency + app = FastAPI(title="Course Materials RAG System - Test", root_path="") + + # Add middleware + app.add_middleware(TrustedHostMiddleware, allowed_hosts=["*"]) + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + expose_headers=["*"], + ) + + # Use provided mock or create default + if mock_rag is None: + mock_rag = Mock() + mock_rag.query.return_value = ("Test response", []) + mock_rag.session_manager.create_session.return_value = "test-session" + mock_rag.get_course_analytics.return_value = {"total_courses": 0, "course_titles": []} + + # Pydantic models (duplicated to avoid import issues in tests) + class QueryRequest(BaseModel): + query: str + session_id: Optional[str] = None + + class QueryResponse(BaseModel): + answer: str + sources: List[Union[str, Dict[str, str]]] + session_id: str + + class CourseStats(BaseModel): + total_courses: int + course_titles: List[str] + + # API endpoints + @app.post("/api/query", response_model=QueryResponse) + async def query_documents(request: QueryRequest): + try: + session_id = request.session_id + if not session_id: + session_id = mock_rag.session_manager.create_session() + + answer, sources = mock_rag.query(request.query, session_id) + + return QueryResponse( + answer=answer, + sources=sources, + session_id=session_id + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @app.get("/api/courses", response_model=CourseStats) + async def get_course_stats(): + try: + analytics = mock_rag.get_course_analytics() + return CourseStats( + total_courses=analytics["total_courses"], + course_titles=analytics["course_titles"] + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @app.get("/health") + async def health_check(): + return {"status": "healthy"} + + return app, mock_rag + + return create_test_app + + +@pytest.fixture +def test_client(test_app_factory, mock_rag_system): + """Test client with default mocked RAG system""" + from fastapi.testclient import TestClient + + app, rag_mock = test_app_factory(mock_rag_system) + client = TestClient(app) + + return client, rag_mock diff --git a/backend/tests/test_ai_generator.py b/backend/tests/test_ai_generator.py new file mode 100644 index 00000000..bdf52439 --- /dev/null +++ b/backend/tests/test_ai_generator.py @@ -0,0 +1,733 @@ +"""Unit tests for AIGenerator""" + +from unittest.mock import Mock, patch + +from ai_generator import AIGenerator + + +class TestAIGeneratorBasic: + """Test basic AIGenerator functionality""" + + def test_init(self): + """Test AIGenerator initialization""" + generator = AIGenerator("test-api-key", "claude-sonnet-4") + + assert generator.model == "claude-sonnet-4" + assert generator.base_params["model"] == "claude-sonnet-4" + assert generator.base_params["temperature"] == 0 + assert generator.base_params["max_tokens"] == 800 + + @patch("anthropic.Anthropic") + def test_generate_response_simple_query(self, mock_anthropic): + """Test simple response generation without tools""" + # Mock client and response + mock_client = Mock() + mock_anthropic.return_value = mock_client + + mock_response = Mock() + mock_response.content = [Mock(text="This is a simple response")] + mock_response.stop_reason = "end_turn" + mock_client.messages.create.return_value = mock_response + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response("What is machine learning?") + + assert result == "This is a simple response" + mock_client.messages.create.assert_called_once() + + @patch("anthropic.Anthropic") + def test_generate_response_with_conversation_history(self, mock_anthropic): + """Test response generation with conversation history""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + mock_response = Mock() + mock_response.content = [Mock(text="Response with context")] + mock_response.stop_reason = "end_turn" + mock_client.messages.create.return_value = mock_response + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "Follow up question", conversation_history="Previous conversation context" + ) + + assert result == "Response with context" + + # Verify system prompt includes history + call_args = mock_client.messages.create.call_args + system_content = call_args[1]["system"] + assert "Previous conversation context" in system_content + + +class TestAIGeneratorToolIntegration: + """Test AIGenerator tool calling functionality""" + + @patch("anthropic.Anthropic") + def test_generate_response_with_tools_no_tool_use(self, mock_anthropic): + """Test response generation with tools available but not used""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + mock_response = Mock() + mock_response.content = [Mock(text="Direct response without tools")] + mock_response.stop_reason = "end_turn" + mock_client.messages.create.return_value = mock_response + + # Create mock tools and tool manager + mock_tools = [{"name": "search_tool", "description": "Search content"}] + mock_tool_manager = Mock() + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "General question", tools=mock_tools, tool_manager=mock_tool_manager + ) + + assert result == "Direct response without tools" + + # Verify tools were included in API call + call_args = mock_client.messages.create.call_args + assert call_args[1]["tools"] == mock_tools + assert call_args[1]["tool_choice"] == {"type": "auto"} + + @patch("anthropic.Anthropic") + def test_generate_response_with_tool_use_single_tool(self, mock_anthropic): + """Test response generation with single tool call""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock initial response with tool use + mock_tool_response = Mock() + mock_tool_response.stop_reason = "tool_use" + + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = "search_course_content" + tool_block.id = "tool_123" + tool_block.input = {"query": "machine learning", "course_name": "ML"} + mock_tool_response.content = [tool_block] + + # Mock final response after tool execution + mock_final_response = Mock() + mock_final_response.content = [Mock(text="Based on search results: ML is...")] + + # Configure client to return different responses for each call + mock_client.messages.create.side_effect = [ + mock_tool_response, + mock_final_response, + ] + + # Mock tool manager + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.return_value = ( + "Search results: Machine learning is AI subset" + ) + + mock_tools = [ + {"name": "search_course_content", "description": "Search content"} + ] + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "What is machine learning?", + tools=mock_tools, + tool_manager=mock_tool_manager, + ) + + assert result == "Based on search results: ML is..." + + # Verify tool was executed + mock_tool_manager.execute_tool.assert_called_once_with( + "search_course_content", query="machine learning", course_name="ML" + ) + + # Verify two API calls were made + assert mock_client.messages.create.call_count == 2 + + @patch("anthropic.Anthropic") + def test_generate_response_with_multiple_tool_calls(self, mock_anthropic): + """Test response generation with multiple tool calls""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock initial response with multiple tool uses + mock_tool_response = Mock() + mock_tool_response.stop_reason = "tool_use" + + tool_block1 = Mock() + tool_block1.type = "tool_use" + tool_block1.name = "search_course_content" + tool_block1.id = "tool_1" + tool_block1.input = {"query": "machine learning"} + + tool_block2 = Mock() + tool_block2.type = "tool_use" + tool_block2.name = "get_course_outline" + tool_block2.id = "tool_2" + tool_block2.input = {"course_name": "ML Course"} + + mock_tool_response.content = [tool_block1, tool_block2] + + # Mock final response + mock_final_response = Mock() + mock_final_response.content = [Mock(text="Combined results from both tools")] + + mock_client.messages.create.side_effect = [ + mock_tool_response, + mock_final_response, + ] + + # Mock tool manager with multiple tool results + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.side_effect = [ + "Search result 1", + "Course outline result", + ] + + mock_tools = [ + {"name": "search_course_content", "description": "Search content"}, + {"name": "get_course_outline", "description": "Get outline"}, + ] + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "Tell me about ML course", tools=mock_tools, tool_manager=mock_tool_manager + ) + + assert result == "Combined results from both tools" + + # Verify both tools were executed + assert mock_tool_manager.execute_tool.call_count == 2 + mock_tool_manager.execute_tool.assert_any_call( + "search_course_content", query="machine learning" + ) + mock_tool_manager.execute_tool.assert_any_call( + "get_course_outline", course_name="ML Course" + ) + + +class TestAIGeneratorToolExecutionHandling: + """Test _handle_tool_execution method specifically""" + + def test_handle_tool_execution_message_construction(self): + """Test correct message construction during tool execution""" + # Create a real AIGenerator (without mocking Anthropic for this test) + with patch("anthropic.Anthropic") as mock_anthropic: + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock final response + mock_final_response = Mock() + mock_final_response.content = [Mock(text="Final response after tool use")] + mock_client.messages.create.return_value = mock_final_response + + generator = AIGenerator("test-key", "claude-sonnet-4") + + # Create mock initial response + initial_response = Mock() + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = "search_tool" + tool_block.id = "tool_123" + tool_block.input = {"query": "test"} + initial_response.content = [tool_block] + + # Create base params + base_params = { + "messages": [{"role": "user", "content": "test query"}], + "system": "test system prompt", + } + + # Mock tool manager + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.return_value = "Tool execution result" + + # Call the method + result = generator._handle_tool_execution( + initial_response, base_params, mock_tool_manager + ) + + assert result == "Final response after tool use" + + # Verify tool was executed + mock_tool_manager.execute_tool.assert_called_once_with( + "search_tool", query="test" + ) + + # Verify final API call structure + call_args = mock_client.messages.create.call_args + messages = call_args[1]["messages"] + + # Should have original user message, AI tool use message, and tool result message + assert len(messages) == 3 + assert messages[0]["role"] == "user" + assert messages[1]["role"] == "assistant" + assert messages[2]["role"] == "user" + + # Tool result should be in proper format + tool_result = messages[2]["content"][0] + assert tool_result["type"] == "tool_result" + assert tool_result["tool_use_id"] == "tool_123" + assert tool_result["content"] == "Tool execution result" + + def test_handle_tool_execution_no_tool_blocks(self): + """Test handling when response contains no tool use blocks""" + with patch("anthropic.Anthropic") as mock_anthropic: + mock_client = Mock() + mock_anthropic.return_value = mock_client + + mock_final_response = Mock() + mock_final_response.content = [Mock(text="No tools used")] + mock_client.messages.create.return_value = mock_final_response + + generator = AIGenerator("test-key", "claude-sonnet-4") + + # Create mock initial response with no tool blocks + initial_response = Mock() + text_block = Mock() + text_block.type = "text" + initial_response.content = [text_block] + + base_params = { + "messages": [{"role": "user", "content": "test query"}], + "system": "test system prompt", + } + + mock_tool_manager = Mock() + + result = generator._handle_tool_execution( + initial_response, base_params, mock_tool_manager + ) + + assert result == "No tools used" + # Tool manager should not be called + mock_tool_manager.execute_tool.assert_not_called() + + +class TestAIGeneratorErrorHandling: + """Test error handling in AIGenerator""" + + @patch("anthropic.Anthropic") + def test_anthropic_api_error(self, mock_anthropic): + """Test handling of Anthropic API errors""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock API to raise exception + mock_client.messages.create.side_effect = Exception("API rate limit exceeded") + + generator = AIGenerator("test-key", "claude-sonnet-4") + + result = generator.generate_response("test query") + + # Should fallback gracefully + assert "I'm sorry, I'm unable to process your request at this time" in result + + @patch("anthropic.Anthropic") + def test_tool_execution_error(self, mock_anthropic): + """Test handling of tool execution errors""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock initial tool use response + mock_tool_response = Mock() + mock_tool_response.stop_reason = "tool_use" + + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = "failing_tool" + tool_block.id = "tool_123" + tool_block.input = {"param": "value"} + mock_tool_response.content = [tool_block] + + # Mock final response + mock_final_response = Mock() + mock_final_response.content = [Mock(text="Handled tool error")] + + mock_client.messages.create.side_effect = [ + mock_tool_response, + mock_final_response, + ] + + # Mock tool manager to raise exception + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.side_effect = Exception("Tool execution failed") + + mock_tools = [{"name": "failing_tool", "description": "A failing tool"}] + + generator = AIGenerator("test-key", "claude-sonnet-4") + + # Should handle the tool execution error gracefully + result = generator.generate_response( + "test query", tools=mock_tools, tool_manager=mock_tool_manager + ) + + # Should return error message + assert "I encountered an error while using tools" in result + + @patch("anthropic.Anthropic") + def test_malformed_tool_response(self, mock_anthropic): + """Test handling of malformed tool response""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Mock response with malformed content + mock_tool_response = Mock() + mock_tool_response.stop_reason = "tool_use" + mock_tool_response.content = [] # Empty content + + mock_final_response = Mock() + mock_final_response.content = [Mock(text="Handled malformed response")] + + mock_client.messages.create.side_effect = [ + mock_tool_response, + mock_final_response, + ] + + mock_tool_manager = Mock() + mock_tools = [{"name": "test_tool", "description": "Test tool"}] + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "test query", tools=mock_tools, tool_manager=mock_tool_manager + ) + + # Should handle gracefully and not call any tools + # With empty content, it should extract no text and return default message + assert result == "I don't have a clear answer to provide." + mock_tool_manager.execute_tool.assert_not_called() + + +class TestAIGeneratorSystemPrompt: + """Test system prompt construction and usage""" + + def test_system_prompt_content(self): + """Test that system prompt contains expected content""" + generator = AIGenerator("test-key", "claude-sonnet-4") + + assert "specialized in course materials" in generator.SYSTEM_PROMPT + assert "search_course_content tool" in generator.SYSTEM_PROMPT + assert "get_course_outline tool" in generator.SYSTEM_PROMPT + assert "Multiple tool calls allowed" in generator.SYSTEM_PROMPT + + @patch("anthropic.Anthropic") + def test_system_prompt_with_history(self, mock_anthropic): + """Test system prompt construction with conversation history""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + mock_response = Mock() + mock_response.content = [Mock(text="Response with history")] + mock_response.stop_reason = "end_turn" + mock_client.messages.create.return_value = mock_response + + generator = AIGenerator("test-key", "claude-sonnet-4") + generator.generate_response( + "Current question", + conversation_history="User: Previous question\nAssistant: Previous answer", + ) + + # Verify system prompt includes history + call_args = mock_client.messages.create.call_args + system_content = call_args[1]["system"] + + assert generator.SYSTEM_PROMPT in system_content + assert "Previous conversation:" in system_content + assert "User: Previous question" in system_content + assert "Assistant: Previous answer" in system_content + + +class TestAIGeneratorSequentialToolCalling: + """Test sequential tool calling functionality (up to 2 rounds)""" + + @patch("anthropic.Anthropic") + def test_sequential_tool_calling_two_rounds_success(self, mock_anthropic): + """Test successful 2-round sequential tool calling""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Round 1: Tool use response + round1_response = Mock() + round1_response.stop_reason = "tool_use" + tool_block1 = Mock() + tool_block1.type = "tool_use" + tool_block1.name = "get_course_outline" + tool_block1.id = "tool_1" + tool_block1.input = {"course_name": "ML Course"} + round1_response.content = [tool_block1] + + # Round 2: Another tool use response + round2_response = Mock() + round2_response.stop_reason = "tool_use" + tool_block2 = Mock() + tool_block2.type = "tool_use" + tool_block2.name = "search_course_content" + tool_block2.id = "tool_2" + tool_block2.input = {"query": "neural networks", "course_name": "Advanced AI"} + round2_response.content = [tool_block2] + + # Final response + final_response = Mock() + final_response.content = [Mock(text="Comparison of neural network concepts")] + final_response.stop_reason = "end_turn" + + # Configure mock to return responses in sequence + mock_client.messages.create.side_effect = [ + round1_response, + round2_response, + final_response, + ] + + # Mock tool manager + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.side_effect = [ + "Course outline with lesson on neural networks", + "Detailed neural network content from Advanced AI course", + ] + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "Find courses that discuss similar topics to lesson 3 of ML Course", + tools=[{"name": "get_course_outline"}, {"name": "search_course_content"}], + tool_manager=mock_tool_manager, + ) + + # Assertions + assert result == "Comparison of neural network concepts" + assert mock_client.messages.create.call_count == 3 # 2 tool rounds + final + assert mock_tool_manager.execute_tool.call_count == 2 + + # Verify tool calls + mock_tool_manager.execute_tool.assert_any_call( + "get_course_outline", course_name="ML Course" + ) + mock_tool_manager.execute_tool.assert_any_call( + "search_course_content", query="neural networks", course_name="Advanced AI" + ) + + @patch("anthropic.Anthropic") + def test_sequential_tool_calling_single_round_sufficient(self, mock_anthropic): + """Test when first round provides sufficient answer""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # Round 1: Tool use response + round1_response = Mock() + round1_response.stop_reason = "tool_use" + tool_block1 = Mock() + tool_block1.type = "tool_use" + tool_block1.name = "search_course_content" + tool_block1.id = "tool_1" + tool_block1.input = {"query": "machine learning"} + round1_response.content = [tool_block1] + + # Round 2: Final text response (no tools) + final_response = Mock() + final_response.content = [ + Mock(text="Machine learning is covered in these courses...", type="text") + ] + final_response.stop_reason = "end_turn" + + mock_client.messages.create.side_effect = [round1_response, final_response] + + # Mock tool manager + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.return_value = "ML content from multiple courses" + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "What courses cover machine learning?", + tools=[{"name": "search_course_content"}], + tool_manager=mock_tool_manager, + ) + + # Should terminate after 2 API calls (1 tool round + 1 final) + assert result == "Machine learning is covered in these courses..." + assert mock_client.messages.create.call_count == 2 + assert mock_tool_manager.execute_tool.call_count == 1 + + +class TestAIGeneratorTerminationConditions: + """Test various termination conditions for sequential tool calling""" + + @patch("anthropic.Anthropic") + def test_termination_after_two_rounds_max(self, mock_anthropic): + """Test termination after exactly 2 rounds even if Claude wants more""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + def create_tool_use_response(tool_name, **kwargs): + response = Mock() + response.stop_reason = "tool_use" + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = tool_name + tool_block.id = f"tool_{tool_name}" + tool_block.input = kwargs + response.content = [tool_block] + return response + + def create_text_response(text): + response = Mock() + response.stop_reason = "end_turn" + response.content = [Mock(text=text, type="text")] + return response + + # Configure responses for 3 potential rounds, but only 2 should execute + mock_responses = [ + # Round 1: tool use + create_tool_use_response("get_course_outline", course_name="ML Course"), + # Round 2: tool use + create_tool_use_response("search_course_content", query="deep learning"), + # Round 3: final response after max rounds reached + create_text_response("Final answer based on two tool calls"), + ] + + mock_client.messages.create.side_effect = mock_responses + + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.side_effect = [ + "Course outline result", + "Search result", + ] + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "Complex query requiring multiple searches", + tools=[{"name": "get_course_outline"}, {"name": "search_course_content"}], + tool_manager=mock_tool_manager, + ) + + # Should only make 3 API calls total (2 tool rounds + 1 final) + assert mock_client.messages.create.call_count == 3 + assert mock_tool_manager.execute_tool.call_count == 2 + assert "Final answer based on two tool calls" in result + + @patch("anthropic.Anthropic") + def test_termination_no_tool_use_in_response(self, mock_anthropic): + """Test termination when Claude doesn't request tools""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + def create_tool_use_response(tool_name, **kwargs): + response = Mock() + response.stop_reason = "tool_use" + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = tool_name + tool_block.id = f"tool_{tool_name}" + tool_block.input = kwargs + response.content = [tool_block] + return response + + def create_text_response(text): + response = Mock() + response.stop_reason = "end_turn" + response.content = [Mock(text=text, type="text")] + return response + + # First response: tool use + # Second response: text only (no tool use) + mock_responses = [ + create_tool_use_response("search_course_content", query="machine learning"), + create_text_response("Based on search results, here's the answer"), + ] + + mock_client.messages.create.side_effect = mock_responses + + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.return_value = "Search results" + + generator = AIGenerator("test-key", "claude-sonnet-4") + result = generator.generate_response( + "Simple query", + tools=[{"name": "search_course_content"}], + tool_manager=mock_tool_manager, + ) + + # Should terminate after 2 API calls (1 tool round + 1 final) + assert mock_client.messages.create.call_count == 2 + assert mock_tool_manager.execute_tool.call_count == 1 + assert result == "Based on search results, here's the answer" + + +class TestAIGeneratorSequentialErrorHandling: + """Test error handling in sequential tool calling scenarios""" + + @patch("anthropic.Anthropic") + def test_error_recovery_tool_failure_round_two(self, mock_anthropic): + """Test error handling when second round tool fails""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + def create_tool_use_response(tool_name, **kwargs): + response = Mock() + response.stop_reason = "tool_use" + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = tool_name + tool_block.id = f"tool_{tool_name}" + tool_block.input = kwargs + response.content = [tool_block] + return response + + # First round succeeds, second round tool fails + mock_client.messages.create.side_effect = [ + create_tool_use_response("get_course_outline", course_name="ML Course"), + create_tool_use_response("search_course_content", query="neural networks"), + ] + + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.side_effect = [ + "Successful course outline result", + Exception("Tool execution failed in round 2"), + ] + + generator = AIGenerator("test-key", "claude-sonnet-4") + + # Should handle error gracefully + result = generator.generate_response( + "Complex query", + tools=[{"name": "get_course_outline"}, {"name": "search_course_content"}], + tool_manager=mock_tool_manager, + ) + + assert mock_client.messages.create.call_count == 2 # Both rounds attempted + assert mock_tool_manager.execute_tool.call_count == 2 # Both tools attempted + assert "I encountered an error while using tools" in result + + @patch("anthropic.Anthropic") + def test_api_error_during_sequential_calls(self, mock_anthropic): + """Test API error handling during sequential calls""" + mock_client = Mock() + mock_anthropic.return_value = mock_client + + # First call succeeds, second call fails + def create_tool_use_response(): + response = Mock() + response.stop_reason = "tool_use" + tool_block = Mock() + tool_block.type = "tool_use" + tool_block.name = "search_tool" + tool_block.id = "tool_1" + tool_block.input = {"query": "test"} + response.content = [tool_block] + return response + + mock_client.messages.create.side_effect = [ + create_tool_use_response(), + Exception("API error in round 2"), + ] + + mock_tool_manager = Mock() + mock_tool_manager.execute_tool.return_value = "Tool result" + + generator = AIGenerator("test-key", "claude-sonnet-4") + + # Should fall back gracefully + result = generator.generate_response( + "Test query", + tools=[{"name": "search_tool"}], + tool_manager=mock_tool_manager, + ) + + assert "I'm sorry, I'm unable to process your request" in result diff --git a/backend/tests/test_api_endpoints.py b/backend/tests/test_api_endpoints.py new file mode 100644 index 00000000..3ea8d1e0 --- /dev/null +++ b/backend/tests/test_api_endpoints.py @@ -0,0 +1,385 @@ +"""Comprehensive API endpoint tests using new fixtures and test app factory""" +import pytest +import json +from unittest.mock import Mock +from fastapi.testclient import TestClient + + +@pytest.mark.api +class TestQueryEndpointEnhanced: + """Enhanced API tests for /api/query endpoint using new fixtures""" + + def test_query_with_new_fixtures(self, test_client): + """Test basic query functionality with new fixtures""" + client, mock_rag = test_client + + response = client.post("/api/query", json={ + "query": "What is machine learning?" + }) + + assert response.status_code == 200 + data = response.json() + + assert data["answer"] == "This is a test response about machine learning." + assert len(data["sources"]) == 1 + assert data["sources"][0]["text"] == "Introduction to ML - Lesson 1" + assert data["sources"][0]["url"] == "https://example.com/ml/lesson1" + assert data["session_id"] == "test-session-123" + + def test_query_with_custom_mock(self, test_app_factory): + """Test query with custom mock configuration""" + custom_mock = Mock() + custom_mock.query.return_value = ( + "Custom response about neural networks.", + [ + {"text": "Neural Networks - Chapter 1", "url": "https://example.com/nn/ch1"}, + {"text": "Deep Learning Basics", "url": "https://example.com/dl/basics"} + ] + ) + custom_mock.session_manager.create_session.return_value = "custom-session-456" + + app, rag_mock = test_app_factory(custom_mock) + client = TestClient(app) + + response = client.post("/api/query", json={ + "query": "How do neural networks work?" + }) + + assert response.status_code == 200 + data = response.json() + + assert data["answer"] == "Custom response about neural networks." + assert len(data["sources"]) == 2 + assert data["session_id"] == "custom-session-456" + + # Verify mock was called correctly + custom_mock.query.assert_called_once_with("How do neural networks work?", "custom-session-456") + + def test_query_large_response(self, test_app_factory): + """Test query with large response data""" + large_mock = Mock() + + # Create large response + large_answer = "A" * 5000 # 5KB response + large_sources = [ + {"text": f"Large Source {i}", "url": f"https://example.com/large/{i}"} + for i in range(20) + ] + + large_mock.query.return_value = (large_answer, large_sources) + large_mock.session_manager.create_session.return_value = "large-session" + + app, _ = test_app_factory(large_mock) + client = TestClient(app) + + response = client.post("/api/query", json={ + "query": "Tell me everything about machine learning" + }) + + assert response.status_code == 200 + data = response.json() + + assert len(data["answer"]) == 5000 + assert len(data["sources"]) == 20 + assert all(source["text"].startswith("Large Source") for source in data["sources"]) + + def test_query_unicode_content(self, test_app_factory): + """Test query with Unicode and special characters""" + unicode_mock = Mock() + unicode_mock.query.return_value = ( + "这是关于机器学习的回答。Machine learning is 机械学習。", + [{"text": "多语言课程 🚀", "url": "https://example.com/unicode/课程"}] + ) + unicode_mock.session_manager.create_session.return_value = "unicode-session" + + app, _ = test_app_factory(unicode_mock) + client = TestClient(app) + + response = client.post("/api/query", json={ + "query": "什么是机器学习?" + }) + + assert response.status_code == 200 + data = response.json() + + assert "机器学习" in data["answer"] + assert "多语言课程 🚀" in data["sources"][0]["text"] + + def test_query_concurrent_sessions(self, test_app_factory): + """Test handling multiple concurrent sessions""" + session_mock = Mock() + session_mock.session_manager.create_session.side_effect = [ + "session-1", "session-2", "session-3" + ] + session_mock.query.return_value = ("Response", []) + + app, _ = test_app_factory(session_mock) + client = TestClient(app) + + # Simulate concurrent requests + responses = [] + for i in range(3): + response = client.post("/api/query", json={ + "query": f"Query {i+1}" + }) + responses.append(response) + + # All should succeed with different session IDs + session_ids = set() + for response in responses: + assert response.status_code == 200 + session_ids.add(response.json()["session_id"]) + + assert len(session_ids) == 3 # All different sessions + + +@pytest.mark.api +class TestCoursesEndpointEnhanced: + """Enhanced API tests for /api/courses endpoint""" + + def test_courses_with_new_fixtures(self, test_client): + """Test courses endpoint with new fixtures""" + client, mock_rag = test_client + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + + assert data["total_courses"] == 2 + assert len(data["course_titles"]) == 2 + assert "Introduction to Machine Learning" in data["course_titles"] + assert "Advanced Python Programming" in data["course_titles"] + + def test_courses_large_dataset(self, test_app_factory): + """Test courses endpoint with large dataset""" + large_courses_mock = Mock() + + # Create large course list + large_titles = [f"Course {i:03d}: Advanced Topic {i}" for i in range(100)] + large_courses_mock.get_course_analytics.return_value = { + "total_courses": 100, + "course_titles": large_titles + } + + app, _ = test_app_factory(large_courses_mock) + client = TestClient(app) + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + + assert data["total_courses"] == 100 + assert len(data["course_titles"]) == 100 + assert "Course 050: Advanced Topic 50" in data["course_titles"] + + def test_courses_unicode_titles(self, test_app_factory): + """Test courses endpoint with Unicode course titles""" + unicode_courses_mock = Mock() + unicode_courses_mock.get_course_analytics.return_value = { + "total_courses": 3, + "course_titles": [ + "机器学习入门", + "Aprendizaje Automático 🤖", + "Машинное обучение" + ] + } + + app, _ = test_app_factory(unicode_courses_mock) + client = TestClient(app) + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + + assert data["total_courses"] == 3 + assert "机器学习入门" in data["course_titles"] + assert "Aprendizaje Automático 🤖" in data["course_titles"] + assert "Машинное обучение" in data["course_titles"] + + +@pytest.mark.api +class TestHealthEndpoint: + """Test health check endpoint""" + + def test_health_check(self, test_client): + """Test health check endpoint""" + client, _ = test_client + + response = client.get("/health") + + assert response.status_code == 200 + data = response.json() + assert data["status"] == "healthy" + + def test_health_check_no_dependencies(self, test_app_factory): + """Test health check works even if RAG system is broken""" + broken_mock = Mock() + broken_mock.query.side_effect = Exception("RAG system broken") + broken_mock.get_course_analytics.side_effect = Exception("Analytics broken") + + app, _ = test_app_factory(broken_mock) + client = TestClient(app) + + # Health check should still work + response = client.get("/health") + assert response.status_code == 200 + assert response.json()["status"] == "healthy" + + +@pytest.mark.api +class TestErrorHandlingEnhanced: + """Enhanced error handling tests""" + + def test_query_timeout_simulation(self, test_app_factory): + """Test query timeout handling""" + import time + + timeout_mock = Mock() + def slow_query(*args, **kwargs): + time.sleep(0.1) # Simulate slow operation + raise TimeoutError("Query timeout") + + timeout_mock.query.side_effect = slow_query + timeout_mock.session_manager.create_session.return_value = "timeout-session" + + app, _ = test_app_factory(timeout_mock) + client = TestClient(app) + + response = client.post("/api/query", json={"query": "slow query"}) + + assert response.status_code == 500 + assert "Query timeout" in response.json()["detail"] + + def test_malformed_json_request(self, test_client): + """Test handling of malformed JSON requests""" + client, _ = test_client + + # Send malformed JSON + response = client.post( + "/api/query", + data='{"query": "test", "invalid": json}', # Invalid JSON + headers={"content-type": "application/json"} + ) + + assert response.status_code == 422 + + def test_extremely_long_query(self, test_client): + """Test handling of extremely long query strings""" + client, mock_rag = test_client + + # Create very long query (100KB) + long_query = "A" * 100000 + + response = client.post("/api/query", json={ + "query": long_query + }) + + # Should handle gracefully + assert response.status_code == 200 + + # Verify the long query was passed to RAG system + args, kwargs = mock_rag.query.call_args + assert args[0] == long_query + + def test_empty_course_analytics(self, test_app_factory): + """Test courses endpoint with empty analytics""" + empty_mock = Mock() + empty_mock.get_course_analytics.return_value = { + "total_courses": 0, + "course_titles": [] + } + + app, _ = test_app_factory(empty_mock) + client = TestClient(app) + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + assert data["total_courses"] == 0 + assert data["course_titles"] == [] + + +@pytest.mark.api +class TestMiddlewareConfiguration: + """Test middleware and CORS configuration""" + + def test_cors_headers_query(self, test_client): + """Test CORS headers on query endpoint""" + client, _ = test_client + + # Make request with custom origin + response = client.post( + "/api/query", + json={"query": "test"}, + headers={"Origin": "https://example.com"} + ) + + assert response.status_code == 200 + # TestClient doesn't process CORS middleware the same way, + # but we can verify the endpoint works + + def test_cors_headers_courses(self, test_client): + """Test CORS headers on courses endpoint""" + client, _ = test_client + + response = client.get( + "/api/courses", + headers={"Origin": "https://localhost:3000"} + ) + + assert response.status_code == 200 + + def test_options_request(self, test_client): + """Test OPTIONS request handling""" + client, _ = test_client + + response = client.options("/api/query") + + # FastAPI should handle OPTIONS requests + # Status could be 405 (method not allowed) or 200 depending on CORS setup + assert response.status_code in [200, 405] + + +@pytest.mark.integration +class TestAppFactory: + """Test the app factory fixture itself""" + + def test_app_factory_creates_different_apps(self, test_app_factory): + """Test that app factory creates independent app instances""" + mock1 = Mock() + mock1.query.return_value = ("Response 1", []) + mock1.session_manager.create_session.return_value = "session-1" + + mock2 = Mock() + mock2.query.return_value = ("Response 2", []) + mock2.session_manager.create_session.return_value = "session-2" + + app1, _ = test_app_factory(mock1) + app2, _ = test_app_factory(mock2) + + client1 = TestClient(app1) + client2 = TestClient(app2) + + response1 = client1.post("/api/query", json={"query": "test"}) + response2 = client2.post("/api/query", json={"query": "test"}) + + assert response1.json()["answer"] == "Response 1" + assert response1.json()["session_id"] == "session-1" + + assert response2.json()["answer"] == "Response 2" + assert response2.json()["session_id"] == "session-2" + + def test_app_factory_with_none_mock(self, test_app_factory): + """Test app factory creates default mock when None provided""" + app, rag_mock = test_app_factory(None) + client = TestClient(app) + + response = client.post("/api/query", json={"query": "test"}) + + assert response.status_code == 200 + assert response.json()["answer"] == "Test response" + assert response.json()["session_id"] == "test-session" \ No newline at end of file diff --git a/backend/tests/test_app.py b/backend/tests/test_app.py new file mode 100644 index 00000000..cab1806c --- /dev/null +++ b/backend/tests/test_app.py @@ -0,0 +1,412 @@ +"""API layer tests for FastAPI endpoints""" + +import os +import sys +from unittest.mock import Mock, patch + +import pytest +from fastapi.testclient import TestClient + +# Add backend directory to Python path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + + +class TestQueryEndpoint: + """Test /api/query endpoint""" + + @pytest.fixture + def mock_app(self): + """Create test client with mocked RAG system""" + with patch("app.RAGSystem") as mock_rag_class: + # Import app after patching + from app import app + + # Configure mock RAG system + mock_rag = Mock() + mock_rag.query.return_value = ( + "Test response", + [{"text": "Test Course", "url": "https://example.com"}], + ) + mock_rag.session_manager.create_session.return_value = "test-session-123" + mock_rag_class.return_value = mock_rag + + client = TestClient(app) + return client, mock_rag + + def test_query_without_session(self, mock_app): + """Test query endpoint without session ID""" + client, mock_rag = mock_app + + response = client.post( + "/api/query", json={"query": "What is machine learning?"} + ) + + assert response.status_code == 200 + data = response.json() + + assert data["answer"] == "Test response" + assert len(data["sources"]) == 1 + assert data["sources"][0]["text"] == "Test Course" + assert data["session_id"] == "test-session-123" + + # Verify RAG system was called correctly + mock_rag.query.assert_called_once_with( + "What is machine learning?", "test-session-123" + ) + + def test_query_with_session(self, mock_app): + """Test query endpoint with existing session ID""" + client, mock_rag = mock_app + + response = client.post( + "/api/query", + json={"query": "Follow up question", "session_id": "existing-session-456"}, + ) + + assert response.status_code == 200 + data = response.json() + + assert data["session_id"] == "existing-session-456" + + # Verify RAG system was called with existing session + mock_rag.query.assert_called_once_with( + "Follow up question", "existing-session-456" + ) + # Session creation should not be called + mock_rag.session_manager.create_session.assert_not_called() + + def test_query_with_string_sources(self, mock_app): + """Test query endpoint with string sources (backward compatibility)""" + client, mock_rag = mock_app + + # Configure RAG to return string sources + mock_rag.query.return_value = ("Test response", ["Course 1", "Course 2"]) + + response = client.post("/api/query", json={"query": "Test query"}) + + assert response.status_code == 200 + data = response.json() + + assert data["sources"] == ["Course 1", "Course 2"] + + def test_query_with_mixed_sources(self, mock_app): + """Test query endpoint with mixed source types""" + client, mock_rag = mock_app + + # Configure RAG to return mixed sources + mixed_sources = [ + {"text": "Course with link", "url": "https://example.com/course"}, + "Plain text source", + ] + mock_rag.query.return_value = ("Test response", mixed_sources) + + response = client.post("/api/query", json={"query": "Test query"}) + + assert response.status_code == 200 + data = response.json() + + assert len(data["sources"]) == 2 + assert data["sources"][0]["text"] == "Course with link" + assert data["sources"][0]["url"] == "https://example.com/course" + assert data["sources"][1] == "Plain text source" + + def test_query_empty_query(self, mock_app): + """Test query endpoint with empty query""" + client, mock_rag = mock_app + + response = client.post("/api/query", json={"query": ""}) + + assert response.status_code == 200 + # Should still process empty query + mock_rag.query.assert_called_once() + + def test_query_missing_query_field(self, mock_app): + """Test query endpoint with missing query field""" + client, mock_rag = mock_app + + response = client.post("/api/query", json={"session_id": "test-session"}) + + assert response.status_code == 422 # Validation error + + def test_query_rag_system_exception(self, mock_app): + """Test query endpoint when RAG system raises exception""" + client, mock_rag = mock_app + + # Configure RAG to raise exception + mock_rag.query.side_effect = Exception("RAG system error") + + response = client.post("/api/query", json={"query": "Test query"}) + + assert response.status_code == 500 + data = response.json() + assert "RAG system error" in data["detail"] + + def test_query_session_creation_exception(self, mock_app): + """Test query endpoint when session creation fails""" + client, mock_rag = mock_app + + # Configure session manager to raise exception + mock_rag.session_manager.create_session.side_effect = Exception( + "Session creation failed" + ) + + response = client.post("/api/query", json={"query": "Test query"}) + + assert response.status_code == 500 + data = response.json() + assert "Session creation failed" in data["detail"] + + +class TestCoursesEndpoint: + """Test /api/courses endpoint""" + + @pytest.fixture + def mock_app_courses(self): + """Create test client with mocked RAG system for courses endpoint""" + with patch("app.RAGSystem") as mock_rag_class: + from app import app + + mock_rag = Mock() + mock_rag.get_course_analytics.return_value = { + "total_courses": 3, + "course_titles": ["Course 1", "Course 2", "Course 3"], + } + mock_rag_class.return_value = mock_rag + + client = TestClient(app) + return client, mock_rag + + def test_get_course_stats_success(self, mock_app_courses): + """Test successful course statistics retrieval""" + client, mock_rag = mock_app_courses + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + + assert data["total_courses"] == 3 + assert len(data["course_titles"]) == 3 + assert "Course 1" in data["course_titles"] + + # Verify RAG system was called + mock_rag.get_course_analytics.assert_called_once() + + def test_get_course_stats_empty(self, mock_app_courses): + """Test course statistics when no courses exist""" + client, mock_rag = mock_app_courses + + # Configure RAG to return empty analytics + mock_rag.get_course_analytics.return_value = { + "total_courses": 0, + "course_titles": [], + } + + response = client.get("/api/courses") + + assert response.status_code == 200 + data = response.json() + + assert data["total_courses"] == 0 + assert data["course_titles"] == [] + + def test_get_course_stats_exception(self, mock_app_courses): + """Test course statistics endpoint when RAG system raises exception""" + client, mock_rag = mock_app_courses + + # Configure RAG to raise exception + mock_rag.get_course_analytics.side_effect = Exception("Analytics error") + + response = client.get("/api/courses") + + assert response.status_code == 500 + data = response.json() + assert "Analytics error" in data["detail"] + + +class TestAppStartup: + """Test application startup behavior""" + + @patch("app.os.path.exists") + @patch("app.RAGSystem") + def test_startup_with_docs_folder(self, mock_rag_class, mock_exists): + """Test startup behavior when docs folder exists""" + mock_exists.return_value = True + mock_rag = Mock() + mock_rag.add_course_folder.return_value = (2, 10) # 2 courses, 10 chunks + mock_rag_class.return_value = mock_rag + + from app import app + + with TestClient(app): + # Trigger startup event + pass + + # Verify docs folder was processed + mock_rag.add_course_folder.assert_called_once_with( + "../docs", clear_existing=False + ) + + @patch("app.os.path.exists") + @patch("app.RAGSystem") + def test_startup_without_docs_folder(self, mock_rag_class, mock_exists): + """Test startup behavior when docs folder doesn't exist""" + mock_exists.return_value = False + mock_rag = Mock() + mock_rag_class.return_value = mock_rag + + from app import app + + with TestClient(app): + pass + + # Verify docs processing was not attempted + mock_rag.add_course_folder.assert_not_called() + + @patch("app.os.path.exists") + @patch("app.RAGSystem") + def test_startup_docs_processing_error(self, mock_rag_class, mock_exists): + """Test startup behavior when docs processing fails""" + mock_exists.return_value = True + mock_rag = Mock() + mock_rag.add_course_folder.side_effect = Exception("Processing error") + mock_rag_class.return_value = mock_rag + + from app import app + + # Should not crash despite processing error + with TestClient(app): + pass + + +class TestAppConfiguration: + """Test app configuration and middleware""" + + def test_cors_configuration(self): + """Test CORS middleware configuration""" + from app import app + + client = TestClient(app) + + # Test CORS headers on OPTIONS request + response = client.options("/api/query") + + # Should handle CORS properly + assert ( + response.status_code == 405 + ) # Method not allowed, but CORS headers should be present + + def test_trusted_host_middleware(self): + """Test trusted host middleware allows requests""" + from app import app + + client = TestClient(app) + + # Should accept requests from any host (configured with "*") + with patch("app.RAGSystem"): + response = client.get("/api/courses") + # Should not be blocked by trusted host middleware + assert response.status_code != 400 + + +class TestErrorHandling: + """Test error handling across the application""" + + @pytest.fixture + def error_app(self): + """App configured to test error scenarios""" + with patch("app.RAGSystem") as mock_rag_class: + from app import app + + mock_rag = Mock() + mock_rag_class.return_value = mock_rag + + return TestClient(app), mock_rag + + def test_query_validation_error(self, error_app): + """Test validation error handling""" + client, mock_rag = error_app + + # Send invalid JSON + response = client.post("/api/query", json={"invalid_field": "value"}) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + def test_query_unexpected_error(self, error_app): + """Test unexpected error handling in query endpoint""" + client, mock_rag = error_app + + # Configure RAG to raise unexpected error + mock_rag.query.side_effect = RuntimeError("Unexpected system error") + + response = client.post("/api/query", json={"query": "test"}) + + assert response.status_code == 500 + data = response.json() + assert "Unexpected system error" in data["detail"] + + def test_courses_unexpected_error(self, error_app): + """Test unexpected error handling in courses endpoint""" + client, mock_rag = error_app + + # Configure RAG to raise unexpected error + mock_rag.get_course_analytics.side_effect = ValueError( + "Analytics computation error" + ) + + response = client.get("/api/courses") + + assert response.status_code == 500 + data = response.json() + assert "Analytics computation error" in data["detail"] + + +class TestRequestResponseModels: + """Test Pydantic request/response models""" + + def test_query_request_model_validation(self): + """Test QueryRequest model validation""" + from app import QueryRequest + + # Valid request + request = QueryRequest(query="test query", session_id="test-session") + assert request.query == "test query" + assert request.session_id == "test-session" + + # Request without session_id + request = QueryRequest(query="test query") + assert request.query == "test query" + assert request.session_id is None + + # Invalid request (missing query) + with pytest.raises(ValueError): + QueryRequest(session_id="test-session") + + def test_query_response_model_validation(self): + """Test QueryResponse model validation""" + from app import QueryResponse + + # Valid response with dict sources + response = QueryResponse( + answer="test answer", + sources=[{"text": "source", "url": "https://example.com"}], + session_id="test-session", + ) + assert response.answer == "test answer" + assert len(response.sources) == 1 + + # Valid response with string sources + response = QueryResponse( + answer="test answer", sources=["string source"], session_id="test-session" + ) + assert response.sources == ["string source"] + + def test_course_stats_model_validation(self): + """Test CourseStats model validation""" + from app import CourseStats + + stats = CourseStats(total_courses=5, course_titles=["Course 1", "Course 2"]) + assert stats.total_courses == 5 + assert len(stats.course_titles) == 2 diff --git a/backend/tests/test_course_search_tool.py b/backend/tests/test_course_search_tool.py new file mode 100644 index 00000000..9626d2d6 --- /dev/null +++ b/backend/tests/test_course_search_tool.py @@ -0,0 +1,294 @@ +"""Unit tests for CourseSearchTool""" + +from unittest.mock import Mock + +from search_tools import CourseSearchTool +from vector_store import SearchResults + + +class TestCourseSearchTool: + """Test cases for CourseSearchTool functionality""" + + def test_get_tool_definition(self, course_search_tool): + """Test that tool definition is correctly structured""" + definition = course_search_tool.get_tool_definition() + + assert definition["name"] == "search_course_content" + assert "description" in definition + assert "input_schema" in definition + assert definition["input_schema"]["required"] == ["query"] + + # Check properties structure + properties = definition["input_schema"]["properties"] + assert "query" in properties + assert "course_name" in properties + assert "lesson_number" in properties + + def test_execute_successful_search_basic_query(self, course_search_tool): + """Test successful search with basic query only""" + result = course_search_tool.execute("machine learning") + + # Should call vector store search with correct parameters + course_search_tool.store.search.assert_called_once_with( + query="machine learning", course_name=None, lesson_number=None + ) + + # Should return formatted results + assert "[Introduction to Machine Learning" in result + assert "Machine learning is a subset" in result + + # Should track sources + assert len(course_search_tool.last_sources) > 0 + + def test_execute_successful_search_with_course_filter(self, course_search_tool): + """Test successful search with course name filter""" + result = course_search_tool.execute( + "machine learning", course_name="Introduction to Machine Learning" + ) + + course_search_tool.store.search.assert_called_once_with( + query="machine learning", + course_name="Introduction to Machine Learning", + lesson_number=None, + ) + + assert "[Introduction to Machine Learning" in result + + def test_execute_successful_search_with_lesson_filter(self, course_search_tool): + """Test successful search with lesson number filter""" + result = course_search_tool.execute( + "machine learning", + course_name="Introduction to Machine Learning", + lesson_number=1, + ) + + course_search_tool.store.search.assert_called_once_with( + query="machine learning", + course_name="Introduction to Machine Learning", + lesson_number=1, + ) + + assert "[Introduction to Machine Learning - Lesson 1]" in result + + def test_execute_vector_store_error(self, course_search_tool): + """Test handling of vector store errors""" + # Mock vector store to return error + course_search_tool.store.search.return_value = SearchResults.empty( + "Database connection failed" + ) + + result = course_search_tool.execute("machine learning") + + assert result == "Database connection failed" + assert course_search_tool.last_sources == [] + + def test_execute_no_results_found_basic_query(self, course_search_tool): + """Test handling when no results are found""" + # Mock vector store to return empty results + course_search_tool.store.search.return_value = SearchResults( + documents=[], metadata=[], distances=[] + ) + + result = course_search_tool.execute("nonexistent topic") + + assert result == "No relevant content found." + assert course_search_tool.last_sources == [] + + def test_execute_no_results_found_with_filters(self, course_search_tool): + """Test handling when no results are found with filters""" + course_search_tool.store.search.return_value = SearchResults( + documents=[], metadata=[], distances=[] + ) + + result = course_search_tool.execute( + "nonexistent topic", course_name="ML Course", lesson_number=5 + ) + + assert result == "No relevant content found in course 'ML Course' in lesson 5." + assert course_search_tool.last_sources == [] + + def test_execute_partial_course_filter_message(self, course_search_tool): + """Test error message construction with partial filters""" + course_search_tool.store.search.return_value = SearchResults( + documents=[], metadata=[], distances=[] + ) + + # Test with only course name + result = course_search_tool.execute("test query", course_name="Some Course") + assert result == "No relevant content found in course 'Some Course'." + + # Test with only lesson number + result = course_search_tool.execute("test query", lesson_number=3) + assert result == "No relevant content found in lesson 3." + + def test_format_results_with_links(self, course_search_tool): + """Test result formatting with lesson and course links""" + # Configure mock to return links + course_search_tool.store.get_lesson_link.return_value = ( + "https://example.com/lesson1" + ) + course_search_tool.store.get_course_link.return_value = ( + "https://example.com/course" + ) + + # Create search results with multiple documents + search_results = SearchResults( + documents=[ + "Content about machine learning algorithms", + "More content about neural networks", + ], + metadata=[ + {"course_title": "ML Course", "lesson_number": 1}, + {"course_title": "ML Course", "lesson_number": 2}, + ], + distances=[0.1, 0.2], + ) + + course_search_tool.store.search.return_value = search_results + + result = course_search_tool.execute("algorithms") + + # Should format with lesson headers + assert "[ML Course - Lesson 1]" in result + assert "[ML Course - Lesson 2]" in result + assert "Content about machine learning algorithms" in result + assert "More content about neural networks" in result + + # Should track sources with links + expected_sources = [ + {"text": "ML Course - Lesson 1", "url": "https://example.com/lesson1"}, + { + "text": "ML Course - Lesson 2", + "url": "https://example.com/lesson1", + }, # Mock returns same link + ] + assert len(course_search_tool.last_sources) == 2 + + def test_format_results_without_links(self, course_search_tool): + """Test result formatting when links are not available""" + # Configure mock to return no links + course_search_tool.store.get_lesson_link.return_value = None + course_search_tool.store.get_course_link.return_value = None + + search_results = SearchResults( + documents=["Content without links"], + metadata=[{"course_title": "No Link Course", "lesson_number": 1}], + distances=[0.1], + ) + + course_search_tool.store.search.return_value = search_results + + result = course_search_tool.execute("test") + + # Should still format properly but sources should be plain text + assert "[No Link Course - Lesson 1]" in result + assert course_search_tool.last_sources == ["No Link Course - Lesson 1"] + + def test_format_results_course_level_content(self, course_search_tool): + """Test result formatting for course-level content (no lesson number)""" + course_search_tool.store.get_course_link.return_value = ( + "https://example.com/course" + ) + + search_results = SearchResults( + documents=["Course overview content"], + metadata=[{"course_title": "Overview Course"}], # No lesson_number + distances=[0.1], + ) + + course_search_tool.store.search.return_value = search_results + + result = course_search_tool.execute("overview") + + # Should format without lesson number + assert "[Overview Course]" in result + assert "Course overview content" in result + + # Should track course-level source with link + expected_source = { + "text": "Overview Course", + "url": "https://example.com/course", + } + assert course_search_tool.last_sources == [expected_source] + + def test_format_results_malformed_metadata(self, course_search_tool): + """Test handling of malformed metadata""" + search_results = SearchResults( + documents=["Some content"], + metadata=[{}], + distances=[0.1], # Empty metadata + ) + + course_search_tool.store.search.return_value = search_results + + result = course_search_tool.execute("test") + + # Should handle gracefully with unknown course + assert "[unknown]" in result + assert "Some content" in result + + def test_sources_reset_between_searches(self, course_search_tool): + """Test that sources are properly managed between searches""" + # First search + course_search_tool.execute("first query") + first_sources = course_search_tool.last_sources.copy() + assert len(first_sources) > 0 + + # Second search with different results + course_search_tool.store.search.return_value = SearchResults( + documents=["Different content"], + metadata=[{"course_title": "Different Course", "lesson_number": 2}], + distances=[0.3], + ) + + course_search_tool.execute("second query") + second_sources = course_search_tool.last_sources + + # Sources should be different and reflect the new search + assert second_sources != first_sources + assert any("Different Course" in str(source) for source in second_sources) + + +class TestCourseSearchToolEdgeCases: + """Test edge cases and error conditions""" + + def test_execute_with_none_query(self): + """Test behavior with None query - should return error message""" + mock_store = Mock() + tool = CourseSearchTool(mock_store) + + result = tool.execute(None) + assert result == "Error: Search query cannot be None." + + def test_execute_with_empty_string_query(self, course_search_tool): + """Test behavior with empty string query""" + result = course_search_tool.execute("") + + course_search_tool.store.search.assert_called_once_with( + query="", course_name=None, lesson_number=None + ) + + def test_execute_with_invalid_lesson_number(self, course_search_tool): + """Test behavior with invalid lesson number types""" + # Should handle string lesson numbers + result = course_search_tool.execute("test", lesson_number="not_a_number") + + course_search_tool.store.search.assert_called_once_with( + query="test", + course_name=None, + lesson_number="not_a_number", # Vector store should handle validation + ) + + def test_vector_store_exception_handling(self): + """Test handling when vector store raises unexpected exceptions""" + mock_store = Mock() + mock_store.search.side_effect = Exception("Unexpected database error") + + tool = CourseSearchTool(mock_store) + + result = tool.execute("test query") + + # Should handle gracefully and return error message + assert isinstance(result, str) + assert "Search failed due to an internal error" in result + assert "Unexpected database error" in result diff --git a/backend/tests/test_rag_system.py b/backend/tests/test_rag_system.py new file mode 100644 index 00000000..639a1a90 --- /dev/null +++ b/backend/tests/test_rag_system.py @@ -0,0 +1,419 @@ +"""Integration tests for RAGSystem""" + +from unittest.mock import Mock, patch + +import pytest +from models import Course, CourseChunk, Lesson +from rag_system import RAGSystem + + +class TestRAGSystemIntegration: + """Test RAG system integration and complete query flow""" + + @pytest.fixture + def mock_rag_system(self, test_config): + """Create RAG system with mocked dependencies""" + with ( + patch("rag_system.DocumentProcessor"), + patch("rag_system.VectorStore") as mock_vector_store, + patch("rag_system.AIGenerator") as mock_ai_generator, + patch("rag_system.SessionManager"), + ): + rag = RAGSystem(test_config) + + # Configure mocks + rag.vector_store = mock_vector_store.return_value + rag.ai_generator = mock_ai_generator.return_value + + # Mock AI generator to return simple response + rag.ai_generator.generate_response.return_value = "Mocked AI response" + + return rag + + def test_rag_system_initialization(self, test_config): + """Test that RAG system initializes all components correctly""" + with ( + patch("rag_system.DocumentProcessor"), + patch("rag_system.VectorStore"), + patch("rag_system.AIGenerator"), + patch("rag_system.SessionManager"), + ): + rag = RAGSystem(test_config) + + # Verify tools are registered + assert "search_course_content" in rag.tool_manager.tools + assert "get_course_outline" in rag.tool_manager.tools + assert len(rag.tool_manager.tools) == 2 + + def test_query_without_session(self, mock_rag_system): + """Test query processing without session ID""" + response, sources = mock_rag_system.query("What is machine learning?") + + assert response == "Mocked AI response" + assert isinstance(sources, list) + + # Verify AI generator was called with correct parameters + mock_rag_system.ai_generator.generate_response.assert_called_once() + call_args = mock_rag_system.ai_generator.generate_response.call_args + + # Check query format + assert ( + "Answer this question about course materials: What is machine learning?" + in call_args[0][0] + ) + + # Check tools are provided + assert call_args[1]["tools"] is not None + assert call_args[1]["tool_manager"] is not None + + def test_query_with_session(self, mock_rag_system): + """Test query processing with session ID""" + session_id = "test-session-123" + + # Mock session manager + mock_rag_system.session_manager.get_conversation_history.return_value = ( + "Previous context" + ) + + response, sources = mock_rag_system.query("Follow up question", session_id) + + assert response == "Mocked AI response" + + # Verify session history was retrieved + mock_rag_system.session_manager.get_conversation_history.assert_called_once_with( + session_id + ) + + # Verify conversation history was passed to AI generator + call_args = mock_rag_system.ai_generator.generate_response.call_args + assert call_args[1]["conversation_history"] == "Previous context" + + # Verify session was updated + mock_rag_system.session_manager.add_exchange.assert_called_once_with( + session_id, "Follow up question", "Mocked AI response" + ) + + def test_query_with_tool_execution(self, mock_rag_system): + """Test query that triggers tool execution""" + # Configure search tool to return results + mock_rag_system.search_tool.last_sources = [ + {"text": "ML Course - Lesson 1", "url": "https://example.com/lesson1"} + ] + + # Configure tool manager to return sources + mock_rag_system.tool_manager.get_last_sources.return_value = ( + mock_rag_system.search_tool.last_sources + ) + + response, sources = mock_rag_system.query("What is machine learning?") + + assert response == "Mocked AI response" + assert len(sources) == 1 + assert sources[0]["text"] == "ML Course - Lesson 1" + assert sources[0]["url"] == "https://example.com/lesson1" + + # Verify sources were reset after retrieval + mock_rag_system.tool_manager.reset_sources.assert_called_once() + + def test_query_ai_generator_exception(self, mock_rag_system): + """Test handling when AI generator raises exception""" + mock_rag_system.ai_generator.generate_response.side_effect = Exception( + "API error" + ) + + with pytest.raises(Exception) as exc_info: + mock_rag_system.query("test query") + + assert "API error" in str(exc_info.value) + + +class TestRAGSystemDocumentProcessing: + """Test document processing functionality""" + + @pytest.fixture + def mock_rag_with_docs(self, test_config): + """RAG system with document processing mocks""" + with ( + patch("rag_system.DocumentProcessor") as mock_doc_processor, + patch("rag_system.VectorStore") as mock_vector_store, + patch("rag_system.AIGenerator"), + patch("rag_system.SessionManager"), + ): + rag = RAGSystem(test_config) + + # Configure document processor mock + sample_course = Course( + title="Test Course", + course_link="https://example.com/course", + lessons=[Lesson(lesson_number=1, title="Test Lesson")], + ) + sample_chunks = [ + CourseChunk( + content="Test content", + course_title="Test Course", + lesson_number=1, + chunk_index=0, + ) + ] + + mock_doc_processor.return_value.process_course_document.return_value = ( + sample_course, + sample_chunks, + ) + rag.document_processor = mock_doc_processor.return_value + rag.vector_store = mock_vector_store.return_value + + return rag + + def test_add_course_document_success(self, mock_rag_with_docs): + """Test successful course document addition""" + course, chunk_count = mock_rag_with_docs.add_course_document( + "/path/to/course.pdf" + ) + + assert course.title == "Test Course" + assert chunk_count == 1 + + # Verify document was processed + mock_rag_with_docs.document_processor.process_course_document.assert_called_once_with( + "/path/to/course.pdf" + ) + + # Verify data was added to vector store + mock_rag_with_docs.vector_store.add_course_metadata.assert_called_once() + mock_rag_with_docs.vector_store.add_course_content.assert_called_once() + + def test_add_course_document_processing_error(self, mock_rag_with_docs): + """Test handling of document processing errors""" + mock_rag_with_docs.document_processor.process_course_document.side_effect = ( + Exception("Processing failed") + ) + + course, chunk_count = mock_rag_with_docs.add_course_document( + "/path/to/invalid.pdf" + ) + + assert course is None + assert chunk_count == 0 + + # Vector store should not be called + mock_rag_with_docs.vector_store.add_course_metadata.assert_not_called() + mock_rag_with_docs.vector_store.add_course_content.assert_not_called() + + @patch("os.path.exists") + @patch("os.listdir") + @patch("os.path.isfile") + def test_add_course_folder_success( + self, mock_isfile, mock_listdir, mock_exists, mock_rag_with_docs + ): + """Test successful course folder processing""" + # Mock file system + mock_exists.return_value = True + mock_listdir.return_value = ["course1.pdf", "course2.docx", "readme.txt"] + mock_isfile.return_value = True + + # Mock existing courses (empty) + mock_rag_with_docs.vector_store.get_existing_course_titles.return_value = [] + + courses, chunks = mock_rag_with_docs.add_course_folder("/docs") + + assert courses == 3 # All three files processed + assert chunks == 3 # One chunk per file + + # Verify all files were processed + assert ( + mock_rag_with_docs.document_processor.process_course_document.call_count + == 3 + ) + + @patch("os.path.exists") + def test_add_course_folder_missing_folder(self, mock_exists, mock_rag_with_docs): + """Test handling of missing course folder""" + mock_exists.return_value = False + + courses, chunks = mock_rag_with_docs.add_course_folder("/nonexistent") + + assert courses == 0 + assert chunks == 0 + + @patch("os.path.exists") + @patch("os.listdir") + @patch("os.path.isfile") + def test_add_course_folder_skip_existing( + self, mock_isfile, mock_listdir, mock_exists, mock_rag_with_docs + ): + """Test skipping existing courses when adding folder""" + mock_exists.return_value = True + mock_listdir.return_value = ["course1.pdf"] + mock_isfile.return_value = True + + # Mock existing courses to include the course we're trying to add + mock_rag_with_docs.vector_store.get_existing_course_titles.return_value = [ + "Test Course" + ] + + courses, chunks = mock_rag_with_docs.add_course_folder("/docs") + + assert courses == 0 # Should skip existing course + assert chunks == 0 + + # Document should still be processed to check if it's duplicate + mock_rag_with_docs.document_processor.process_course_document.assert_called_once() + + # But vector store should not be updated + mock_rag_with_docs.vector_store.add_course_metadata.assert_not_called() + mock_rag_with_docs.vector_store.add_course_content.assert_not_called() + + @patch("os.path.exists") + @patch("os.listdir") + @patch("os.path.isfile") + def test_add_course_folder_clear_existing( + self, mock_isfile, mock_listdir, mock_exists, mock_rag_with_docs + ): + """Test clearing existing data before adding folder""" + mock_exists.return_value = True + mock_listdir.return_value = ["course1.pdf"] + mock_isfile.return_value = True + + mock_rag_with_docs.vector_store.get_existing_course_titles.return_value = [] + + courses, chunks = mock_rag_with_docs.add_course_folder( + "/docs", clear_existing=True + ) + + # Verify data was cleared + mock_rag_with_docs.vector_store.clear_all_data.assert_called_once() + + assert courses == 1 + assert chunks == 1 + + +class TestRAGSystemAnalytics: + """Test RAG system analytics functionality""" + + def test_get_course_analytics(self, mock_rag_system): + """Test course analytics retrieval""" + # Configure vector store mock + mock_rag_system.vector_store.get_course_count.return_value = 5 + mock_rag_system.vector_store.get_existing_course_titles.return_value = [ + "Course 1", + "Course 2", + "Course 3", + "Course 4", + "Course 5", + ] + + analytics = mock_rag_system.get_course_analytics() + + assert analytics["total_courses"] == 5 + assert len(analytics["course_titles"]) == 5 + assert "Course 1" in analytics["course_titles"] + + +class TestRAGSystemRealIntegration: + """Test RAG system with real components (integration test)""" + + @pytest.fixture + def real_rag_system(self, test_config): + """RAG system with real components""" + # Only mock the Anthropic client to avoid real API calls + with patch("ai_generator.anthropic.Anthropic") as mock_anthropic: + mock_client = Mock() + mock_response = Mock() + mock_response.content = [Mock(text="Real integration test response")] + mock_response.stop_reason = "end_turn" + mock_client.messages.create.return_value = mock_response + mock_anthropic.return_value = mock_client + + return RAGSystem(test_config) + + def test_real_integration_query_flow( + self, real_rag_system, sample_course, sample_course_chunks + ): + """Test complete query flow with real components""" + # Add test data + real_rag_system.vector_store.add_course_metadata(sample_course) + real_rag_system.vector_store.add_course_content(sample_course_chunks) + + # Execute query + response, sources = real_rag_system.query("What is machine learning?") + + assert "Real integration test response" in response + assert isinstance(sources, list) + + # Verify vector store has the data + assert real_rag_system.vector_store.get_course_count() == 1 + assert ( + "Introduction to Machine Learning" + in real_rag_system.vector_store.get_existing_course_titles() + ) + + def test_real_tool_registration(self, real_rag_system): + """Test that tools are properly registered in real system""" + tool_definitions = real_rag_system.tool_manager.get_tool_definitions() + + assert len(tool_definitions) == 2 + + # Check search tool + search_tool = next( + ( + tool + for tool in tool_definitions + if tool["name"] == "search_course_content" + ), + None, + ) + assert search_tool is not None + assert "course materials" in search_tool["description"].lower() + + # Check outline tool + outline_tool = next( + (tool for tool in tool_definitions if tool["name"] == "get_course_outline"), + None, + ) + assert outline_tool is not None + assert "outline" in outline_tool["description"].lower() + + def test_real_search_tool_execution( + self, real_rag_system, sample_course, sample_course_chunks + ): + """Test that search tool actually works with real vector store""" + # Add test data + real_rag_system.vector_store.add_course_metadata(sample_course) + real_rag_system.vector_store.add_course_content(sample_course_chunks) + + # Execute search tool directly + result = real_rag_system.tool_manager.execute_tool( + "search_course_content", + query="machine learning", + course_name="Introduction to Machine Learning", + ) + + assert isinstance(result, str) + assert len(result) > 0 + assert "Introduction to Machine Learning" in result + + # Check that sources are tracked + sources = real_rag_system.tool_manager.get_last_sources() + assert len(sources) > 0 + + def test_real_outline_tool_execution( + self, real_rag_system, sample_course, sample_course_chunks + ): + """Test that outline tool actually works with real vector store""" + # Add test data + real_rag_system.vector_store.add_course_metadata(sample_course) + real_rag_system.vector_store.add_course_content(sample_course_chunks) + + # Execute outline tool directly + result = real_rag_system.tool_manager.execute_tool( + "get_course_outline", + course_name="Machine Learning", # Partial name to test fuzzy matching + ) + + assert isinstance(result, str) + assert "Introduction to Machine Learning" in result + assert "Lessons (3 total):" in result + assert "1. What is ML?" in result + assert "2. Types of ML" in result + assert "3. ML Algorithms" in result diff --git a/backend/tests/test_static_files.py b/backend/tests/test_static_files.py new file mode 100644 index 00000000..f74706f2 --- /dev/null +++ b/backend/tests/test_static_files.py @@ -0,0 +1,364 @@ +"""Tests for static file serving and frontend integration""" +import pytest +import tempfile +import os +from pathlib import Path +from unittest.mock import Mock, patch +from fastapi.testclient import TestClient + + +@pytest.mark.api +class TestStaticFileHandling: + """Test static file serving without requiring actual frontend files""" + + @pytest.fixture + def temp_frontend_dir(self): + """Create temporary frontend directory with test files""" + with tempfile.TemporaryDirectory() as temp_dir: + frontend_path = Path(temp_dir) / "frontend" + frontend_path.mkdir() + + # Create test HTML file + index_html = frontend_path / "index.html" + index_html.write_text(""" + + + RAG System Test + +

Course Materials RAG System

+
Test Application
+ + + """) + + # Create test CSS file + styles_css = frontend_path / "styles.css" + styles_css.write_text(""" + body { font-family: Arial, sans-serif; } + .container { max-width: 1200px; margin: 0 auto; } + """) + + # Create test JS file + app_js = frontend_path / "app.js" + app_js.write_text(""" + console.log('RAG System Test App'); + document.addEventListener('DOMContentLoaded', function() { + console.log('App loaded'); + }); + """) + + yield frontend_path + + def test_static_app_with_frontend_files(self, temp_frontend_dir, test_app_factory): + """Test app with actual frontend files""" + from fastapi import FastAPI + from fastapi.staticfiles import StaticFiles + from fastapi.middleware.cors import CORSMiddleware + from fastapi.middleware.trustedhost import TrustedHostMiddleware + + # Create app with static files + mock_rag = Mock() + mock_rag.query.return_value = ("Test response", []) + mock_rag.session_manager.create_session.return_value = "test-session" + mock_rag.get_course_analytics.return_value = {"total_courses": 0, "course_titles": []} + + app = FastAPI(title="RAG System with Static Files") + app.add_middleware(TrustedHostMiddleware, allowed_hosts=["*"]) + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Add API routes (simplified) + @app.get("/api/courses") + async def get_courses(): + return {"total_courses": 0, "course_titles": []} + + # Mount static files + app.mount("/", StaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + client = TestClient(app) + + # Test that index.html is served + response = client.get("/") + assert response.status_code == 200 + assert "Course Materials RAG System" in response.text + assert "Test Application" in response.text + + # Test CSS file + response = client.get("/styles.css") + assert response.status_code == 200 + assert "font-family: Arial" in response.text + + # Test JS file + response = client.get("/app.js") + assert response.status_code == 200 + assert "RAG System Test App" in response.text + + # Test API still works + response = client.get("/api/courses") + assert response.status_code == 200 + + def test_static_file_not_found(self, temp_frontend_dir): + """Test 404 handling for missing static files""" + from fastapi import FastAPI + from fastapi.staticfiles import StaticFiles + + app = FastAPI() + app.mount("/", StaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + client = TestClient(app) + + # Request non-existent file + response = client.get("/nonexistent.html") + assert response.status_code == 404 + + def test_static_file_security(self, temp_frontend_dir): + """Test that static files don't expose sensitive paths""" + from fastapi import FastAPI + from fastapi.staticfiles import StaticFiles + + app = FastAPI() + app.mount("/", StaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + client = TestClient(app) + + # Try to access files outside the static directory + response = client.get("/../../../etc/passwd") + assert response.status_code == 404 + + response = client.get("/../../backend/config.py") + assert response.status_code == 404 + + +@pytest.mark.api +class TestDevStaticFiles: + """Test custom DevStaticFiles class with no-cache headers""" + + @pytest.fixture + def dev_app_with_static(self, temp_frontend_dir): + """Create app using DevStaticFiles class""" + from fastapi import FastAPI + + # Recreate the DevStaticFiles class from app.py + from fastapi.staticfiles import StaticFiles + from fastapi.responses import FileResponse + + class DevStaticFiles(StaticFiles): + async def get_response(self, path: str, scope): + response = await super().get_response(path, scope) + if isinstance(response, FileResponse): + response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" + response.headers["Pragma"] = "no-cache" + response.headers["Expires"] = "0" + return response + + app = FastAPI() + app.mount("/", DevStaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + return TestClient(app) + + def test_dev_static_no_cache_headers(self, dev_app_with_static): + """Test that DevStaticFiles adds no-cache headers""" + client = dev_app_with_static + + response = client.get("/index.html") + assert response.status_code == 200 + + # Check for no-cache headers + assert response.headers.get("Cache-Control") == "no-cache, no-store, must-revalidate" + assert response.headers.get("Pragma") == "no-cache" + assert response.headers.get("Expires") == "0" + + def test_dev_static_css_no_cache(self, dev_app_with_static): + """Test no-cache headers on CSS files""" + client = dev_app_with_static + + response = client.get("/styles.css") + assert response.status_code == 200 + assert "no-cache" in response.headers.get("Cache-Control", "") + + def test_dev_static_js_no_cache(self, dev_app_with_static): + """Test no-cache headers on JS files""" + client = dev_app_with_static + + response = client.get("/app.js") + assert response.status_code == 200 + assert "no-cache" in response.headers.get("Cache-Control", "") + + +@pytest.mark.integration +class TestFullAppWithStatic: + """Integration tests with both API and static file serving""" + + @pytest.fixture + def full_test_app(self, temp_frontend_dir): + """Create full app with both API and static files""" + from fastapi import FastAPI, HTTPException + from fastapi.staticfiles import StaticFiles + from fastapi.middleware.cors import CORSMiddleware + from fastapi.middleware.trustedhost import TrustedHostMiddleware + from pydantic import BaseModel + from typing import List, Dict + from unittest.mock import Mock + + # Create mock RAG system + mock_rag = Mock() + mock_rag.query.return_value = ("Integration test response", []) + mock_rag.session_manager.create_session.return_value = "integration-session" + mock_rag.get_course_analytics.return_value = { + "total_courses": 1, + "course_titles": ["Integration Test Course"] + } + + app = FastAPI(title="Full Integration Test App") + + # Add middleware + app.add_middleware(TrustedHostMiddleware, allowed_hosts=["*"]) + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Request/Response models + class QueryRequest(BaseModel): + query: str + session_id: str = None + + class QueryResponse(BaseModel): + answer: str + sources: List[Dict[str, str]] + session_id: str + + class CourseStats(BaseModel): + total_courses: int + course_titles: List[str] + + # API endpoints + @app.post("/api/query", response_model=QueryResponse) + async def query_documents(request: QueryRequest): + try: + session_id = request.session_id or mock_rag.session_manager.create_session() + answer, sources = mock_rag.query(request.query, session_id) + return QueryResponse(answer=answer, sources=sources, session_id=session_id) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @app.get("/api/courses", response_model=CourseStats) + async def get_course_stats(): + try: + analytics = mock_rag.get_course_analytics() + return CourseStats( + total_courses=analytics["total_courses"], + course_titles=analytics["course_titles"] + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + # Mount static files + app.mount("/", StaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + return TestClient(app), mock_rag + + def test_api_and_static_coexistence(self, full_test_app): + """Test that API and static files work together""" + client, mock_rag = full_test_app + + # Test API endpoints work + query_response = client.post("/api/query", json={"query": "test integration"}) + assert query_response.status_code == 200 + assert query_response.json()["answer"] == "Integration test response" + + courses_response = client.get("/api/courses") + assert courses_response.status_code == 200 + assert courses_response.json()["total_courses"] == 1 + + # Test static files work + static_response = client.get("/") + assert static_response.status_code == 200 + assert "Course Materials RAG System" in static_response.text + + css_response = client.get("/styles.css") + assert css_response.status_code == 200 + assert "font-family" in css_response.text + + def test_api_priority_over_static(self, full_test_app): + """Test that API routes have priority over static file paths""" + client, mock_rag = full_test_app + + # Create a file that could conflict with API path + # (This would be handled by FastAPI route precedence) + + # API should still work + response = client.get("/api/courses") + assert response.status_code == 200 + assert "total_courses" in response.json() + + def test_error_handling_with_static(self, full_test_app): + """Test error handling doesn't interfere with static files""" + client, mock_rag = full_test_app + + # Make API throw error + mock_rag.query.side_effect = Exception("Test error") + + # API should return 500 + api_response = client.post("/api/query", json={"query": "test"}) + assert api_response.status_code == 500 + + # Static files should still work + static_response = client.get("/index.html") + assert static_response.status_code == 200 + + +@pytest.mark.api +class TestStaticFileConfiguration: + """Test static file configuration and edge cases""" + + def test_html_fallback_behavior(self, temp_frontend_dir): + """Test HTML fallback for SPA routing""" + from fastapi import FastAPI + from fastapi.staticfiles import StaticFiles + + app = FastAPI() + # html=True enables SPA fallback to index.html + app.mount("/", StaticFiles(directory=str(temp_frontend_dir), html=True), name="static") + + client = TestClient(app) + + # Request to SPA route should fallback to index.html + response = client.get("/nonexistent-spa-route") + # This might be 404 or return index.html depending on StaticFiles implementation + # The important thing is it doesn't crash + assert response.status_code in [200, 404] + + def test_static_file_content_types(self, temp_frontend_dir): + """Test proper content types are set for different file types""" + from fastapi import FastAPI + from fastapi.staticfiles import StaticFiles + + app = FastAPI() + app.mount("/", StaticFiles(directory=str(temp_frontend_dir)), name="static") + + client = TestClient(app) + + # HTML file + html_response = client.get("/index.html") + if html_response.status_code == 200: + assert "text/html" in html_response.headers.get("content-type", "") + + # CSS file + css_response = client.get("/styles.css") + if css_response.status_code == 200: + assert "text/css" in css_response.headers.get("content-type", "") + + # JS file + js_response = client.get("/app.js") + if js_response.status_code == 200: + content_type = js_response.headers.get("content-type", "") + assert any(mime in content_type for mime in ["application/javascript", "text/javascript"]) \ No newline at end of file diff --git a/backend/tests/test_vector_store.py b/backend/tests/test_vector_store.py new file mode 100644 index 00000000..896a9ee1 --- /dev/null +++ b/backend/tests/test_vector_store.py @@ -0,0 +1,333 @@ +"""Unit tests for VectorStore""" + +from unittest.mock import patch + +import pytest +from vector_store import SearchResults, VectorStore + + +class TestVectorStoreSearch: + """Test VectorStore search functionality""" + + def test_search_basic_query_success(self, populated_vector_store): + """Test basic search without filters""" + results = populated_vector_store.search("machine learning") + + assert not results.is_empty() + assert results.error is None + assert len(results.documents) > 0 + assert len(results.metadata) == len(results.documents) + assert all("course_title" in meta for meta in results.metadata) + + def test_search_with_course_name_filter(self, populated_vector_store): + """Test search with course name filter""" + results = populated_vector_store.search( + "algorithms", course_name="Introduction to Machine Learning" + ) + + assert not results.is_empty() + assert results.error is None + # All results should be from the specified course + for meta in results.metadata: + assert meta["course_title"] == "Introduction to Machine Learning" + + def test_search_with_lesson_number_filter(self, populated_vector_store): + """Test search with lesson number filter""" + results = populated_vector_store.search("machine learning", lesson_number=1) + + assert not results.is_empty() + assert results.error is None + # All results should be from lesson 1 + for meta in results.metadata: + assert meta.get("lesson_number") == 1 + + def test_search_with_both_filters(self, populated_vector_store): + """Test search with both course name and lesson number filters""" + results = populated_vector_store.search( + "machine learning", + course_name="Introduction to Machine Learning", + lesson_number=1, + ) + + assert not results.is_empty() + assert results.error is None + # Results should match both filters + for meta in results.metadata: + assert meta["course_title"] == "Introduction to Machine Learning" + assert meta.get("lesson_number") == 1 + + def test_search_nonexistent_course(self, populated_vector_store): + """Test search with non-existent course name""" + results = populated_vector_store.search( + "machine learning", course_name="Nonexistent Course" + ) + + assert results.error is not None + assert "No course found matching 'Nonexistent Course'" in results.error + assert results.is_empty() + + def test_search_no_results_found(self, populated_vector_store): + """Test search that yields no results - semantic search may still return distant matches""" + results = populated_vector_store.search("completely unrelated topic xyz123") + + # Should return results without error (semantic search finds best matches even if distant) + assert results.error is None + # Note: Semantic search typically returns results even for unrelated queries, + # finding the "closest" match. This is expected behavior. + + def test_search_with_limit(self, populated_vector_store): + """Test search with custom result limit""" + results = populated_vector_store.search("machine learning", limit=1) + + assert not results.is_empty() + assert len(results.documents) == 1 + assert len(results.metadata) == 1 + assert len(results.distances) == 1 + + +class TestVectorStoreCourseNameResolution: + """Test course name resolution functionality""" + + def test_resolve_exact_course_name(self, populated_vector_store): + """Test resolving exact course name""" + resolved = populated_vector_store._resolve_course_name( + "Introduction to Machine Learning" + ) + assert resolved == "Introduction to Machine Learning" + + def test_resolve_partial_course_name(self, populated_vector_store): + """Test resolving partial course name""" + resolved = populated_vector_store._resolve_course_name("Machine Learning") + assert resolved == "Introduction to Machine Learning" + + def test_resolve_case_insensitive_course_name(self, populated_vector_store): + """Test case-insensitive course name resolution""" + resolved = populated_vector_store._resolve_course_name("machine learning") + assert resolved == "Introduction to Machine Learning" + + def test_resolve_nonexistent_course_name(self, populated_vector_store): + """Test resolving non-existent course name""" + resolved = populated_vector_store._resolve_course_name("Nonexistent Course") + assert resolved is None + + +class TestVectorStoreFilterBuilding: + """Test filter building for ChromaDB queries""" + + def test_build_filter_no_parameters(self, real_vector_store): + """Test filter building with no parameters""" + filter_dict = real_vector_store._build_filter(None, None) + assert filter_dict is None + + def test_build_filter_course_only(self, real_vector_store): + """Test filter building with course title only""" + filter_dict = real_vector_store._build_filter("Test Course", None) + expected = {"course_title": "Test Course"} + assert filter_dict == expected + + def test_build_filter_lesson_only(self, real_vector_store): + """Test filter building with lesson number only""" + filter_dict = real_vector_store._build_filter(None, 5) + expected = {"lesson_number": 5} + assert filter_dict == expected + + def test_build_filter_both_parameters(self, real_vector_store): + """Test filter building with both parameters""" + filter_dict = real_vector_store._build_filter("Test Course", 3) + expected = {"$and": [{"course_title": "Test Course"}, {"lesson_number": 3}]} + assert filter_dict == expected + + +class TestVectorStoreDataManagement: + """Test data addition and management""" + + def test_add_course_metadata(self, real_vector_store, sample_course): + """Test adding course metadata""" + real_vector_store.add_course_metadata(sample_course) + + # Verify course was added + existing_titles = real_vector_store.get_existing_course_titles() + assert sample_course.title in existing_titles + + def test_add_course_content(self, real_vector_store, sample_course_chunks): + """Test adding course content chunks""" + real_vector_store.add_course_content(sample_course_chunks) + + # Search should find the added content + results = real_vector_store.search("machine learning") + assert not results.is_empty() + + def test_get_course_count(self, populated_vector_store): + """Test getting course count""" + count = populated_vector_store.get_course_count() + assert count == 1 # One course from fixture + + def test_get_existing_course_titles(self, populated_vector_store): + """Test getting existing course titles""" + titles = populated_vector_store.get_existing_course_titles() + assert "Introduction to Machine Learning" in titles + + def test_clear_all_data(self, populated_vector_store): + """Test clearing all data""" + # Verify data exists + assert populated_vector_store.get_course_count() > 0 + + # Clear data + populated_vector_store.clear_all_data() + + # Verify data is cleared + assert populated_vector_store.get_course_count() == 0 + assert len(populated_vector_store.get_existing_course_titles()) == 0 + + +class TestVectorStoreLinkMethods: + """Test course and lesson link retrieval""" + + def test_get_course_link_existing(self, populated_vector_store): + """Test getting link for existing course""" + link = populated_vector_store.get_course_link( + "Introduction to Machine Learning" + ) + assert link == "https://example.com/ml-course" + + def test_get_course_link_nonexistent(self, populated_vector_store): + """Test getting link for non-existent course""" + link = populated_vector_store.get_course_link("Nonexistent Course") + assert link is None + + def test_get_lesson_link_existing(self, populated_vector_store): + """Test getting link for existing lesson""" + link = populated_vector_store.get_lesson_link( + "Introduction to Machine Learning", 1 + ) + assert link == "https://example.com/lesson1" + + def test_get_lesson_link_nonexistent_course(self, populated_vector_store): + """Test getting lesson link for non-existent course""" + link = populated_vector_store.get_lesson_link("Nonexistent Course", 1) + assert link is None + + def test_get_lesson_link_nonexistent_lesson(self, populated_vector_store): + """Test getting link for non-existent lesson""" + link = populated_vector_store.get_lesson_link( + "Introduction to Machine Learning", 999 + ) + assert link is None + + +class TestVectorStoreCourseOutline: + """Test course outline functionality""" + + def test_get_course_outline_existing(self, populated_vector_store): + """Test getting outline for existing course""" + outline = populated_vector_store.get_course_outline( + "Introduction to Machine Learning" + ) + + assert outline is not None + assert outline["course_title"] == "Introduction to Machine Learning" + assert outline["course_link"] == "https://example.com/ml-course" + assert len(outline["lessons"]) == 3 + + # Verify lesson structure + lesson1 = outline["lessons"][0] + assert lesson1["lesson_number"] == 1 + assert lesson1["lesson_title"] == "What is ML?" + + def test_get_course_outline_with_fuzzy_matching(self, populated_vector_store): + """Test getting outline with partial course name""" + outline = populated_vector_store.get_course_outline("Machine Learning") + + assert outline is not None + assert outline["course_title"] == "Introduction to Machine Learning" + + def test_get_course_outline_nonexistent(self, populated_vector_store): + """Test getting outline for non-existent course""" + outline = populated_vector_store.get_course_outline("Nonexistent Course") + assert outline is None + + +class TestVectorStoreErrorHandling: + """Test error handling in VectorStore""" + + @patch("chromadb.PersistentClient") + def test_chromadb_connection_error(self, mock_client_class, test_config): + """Test handling of ChromaDB connection errors""" + # Mock client to raise exception on creation + mock_client_class.side_effect = Exception("Cannot connect to ChromaDB") + + with pytest.raises(Exception): + VectorStore(test_config.CHROMA_PATH, test_config.EMBEDDING_MODEL) + + def test_search_with_chromadb_exception(self, real_vector_store): + """Test search behavior when ChromaDB raises exception""" + # Mock the collection to raise exception + with patch.object(real_vector_store.course_content, "query") as mock_query: + mock_query.side_effect = Exception("ChromaDB query failed") + + results = real_vector_store.search("test query") + + assert results.error is not None + assert "Search error" in results.error + assert results.is_empty() + + def test_course_name_resolution_exception(self, real_vector_store): + """Test course name resolution when ChromaDB raises exception""" + with patch.object(real_vector_store.course_catalog, "query") as mock_query: + mock_query.side_effect = Exception("ChromaDB query failed") + + resolved = real_vector_store._resolve_course_name("Test Course") + assert resolved is None + + def test_add_empty_course_content(self, real_vector_store): + """Test adding empty course content list""" + real_vector_store.add_course_content([]) + # Should not raise exception - verify by checking no crash occurs + + +class TestSearchResults: + """Test SearchResults data class""" + + def test_from_chroma_normal_results(self): + """Test creating SearchResults from normal ChromaDB results""" + chroma_results = { + "documents": [["doc1", "doc2"]], + "metadatas": [["meta1", "meta2"]], + "distances": [[0.1, 0.2]], + } + + results = SearchResults.from_chroma(chroma_results) + + assert results.documents == ["doc1", "doc2"] + assert results.metadata == ["meta1", "meta2"] + assert results.distances == [0.1, 0.2] + assert results.error is None + + def test_from_chroma_empty_results(self): + """Test creating SearchResults from empty ChromaDB results""" + chroma_results = {"documents": [[]], "metadatas": [[]], "distances": [[]]} + + results = SearchResults.from_chroma(chroma_results) + + assert results.documents == [] + assert results.metadata == [] + assert results.distances == [] + assert results.error is None + + def test_empty_with_error(self): + """Test creating empty SearchResults with error""" + results = SearchResults.empty("Test error message") + + assert results.documents == [] + assert results.metadata == [] + assert results.distances == [] + assert results.error == "Test error message" + assert results.is_empty() + + def test_is_empty_detection(self): + """Test is_empty method""" + empty_results = SearchResults([], [], []) + assert empty_results.is_empty() + + non_empty_results = SearchResults(["doc1"], [{"meta": "data"}], [0.1]) + assert not non_empty_results.is_empty() diff --git a/backend/vector_store.py b/backend/vector_store.py index 390abe71..5ed6ebb3 100644 --- a/backend/vector_store.py +++ b/backend/vector_store.py @@ -1,77 +1,92 @@ +from dataclasses import dataclass +from typing import Any + import chromadb from chromadb.config import Settings -from typing import List, Dict, Any, Optional -from dataclasses import dataclass from models import Course, CourseChunk -from sentence_transformers import SentenceTransformer + @dataclass class SearchResults: """Container for search results with metadata""" - documents: List[str] - metadata: List[Dict[str, Any]] - distances: List[float] - error: Optional[str] = None - + + documents: list[str] + metadata: list[dict[str, Any]] + distances: list[float] + error: str | None = None + @classmethod - def from_chroma(cls, chroma_results: Dict) -> 'SearchResults': + def from_chroma(cls, chroma_results: dict) -> "SearchResults": """Create SearchResults from ChromaDB query results""" return cls( - documents=chroma_results['documents'][0] if chroma_results['documents'] else [], - metadata=chroma_results['metadatas'][0] if chroma_results['metadatas'] else [], - distances=chroma_results['distances'][0] if chroma_results['distances'] else [] + documents=( + chroma_results["documents"][0] if chroma_results["documents"] else [] + ), + metadata=( + chroma_results["metadatas"][0] if chroma_results["metadatas"] else [] + ), + distances=( + chroma_results["distances"][0] if chroma_results["distances"] else [] + ), ) - + @classmethod - def empty(cls, error_msg: str) -> 'SearchResults': + def empty(cls, error_msg: str) -> "SearchResults": """Create empty results with error message""" return cls(documents=[], metadata=[], distances=[], error=error_msg) - + def is_empty(self) -> bool: """Check if results are empty""" return len(self.documents) == 0 + class VectorStore: """Vector storage using ChromaDB for course content and metadata""" - + def __init__(self, chroma_path: str, embedding_model: str, max_results: int = 5): self.max_results = max_results # Initialize ChromaDB client self.client = chromadb.PersistentClient( - path=chroma_path, - settings=Settings(anonymized_telemetry=False) + path=chroma_path, settings=Settings(anonymized_telemetry=False) ) - + # Set up sentence transformer embedding function - self.embedding_function = chromadb.utils.embedding_functions.SentenceTransformerEmbeddingFunction( - model_name=embedding_model + self.embedding_function = ( + chromadb.utils.embedding_functions.SentenceTransformerEmbeddingFunction( + model_name=embedding_model + ) ) - + # Create collections for different types of data - self.course_catalog = self._create_collection("course_catalog") # Course titles/instructors - self.course_content = self._create_collection("course_content") # Actual course material - + self.course_catalog = self._create_collection( + "course_catalog" + ) # Course titles/instructors + self.course_content = self._create_collection( + "course_content" + ) # Actual course material + def _create_collection(self, name: str): """Create or get a ChromaDB collection""" return self.client.get_or_create_collection( - name=name, - embedding_function=self.embedding_function + name=name, embedding_function=self.embedding_function ) - - def search(self, - query: str, - course_name: Optional[str] = None, - lesson_number: Optional[int] = None, - limit: Optional[int] = None) -> SearchResults: + + def search( + self, + query: str, + course_name: str | None = None, + lesson_number: int | None = None, + limit: int | None = None, + ) -> SearchResults: """ Main search interface that handles course resolution and content search. - + Args: query: What to search for in course content course_name: Optional course name/title to filter by lesson_number: Optional lesson number to filter by limit: Maximum results to return - + Returns: SearchResults object with documents and metadata """ @@ -81,104 +96,128 @@ def search(self, course_title = self._resolve_course_name(course_name) if not course_title: return SearchResults.empty(f"No course found matching '{course_name}'") - + # Step 2: Build filter for content search filter_dict = self._build_filter(course_title, lesson_number) - + # Step 3: Search course content # Use provided limit or fall back to configured max_results search_limit = limit if limit is not None else self.max_results - + try: results = self.course_content.query( - query_texts=[query], - n_results=search_limit, - where=filter_dict + query_texts=[query], n_results=search_limit, where=filter_dict ) return SearchResults.from_chroma(results) except Exception as e: return SearchResults.empty(f"Search error: {str(e)}") - - def _resolve_course_name(self, course_name: str) -> Optional[str]: - """Use vector search to find best matching course by name""" + + def _resolve_course_name(self, course_name: str) -> str | None: + """Use vector search to find best matching course by name with similarity threshold""" try: - results = self.course_catalog.query( - query_texts=[course_name], - n_results=1 - ) - - if results['documents'][0] and results['metadatas'][0]: - # Return the title (which is now the ID) - return results['metadatas'][0][0]['title'] + results = self.course_catalog.query(query_texts=[course_name], n_results=1) + + if ( + results["documents"][0] + and results["metadatas"][0] + and results["distances"][0] + ): + # Check similarity threshold (ChromaDB uses distance, lower is better) + # Distance of 1.0+ indicates poor match for short text like course names + distance = results["distances"][0][0] + + # For course names, be more strict with matching + SIMILARITY_THRESHOLD = 0.8 # Allow moderate similarity + + if distance <= SIMILARITY_THRESHOLD: + # Return the title (which is now the ID) + return results["metadatas"][0][0]["title"] + else: + print( + f"Course name '{course_name}' rejected due to poor similarity (distance: {distance})" + ) + except Exception as e: print(f"Error resolving course name: {e}") - + return None - - def _build_filter(self, course_title: Optional[str], lesson_number: Optional[int]) -> Optional[Dict]: + + def _build_filter( + self, course_title: str | None, lesson_number: int | None + ) -> dict | None: """Build ChromaDB filter from search parameters""" if not course_title and lesson_number is None: return None - + # Handle different filter combinations if course_title and lesson_number is not None: - return {"$and": [ - {"course_title": course_title}, - {"lesson_number": lesson_number} - ]} - + return { + "$and": [ + {"course_title": course_title}, + {"lesson_number": lesson_number}, + ] + } + if course_title: return {"course_title": course_title} - + return {"lesson_number": lesson_number} - + def add_course_metadata(self, course: Course): """Add course information to the catalog for semantic search""" import json course_text = course.title - + # Build lessons metadata and serialize as JSON string lessons_metadata = [] for lesson in course.lessons: - lessons_metadata.append({ - "lesson_number": lesson.lesson_number, - "lesson_title": lesson.title, - "lesson_link": lesson.lesson_link - }) - + lessons_metadata.append( + { + "lesson_number": lesson.lesson_number, + "lesson_title": lesson.title, + "lesson_link": lesson.lesson_link, + } + ) + self.course_catalog.add( documents=[course_text], - metadatas=[{ - "title": course.title, - "instructor": course.instructor, - "course_link": course.course_link, - "lessons_json": json.dumps(lessons_metadata), # Serialize as JSON string - "lesson_count": len(course.lessons) - }], - ids=[course.title] + metadatas=[ + { + "title": course.title, + "instructor": course.instructor, + "course_link": course.course_link, + "lessons_json": json.dumps( + lessons_metadata + ), # Serialize as JSON string + "lesson_count": len(course.lessons), + } + ], + ids=[course.title], ) - - def add_course_content(self, chunks: List[CourseChunk]): + + def add_course_content(self, chunks: list[CourseChunk]): """Add course content chunks to the vector store""" if not chunks: return - + documents = [chunk.content for chunk in chunks] - metadatas = [{ - "course_title": chunk.course_title, - "lesson_number": chunk.lesson_number, - "chunk_index": chunk.chunk_index - } for chunk in chunks] + metadatas = [ + { + "course_title": chunk.course_title, + "lesson_number": chunk.lesson_number, + "chunk_index": chunk.chunk_index, + } + for chunk in chunks + ] # Use title with chunk index for unique IDs - ids = [f"{chunk.course_title.replace(' ', '_')}_{chunk.chunk_index}" for chunk in chunks] - - self.course_content.add( - documents=documents, - metadatas=metadatas, - ids=ids - ) - + ids = [ + f"{chunk.course_title.replace(' ', '_')}_{chunk.chunk_index}" + for chunk in chunks + ] + + self.course_content.add(documents=documents, metadatas=metadatas, ids=ids) + def clear_all_data(self): """Clear all data from both collections""" try: @@ -189,43 +228,46 @@ def clear_all_data(self): self.course_content = self._create_collection("course_content") except Exception as e: print(f"Error clearing data: {e}") - - def get_existing_course_titles(self) -> List[str]: + + def get_existing_course_titles(self) -> list[str]: """Get all existing course titles from the vector store""" try: # Get all documents from the catalog results = self.course_catalog.get() - if results and 'ids' in results: - return results['ids'] + if results and "ids" in results: + return results["ids"] return [] except Exception as e: print(f"Error getting existing course titles: {e}") return [] - + def get_course_count(self) -> int: """Get the total number of courses in the vector store""" try: results = self.course_catalog.get() - if results and 'ids' in results: - return len(results['ids']) + if results and "ids" in results: + return len(results["ids"]) return 0 except Exception as e: print(f"Error getting course count: {e}") return 0 - - def get_all_courses_metadata(self) -> List[Dict[str, Any]]: + + def get_all_courses_metadata(self) -> list[dict[str, Any]]: """Get metadata for all courses in the vector store""" import json + try: results = self.course_catalog.get() - if results and 'metadatas' in results: + if results and "metadatas" in results: # Parse lessons JSON for each course parsed_metadata = [] - for metadata in results['metadatas']: + for metadata in results["metadatas"]: course_meta = metadata.copy() - if 'lessons_json' in course_meta: - course_meta['lessons'] = json.loads(course_meta['lessons_json']) - del course_meta['lessons_json'] # Remove the JSON string version + if "lessons_json" in course_meta: + course_meta["lessons"] = json.loads(course_meta["lessons_json"]) + del course_meta[ + "lessons_json" + ] # Remove the JSON string version parsed_metadata.append(course_meta) return parsed_metadata return [] @@ -233,35 +275,71 @@ def get_all_courses_metadata(self) -> List[Dict[str, Any]]: print(f"Error getting courses metadata: {e}") return [] - def get_course_link(self, course_title: str) -> Optional[str]: + def get_course_link(self, course_title: str) -> str | None: """Get course link for a given course title""" try: # Get course by ID (title is the ID) results = self.course_catalog.get(ids=[course_title]) - if results and 'metadatas' in results and results['metadatas']: - metadata = results['metadatas'][0] - return metadata.get('course_link') + if results and "metadatas" in results and results["metadatas"]: + metadata = results["metadatas"][0] + return metadata.get("course_link") return None except Exception as e: print(f"Error getting course link: {e}") return None - - def get_lesson_link(self, course_title: str, lesson_number: int) -> Optional[str]: + + def get_lesson_link(self, course_title: str, lesson_number: int) -> str | None: """Get lesson link for a given course title and lesson number""" import json + try: # Get course by ID (title is the ID) results = self.course_catalog.get(ids=[course_title]) - if results and 'metadatas' in results and results['metadatas']: - metadata = results['metadatas'][0] - lessons_json = metadata.get('lessons_json') + if results and "metadatas" in results and results["metadatas"]: + metadata = results["metadatas"][0] + lessons_json = metadata.get("lessons_json") if lessons_json: lessons = json.loads(lessons_json) # Find the lesson with matching number for lesson in lessons: - if lesson.get('lesson_number') == lesson_number: - return lesson.get('lesson_link') + if lesson.get("lesson_number") == lesson_number: + return lesson.get("lesson_link") return None except Exception as e: print(f"Error getting lesson link: {e}") - \ No newline at end of file + return None + + def get_course_outline(self, course_title: str) -> dict[str, Any] | None: + """ + Get complete course outline including title, link, and all lessons. + + Args: + course_title: The course title to get outline for + + Returns: + Dict with course_title, course_link, and lessons list, or None if not found + """ + import json + + try: + # First resolve the course name to handle fuzzy matching + resolved_title = self._resolve_course_name(course_title) + if not resolved_title: + return None + + # Get course by resolved title (title is the ID) + results = self.course_catalog.get(ids=[resolved_title]) + if results and "metadatas" in results and results["metadatas"]: + metadata = results["metadatas"][0] + lessons_json = metadata.get("lessons_json", "[]") + lessons = json.loads(lessons_json) + + return { + "course_title": metadata.get("title"), + "course_link": metadata.get("course_link"), + "lessons": lessons, # Already parsed list of lesson objects + } + return None + except Exception as e: + print(f"Error getting course outline: {e}") + return None diff --git a/frontend/index.html b/frontend/index.html index f8e25a62..46eba5b8 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -12,13 +12,61 @@
-

Course Materials Assistant

-

Ask questions about courses, instructors, and content

+
+
+

Course Materials Assistant

+

Ask questions about courses, instructors, and content

+
+ +
+ +
+ +
+ + +
+ +
+
diff --git a/frontend/script.js b/frontend/script.js index 562a8a36..3de728df 100644 --- a/frontend/script.js +++ b/frontend/script.js @@ -1,104 +1,118 @@ // API base URL - use relative path to work from any host -const API_URL = '/api'; +const API_URL = "/api"; // Global state let currentSessionId = null; // DOM elements -let chatMessages, chatInput, sendButton, totalCourses, courseTitles; +let chatMessages, chatInput, sendButton, totalCourses, courseTitles, newChatButton, themeToggle, sidebarThemeToggle; // Initialize -document.addEventListener('DOMContentLoaded', () => { - // Get DOM elements after page loads - chatMessages = document.getElementById('chatMessages'); - chatInput = document.getElementById('chatInput'); - sendButton = document.getElementById('sendButton'); - totalCourses = document.getElementById('totalCourses'); - courseTitles = document.getElementById('courseTitles'); - - setupEventListeners(); - createNewSession(); - loadCourseStats(); +document.addEventListener("DOMContentLoaded", () => { + // Get DOM elements after page loads + chatMessages = document.getElementById("chatMessages"); + chatInput = document.getElementById("chatInput"); + sendButton = document.getElementById("sendButton"); + totalCourses = document.getElementById("totalCourses"); + courseTitles = document.getElementById("courseTitles"); + newChatButton = document.getElementById("newChatButton"); + themeToggle = document.getElementById("themeToggle"); + sidebarThemeToggle = document.getElementById("sidebarThemeToggle"); + + setupEventListeners(); + initializeTheme(); + createNewSession(); + loadCourseStats(); }); // Event Listeners function setupEventListeners() { - // Chat functionality - sendButton.addEventListener('click', sendMessage); - chatInput.addEventListener('keypress', (e) => { - if (e.key === 'Enter') sendMessage(); - }); - - - // Suggested questions - document.querySelectorAll('.suggested-item').forEach(button => { - button.addEventListener('click', (e) => { - const question = e.target.getAttribute('data-question'); - chatInput.value = question; - sendMessage(); - }); + // Chat functionality + sendButton.addEventListener("click", sendMessage); + chatInput.addEventListener("keypress", (e) => { + if (e.key === "Enter") sendMessage(); + }); + + // New Chat button + if (newChatButton) { + newChatButton.addEventListener("click", startNewChat); + } + + // Theme toggle buttons + if (themeToggle) { + themeToggle.addEventListener("click", toggleTheme); + } + if (sidebarThemeToggle) { + sidebarThemeToggle.addEventListener("click", toggleTheme); + } + + // Suggested questions + document.querySelectorAll(".suggested-item").forEach((button) => { + button.addEventListener("click", (e) => { + const question = e.target.getAttribute("data-question"); + chatInput.value = question; + sendMessage(); }); + }); } - // Chat Functions async function sendMessage() { - const query = chatInput.value.trim(); - if (!query) return; - - // Disable input - chatInput.value = ''; - chatInput.disabled = true; - sendButton.disabled = true; - - // Add user message - addMessage(query, 'user'); - - // Add loading message - create a unique container for it - const loadingMessage = createLoadingMessage(); - chatMessages.appendChild(loadingMessage); - chatMessages.scrollTop = chatMessages.scrollHeight; - - try { - const response = await fetch(`${API_URL}/query`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - query: query, - session_id: currentSessionId - }) - }); - - if (!response.ok) throw new Error('Query failed'); - - const data = await response.json(); - - // Update session ID if new - if (!currentSessionId) { - currentSessionId = data.session_id; - } - - // Replace loading message with response - loadingMessage.remove(); - addMessage(data.answer, 'assistant', data.sources); - - } catch (error) { - // Replace loading message with error - loadingMessage.remove(); - addMessage(`Error: ${error.message}`, 'assistant'); - } finally { - chatInput.disabled = false; - sendButton.disabled = false; - chatInput.focus(); + const query = chatInput.value.trim(); + if (!query) return; + + // Disable input + chatInput.value = ""; + chatInput.disabled = true; + sendButton.disabled = true; + + // Add user message + addMessage(query, "user"); + + // Add loading message - create a unique container for it + const loadingMessage = createLoadingMessage(); + chatMessages.appendChild(loadingMessage); + chatMessages.scrollTop = chatMessages.scrollHeight; + + try { + const response = await fetch(`${API_URL}/query`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + query: query, + session_id: currentSessionId, + }), + }); + + if (!response.ok) throw new Error("Query failed"); + + const data = await response.json(); + + // Update session ID if new + if (!currentSessionId) { + currentSessionId = data.session_id; } + + // Replace loading message with response + loadingMessage.remove(); + addMessage(data.answer, "assistant", data.sources); + } catch (error) { + // Replace loading message with error + loadingMessage.remove(); + addMessage(`Error: ${error.message}`, "assistant"); + } finally { + chatInput.disabled = false; + sendButton.disabled = false; + chatInput.focus(); + } } function createLoadingMessage() { - const messageDiv = document.createElement('div'); - messageDiv.className = 'message assistant'; - messageDiv.innerHTML = ` + const messageDiv = document.createElement("div"); + messageDiv.className = "message assistant"; + messageDiv.innerHTML = `
@@ -107,85 +121,158 @@ function createLoadingMessage() {
`; - return messageDiv; + return messageDiv; } function addMessage(content, type, sources = null, isWelcome = false) { - const messageId = Date.now(); - const messageDiv = document.createElement('div'); - messageDiv.className = `message ${type}${isWelcome ? ' welcome-message' : ''}`; - messageDiv.id = `message-${messageId}`; - - // Convert markdown to HTML for assistant messages - const displayContent = type === 'assistant' ? marked.parse(content) : escapeHtml(content); - - let html = `
${displayContent}
`; - - if (sources && sources.length > 0) { - html += ` + const messageId = Date.now(); + const messageDiv = document.createElement("div"); + messageDiv.className = `message ${type}${ + isWelcome ? " welcome-message" : "" + }`; + messageDiv.id = `message-${messageId}`; + + // Convert markdown to HTML for assistant messages + const displayContent = + type === "assistant" ? marked.parse(content) : escapeHtml(content); + + let html = `
${displayContent}
`; + + if (sources && sources.length > 0) { + // Format sources as clickable links when URLs are available + const formattedSources = sources.map((source) => { + if (typeof source === "object" && source.text && source.url) { + // Create clickable link that opens in new tab + return `${source.text}`; + } else { + // Plain text source + return typeof source === "string" + ? source + : source.text || "Unknown source"; + } + }); + + html += `
Sources -
${sources.join(', ')}
+
${formattedSources.join( + ", " + )}
`; - } - - messageDiv.innerHTML = html; - chatMessages.appendChild(messageDiv); - chatMessages.scrollTop = chatMessages.scrollHeight; - - return messageId; + } + + messageDiv.innerHTML = html; + chatMessages.appendChild(messageDiv); + chatMessages.scrollTop = chatMessages.scrollHeight; + + return messageId; } // Helper function to escape HTML for user messages function escapeHtml(text) { - const div = document.createElement('div'); - div.textContent = text; - return div.innerHTML; + const div = document.createElement("div"); + div.textContent = text; + return div.innerHTML; } // Removed removeMessage function - no longer needed since we handle loading differently +function startNewChat() { + // Clear current session and chat + currentSessionId = null; + chatMessages.innerHTML = ""; + + // Re-enable input if disabled + chatInput.disabled = false; + sendButton.disabled = false; + chatInput.value = ""; + + // Show welcome message + addMessage( + "Welcome to the Course Materials Assistant! I can help you with questions about courses, lessons and specific content. What would you like to know?", + "assistant", + null, + true + ); + + // Focus on input for immediate use + chatInput.focus(); +} + async function createNewSession() { - currentSessionId = null; - chatMessages.innerHTML = ''; - addMessage('Welcome to the Course Materials Assistant! I can help you with questions about courses, lessons and specific content. What would you like to know?', 'assistant', null, true); + currentSessionId = null; + chatMessages.innerHTML = ""; + addMessage( + "Welcome to the Course Materials Assistant! I can help you with questions about courses, lessons and specific content. What would you like to know?", + "assistant", + null, + true + ); } // Load course statistics async function loadCourseStats() { - try { - console.log('Loading course stats...'); - const response = await fetch(`${API_URL}/courses`); - if (!response.ok) throw new Error('Failed to load course stats'); - - const data = await response.json(); - console.log('Course data received:', data); - - // Update stats in UI - if (totalCourses) { - totalCourses.textContent = data.total_courses; - } - - // Update course titles - if (courseTitles) { - if (data.course_titles && data.course_titles.length > 0) { - courseTitles.innerHTML = data.course_titles - .map(title => `
${title}
`) - .join(''); - } else { - courseTitles.innerHTML = 'No courses available'; - } - } - - } catch (error) { - console.error('Error loading course stats:', error); - // Set default values on error - if (totalCourses) { - totalCourses.textContent = '0'; - } - if (courseTitles) { - courseTitles.innerHTML = 'Failed to load courses'; - } + try { + console.log("Loading course stats..."); + const response = await fetch(`${API_URL}/courses`); + if (!response.ok) throw new Error("Failed to load course stats"); + + const data = await response.json(); + console.log("Course data received:", data); + + // Update stats in UI + if (totalCourses) { + totalCourses.textContent = data.total_courses; + } + + // Update course titles + if (courseTitles) { + if (data.course_titles && data.course_titles.length > 0) { + courseTitles.innerHTML = data.course_titles + .map((title) => `
${title}
`) + .join(""); + } else { + courseTitles.innerHTML = + 'No courses available'; + } } -} \ No newline at end of file + } catch (error) { + console.error("Error loading course stats:", error); + // Set default values on error + if (totalCourses) { + totalCourses.textContent = "0"; + } + if (courseTitles) { + courseTitles.innerHTML = + 'Failed to load courses'; + } + } +} + +// Theme Functions +function initializeTheme() { + // Check for saved theme preference or default to 'dark' + const savedTheme = localStorage.getItem('theme') || 'dark'; + document.documentElement.setAttribute('data-theme', savedTheme); +} + +function toggleTheme() { + const currentTheme = document.documentElement.getAttribute('data-theme'); + const newTheme = currentTheme === 'light' ? 'dark' : 'light'; + + // Update the theme + document.documentElement.setAttribute('data-theme', newTheme); + + // Save the preference + localStorage.setItem('theme', newTheme); +} + +// Add keyboard support for theme toggle +document.addEventListener('keydown', (e) => { + // Toggle theme with Ctrl+Shift+T (or Cmd+Shift+T on Mac) + if ((e.ctrlKey || e.metaKey) && e.shiftKey && e.key === 'T') { + e.preventDefault(); + toggleTheme(); + } +}); diff --git a/frontend/style.css b/frontend/style.css index 825d0367..33487688 100644 --- a/frontend/style.css +++ b/frontend/style.css @@ -7,6 +7,7 @@ /* CSS Variables */ :root { + /* Dark Theme (Default) */ --primary-color: #2563eb; --primary-hover: #1d4ed8; --background: #0f172a; @@ -22,6 +23,45 @@ --focus-ring: rgba(37, 99, 235, 0.2); --welcome-bg: #1e3a5f; --welcome-border: #2563eb; + --code-background: rgba(0, 0, 0, 0.2); + --blockquote-border: #2563eb; + --link-color: #38bdf8; + --link-hover: #0ea5e9; + --error-bg: rgba(239, 68, 68, 0.1); + --error-color: #f87171; + --error-border: rgba(239, 68, 68, 0.2); + --success-bg: rgba(34, 197, 94, 0.1); + --success-color: #4ade80; + --success-border: rgba(34, 197, 94, 0.2); +} + +/* Light Theme */ +[data-theme="light"] { + --primary-color: #2563eb; + --primary-hover: #1d4ed8; + --background: #ffffff; + --surface: #f8fafc; + --surface-hover: #e2e8f0; + --text-primary: #1e293b; + --text-secondary: #64748b; + --border-color: #e2e8f0; + --user-message: #2563eb; + --assistant-message: #f1f5f9; + --shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1); + --radius: 12px; + --focus-ring: rgba(37, 99, 235, 0.2); + --welcome-bg: #f0f9ff; + --welcome-border: #2563eb; + --code-background: rgba(0, 0, 0, 0.05); + --blockquote-border: #2563eb; + --link-color: #0ea5e9; + --link-hover: #0284c7; + --error-bg: rgba(239, 68, 68, 0.1); + --error-color: #dc2626; + --error-border: rgba(239, 68, 68, 0.2); + --success-bg: rgba(34, 197, 94, 0.1); + --success-color: #16a34a; + --success-border: rgba(34, 197, 94, 0.2); } /* Base Styles */ @@ -34,6 +74,7 @@ body { overflow: hidden; margin: 0; padding: 0; + transition: background-color 0.3s ease, color 0.3s ease; } /* Container - Full Screen */ @@ -245,6 +286,22 @@ header h1 { color: var(--text-secondary); } +.sources-content a { + color: var(--link-color); + text-decoration: none; + font-weight: 600; + border-bottom: 1px solid transparent; + transition: all 0.2s ease; + opacity: 1; +} + +.sources-content a:hover { + color: var(--link-hover); + border-bottom-color: var(--link-hover); + text-decoration: none; + opacity: 1; +} + /* Markdown formatting styles */ .message-content h1, .message-content h2, @@ -277,7 +334,7 @@ header h1 { } .message-content code { - background-color: rgba(0, 0, 0, 0.2); + background-color: var(--code-background); padding: 0.125rem 0.25rem; border-radius: 3px; font-family: 'Fira Code', 'Consolas', monospace; @@ -285,7 +342,7 @@ header h1 { } .message-content pre { - background-color: rgba(0, 0, 0, 0.2); + background-color: var(--code-background); padding: 0.75rem; border-radius: 4px; overflow-x: auto; @@ -298,7 +355,7 @@ header h1 { } .message-content blockquote { - border-left: 3px solid var(--primary); + border-left: 3px solid var(--blockquote-border); padding-left: 1rem; margin: 0.5rem 0; color: var(--text-secondary); @@ -427,21 +484,21 @@ header h1 { /* Error Message */ .error-message { - background: rgba(239, 68, 68, 0.1); - color: #f87171; + background: var(--error-bg); + color: var(--error-color); padding: 0.75rem 1.25rem; border-radius: 8px; - border: 1px solid rgba(239, 68, 68, 0.2); + border: 1px solid var(--error-border); margin: 0.5rem 0; } /* Success Message */ .success-message { - background: rgba(34, 197, 94, 0.1); - color: #4ade80; + background: var(--success-bg); + color: var(--success-color); padding: 0.75rem 1.25rem; border-radius: 8px; - border: 1px solid rgba(34, 197, 94, 0.2); + border: 1px solid var(--success-border); margin: 0.5rem 0; } @@ -601,6 +658,32 @@ details[open] .suggested-header::before { text-transform: none; } +/* New Chat Button */ +.new-chat-button { + width: 100%; + padding: 0.5rem 0; + background: none; + border: none; + color: var(--text-secondary); + font-size: 0.875rem; + font-weight: 600; + cursor: pointer; + transition: color 0.2s ease; + text-align: left; + text-transform: uppercase; + letter-spacing: 0.5px; + margin-bottom: 0.75rem; +} + +.new-chat-button:focus { + outline: none; + color: var(--primary-color); +} + +.new-chat-button:hover { + color: var(--primary-color); +} + /* Suggested Questions in Sidebar */ .suggested-items { display: flex; @@ -716,3 +799,129 @@ details[open] .suggested-header::before { width: 280px; } } + +/* Theme Toggle Button */ +.theme-toggle { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 0.75rem 1rem; + background: var(--background); + border: 1px solid var(--border-color); + border-radius: 8px; + color: var(--text-primary); + cursor: pointer; + transition: all 0.2s ease; + font-size: 0.875rem; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.5px; + width: 100%; + justify-content: flex-start; +} + +.theme-toggle:focus { + outline: none; + box-shadow: 0 0 0 3px var(--focus-ring); +} + +.theme-toggle:hover { + background: var(--surface-hover); + border-color: var(--primary-color); + color: var(--primary-color); + transform: translateX(2px); +} + +/* Theme Toggle Icons */ +.theme-icon { + flex-shrink: 0; + transition: opacity 0.3s ease, transform 0.3s ease; +} + +.sun-icon { + opacity: 1; +} + +.moon-icon { + opacity: 0; + position: absolute; +} + +[data-theme="light"] .sun-icon { + opacity: 0; +} + +[data-theme="light"] .moon-icon { + opacity: 1; + position: static; +} + +.theme-label { + margin-left: 0.25rem; +} + +/* Header Theme Toggle (when header is visible) */ +header { + padding: 1.5rem 2rem; + background: var(--surface); + border-bottom: 1px solid var(--border-color); + transition: background-color 0.3s ease, border-color 0.3s ease; +} + +.header-content { + display: flex; + justify-content: space-between; + align-items: center; + max-width: 1200px; + margin: 0 auto; +} + +.header-text { + flex: 1; +} + +.theme-toggle:not(.sidebar-toggle) { + width: auto; + padding: 0.5rem; + border-radius: 50%; + justify-content: center; + min-width: 40px; + min-height: 40px; +} + +.theme-toggle:not(.sidebar-toggle) .theme-label { + display: none; +} + +/* Smooth Transitions for Theme Changes */ +* { + transition: + background-color 0.3s ease, + color 0.3s ease, + border-color 0.3s ease, + box-shadow 0.3s ease; +} + +/* Preserve existing animations */ +.message, +.loading span, +.theme-icon { + transition: + background-color 0.3s ease, + color 0.3s ease, + border-color 0.3s ease, + box-shadow 0.3s ease, + opacity 0.3s ease, + transform 0.3s ease; +} + +.theme-toggle:hover, +.suggested-item:hover, +#sendButton:hover:not(:disabled) { + transition: + background-color 0.2s ease, + color 0.2s ease, + border-color 0.2s ease, + box-shadow 0.2s ease, + transform 0.2s ease; +} diff --git a/pyproject.toml b/pyproject.toml index 3f05e2de..2be4e1c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "starting-codebase" version = "0.1.0" description = "Add your description here" readme = "README.md" -requires-python = ">=3.13" +requires-python = ">=3.12" dependencies = [ "chromadb==1.0.15", "anthropic==0.58.2", @@ -12,4 +12,90 @@ dependencies = [ "uvicorn==0.35.0", "python-multipart==0.0.20", "python-dotenv==1.1.1", + "pytest==7.4.4", + "pytest-asyncio==0.23.2", + "httpx==0.27.0", + "black==24.8.0", + "ruff==0.6.8", + "mypy==1.11.2", ] + +[tool.pytest.ini_options] +testpaths = ["backend/tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +addopts = [ + "-v", + "--tb=short", + "--strict-markers", + "--disable-warnings", + "--color=yes" +] +filterwarnings = [ + "ignore::UserWarning", + "ignore::DeprecationWarning", + "ignore:resource_tracker.*:UserWarning" +] +markers = [ + "unit: Unit tests for individual components", + "integration: Integration tests with real dependencies", + "api: API endpoint tests", + "slow: Slow-running tests" +] + +[tool.black] +line-length = 88 +target-version = ['py312'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | build + | dist +)/ +''' + +[tool.ruff] +line-length = 88 +target-version = "py312" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade +] +ignore = [ + "E501", # line too long, handled by black + "B008", # do not perform function calls in argument defaults + "C901", # too complex +] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[tool.mypy] +python_version = "3.12" +check_untyped_defs = true +disallow_any_generics = true +disallow_incomplete_defs = true +disallow_untyped_defs = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_return_any = true +strict_optional = true diff --git a/query-flow-diagram.md b/query-flow-diagram.md new file mode 100644 index 00000000..a98ba4a3 --- /dev/null +++ b/query-flow-diagram.md @@ -0,0 +1,110 @@ +# RAG System Query Flow Diagram + +``` +┌─────────────────┐ +│ Frontend │ +│ (script.js) │ +└─────────────────┘ + │ + │ 1. User submits query + │ POST /api/query + │ { "query": "...", "session_id": "..." } + ▼ +┌─────────────────┐ +│ FastAPI App │ +│ (app.py) │ +│ │ +│ query_documents()│ +└─────────────────┘ + │ + │ 2. Create/get session + │ 3. Call rag_system.query() + ▼ +┌─────────────────┐ +│ RAG System │ +│ (rag_system.py) │ +│ │ +│ query() method │ +└─────────────────┘ + │ + │ 4. Get conversation history + │ 5. Prepare prompt + tools + ▼ +┌─────────────────┐ +│ AI Generator │ +│(ai_generator.py)│ +│ │ +│ Claude API + │ +│ Tool Manager │ +└─────────────────┘ + │ + │ 6. Claude decides to use tools + │ 7. Execute CourseSearchTool + ▼ +┌─────────────────┐ +│ Vector Store │ +│(vector_store.py)│ +│ │ +│ ChromaDB │ +│ Semantic │ +│ Search │ +└─────────────────┘ + │ + │ 8. Return relevant chunks + ▲ + │ +┌─────────────────┐ +│ Course Chunks │ +│ │ +│ "Course X │ +│ Lesson Y │ +│ content: ..." │ +└─────────────────┘ + │ + │ 9. Generate response using context + ▼ +┌─────────────────┐ +│ Claude Response │ +│ │ +│ + Sources List │ +└─────────────────┘ + │ + │ 10. Update session history + │ 11. Return answer + sources + ▼ +┌─────────────────┐ +│ Frontend │ +│ │ +│ Display answer │ +│ + collapsible │ +│ sources │ +└─────────────────┘ +``` + +## Data Flow Details + +### Request Flow (Frontend → Backend) +1. **User Input**: Types question in chat interface +2. **API Call**: POST to `/api/query` with JSON payload +3. **Session Management**: Create new session or use existing +4. **RAG Orchestration**: Main query processing logic +5. **AI Processing**: Claude with tool access for search + +### Search & Retrieval Flow +6. **Tool Decision**: Claude decides when to search for information +7. **Vector Search**: Semantic similarity search in ChromaDB +8. **Context Retrieval**: Get relevant course content chunks +9. **Response Generation**: Claude creates answer using retrieved context + +### Response Flow (Backend → Frontend) +10. **History Update**: Save conversation for context +11. **JSON Response**: Return structured answer + metadata +12. **UI Update**: Display formatted response with sources + +## Key Components + +- **Session Manager**: Maintains conversation context +- **Document Processor**: Chunks course content with context prefixes +- **Vector Store**: ChromaDB for semantic search +- **Tool Manager**: Enables Claude to search when needed +- **Course Search Tool**: Retrieves relevant content chunks \ No newline at end of file diff --git a/scripts/format.sh b/scripts/format.sh new file mode 100755 index 00000000..b933ffda --- /dev/null +++ b/scripts/format.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Code formatting script for RAG chatbot project +set -e + +echo "🔧 Formatting Python code with Black..." +uv run black backend/ main.py + +echo "📋 Formatting and fixing imports with Ruff..." +uv run ruff format backend/ main.py +uv run ruff check --fix backend/ main.py + +echo "✅ Code formatting completed!" \ No newline at end of file diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 00000000..79c99e6a --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Linting script for RAG chatbot project +set -e + +echo "🔍 Running Ruff linting checks..." +uv run ruff check backend/ main.py + +echo "🏷️ Running MyPy type checking..." +uv run mypy backend/ main.py + +echo "✅ All linting checks completed!" \ No newline at end of file diff --git a/scripts/quality-check.sh b/scripts/quality-check.sh new file mode 100755 index 00000000..ae00df8d --- /dev/null +++ b/scripts/quality-check.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# Complete code quality check script for RAG chatbot project +set -e + +echo "🚀 Starting complete code quality check..." + +echo "📋 Step 1: Formatting code..." +./scripts/format.sh + +echo "🔍 Step 2: Running linting and type checks..." +./scripts/lint.sh + +echo "🧪 Step 3: Running tests..." +cd backend && uv run pytest tests/ -v + +echo "✅ All quality checks passed!" \ No newline at end of file diff --git a/uv.lock b/uv.lock index 9ae65c55..115bd49b 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,10 @@ version = 1 -revision = 2 -requires-python = ">=3.13" +revision = 3 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] [[package]] name = "annotated-types" @@ -36,6 +40,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } wheels = [ @@ -110,6 +115,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, ] +[[package]] +name = "black" +version = "24.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b0/46fb0d4e00372f4a86a6f8efa3cb193c9f64863615e39010b1477e010578/black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", size = 644810, upload-time = "2024-08-02T17:43:18.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/a8/05fb14195cfef32b7c8d4585a44b7499c2a4b205e1662c427b941ed87054/black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", size = 1646132, upload-time = "2024-08-02T17:49:52.843Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/8d9ce42673e5cb9988f6df73c1c5c1d4e9e788053cccd7f5fb14ef100982/black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", size = 1448665, upload-time = "2024-08-02T17:47:54.479Z" }, + { url = "https://files.pythonhosted.org/packages/cc/94/eff1ddad2ce1d3cc26c162b3693043c6b6b575f538f602f26fe846dfdc75/black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", size = 1762458, upload-time = "2024-08-02T17:46:19.384Z" }, + { url = "https://files.pythonhosted.org/packages/28/ea/18b8d86a9ca19a6942e4e16759b2fa5fc02bbc0eb33c1b866fcd387640ab/black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", size = 1436109, upload-time = "2024-08-02T17:46:52.97Z" }, + { url = "https://files.pythonhosted.org/packages/27/1e/83fa8a787180e1632c3d831f7e58994d7aaf23a0961320d21e84f922f919/black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", size = 206504, upload-time = "2024-08-02T17:43:15.747Z" }, +] + [[package]] name = "build" version = "1.2.2.post1" @@ -148,6 +173,19 @@ version = "3.4.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, @@ -330,6 +368,16 @@ version = "1.73.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/79/e8/b43b851537da2e2f03fa8be1aef207e5cbfb1a2e014fbb6b40d24c177cd3/grpcio-1.73.1.tar.gz", hash = "sha256:7fce2cd1c0c1116cf3850564ebfc3264fba75d3c74a7414373f1238ea365ef87", size = 12730355, upload-time = "2025-06-26T01:53:24.622Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/41/456caf570c55d5ac26f4c1f2db1f2ac1467d5bf3bcd660cba3e0a25b195f/grpcio-1.73.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:921b25618b084e75d424a9f8e6403bfeb7abef074bb6c3174701e0f2542debcf", size = 5334621, upload-time = "2025-06-26T01:52:23.602Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c2/9a15e179e49f235bb5e63b01590658c03747a43c9775e20c4e13ca04f4c4/grpcio-1.73.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:277b426a0ed341e8447fbf6c1d6b68c952adddf585ea4685aa563de0f03df887", size = 10601131, upload-time = "2025-06-26T01:52:25.691Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/1d39e90ef6348a0964caa7c5c4d05f3bae2c51ab429eb7d2e21198ac9b6d/grpcio-1.73.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:96c112333309493c10e118d92f04594f9055774757f5d101b39f8150f8c25582", size = 5759268, upload-time = "2025-06-26T01:52:27.631Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2b/2dfe9ae43de75616177bc576df4c36d6401e0959833b2e5b2d58d50c1f6b/grpcio-1.73.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f48e862aed925ae987eb7084409a80985de75243389dc9d9c271dd711e589918", size = 6409791, upload-time = "2025-06-26T01:52:29.711Z" }, + { url = "https://files.pythonhosted.org/packages/6e/66/e8fe779b23b5a26d1b6949e5c70bc0a5fd08f61a6ec5ac7760d589229511/grpcio-1.73.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a6c2cce218e28f5040429835fa34a29319071079e3169f9543c3fbeff166d2", size = 6003728, upload-time = "2025-06-26T01:52:31.352Z" }, + { url = "https://files.pythonhosted.org/packages/a9/39/57a18fcef567784108c4fc3f5441cb9938ae5a51378505aafe81e8e15ecc/grpcio-1.73.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:65b0458a10b100d815a8426b1442bd17001fdb77ea13665b2f7dc9e8587fdc6b", size = 6103364, upload-time = "2025-06-26T01:52:33.028Z" }, + { url = "https://files.pythonhosted.org/packages/c5/46/28919d2aa038712fc399d02fa83e998abd8c1f46c2680c5689deca06d1b2/grpcio-1.73.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0a9f3ea8dce9eae9d7cb36827200133a72b37a63896e0e61a9d5ec7d61a59ab1", size = 6749194, upload-time = "2025-06-26T01:52:34.734Z" }, + { url = "https://files.pythonhosted.org/packages/3d/56/3898526f1fad588c5d19a29ea0a3a4996fb4fa7d7c02dc1be0c9fd188b62/grpcio-1.73.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:de18769aea47f18e782bf6819a37c1c528914bfd5683b8782b9da356506190c8", size = 6283902, upload-time = "2025-06-26T01:52:36.503Z" }, + { url = "https://files.pythonhosted.org/packages/dc/64/18b77b89c5870d8ea91818feb0c3ffb5b31b48d1b0ee3e0f0d539730fea3/grpcio-1.73.1-cp312-cp312-win32.whl", hash = "sha256:24e06a5319e33041e322d32c62b1e728f18ab8c9dbc91729a3d9f9e3ed336642", size = 3668687, upload-time = "2025-06-26T01:52:38.678Z" }, + { url = "https://files.pythonhosted.org/packages/3c/52/302448ca6e52f2a77166b2e2ed75f5d08feca4f2145faf75cb768cccb25b/grpcio-1.73.1-cp312-cp312-win_amd64.whl", hash = "sha256:303c8135d8ab176f8038c14cc10d698ae1db9c480f2b2823f7a987aa2a4c5646", size = 4334887, upload-time = "2025-06-26T01:52:40.743Z" }, { url = "https://files.pythonhosted.org/packages/37/bf/4ca20d1acbefabcaba633ab17f4244cbbe8eca877df01517207bd6655914/grpcio-1.73.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:b310824ab5092cf74750ebd8a8a8981c1810cb2b363210e70d06ef37ad80d4f9", size = 5335615, upload-time = "2025-06-26T01:52:42.896Z" }, { url = "https://files.pythonhosted.org/packages/75/ed/45c345f284abec5d4f6d77cbca9c52c39b554397eb7de7d2fcf440bcd049/grpcio-1.73.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:8f5a6df3fba31a3485096ac85b2e34b9666ffb0590df0cd044f58694e6a1f6b5", size = 10595497, upload-time = "2025-06-26T01:52:44.695Z" }, { url = "https://files.pythonhosted.org/packages/a4/75/bff2c2728018f546d812b755455014bc718f8cdcbf5c84f1f6e5494443a8/grpcio-1.73.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:052e28fe9c41357da42250a91926a3e2f74c046575c070b69659467ca5aa976b", size = 5765321, upload-time = "2025-06-26T01:52:46.871Z" }, @@ -385,6 +433,13 @@ version = "0.6.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, @@ -470,6 +525,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, ] +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -488,6 +552,18 @@ version = "0.10.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, + { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, + { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, + { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, + { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, + { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, + { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, + { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, + { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, { url = "https://files.pythonhosted.org/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617, upload-time = "2025-05-18T19:04:02.078Z" }, { url = "https://files.pythonhosted.org/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947, upload-time = "2025-05-18T19:04:03.347Z" }, { url = "https://files.pythonhosted.org/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618, upload-time = "2025-05-18T19:04:04.709Z" }, @@ -594,6 +670,16 @@ version = "3.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, @@ -631,6 +717,22 @@ version = "5.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1fc6888c74cbd8abad1292dde2ddfcf8fc059e114c97dd6bf16d12f36293/mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c", size = 33728, upload-time = "2025-01-25T08:39:43.386Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/47/e5f452bdf16028bfd2edb4e2e35d0441e4a4740f30e68ccd4cfd2fb2c57e/mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d", size = 56152, upload-time = "2025-01-25T08:38:47.902Z" }, + { url = "https://files.pythonhosted.org/packages/60/38/2132d537dc7a7fdd8d2e98df90186c7fcdbd3f14f95502a24ba443c92245/mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae", size = 40564, upload-time = "2025-01-25T08:38:48.839Z" }, + { url = "https://files.pythonhosted.org/packages/c0/2a/c52cf000581bfb8d94794f58865658e7accf2fa2e90789269d4ae9560b16/mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322", size = 40104, upload-time = "2025-01-25T08:38:49.773Z" }, + { url = "https://files.pythonhosted.org/packages/83/33/30d163ce538c54fc98258db5621447e3ab208d133cece5d2577cf913e708/mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00", size = 102634, upload-time = "2025-01-25T08:38:51.5Z" }, + { url = "https://files.pythonhosted.org/packages/94/5c/5a18acb6ecc6852be2d215c3d811aa61d7e425ab6596be940877355d7f3e/mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06", size = 108888, upload-time = "2025-01-25T08:38:52.542Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/11c556324c64a92aa12f28e221a727b6e082e426dc502e81f77056f6fc98/mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968", size = 106968, upload-time = "2025-01-25T08:38:54.286Z" }, + { url = "https://files.pythonhosted.org/packages/5d/61/ca0c196a685aba7808a5c00246f17b988a9c4f55c594ee0a02c273e404f3/mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83", size = 93771, upload-time = "2025-01-25T08:38:55.576Z" }, + { url = "https://files.pythonhosted.org/packages/b4/55/0927c33528710085ee77b808d85bbbafdb91a1db7c8eaa89cac16d6c513e/mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd", size = 101726, upload-time = "2025-01-25T08:38:56.654Z" }, + { url = "https://files.pythonhosted.org/packages/49/39/a92c60329fa470f41c18614a93c6cd88821412a12ee78c71c3f77e1cfc2d/mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559", size = 98523, upload-time = "2025-01-25T08:38:57.662Z" }, + { url = "https://files.pythonhosted.org/packages/81/90/26adb15345af8d9cf433ae1b6adcf12e0a4cad1e692de4fa9f8e8536c5ae/mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63", size = 96628, upload-time = "2025-01-25T08:38:59.505Z" }, + { url = "https://files.pythonhosted.org/packages/8a/4d/340d1e340df972a13fd4ec84c787367f425371720a1044220869c82364e9/mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3", size = 105190, upload-time = "2025-01-25T08:39:00.483Z" }, + { url = "https://files.pythonhosted.org/packages/d3/7c/65047d1cccd3782d809936db446430fc7758bda9def5b0979887e08302a2/mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b", size = 98439, upload-time = "2025-01-25T08:39:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/72/d2/3c259d43097c30f062050f7e861075099404e8886b5d4dd3cebf180d6e02/mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df", size = 97780, upload-time = "2025-01-25T08:39:02.444Z" }, + { url = "https://files.pythonhosted.org/packages/29/29/831ea8d4abe96cdb3e28b79eab49cac7f04f9c6b6e36bfc686197ddba09d/mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76", size = 40835, upload-time = "2025-01-25T08:39:03.369Z" }, + { url = "https://files.pythonhosted.org/packages/12/dd/7cbc30153b73f08eeac43804c1dbc770538a01979b4094edbe1a4b8eb551/mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776", size = 41509, upload-time = "2025-01-25T08:39:04.284Z" }, + { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888, upload-time = "2025-01-25T08:39:05.174Z" }, { url = "https://files.pythonhosted.org/packages/05/06/a098a42870db16c0a54a82c56a5bdc873de3165218cd5b3ca59dbc0d31a7/mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c", size = 56165, upload-time = "2025-01-25T08:39:06.887Z" }, { url = "https://files.pythonhosted.org/packages/5a/65/eaada79a67fde1f43e1156d9630e2fb70655e1d3f4e8f33d7ffa31eeacfd/mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40", size = 40569, upload-time = "2025-01-25T08:39:07.945Z" }, { url = "https://files.pythonhosted.org/packages/36/7e/2b6c43ed48be583acd68e34d16f19209a9f210e4669421b0321e326d8554/mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997", size = 40104, upload-time = "2025-01-25T08:39:09.598Z" }, @@ -658,6 +760,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] +[[package]] +name = "mypy" +version = "1.11.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806, upload-time = "2024-08-24T22:50:11.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335, upload-time = "2024-08-24T22:49:54.245Z" }, + { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119, upload-time = "2024-08-24T22:49:03.451Z" }, + { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856, upload-time = "2024-08-24T22:50:08.804Z" }, + { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066, upload-time = "2024-08-24T22:50:03.89Z" }, + { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000, upload-time = "2024-08-24T22:49:59.703Z" }, + { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625, upload-time = "2024-08-24T22:50:01.842Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + [[package]] name = "networkx" version = "3.5" @@ -673,6 +802,17 @@ version = "2.3.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2e/19/d7c972dfe90a353dbd3efbbe1d14a5951de80c99c9dc1b93cd998d51dc0f/numpy-2.3.1.tar.gz", hash = "sha256:1ec9ae20a4226da374362cca3c62cd753faf2f951440b0e3b98e93c235441d2b", size = 20390372, upload-time = "2025-06-21T12:28:33.469Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/56/71ad5022e2f63cfe0ca93559403d0edef14aea70a841d640bd13cdba578e/numpy-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2959d8f268f3d8ee402b04a9ec4bb7604555aeacf78b360dc4ec27f1d508177d", size = 20896664, upload-time = "2025-06-21T12:15:30.845Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/2db52ba049813670f7f987cc5db6dac9be7cd95e923cc6832b3d32d87cef/numpy-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:762e0c0c6b56bdedfef9a8e1d4538556438288c4276901ea008ae44091954e29", size = 14131078, upload-time = "2025-06-21T12:15:52.23Z" }, + { url = "https://files.pythonhosted.org/packages/57/dd/28fa3c17b0e751047ac928c1e1b6990238faad76e9b147e585b573d9d1bd/numpy-2.3.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:867ef172a0976aaa1f1d1b63cf2090de8b636a7674607d514505fb7276ab08fc", size = 5112554, upload-time = "2025-06-21T12:16:01.434Z" }, + { url = "https://files.pythonhosted.org/packages/c9/fc/84ea0cba8e760c4644b708b6819d91784c290288c27aca916115e3311d17/numpy-2.3.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:4e602e1b8682c2b833af89ba641ad4176053aaa50f5cacda1a27004352dde943", size = 6646560, upload-time = "2025-06-21T12:16:11.895Z" }, + { url = "https://files.pythonhosted.org/packages/61/b2/512b0c2ddec985ad1e496b0bd853eeb572315c0f07cd6997473ced8f15e2/numpy-2.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8e333040d069eba1652fb08962ec5b76af7f2c7bce1df7e1418c8055cf776f25", size = 14260638, upload-time = "2025-06-21T12:16:32.611Z" }, + { url = "https://files.pythonhosted.org/packages/6e/45/c51cb248e679a6c6ab14b7a8e3ead3f4a3fe7425fc7a6f98b3f147bec532/numpy-2.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e7cbf5a5eafd8d230a3ce356d892512185230e4781a361229bd902ff403bc660", size = 16632729, upload-time = "2025-06-21T12:16:57.439Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ff/feb4be2e5c09a3da161b412019caf47183099cbea1132fd98061808c2df2/numpy-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1b8f26d1086835f442286c1d9b64bb3974b0b1e41bb105358fd07d20872952", size = 15565330, upload-time = "2025-06-21T12:17:20.638Z" }, + { url = "https://files.pythonhosted.org/packages/bc/6d/ceafe87587101e9ab0d370e4f6e5f3f3a85b9a697f2318738e5e7e176ce3/numpy-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee8340cb48c9b7a5899d1149eece41ca535513a9698098edbade2a8e7a84da77", size = 18361734, upload-time = "2025-06-21T12:17:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2b/19/0fb49a3ea088be691f040c9bf1817e4669a339d6e98579f91859b902c636/numpy-2.3.1-cp312-cp312-win32.whl", hash = "sha256:e772dda20a6002ef7061713dc1e2585bc1b534e7909b2030b5a46dae8ff077ab", size = 6320411, upload-time = "2025-06-21T12:17:58.475Z" }, + { url = "https://files.pythonhosted.org/packages/b1/3e/e28f4c1dd9e042eb57a3eb652f200225e311b608632bc727ae378623d4f8/numpy-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cfecc7822543abdea6de08758091da655ea2210b8ffa1faf116b940693d3df76", size = 12734973, upload-time = "2025-06-21T12:18:17.601Z" }, + { url = "https://files.pythonhosted.org/packages/04/a8/8a5e9079dc722acf53522b8f8842e79541ea81835e9b5483388701421073/numpy-2.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:7be91b2239af2658653c5bb6f1b8bccafaf08226a258caf78ce44710a0160d30", size = 10191491, upload-time = "2025-06-21T12:18:33.585Z" }, { url = "https://files.pythonhosted.org/packages/d4/bd/35ad97006d8abff8631293f8ea6adf07b0108ce6fec68da3c3fcca1197f2/numpy-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25a1992b0a3fdcdaec9f552ef10d8103186f5397ab45e2d25f8ac51b1a6b97e8", size = 20889381, upload-time = "2025-06-21T12:19:04.103Z" }, { url = "https://files.pythonhosted.org/packages/f1/4f/df5923874d8095b6062495b39729178eef4a922119cee32a12ee1bd4664c/numpy-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dea630156d39b02a63c18f508f85010230409db5b2927ba59c8ba4ab3e8272e", size = 14152726, upload-time = "2025-06-21T12:19:25.599Z" }, { url = "https://files.pythonhosted.org/packages/8c/0f/a1f269b125806212a876f7efb049b06c6f8772cf0121139f97774cd95626/numpy-2.3.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bada6058dd886061f10ea15f230ccf7dfff40572e99fef440a4a857c8728c9c0", size = 5105145, upload-time = "2025-06-21T12:19:34.782Z" }, @@ -852,6 +992,10 @@ dependencies = [ { name = "sympy" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/48/70/ca2a4d38a5deccd98caa145581becb20c53684f451e89eb3a39915620066/onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a", size = 34342883, upload-time = "2025-07-10T19:15:38.223Z" }, + { url = "https://files.pythonhosted.org/packages/29/e5/00b099b4d4f6223b610421080d0eed9327ef9986785c9141819bbba0d396/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928", size = 14473861, upload-time = "2025-07-10T19:15:42.911Z" }, + { url = "https://files.pythonhosted.org/packages/0a/50/519828a5292a6ccd8d5cd6d2f72c6b36ea528a2ef68eca69647732539ffa/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d", size = 16475713, upload-time = "2025-07-10T19:15:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/5d/54/7139d463bb0a312890c9a5db87d7815d4a8cce9e6f5f28d04f0b55fcb160/onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87", size = 12690910, upload-time = "2025-07-10T19:15:47.478Z" }, { url = "https://files.pythonhosted.org/packages/e0/39/77cefa829740bd830915095d8408dce6d731b244e24b1f64fe3df9f18e86/onnxruntime-1.22.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:d29c7d87b6cbed8fecfd09dca471832384d12a69e1ab873e5effbb94adc3e966", size = 34342026, upload-time = "2025-07-10T19:15:50.266Z" }, { url = "https://files.pythonhosted.org/packages/d2/a6/444291524cb52875b5de980a6e918072514df63a57a7120bf9dfae3aeed1/onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:460487d83b7056ba98f1f7bac80287224c31d8149b15712b0d6f5078fcc33d0f", size = 14474014, upload-time = "2025-07-10T19:15:53.991Z" }, { url = "https://files.pythonhosted.org/packages/87/9d/45a995437879c18beff26eacc2322f4227224d04c6ac3254dce2e8950190/onnxruntime-1.22.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0c37070268ba4e02a1a9d28560cd00cd1e94f0d4f275cbef283854f861a65fa", size = 16475427, upload-time = "2025-07-10T19:15:56.067Z" }, @@ -948,6 +1092,21 @@ version = "3.11.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/29/87/03ababa86d984952304ac8ce9fbd3a317afb4a225b9a81f9b606ac60c873/orjson-3.11.0.tar.gz", hash = "sha256:2e4c129da624f291bcc607016a99e7f04a353f6874f3bd8d9b47b88597d5f700", size = 5318246, upload-time = "2025-07-15T16:08:29.194Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/92/c9/241e304fb1e58ea70b720f1a9e5349c6bb7735ffac401ef1b94f422edd6d/orjson-3.11.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b4089f940c638bb1947d54e46c1cd58f4259072fcc97bc833ea9c78903150ac9", size = 240269, upload-time = "2025-07-15T16:07:08.173Z" }, + { url = "https://files.pythonhosted.org/packages/26/7c/289457cdf40be992b43f1d90ae213ebc03a31a8e2850271ecd79e79a3135/orjson-3.11.0-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:8335a0ba1c26359fb5c82d643b4c1abbee2bc62875e0f2b5bde6c8e9e25eb68c", size = 129276, upload-time = "2025-07-15T16:07:10.128Z" }, + { url = "https://files.pythonhosted.org/packages/66/de/5c0528d46ded965939b6b7f75b1fe93af42b9906b0039096fc92c9001c12/orjson-3.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63c1c9772dafc811d16d6a7efa3369a739da15d1720d6e58ebe7562f54d6f4a2", size = 131966, upload-time = "2025-07-15T16:07:11.509Z" }, + { url = "https://files.pythonhosted.org/packages/ad/74/39822f267b5935fb6fc961ccc443f4968a74d34fc9270b83caa44e37d907/orjson-3.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9457ccbd8b241fb4ba516417a4c5b95ba0059df4ac801309bcb4ec3870f45ad9", size = 127028, upload-time = "2025-07-15T16:07:13.023Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e3/28f6ed7f03db69bddb3ef48621b2b05b394125188f5909ee0a43fcf4820e/orjson-3.11.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0846e13abe79daece94a00b92574f294acad1d362be766c04245b9b4dd0e47e1", size = 129105, upload-time = "2025-07-15T16:07:14.367Z" }, + { url = "https://files.pythonhosted.org/packages/cb/50/8867fd2fc92c0ab1c3e14673ec5d9d0191202e4ab8ba6256d7a1d6943ad3/orjson-3.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5587c85ae02f608a3f377b6af9eb04829606f518257cbffa8f5081c1aacf2e2f", size = 131902, upload-time = "2025-07-15T16:07:16.176Z" }, + { url = "https://files.pythonhosted.org/packages/13/65/c189deea10342afee08006331082ff67d11b98c2394989998b3ea060354a/orjson-3.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7a1964a71c1567b4570c932a0084ac24ad52c8cf6253d1881400936565ed438", size = 134042, upload-time = "2025-07-15T16:07:17.937Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e4/cf23c3f4231d2a9a043940ab045f799f84a6df1b4fb6c9b4412cdc3ebf8c/orjson-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5a8243e73690cc6e9151c9e1dd046a8f21778d775f7d478fa1eb4daa4897c61", size = 128260, upload-time = "2025-07-15T16:07:19.651Z" }, + { url = "https://files.pythonhosted.org/packages/de/b9/2cb94d3a67edb918d19bad4a831af99cd96c3657a23daa239611bcf335d7/orjson-3.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51646f6d995df37b6e1b628f092f41c0feccf1d47e3452c6e95e2474b547d842", size = 130282, upload-time = "2025-07-15T16:07:21.022Z" }, + { url = "https://files.pythonhosted.org/packages/0b/96/df963cc973e689d4c56398647917b4ee95f47e5b6d2779338c09c015b23b/orjson-3.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:2fb8ca8f0b4e31b8aaec674c7540649b64ef02809410506a44dc68d31bd5647b", size = 403765, upload-time = "2025-07-15T16:07:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/fb/92/71429ee1badb69f53281602dbb270fa84fc2e51c83193a814d0208bb63b0/orjson-3.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:64a6a3e94a44856c3f6557e6aa56a6686544fed9816ae0afa8df9077f5759791", size = 144779, upload-time = "2025-07-15T16:07:27.339Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ab/3678b2e5ff0c622a974cb8664ed7cdda5ed26ae2b9d71ba66ec36f32d6cf/orjson-3.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69f95d484938d8fab5963e09131bcf9fbbb81fa4ec132e316eb2fb9adb8ce78", size = 132797, upload-time = "2025-07-15T16:07:28.717Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/74509f715ff189d2aca90ebb0bd5af6658e0f9aa2512abbe6feca4c78208/orjson-3.11.0-cp312-cp312-win32.whl", hash = "sha256:8514f9f9c667ce7d7ef709ab1a73e7fcab78c297270e90b1963df7126d2b0e23", size = 134695, upload-time = "2025-07-15T16:07:30.034Z" }, + { url = "https://files.pythonhosted.org/packages/82/ba/ef25e3e223f452a01eac6a5b38d05c152d037508dcbf87ad2858cbb7d82e/orjson-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:41b38a894520b8cb5344a35ffafdf6ae8042f56d16771b2c5eb107798cee85ee", size = 129446, upload-time = "2025-07-15T16:07:31.412Z" }, + { url = "https://files.pythonhosted.org/packages/e3/cd/6f4d93867c5d81bb4ab2d4ac870d3d6e9ba34fa580a03b8d04bf1ce1d8ad/orjson-3.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:5579acd235dd134467340b2f8a670c1c36023b5a69c6a3174c4792af7502bd92", size = 126400, upload-time = "2025-07-15T16:07:34.143Z" }, { url = "https://files.pythonhosted.org/packages/31/63/82d9b6b48624009d230bc6038e54778af8f84dfd54402f9504f477c5cfd5/orjson-3.11.0-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4a8ba9698655e16746fdf5266939427da0f9553305152aeb1a1cc14974a19cfb", size = 240125, upload-time = "2025-07-15T16:07:35.976Z" }, { url = "https://files.pythonhosted.org/packages/16/3a/d557ed87c63237d4c97a7bac7ac054c347ab8c4b6da09748d162ca287175/orjson-3.11.0-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:67133847f9a35a5ef5acfa3325d4a2f7fe05c11f1505c4117bb086fc06f2a58f", size = 129189, upload-time = "2025-07-15T16:07:37.486Z" }, { url = "https://files.pythonhosted.org/packages/69/5e/b2c9e22e2cd10aa7d76a629cee65d661e06a61fbaf4dc226386f5636dd44/orjson-3.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f797d57814975b78f5f5423acb003db6f9be5186b72d48bd97a1000e89d331d", size = 131953, upload-time = "2025-07-15T16:07:39.254Z" }, @@ -983,12 +1142,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + [[package]] name = "pillow" version = "11.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, @@ -1038,6 +1217,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, ] +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + [[package]] name = "posthog" version = "5.4.0" @@ -1095,6 +1292,23 @@ version = "1.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/38/32/5d25a15256d2e80d1e92be821f19fc49190e65a90ea86733cb5af2285449/pybase64-1.4.1.tar.gz", hash = "sha256:03fc365c601671add4f9e0713c2bc2485fa4ab2b32f0d3bb060bd7e069cdaa43", size = 136836, upload-time = "2025-03-02T11:13:57.109Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/a9/43bac4f39401f7241d233ddaf9e6561860b2466798cfb83b9e7dbf89bc1b/pybase64-1.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbdcf77e424c91389f22bf10158851ce05c602c50a74ccf5943ee3f5ef4ba489", size = 38152, upload-time = "2025-03-02T11:11:07.576Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/d0ae801e31a5052dbb1744a45318f822078dd4ce4cc7f49bfe97e7768f7e/pybase64-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af41e2e6015f980d15eae0df0c365df94c7587790aea236ba0bf48c65a9fa04e", size = 31488, upload-time = "2025-03-02T11:11:09.758Z" }, + { url = "https://files.pythonhosted.org/packages/be/34/bf4119a88b2ad0536a8ed9d66ce4d70ff8152eac00ef8a27e5ae35da4328/pybase64-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ac21c1943a15552347305943b1d0d6298fb64a98b67c750cb8fb2c190cdefd4", size = 59734, upload-time = "2025-03-02T11:11:11.493Z" }, + { url = "https://files.pythonhosted.org/packages/99/1c/1901547adc7d4f24bdcb2f75cb7dcd3975bff42f39da37d4bd218c608c60/pybase64-1.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:65567e8f4f31cf6e1a8cc570723cc6b18adda79b4387a18f8d93c157ff5f1979", size = 56529, upload-time = "2025-03-02T11:11:12.657Z" }, + { url = "https://files.pythonhosted.org/packages/c5/1e/1993e4b9a03e94fc53552285e3998079d864fff332798bf30c25afdac8f3/pybase64-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:988e987f8cfe2dfde7475baf5f12f82b2f454841aef3a174b694a57a92d5dfb0", size = 59114, upload-time = "2025-03-02T11:11:13.972Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f6/061fee5b7ba38b8824dd95752ab7115cf183ffbd3330d5fc1734a47b0f9e/pybase64-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92b2305ac2442b451e19d42c4650c3bb090d6aa9abd87c0c4d700267d8fa96b1", size = 60095, upload-time = "2025-03-02T11:11:15.182Z" }, + { url = "https://files.pythonhosted.org/packages/37/da/ccfe5d1a9f1188cd703390522e96a31045c5b93af84df04a98e69ada5c8b/pybase64-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1ff80e03357b09dab016f41b4c75cf06e9b19cda7f898e4f3681028a3dff29b", size = 68431, upload-time = "2025-03-02T11:11:17.059Z" }, + { url = "https://files.pythonhosted.org/packages/c3/d3/8ca4b0695876b52c0073a3557a65850b6d5c723333b5a271ab10a1085852/pybase64-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cdda297e668e118f6b9ba804e858ff49e3dd945d01fdd147de90445fd08927d", size = 71417, upload-time = "2025-03-02T11:11:19.178Z" }, + { url = "https://files.pythonhosted.org/packages/94/34/5f8f72d1b7b4ddb64c48d60160f3f4f03cfd0bfd2e7068d4558499d948ed/pybase64-1.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51a24d21a21a959eb8884f24346a6480c4bd624aa7976c9761504d847a2f9364", size = 58429, upload-time = "2025-03-02T11:11:20.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/b7/edf53af308c6e8aada1e6d6a0a3789176af8cbae37a2ce084eb9da87bf33/pybase64-1.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b19e169ea1b8a15a03d3a379116eb7b17740803e89bc6eb3efcc74f532323cf7", size = 52228, upload-time = "2025-03-02T11:11:21.632Z" }, + { url = "https://files.pythonhosted.org/packages/0c/bf/c9df141e24a259f38a38bdda5a3b63206f13e612ecbd3880fa10625e0294/pybase64-1.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8a9f1b614efd41240c9bb2cf66031aa7a2c3c092c928f9d429511fe18d4a3fd1", size = 68632, upload-time = "2025-03-02T11:11:23.56Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ae/1aec72325a3c48f7776cc55a3bab8b168eb77aea821253da8b9f09713734/pybase64-1.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d9947b5e289e2c5b018ddc2aee2b9ed137b8aaaba7edfcb73623e576a2407740", size = 57682, upload-time = "2025-03-02T11:11:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/4d/7a/7ad2799c0b3c4e2f7b993e1636468445c30870ca5485110b589b8921808d/pybase64-1.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ba4184ea43aa88a5ab8d6d15db284689765c7487ff3810764d8d823b545158e6", size = 56308, upload-time = "2025-03-02T11:11:26.803Z" }, + { url = "https://files.pythonhosted.org/packages/be/01/6008a4fbda0c4308dab00b95aedde8748032d7620bd95b686619c66917fe/pybase64-1.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4471257628785296efb2d50077fb9dfdbd4d2732c3487795224dd2644216fb07", size = 70784, upload-time = "2025-03-02T11:11:28.427Z" }, + { url = "https://files.pythonhosted.org/packages/27/31/913365a4f0e2922ec369ddaa3a1d6c11059acbe54531b003653efa007a48/pybase64-1.4.1-cp312-cp312-win32.whl", hash = "sha256:614561297ad14de315dd27381fd6ec3ea4de0d8206ba4c7678449afaff8a2009", size = 34271, upload-time = "2025-03-02T11:11:30.585Z" }, + { url = "https://files.pythonhosted.org/packages/d9/98/4d514d3e4c04819d80bccf9ea7b30d1cfc701832fa5ffca168f585004488/pybase64-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:35635db0d64fcbe9b3fad265314c052c47dc9bcef8dea17493ea8e3c15b2b972", size = 36496, upload-time = "2025-03-02T11:11:32.552Z" }, + { url = "https://files.pythonhosted.org/packages/c4/61/01353bc9c461e7b36d692daca3eee9616d8936ea6d8a64255ef7ec9ac307/pybase64-1.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b4ccb438c4208ff41a260b70994c30a8631051f3b025cdca48be586b068b8f49", size = 29692, upload-time = "2025-03-02T11:11:33.735Z" }, { url = "https://files.pythonhosted.org/packages/4b/1a/4e243ba702c07df3df3ba1795cfb02cf7a4242c53fc574b06a2bfa4f8478/pybase64-1.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1c38d9c4a7c132d45859af8d5364d3ce90975a42bd5995d18d174fb57621973", size = 38149, upload-time = "2025-03-02T11:11:35.537Z" }, { url = "https://files.pythonhosted.org/packages/9c/35/3eae81bc8688a83f8b5bb84979d88e2cc3c3279a3b870a506f277d746c56/pybase64-1.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ab0b93ea93cf1f56ca4727d678a9c0144c2653e9de4e93e789a92b4e098c07d9", size = 31485, upload-time = "2025-03-02T11:11:36.656Z" }, { url = "https://files.pythonhosted.org/packages/48/55/d99b9ff8083573bbf97fc433bbc20e2efb612792025f3bad0868c96c37ce/pybase64-1.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:644f393e9bb7f3bacc5cbd3534d02e1b660b258fc8315ecae74d2e23265e5c1f", size = 59738, upload-time = "2025-03-02T11:11:38.468Z" }, @@ -1155,6 +1369,20 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, @@ -1207,6 +1435,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, ] +[[package]] +name = "pytest" +version = "7.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/1f/9d8e98e4133ffb16c90f3b405c43e38d3abb715bb5d7a63a5a684f7e46a3/pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", size = 1357116, upload-time = "2023-12-31T12:00:18.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8", size = 325287, upload-time = "2023-12-31T12:00:13.963Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.23.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/4a/9ed4762a08facf8dfb2e31b0393913e3dce391a7735e0965c0f2e66f0938/pytest-asyncio-0.23.2.tar.gz", hash = "sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc", size = 44174, upload-time = "2023-12-04T07:20:31.503Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/d4/47e991d09385ba7541e9bfdbbf49ff65d5e99400ef5590792021e5b21f40/pytest_asyncio-0.23.2-py3-none-any.whl", hash = "sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f", size = 17267, upload-time = "2023-12-04T07:20:29.048Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1243,6 +1498,15 @@ version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, @@ -1261,6 +1525,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } wheels = [ @@ -1273,6 +1538,21 @@ version = "2024.11.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, + { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, + { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, + { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, + { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, + { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, + { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, + { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, + { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, + { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525, upload-time = "2024-11-06T20:10:45.19Z" }, { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324, upload-time = "2024-11-06T20:10:47.177Z" }, { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617, upload-time = "2024-11-06T20:10:49.312Z" }, @@ -1337,6 +1617,20 @@ version = "0.26.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, @@ -1405,6 +1699,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] +[[package]] +name = "ruff" +version = "0.6.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/f9/4ce3e765a72ab8fe0f80f48508ea38b4196daab3da14d803c21349b2d367/ruff-0.6.8.tar.gz", hash = "sha256:a5bf44b1aa0adaf6d9d20f86162b34f7c593bfedabc51239953e446aefc8ce18", size = 3084543, upload-time = "2024-09-26T12:27:17.785Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/07/42ee57e8b76ca585297a663a552b4f6d6a99372ca47fdc2276ef72cc0f2f/ruff-0.6.8-py3-none-linux_armv6l.whl", hash = "sha256:77944bca110ff0a43b768f05a529fecd0706aac7bcce36d7f1eeb4cbfca5f0f2", size = 10404327, upload-time = "2024-09-26T12:26:31.274Z" }, + { url = "https://files.pythonhosted.org/packages/eb/51/d42571ff8156d65086acb72d39aa64cb24181db53b497d0ed6293f43f07a/ruff-0.6.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27b87e1801e786cd6ede4ada3faa5e254ce774de835e6723fd94551464c56b8c", size = 10018797, upload-time = "2024-09-26T12:26:34.68Z" }, + { url = "https://files.pythonhosted.org/packages/c1/d7/fa5514a60b03976af972b67fe345deb0335dc96b9f9a9fa4df9890472427/ruff-0.6.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd48f945da2a6334f1793d7f701725a76ba93bf3d73c36f6b21fb04d5338dcf5", size = 9691303, upload-time = "2024-09-26T12:26:37.311Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c4/d812a74976927e51d0782a47539069657ac78535779bfa4d061c4fc8d89d/ruff-0.6.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:677e03c00f37c66cea033274295a983c7c546edea5043d0c798833adf4cf4c6f", size = 10719452, upload-time = "2024-09-26T12:26:40.24Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/aa700c4ae6db9b3ee660e23f3c7db596e2b16a3034b797704fba33ddbc96/ruff-0.6.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f1476236b3eacfacfc0f66aa9e6cd39f2a624cb73ea99189556015f27c0bdeb", size = 10161353, upload-time = "2024-09-26T12:26:42.464Z" }, + { url = "https://files.pythonhosted.org/packages/ea/39/0b10075ffcd52ff3a581b9b69eac53579deb230aad300ce8f9d0b58e77bc/ruff-0.6.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5a2f17c7d32991169195d52a04c95b256378bbf0de8cb98478351eb70d526f", size = 10980630, upload-time = "2024-09-26T12:26:45.157Z" }, + { url = "https://files.pythonhosted.org/packages/c1/af/9eb9efc98334f62652e2f9318f137b2667187851911fac3b395365a83708/ruff-0.6.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5fd0d4b7b1457c49e435ee1e437900ced9b35cb8dc5178921dfb7d98d65a08d0", size = 11768996, upload-time = "2024-09-26T12:26:47.987Z" }, + { url = "https://files.pythonhosted.org/packages/e0/59/8b1369cf7878358952b1c0a1559b4d6b5c824c003d09b0db26d26c9d094f/ruff-0.6.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8034b19b993e9601f2ddf2c517451e17a6ab5cdb1c13fdff50c1442a7171d87", size = 11317469, upload-time = "2024-09-26T12:26:50.487Z" }, + { url = "https://files.pythonhosted.org/packages/b9/6d/e252e9b11bbca4114c386ee41ad559d0dac13246201d77ea1223c6fea17f/ruff-0.6.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cfb227b932ba8ef6e56c9f875d987973cd5e35bc5d05f5abf045af78ad8e098", size = 12467185, upload-time = "2024-09-26T12:26:52.928Z" }, + { url = "https://files.pythonhosted.org/packages/48/44/7caa223af7d4ea0f0b2bd34acca65a7694a58317714675a2478815ab3f45/ruff-0.6.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef0411eccfc3909269fed47c61ffebdcb84a04504bafa6b6df9b85c27e813b0", size = 10887766, upload-time = "2024-09-26T12:26:56.585Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/394aff3a785f171869158b9d5be61eec9ffb823c3ad5d2bdf2e5f13cb029/ruff-0.6.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:007dee844738c3d2e6c24ab5bc7d43c99ba3e1943bd2d95d598582e9c1b27750", size = 10711609, upload-time = "2024-09-26T12:26:59.23Z" }, + { url = "https://files.pythonhosted.org/packages/47/31/f31d04c842e54699eab7e3b864538fea26e6c94b71806cd10aa49f13e1c1/ruff-0.6.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ce60058d3cdd8490e5e5471ef086b3f1e90ab872b548814e35930e21d848c9ce", size = 10237621, upload-time = "2024-09-26T12:27:01.862Z" }, + { url = "https://files.pythonhosted.org/packages/20/95/a764e84acf11d425f2f23b8b78b4fd715e9c20be4aac157c6414ca859a67/ruff-0.6.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1085c455d1b3fdb8021ad534379c60353b81ba079712bce7a900e834859182fa", size = 10558329, upload-time = "2024-09-26T12:27:04.753Z" }, + { url = "https://files.pythonhosted.org/packages/2a/76/d4e38846ac9f6dd62dce858a54583911361b5339dcf8f84419241efac93a/ruff-0.6.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:70edf6a93b19481affd287d696d9e311388d808671bc209fb8907b46a8c3af44", size = 10954102, upload-time = "2024-09-26T12:27:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/f18c678da6c69f8d022480f3e8ddce6e4a52e07602c1d212056fbd234f8f/ruff-0.6.8-py3-none-win32.whl", hash = "sha256:792213f7be25316f9b46b854df80a77e0da87ec66691e8f012f887b4a671ab5a", size = 8511090, upload-time = "2024-09-26T12:27:10.199Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c4/0ca7d8ffa358b109db7d7d045a1a076fd8e5d9cbeae022242d3c060931da/ruff-0.6.8-py3-none-win_amd64.whl", hash = "sha256:ec0517dc0f37cad14a5319ba7bba6e7e339d03fbf967a6d69b0907d61be7a263", size = 9350079, upload-time = "2024-09-26T12:27:13.123Z" }, + { url = "https://files.pythonhosted.org/packages/d9/bd/a8b0c64945a92eaeeb8d0283f27a726a776a1c9d12734d990c5fc7a1278c/ruff-0.6.8-py3-none-win_arm64.whl", hash = "sha256:8d3bb2e3fbb9875172119021a13eed38849e762499e3cfde9588e4b4d70968dc", size = 8669595, upload-time = "2024-09-26T12:27:15.464Z" }, +] + [[package]] name = "safetensors" version = "0.5.3" @@ -1439,6 +1758,11 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/41/84/5f4af978fff619706b8961accac84780a6d298d82a8873446f72edb4ead0/scikit_learn-1.7.1.tar.gz", hash = "sha256:24b3f1e976a4665aa74ee0fcaac2b8fccc6ae77c8e07ab25da3ba6d3292b9802", size = 7190445, upload-time = "2025-07-18T08:01:54.5Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/16/57f176585b35ed865f51b04117947fe20f130f78940c6477b6d66279c9c2/scikit_learn-1.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3cee419b49b5bbae8796ecd690f97aa412ef1674410c23fc3257c6b8b85b8087", size = 9260431, upload-time = "2025-07-18T08:01:22.77Z" }, + { url = "https://files.pythonhosted.org/packages/67/4e/899317092f5efcab0e9bc929e3391341cec8fb0e816c4789686770024580/scikit_learn-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2fd8b8d35817b0d9ebf0b576f7d5ffbbabdb55536b0655a8aaae629d7ffd2e1f", size = 8637191, upload-time = "2025-07-18T08:01:24.731Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1b/998312db6d361ded1dd56b457ada371a8d8d77ca2195a7d18fd8a1736f21/scikit_learn-1.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:588410fa19a96a69763202f1d6b7b91d5d7a5d73be36e189bc6396bfb355bd87", size = 9486346, upload-time = "2025-07-18T08:01:26.713Z" }, + { url = "https://files.pythonhosted.org/packages/ad/09/a2aa0b4e644e5c4ede7006748f24e72863ba2ae71897fecfd832afea01b4/scikit_learn-1.7.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3142f0abe1ad1d1c31a2ae987621e41f6b578144a911ff4ac94781a583adad7", size = 9290988, upload-time = "2025-07-18T08:01:28.938Z" }, + { url = "https://files.pythonhosted.org/packages/15/fa/c61a787e35f05f17fc10523f567677ec4eeee5f95aa4798dbbbcd9625617/scikit_learn-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3ddd9092c1bd469acab337d87930067c87eac6bd544f8d5027430983f1e1ae88", size = 8735568, upload-time = "2025-07-18T08:01:30.936Z" }, { url = "https://files.pythonhosted.org/packages/52/f8/e0533303f318a0f37b88300d21f79b6ac067188d4824f1047a37214ab718/scikit_learn-1.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b7839687fa46d02e01035ad775982f2470be2668e13ddd151f0f55a5bf123bae", size = 9213143, upload-time = "2025-07-18T08:01:32.942Z" }, { url = "https://files.pythonhosted.org/packages/71/f3/f1df377d1bdfc3e3e2adc9c119c238b182293e6740df4cbeac6de2cc3e23/scikit_learn-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a10f276639195a96c86aa572ee0698ad64ee939a7b042060b98bd1930c261d10", size = 8591977, upload-time = "2025-07-18T08:01:34.967Z" }, { url = "https://files.pythonhosted.org/packages/99/72/c86a4cd867816350fe8dee13f30222340b9cd6b96173955819a5561810c5/scikit_learn-1.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:13679981fdaebc10cc4c13c43344416a86fcbc61449cb3e6517e1df9d12c8309", size = 9436142, upload-time = "2025-07-18T08:01:37.397Z" }, @@ -1460,6 +1784,15 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/81/18/b06a83f0c5ee8cddbde5e3f3d0bb9b702abfa5136ef6d4620ff67df7eee5/scipy-1.16.0.tar.gz", hash = "sha256:b5ef54021e832869c8cfb03bc3bf20366cbcd426e02a58e8a58d7584dfbb8f62", size = 30581216, upload-time = "2025-06-22T16:27:55.782Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/01/c0/c943bc8d2bbd28123ad0f4f1eef62525fa1723e84d136b32965dcb6bad3a/scipy-1.16.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7eb6bd33cef4afb9fa5f1fb25df8feeb1e52d94f21a44f1d17805b41b1da3180", size = 36459071, upload-time = "2025-06-22T16:19:06.605Z" }, + { url = "https://files.pythonhosted.org/packages/99/0d/270e2e9f1a4db6ffbf84c9a0b648499842046e4e0d9b2275d150711b3aba/scipy-1.16.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1dbc8fdba23e4d80394ddfab7a56808e3e6489176d559c6c71935b11a2d59db1", size = 28490500, upload-time = "2025-06-22T16:19:11.775Z" }, + { url = "https://files.pythonhosted.org/packages/1c/22/01d7ddb07cff937d4326198ec8d10831367a708c3da72dfd9b7ceaf13028/scipy-1.16.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:7dcf42c380e1e3737b343dec21095c9a9ad3f9cbe06f9c05830b44b1786c9e90", size = 20762345, upload-time = "2025-06-22T16:19:15.813Z" }, + { url = "https://files.pythonhosted.org/packages/34/7f/87fd69856569ccdd2a5873fe5d7b5bbf2ad9289d7311d6a3605ebde3a94b/scipy-1.16.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26ec28675f4a9d41587266084c626b02899db373717d9312fa96ab17ca1ae94d", size = 23418563, upload-time = "2025-06-22T16:19:20.746Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f1/e4f4324fef7f54160ab749efbab6a4bf43678a9eb2e9817ed71a0a2fd8de/scipy-1.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:952358b7e58bd3197cfbd2f2f2ba829f258404bdf5db59514b515a8fe7a36c52", size = 33203951, upload-time = "2025-06-22T16:19:25.813Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f0/b6ac354a956384fd8abee2debbb624648125b298f2c4a7b4f0d6248048a5/scipy-1.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03931b4e870c6fef5b5c0970d52c9f6ddd8c8d3e934a98f09308377eba6f3824", size = 35070225, upload-time = "2025-06-22T16:19:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/e5/73/5cbe4a3fd4bc3e2d67ffad02c88b83edc88f381b73ab982f48f3df1a7790/scipy-1.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:512c4f4f85912767c351a0306824ccca6fd91307a9f4318efe8fdbd9d30562ef", size = 35389070, upload-time = "2025-06-22T16:19:37.387Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/a60da80ab9ed68b31ea5a9c6dfd3c2f199347429f229bf7f939a90d96383/scipy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e69f798847e9add03d512eaf5081a9a5c9a98757d12e52e6186ed9681247a1ac", size = 37825287, upload-time = "2025-06-22T16:19:43.375Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b5/29fece1a74c6a94247f8a6fb93f5b28b533338e9c34fdcc9cfe7a939a767/scipy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:adf9b1999323ba335adc5d1dc7add4781cb5a4b0ef1e98b79768c05c796c4e49", size = 38431929, upload-time = "2025-06-22T16:19:49.385Z" }, { url = "https://files.pythonhosted.org/packages/46/95/0746417bc24be0c2a7b7563946d61f670a3b491b76adede420e9d173841f/scipy-1.16.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:e9f414cbe9ca289a73e0cc92e33a6a791469b6619c240aa32ee18abdce8ab451", size = 36418162, upload-time = "2025-06-22T16:19:56.3Z" }, { url = "https://files.pythonhosted.org/packages/19/5a/914355a74481b8e4bbccf67259bbde171348a3f160b67b4945fbc5f5c1e5/scipy-1.16.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:bbba55fb97ba3cdef9b1ee973f06b09d518c0c7c66a009c729c7d1592be1935e", size = 28465985, upload-time = "2025-06-22T16:20:01.238Z" }, { url = "https://files.pythonhosted.org/packages/58/46/63477fc1246063855969cbefdcee8c648ba4b17f67370bd542ba56368d0b/scipy-1.16.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:58e0d4354eacb6004e7aa1cd350e5514bd0270acaa8d5b36c0627bb3bb486974", size = 20737961, upload-time = "2025-06-22T16:20:05.913Z" }, @@ -1541,6 +1874,7 @@ version = "0.47.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0a/69/662169fdb92fb96ec3eaee218cf540a629d629c86d7993d9651226a6789b/starlette-0.47.1.tar.gz", hash = "sha256:aef012dd2b6be325ffa16698f9dc533614fb1cebd593a906b90dc1025529a79b", size = 2583072, upload-time = "2025-06-21T04:03:17.337Z" } wheels = [ @@ -1553,10 +1887,15 @@ version = "0.1.0" source = { virtual = "." } dependencies = [ { name = "anthropic" }, + { name = "black" }, { name = "chromadb" }, { name = "fastapi" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "python-dotenv" }, { name = "python-multipart" }, + { name = "ruff" }, { name = "sentence-transformers" }, { name = "uvicorn" }, ] @@ -1564,10 +1903,15 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "anthropic", specifier = "==0.58.2" }, + { name = "black", specifier = "==24.8.0" }, { name = "chromadb", specifier = "==1.0.15" }, { name = "fastapi", specifier = "==0.116.1" }, + { name = "mypy", specifier = "==1.11.2" }, + { name = "pytest", specifier = "==7.4.4" }, + { name = "pytest-asyncio", specifier = "==0.23.2" }, { name = "python-dotenv", specifier = "==1.1.1" }, { name = "python-multipart", specifier = "==0.0.20" }, + { name = "ruff", specifier = "==0.6.8" }, { name = "sentence-transformers", specifier = "==5.0.0" }, { name = "uvicorn", specifier = "==0.35.0" }, ] @@ -1656,6 +2000,10 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/87/93/fb505a5022a2e908d81fe9a5e0aa84c86c0d5f408173be71c6018836f34e/torch-2.7.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:27ea1e518df4c9de73af7e8a720770f3628e7f667280bce2be7a16292697e3fa", size = 98948276, upload-time = "2025-06-04T17:39:12.852Z" }, + { url = "https://files.pythonhosted.org/packages/56/7e/67c3fe2b8c33f40af06326a3d6ae7776b3e3a01daa8f71d125d78594d874/torch-2.7.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c33360cfc2edd976c2633b3b66c769bdcbbf0e0b6550606d188431c81e7dd1fc", size = 821025792, upload-time = "2025-06-04T17:34:58.747Z" }, + { url = "https://files.pythonhosted.org/packages/a1/37/a37495502bc7a23bf34f89584fa5a78e25bae7b8da513bc1b8f97afb7009/torch-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:d8bf6e1856ddd1807e79dc57e54d3335f2b62e6f316ed13ed3ecfe1fc1df3d8b", size = 216050349, upload-time = "2025-06-04T17:38:59.709Z" }, + { url = "https://files.pythonhosted.org/packages/3a/60/04b77281c730bb13460628e518c52721257814ac6c298acd25757f6a175c/torch-2.7.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:787687087412c4bd68d315e39bc1223f08aae1d16a9e9771d95eabbb04ae98fb", size = 68645146, upload-time = "2025-06-04T17:38:52.97Z" }, { url = "https://files.pythonhosted.org/packages/66/81/e48c9edb655ee8eb8c2a6026abdb6f8d2146abd1f150979ede807bb75dcb/torch-2.7.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:03563603d931e70722dce0e11999d53aa80a375a3d78e6b39b9f6805ea0a8d28", size = 98946649, upload-time = "2025-06-04T17:38:43.031Z" }, { url = "https://files.pythonhosted.org/packages/3a/24/efe2f520d75274fc06b695c616415a1e8a1021d87a13c68ff9dce733d088/torch-2.7.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:d632f5417b6980f61404a125b999ca6ebd0b8b4bbdbb5fbbba44374ab619a412", size = 821033192, upload-time = "2025-06-04T17:38:09.146Z" }, { url = "https://files.pythonhosted.org/packages/dd/d9/9c24d230333ff4e9b6807274f6f8d52a864210b52ec794c5def7925f4495/torch-2.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:23660443e13995ee93e3d844786701ea4ca69f337027b05182f5ba053ce43b38", size = 216055668, upload-time = "2025-06-04T17:38:36.253Z" }, @@ -1707,6 +2055,7 @@ dependencies = [ { name = "setuptools" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/24/5f/950fb373bf9c01ad4eb5a8cd5eaf32cdf9e238c02f9293557a2129b9c4ac/triton-3.3.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9999e83aba21e1a78c1f36f21bce621b77bcaa530277a50484a7cb4a822f6e43", size = 155669138, upload-time = "2025-05-29T23:39:51.771Z" }, { url = "https://files.pythonhosted.org/packages/74/1f/dfb531f90a2d367d914adfee771babbd3f1a5b26c3f5fbc458dee21daa78/triton-3.3.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b89d846b5a4198317fec27a5d3a609ea96b6d557ff44b56c23176546023c4240", size = 155673035, upload-time = "2025-05-29T23:40:02.468Z" }, { url = "https://files.pythonhosted.org/packages/28/71/bd20ffcb7a64c753dc2463489a61bf69d531f308e390ad06390268c4ea04/triton-3.3.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3198adb9d78b77818a5388bff89fa72ff36f9da0bc689db2f0a651a67ce6a42", size = 155735832, upload-time = "2025-05-29T23:40:10.522Z" }, ] @@ -1786,6 +2135,12 @@ version = "0.21.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, @@ -1803,6 +2158,19 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" }, + { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" }, + { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" }, + { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" }, + { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" }, + { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" }, { url = "https://files.pythonhosted.org/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004, upload-time = "2025-06-15T19:05:38.499Z" }, { url = "https://files.pythonhosted.org/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671, upload-time = "2025-06-15T19:05:39.52Z" }, { url = "https://files.pythonhosted.org/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772, upload-time = "2025-06-15T19:05:40.897Z" }, @@ -1863,6 +2231,17 @@ version = "15.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" },