Skip to content

Commit 1e3647b

Browse files
tbrandtbrandaws
andauthored
ruff for python linting (#1255)
Co-authored-by: Taichiro Suzuki <taichirs@amazon.co.jp>
1 parent 3aa4dd0 commit 1e3647b

File tree

13 files changed

+916
-707
lines changed

13 files changed

+916
-707
lines changed

.github/workflows/python.yml

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
# This workflow will install Python dependencies and run linting checks
2+
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
3+
4+
name: Python CI
5+
6+
permissions:
7+
contents: read
8+
9+
on:
10+
push:
11+
branches: ['main']
12+
paths: ['packages/cdk/lambda-python/**']
13+
pull_request:
14+
branches: ['main']
15+
paths: ['packages/cdk/lambda-python/**']
16+
17+
jobs:
18+
lint:
19+
name: 'Python Lint Check'
20+
runs-on: ubuntu-latest
21+
22+
strategy:
23+
matrix:
24+
python-version: ['3.12']
25+
26+
steps:
27+
- uses: actions/checkout@v4
28+
- name: Set up Python ${{ matrix.python-version }}
29+
uses: actions/setup-python@v4
30+
with:
31+
python-version: ${{ matrix.python-version }}
32+
33+
- name: Install uv
34+
uses: astral-sh/setup-uv@v3
35+
with:
36+
version: 'latest'
37+
38+
- name: Install dependencies
39+
working-directory: packages/cdk/lambda-python/generic-agent-core-runtime
40+
run: uv sync --group=lint
41+
42+
- name: Run ruff check
43+
working-directory: packages/cdk/lambda-python/generic-agent-core-runtime
44+
run: uv run ruff check --fix .
45+
46+
- name: Run ruff format
47+
working-directory: packages/cdk/lambda-python/generic-agent-core-runtime
48+
run: uv run ruff format .
49+
50+
- name: Check for changes
51+
run: |
52+
if [ -n "$(git status --porcelain)" ]; then
53+
echo "Linting changes detected. Please run 'uv run ruff check --fix .' and 'uv run ruff format .' locally and commit the changes."
54+
git diff
55+
exit 1
56+
fi

packages/cdk/lambda-python/generic-agent-core-runtime/.dockerignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,4 +86,7 @@ venv/
8686
**/*.swp
8787

8888
# VS Code
89-
.vscode/
89+
.vscode/
90+
91+
# Ruff
92+
.ruff_cache

packages/cdk/lambda-python/generic-agent-core-runtime/.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ logs/
5151
tmp/
5252
temp/
5353
.tmp/
54+
.ruff_cache/
5455

5556
# Docker
5657
.dockerignore

packages/cdk/lambda-python/generic-agent-core-runtime/app.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,12 @@
33
import json
44
import logging
55
import traceback
6+
67
from fastapi import FastAPI, Request
78
from fastapi.responses import StreamingResponse
9+
810
from src.agent import AgentManager
9-
from src.utils import create_ws_directory, clean_ws_directory, create_error_response
10-
from src.types import AgentCoreRequest
11+
from src.utils import clean_ws_directory, create_error_response, create_ws_directory
1112

1213
# Configure root logger
1314
logging.basicConfig(
@@ -71,12 +72,7 @@ async def invocations(request: Request):
7172
# Return streaming response
7273
async def generate():
7374
try:
74-
async for chunk in agent_manager.process_request_streaming(
75-
messages=messages,
76-
system_prompt=system_prompt,
77-
prompt=prompt,
78-
model_info=model_info
79-
):
75+
async for chunk in agent_manager.process_request_streaming(messages=messages, system_prompt=system_prompt, prompt=prompt, model_info=model_info):
8076
yield chunk
8177
finally:
8278
clean_ws_directory()
@@ -92,4 +88,5 @@ async def generate():
9288

9389
if __name__ == "__main__":
9490
import uvicorn
91+
9592
uvicorn.run(app, host="0.0.0.0", port=8080, log_level="warning", access_log=False)

packages/cdk/lambda-python/generic-agent-core-runtime/pyproject.toml

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,28 @@ dependencies = [
1212
"pydantic",
1313
"aws-opentelemetry-distro>=0.10.1",
1414
]
15+
16+
[dependency-groups]
17+
lint = [
18+
"ruff>=0.8.0",
19+
]
20+
21+
[tool.ruff]
22+
target-version = "py313"
23+
line-length = 500
24+
25+
[tool.ruff.lint]
26+
select = [
27+
"E", # pycodestyle errors
28+
"W", # pycodestyle warnings
29+
"F", # pyflakes
30+
"I", # isort
31+
"B", # flake8-bugbear
32+
"C4", # flake8-comprehensions
33+
"UP", # pyupgrade
34+
]
35+
ignore = []
36+
37+
[tool.ruff.format]
38+
quote-style = "double"
39+
indent-style = "space"
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
# Empty init file for src package
1+
# Empty init file for src package

packages/cdk/lambda-python/generic-agent-core-runtime/src/agent.py

Lines changed: 18 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
"""Agent management for the agent core runtime."""
22

3-
import boto3
43
import json
54
import logging
6-
from strands.models import BedrockModel
5+
from collections.abc import AsyncGenerator
6+
from typing import Any
7+
8+
import boto3
79
from strands import Agent as StrandsAgent
8-
from typing import List, Dict, Union, Any, Optional, AsyncGenerator
9-
from .config import get_system_prompt, extract_model_info
10+
from strands.models import BedrockModel
11+
12+
from .config import extract_model_info, get_system_prompt
1013
from .tools import ToolManager
11-
from .utils import (
12-
create_empty_response,
13-
create_error_response,
14-
process_messages,
15-
process_prompt
16-
)
17-
from .types import ModelInfo, Message
14+
from .types import Message, ModelInfo
15+
from .utils import process_messages, process_prompt
1816

1917
logger = logging.getLogger(__name__)
2018

@@ -31,22 +29,22 @@ def set_session_info(self, session_id: str, trace_id: str):
3129

3230
async def process_request_streaming(
3331
self,
34-
messages: Union[List[Message], List[Dict[str, Any]]],
35-
system_prompt: Optional[str],
36-
prompt: Union[str, List[Dict[str, Any]]],
32+
messages: list[Message] | list[dict[str, Any]],
33+
system_prompt: str | None,
34+
prompt: str | list[dict[str, Any]],
3735
model_info: ModelInfo,
38-
) -> AsyncGenerator[str, None]:
36+
) -> AsyncGenerator[str]:
3937
"""Process a request and yield streaming responses as raw events"""
4038
try:
4139
# Get model info
4240
model_id, region = extract_model_info(model_info)
43-
41+
4442
# Combine system prompts
4543
combined_system_prompt = get_system_prompt(system_prompt)
46-
44+
4745
# Get all tools
4846
tools = self.tool_manager.get_all_tools()
49-
47+
5048
# Create boto3 session and Bedrock model
5149
session = boto3.Session(region_name=region)
5250
bedrock_model = BedrockModel(
@@ -55,11 +53,11 @@ async def process_request_streaming(
5553
cache_prompt="default",
5654
cache_tools="default",
5755
)
58-
56+
5957
# Process messages and prompt using utility functions
6058
processed_messages = process_messages(messages)
6159
processed_prompt = process_prompt(prompt)
62-
60+
6361
# Create Strands agent and stream response
6462
agent = StrandsAgent(
6563
system_prompt=combined_system_prompt,

packages/cdk/lambda-python/generic-agent-core-runtime/src/config.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
"""Configuration and environment setup for the agent core runtime."""
22

3-
import os
43
import logging
5-
from typing import Dict, Any
4+
import os
5+
from typing import Any
66

77
# Configure root logger
88
logging.basicConfig(
@@ -21,7 +21,7 @@
2121
"""
2222

2323

24-
def get_aws_credentials() -> Dict[str, str]:
24+
def get_aws_credentials() -> dict[str, str]:
2525
"""Get AWS credentials from environment or IAM role"""
2626
credentials = {}
2727

@@ -37,7 +37,7 @@ def get_aws_credentials() -> Dict[str, str]:
3737
return credentials
3838

3939

40-
def get_uv_environment() -> Dict[str, str]:
40+
def get_uv_environment() -> dict[str, str]:
4141
"""Get UV environment with AWS credentials"""
4242
aws_creds = get_aws_credentials()
4343
return {
@@ -62,14 +62,12 @@ def get_system_prompt(user_system_prompt: str = None) -> str:
6262
def extract_model_info(model_info: Any) -> tuple[str, str]:
6363
"""Extract model ID and region from model info"""
6464
aws_creds = get_aws_credentials()
65-
65+
6666
if isinstance(model_info, str):
6767
model_id = model_info
6868
region = aws_creds.get("AWS_REGION", "us-east-1")
6969
else:
70-
model_id = model_info.get(
71-
"modelId", "us.anthropic.claude-3-5-sonnet-20241022-v2:0"
72-
)
70+
model_id = model_info.get("modelId", "us.anthropic.claude-3-5-sonnet-20241022-v2:0")
7371
region = model_info.get("region", aws_creds.get("AWS_REGION", "us-east-1"))
7472

75-
return model_id, region
73+
return model_id, region

0 commit comments

Comments
 (0)