Skip to content

Commit 469320d

Browse files
authored
Use normalize_summary for document creation (#1981)
* Use normalize_summary for document creation * minor linter issue * increase memory for functions * Push normalize_summary after the LLM API call to avoid having devs to remember * Not needed
1 parent e171ce0 commit 469320d

File tree

3 files changed

+9
-6
lines changed

3 files changed

+9
-6
lines changed

llm/backfill_summaries.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
from firebase_admin import firestore
2222
from bill_on_document_created import get_categories_from_topics, CATEGORY_BY_TOPIC
2323
import csv
24-
from normalize_summaries import normalize_summary
2524

2625
# Module constants
2726
FIREBASE_COLLECTION_PATH = "generalCourts/194/bills"
@@ -73,7 +72,7 @@ def make_bill_summary(bill_id, status, summary, topics):
7372
continue
7473
# Note: `normalize_summary` does some post-processing to clean up the summaries
7574
# As of 2025-10-21 this was necessary due to the LLM prompt
76-
summary = normalize_summary(summary["summary"])
75+
summary = summary["summary"]
7776
bill.reference.update({"summary": summary})
7877

7978
# If the topics are already populated, just make a note of it

llm/llm_functions.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,8 @@
6363
from prompts import *
6464
from tag_categories import *
6565

66+
from normalize_summaries import normalize_summary
67+
6668
GPT_MDOEL_VERSION = 'gpt-4o-mini'
6769
MAX_TOKEN_LIMIT = 128000
6870

@@ -434,7 +436,7 @@ def get_summary_api_function(bill_id: str, bill_title: str, bill_text: str) -> d
434436
if status_code != 1:
435437
return {'status': status_code, 'summary': ''}
436438
else:
437-
return {'status': status_code, 'summary': results.response}
439+
return {'status': status_code, 'summary': normalize_summary(results.response)}
438440

439441
def get_tags_api_function(bill_id: str, bill_title: str, bill_text: str) -> dict:
440442

@@ -1042,4 +1044,4 @@ def large_docs(bill_details: BillDetails, query: str, llm: ChatOpenAI) -> str:
10421044
except Exception as e:
10431045
print(e)
10441046

1045-
return response
1047+
return response

llm/main.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ def is_intersection(keys, required_keys):
2222
def set_openai_api_key():
2323
match os.environ.get("MAPLE_DEV"):
2424
case "prod":
25-
if os.environ.get("OPENAI_PROD") != None:
25+
if os.environ.get("OPENAI_PROD") is not None:
2626
os.environ["OPENAI_API_KEY"] = os.environ["OPENAI_PROD"]
2727
case _: # if "dev" or unspecified, use OPENAI_DEV
28-
if os.environ.get("OPENAI_DEV") != None:
28+
if os.environ.get("OPENAI_DEV") is not None:
2929
os.environ["OPENAI_API_KEY"] = os.environ["OPENAI_DEV"]
3030

3131

@@ -81,6 +81,8 @@ def httpsflaskexample(req: https_fn.Request) -> https_fn.Response:
8181

8282
@on_document_created(
8383
secrets=["OPENAI_DEV", "OPENAI_PROD"],
84+
timeout_sec=300,
85+
memory=options.MemoryOption.GB_1,
8486
document="generalCourts/{session_id}/bills/{bill_id}",
8587
)
8688
def add_summary_on_document_created(event: Event[DocumentSnapshot | None]) -> None:

0 commit comments

Comments
 (0)