Skip to content

Commit e049e72

Browse files
authored
Merge pull request #91 from seungwon2/master
[Fix] : fix context error
2 parents c8becf6 + e54794e commit e049e72

File tree

2 files changed

+214
-84
lines changed

2 files changed

+214
-84
lines changed

genai/aws-gen-ai-kr/20_applications/02_qa_chatbot/04_web_ui/bedrock.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from utils.opensearch_summit import opensearch_utils
66
from utils.ssm import parameter_store
77
from langchain.embeddings import BedrockEmbeddings
8-
from langchain_community.chat_models import BedrockChat
8+
from langchain_aws import ChatBedrock
99
from utils import bedrock
1010
from utils.bedrock import bedrock_info
1111

@@ -19,11 +19,11 @@ def get_llm(streaming_callback):
1919
endpoint_url=os.environ.get("BEDROCK_ENDPOINT_URL", None),
2020
region=os.environ.get("AWS_DEFAULT_REGION", None),
2121
)
22-
llm = BedrockChat(
22+
llm = ChatBedrock(
2323
model_id=bedrock_info.get_model_id(model_name="Claude-V3-Sonnet"),
2424
client=boto3_bedrock,
2525
model_kwargs={
26-
"max_tokens": 1024,
26+
"max_tokens": 102
2727
"stop_sequences": ["\n\nHuman"],
2828
},
2929
streaming=True,
@@ -123,7 +123,7 @@ def invoke(query, streaming_callback, parent, reranker, hyde, ragfusion, alpha):
123123
def extract_elements_and_print(pretty_contexts):
124124
for context in pretty_contexts:
125125
print("context: \n")
126-
# print(context)
126+
# print(context)
127127

128128
# print("######### SEMANTIC #########")
129129
# extract_elements_and_print(pretty_contexts[0])
@@ -136,6 +136,10 @@ def extract_elements_and_print(pretty_contexts):
136136
# if hyde or ragfusion:
137137
# print("######## 중간답변 ##########")
138138
# print(augmentation)
139+
if alpha == 0.0:
140+
pretty_contexts[0].clear()
141+
elif alpha == 1.0:
142+
pretty_contexts[1].clear()
139143
if hyde or ragfusion:
140144
return response, pretty_contexts, augmentation
141145

0 commit comments

Comments
 (0)