@@ -31,7 +31,7 @@ By default, the integration uses the same authentication method configured with
3131 ads.set_auth(auth="resource_principal")
3232
3333 llm = ChatOCIModelDeployment(
34- model="odsc-llm",
34+ model="odsc-llm", # default model name if deployed on AQUA
3535 endpoint= f"https://modeldeployment.oci.customer-oci.com/<OCID>/predict",
3636 # Optionally you can specify additional keyword arguments for the model, e.g. temperature and default_headers.
3737 temperature=0.1,
@@ -46,7 +46,7 @@ Alternatively, you may use specific authentication for the model:
4646 from ads.llm import ChatOCIModelDeployment
4747
4848 llm = ChatOCIModelDeployment(
49- model="odsc-llm",
49+ model="odsc-llm", # default model name if deployed on AQUA
5050 endpoint= f"https://modeldeployment.oci.customer-oci.com/<OCID>/predict",
5151 # Use security token authentication for the model
5252 auth=ads.auth.security_token(profile="my_profile"),
@@ -65,7 +65,7 @@ Completion models takes a text string and input and returns a string with comple
6565 from ads.llm import OCIModelDeploymentLLM
6666
6767 llm = OCIModelDeploymentLLM(
68- model="odsc-llm",
68+ model="odsc-llm", # default model name if deployed on AQUA
6969 endpoint= f"https://modeldeployment.oci.customer-oci.com/<OCID>/predict",
7070 # Optionally you can specify additional keyword arguments for the model.
7171 max_tokens=32,
@@ -98,7 +98,7 @@ Chat models takes `chat messages <https://python.langchain.com/docs/concepts/#me
9898 from ads.llm import ChatOCIModelDeployment
9999
100100 llm = ChatOCIModelDeployment(
101- model="odsc-llm",
101+ model="odsc-llm", # default model name if deployed on AQUA
102102 endpoint=f"<oci_model_deployment_url>/predict",
103103 # Optionally you can specify additional keyword arguments for the model.
104104 max_tokens=32,
@@ -137,7 +137,7 @@ The vLLM container support `tool/function calling <https://docs.vllm.ai/en/lates
137137 from ads.llm import ChatOCIModelDeploymentVLLM, ChatTemplates
138138
139139 llm = ChatOCIModelDeploymentVLLM(
140- model="odsc-llm",
140+ model="odsc-llm", # default model name if deployed on AQUA
141141 endpoint= f"https://modeldeployment.oci.customer-oci.com/<OCID>/predict",
142142 # Set tool_choice to "auto" to enable tool/function calling.
143143 tool_choice="auto",
0 commit comments