1111from datetime import datetime , timedelta
1212from pathlib import Path
1313from threading import Lock
14- from typing import Any , Dict , List , Union
14+ from typing import Any , Dict , List , Optional , Union
1515
1616import oci
1717from cachetools import TTLCache
4545 is_valid_ocid ,
4646 upload_local_to_os ,
4747)
48+ from ads .aqua .config .config import evaluation_service_config
4849from ads .aqua .constants import (
4950 CONSOLE_LINK_RESOURCE_TYPE_MAPPING ,
5051 EVALUATION_REPORT ,
@@ -171,7 +172,7 @@ def create(
171172 "Specify either a model or model deployment id."
172173 )
173174 evaluation_source = None
174- eval_inference_configuration = None
175+ eval_inference_configuration : Dict = {}
175176 if (
176177 DataScienceResource .MODEL_DEPLOYMENT
177178 in create_aqua_evaluation_details .evaluation_source_id
@@ -187,17 +188,26 @@ def create(
187188 runtime = ModelDeploymentContainerRuntime .from_dict (
188189 evaluation_source .runtime .to_dict ()
189190 )
190- inference_config = AquaContainerConfig .from_container_index_json (
191+ container_config = AquaContainerConfig .from_container_index_json (
191192 enable_spec = True
192- ).inference
193- for container in inference_config .values ():
194- if container .name == runtime .image [:runtime .image .rfind (":" )]:
193+ )
194+ for (
195+ inference_container_family ,
196+ inference_container_info ,
197+ ) in container_config .inference .items ():
198+ if (
199+ inference_container_info .name
200+ == runtime .image [: runtime .image .rfind (":" )]
201+ ):
195202 eval_inference_configuration = (
196- container .spec .evaluation_configuration
203+ evaluation_service_config ()
204+ .get_merged_inference_params (inference_container_family )
205+ .to_dict ()
197206 )
207+
198208 except Exception :
199209 logger .debug (
200- f"Could not load inference config details for the evaluation id: "
210+ f"Could not load inference config details for the evaluation source id: "
201211 f"{ create_aqua_evaluation_details .evaluation_source_id } . Please check if the container"
202212 f" runtime has the correct SMC image information."
203213 )
@@ -416,9 +426,7 @@ def create(
416426 report_path = create_aqua_evaluation_details .report_path ,
417427 model_parameters = create_aqua_evaluation_details .model_parameters ,
418428 metrics = create_aqua_evaluation_details .metrics ,
419- inference_configuration = eval_inference_configuration .to_filtered_dict ()
420- if eval_inference_configuration
421- else {},
429+ inference_configuration = eval_inference_configuration or {},
422430 )
423431 ).create (** kwargs ) ## TODO: decide what parameters will be needed
424432 logger .debug (
@@ -1225,45 +1233,24 @@ def _delete_job_and_model(job, model):
12251233 f"Exception message: { ex } "
12261234 )
12271235
1228- def load_evaluation_config (self , eval_id ) :
1236+ def load_evaluation_config (self , container : Optional [ str ] = None ) -> Dict :
12291237 """Loads evaluation config."""
1238+
1239+ # retrieve the evaluation config by container family name
1240+ evaluation_config = evaluation_service_config (container )
1241+
1242+ # convert the new config representation to the old one
12301243 return {
1231- "model_params" : {
1232- "max_tokens" : 500 ,
1233- "temperature" : 0.7 ,
1234- "top_p" : 1.0 ,
1235- "top_k" : 50 ,
1236- "presence_penalty" : 0.0 ,
1237- "frequency_penalty" : 0.0 ,
1238- "stop" : [],
1239- },
1244+ "model_params" : evaluation_config .ui_config .model_params .default ,
12401245 "shape" : {
1241- "VM.Standard.E3.Flex" : {
1242- "ocpu" : 8 ,
1243- "memory_in_gbs" : 128 ,
1244- "block_storage_size" : 200 ,
1245- },
1246- "VM.Standard.E4.Flex" : {
1247- "ocpu" : 8 ,
1248- "memory_in_gbs" : 128 ,
1249- "block_storage_size" : 200 ,
1250- },
1251- "VM.Standard3.Flex" : {
1252- "ocpu" : 8 ,
1253- "memory_in_gbs" : 128 ,
1254- "block_storage_size" : 200 ,
1255- },
1256- "VM.Optimized3.Flex" : {
1257- "ocpu" : 8 ,
1258- "memory_in_gbs" : 128 ,
1259- "block_storage_size" : 200 ,
1260- },
1261- },
1262- "default" : {
1263- "ocpu" : 8 ,
1264- "memory_in_gbs" : 128 ,
1265- "block_storage_size" : 200 ,
1246+ shape .name : shape .to_dict ()
1247+ for shape in evaluation_config .ui_config .shapes
12661248 },
1249+ "default" : (
1250+ evaluation_config .ui_config .shapes [0 ].to_dict ()
1251+ if len (evaluation_config .ui_config .shapes ) > 0
1252+ else {}
1253+ ),
12671254 }
12681255
12691256 def _get_attribute_from_model_metadata (
0 commit comments