2424import numpy as np
2525
2626from cortex .lib import util , Context , api_utils
27- from cortex .lib .log import get_logger , debug_obj
27+ from cortex .lib .log import cx_logger , debug_obj
2828from cortex .lib .exceptions import CortexException , UserRuntimeException , UserException
2929from cortex .lib .stringify import truncate
3030
31- logger = get_logger ()
32- logger .propagate = False # prevent double logging (flask modifies root logger)
33-
3431app = Flask (__name__ )
3532
3633app .json_encoder = util .json_tricks_encoder
@@ -79,7 +76,7 @@ def after_request(response):
7976 api = local_cache ["api" ]
8077 ctx = local_cache ["ctx" ]
8178
82- logger .info (response .status )
79+ cx_logger () .info (response .status )
8380
8481 prediction = None
8582 if "prediction" in g :
@@ -94,7 +91,7 @@ def after_request(response):
9491
9592def prediction_failed (reason ):
9693 message = "prediction failed: {}" .format (reason )
97- logger .error (message )
94+ cx_logger () .error (message )
9895 return message , status .HTTP_406_NOT_ACCEPTABLE
9996
10097
@@ -215,7 +212,7 @@ def predict():
215212
216213 debug_obj ("post_inference" , result , debug )
217214 except Exception as e :
218- logger .exception ("prediction failed" )
215+ cx_logger () .exception ("prediction failed" )
219216 return prediction_failed (str (e ))
220217
221218 g .prediction = result
@@ -238,7 +235,7 @@ def get_signature():
238235
239236@app .errorhandler (Exception )
240237def exceptions (e ):
241- logger .exception (e )
238+ cx_logger () .exception (e )
242239 return jsonify (error = str (e )), 500
243240
244241
@@ -262,45 +259,45 @@ def start(args):
262259 request_handler = local_cache .get ("request_handler" )
263260
264261 if request_handler is not None and util .has_function (request_handler , "pre_inference" ):
265- logger .info (
262+ cx_logger () .info (
266263 "using pre_inference request handler provided in {}" .format (
267264 api ["onnx" ]["request_handler" ]
268265 )
269266 )
270267 else :
271- logger .info ("pre_inference request handler not found" )
268+ cx_logger () .info ("pre_inference request handler not found" )
272269
273270 if request_handler is not None and util .has_function (request_handler , "post_inference" ):
274- logger .info (
271+ cx_logger () .info (
275272 "using post_inference request handler provided in {}" .format (
276273 api ["onnx" ]["request_handler" ]
277274 )
278275 )
279276 else :
280- logger .info ("post_inference request handler not found" )
277+ cx_logger () .info ("post_inference request handler not found" )
281278
282279 sess = rt .InferenceSession (model_path )
283280 local_cache ["sess" ] = sess
284281 local_cache ["input_metadata" ] = sess .get_inputs ()
285- logger .info (
282+ cx_logger () .info (
286283 "input_metadata: {}" .format (truncate (extract_signature (local_cache ["input_metadata" ])))
287284 )
288285 local_cache ["output_metadata" ] = sess .get_outputs ()
289- logger .info (
286+ cx_logger () .info (
290287 "output_metadata: {}" .format (
291288 truncate (extract_signature (local_cache ["output_metadata" ]))
292289 )
293290 )
294291
295292 except Exception as e :
296- logger .exception ("failed to start api" )
293+ cx_logger () .exception ("failed to start api" )
297294 sys .exit (1 )
298295
299296 if api .get ("tracker" ) is not None and api ["tracker" ].get ("model_type" ) == "classification" :
300297 try :
301298 local_cache ["class_set" ] = api_utils .get_classes (ctx , api ["name" ])
302299 except Exception as e :
303- logger .warn ("an error occurred while attempting to load classes" , exc_info = True )
300+ cx_logger () .warn ("an error occurred while attempting to load classes" , exc_info = True )
304301
305302 serve (app , listen = "*:{}" .format (args .port ))
306303
0 commit comments