11import logging
2- import json
32
43import torch
54from my_model import InferenceAutoencoder
@@ -25,11 +24,11 @@ def initialize(self, context):
2524 self .model = InferenceAutoencoder (input_shape = (51 ,), l2_lambda = 1e-4 )
2625 self .model .load_state_dict (state_dict )
2726 self .model .eval ()
28- logger .info ("✅ Model Loaded Successfully!" )
2927
3028 def preprocess (self , data ):
3129 """Convert input data to tensor"""
32- input_data = torch .tensor (data [0 ]['body' ], dtype = torch .float32 )
30+ input_data = data [0 ].get ("data" ) or data [0 ].get ("body" )
31+ input_data = torch .tensor (input_data .get ("instances" ), dtype = torch .float32 )
3332 return input_data
3433
3534 def inference (self , data ):
@@ -41,8 +40,9 @@ def inference(self, data):
4140 def postprocess (self , data ):
4241 """Convert output to JSON format"""
4342 # We have to return the same length as the input:
44- # If our input is: [[1,2,3], [1,2,3]]
45- # Ou output has to be [list|str|int|float, list|str|int|float]
43+ # If our input is: [[1,2,3], [1,2,3], ...]
44+ # Our output has to be [list|str|int|float, list|str|int|float, ...]
45+ # This way, the webser will but the payload directly on the response body
4646 reconstructed , avg_mse = data
4747 payload = []
4848 for idx in range (len (avg_mse )):
0 commit comments