1515
1616import dataclasses
1717import json
18- import os
1918import sys
2019import hmac
2120import hashlib
2928
3029from tblib import pickling_support
3130
31+ # Note: do not use os.path.join for s3 uris, fails on windows
32+
3233
3334def _get_python_version ():
3435 return f"{ sys .version_info .major } .{ sys .version_info .minor } .{ sys .version_info .micro } "
@@ -143,18 +144,15 @@ def serialize_func_to_s3(
143144 Raises:
144145 SerializationError: when fail to serialize function to bytes.
145146 """
146-
147147 bytes_to_upload = CloudpickleSerializer .serialize (func )
148148
149- _upload_bytes_to_s3 (
150- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
151- )
149+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
152150
153151 sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
154152
155153 _upload_bytes_to_s3 (
156154 _MetaData (sha256_hash ).to_json (),
157- os . path . join ( s3_uri , " metadata.json") ,
155+ f" { s3_uri } / metadata.json" ,
158156 s3_kms_key ,
159157 sagemaker_session ,
160158 )
@@ -177,20 +175,16 @@ def deserialize_func_from_s3(sagemaker_session: Session, s3_uri: str, hmac_key:
177175 DeserializationError: when fail to serialize function to bytes.
178176 """
179177 metadata = _MetaData .from_json (
180- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
178+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
181179 )
182180
183- bytes_to_deserialize = _read_bytes_from_s3 (
184- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
185- )
181+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
186182
187183 _perform_integrity_check (
188184 expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
189185 )
190186
191- return CloudpickleSerializer .deserialize (
192- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
193- )
187+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
194188
195189
196190def serialize_obj_to_s3 (
@@ -211,15 +205,13 @@ def serialize_obj_to_s3(
211205
212206 bytes_to_upload = CloudpickleSerializer .serialize (obj )
213207
214- _upload_bytes_to_s3 (
215- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
216- )
208+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
217209
218210 sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
219211
220212 _upload_bytes_to_s3 (
221213 _MetaData (sha256_hash ).to_json (),
222- os . path . join ( s3_uri , " metadata.json") ,
214+ f" { s3_uri } / metadata.json" ,
223215 s3_kms_key ,
224216 sagemaker_session ,
225217 )
@@ -240,20 +232,16 @@ def deserialize_obj_from_s3(sagemaker_session: Session, s3_uri: str, hmac_key: s
240232 """
241233
242234 metadata = _MetaData .from_json (
243- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
235+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
244236 )
245237
246- bytes_to_deserialize = _read_bytes_from_s3 (
247- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
248- )
238+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
249239
250240 _perform_integrity_check (
251241 expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
252242 )
253243
254- return CloudpickleSerializer .deserialize (
255- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
256- )
244+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
257245
258246
259247def serialize_exception_to_s3 (
@@ -275,15 +263,13 @@ def serialize_exception_to_s3(
275263
276264 bytes_to_upload = CloudpickleSerializer .serialize (exc )
277265
278- _upload_bytes_to_s3 (
279- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
280- )
266+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
281267
282268 sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
283269
284270 _upload_bytes_to_s3 (
285271 _MetaData (sha256_hash ).to_json (),
286- os . path . join ( s3_uri , " metadata.json") ,
272+ f" { s3_uri } / metadata.json" ,
287273 s3_kms_key ,
288274 sagemaker_session ,
289275 )
@@ -304,20 +290,16 @@ def deserialize_exception_from_s3(sagemaker_session: Session, s3_uri: str, hmac_
304290 """
305291
306292 metadata = _MetaData .from_json (
307- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
293+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
308294 )
309295
310- bytes_to_deserialize = _read_bytes_from_s3 (
311- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
312- )
296+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
313297
314298 _perform_integrity_check (
315299 expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
316300 )
317301
318- return CloudpickleSerializer .deserialize (
319- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
320- )
302+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
321303
322304
323305def _upload_bytes_to_s3 (bytes , s3_uri , s3_kms_key , sagemaker_session ):
0 commit comments