|
16 | 16 | import os |
17 | 17 |
|
18 | 18 | import pytest |
| 19 | +from tests.integ import DATA_DIR |
| 20 | +from tests.integ.timeout import timeout_and_delete_endpoint_by_name |
19 | 21 |
|
20 | 22 | from sagemaker.amazon.amazon_estimator import get_image_uri |
21 | 23 | from sagemaker.content_types import CONTENT_TYPE_CSV |
22 | 24 | from sagemaker.model import Model |
23 | 25 | from sagemaker.pipeline import PipelineModel |
24 | 26 | from sagemaker.predictor import RealTimePredictor, json_serializer |
25 | | -from sagemaker.session import Session |
26 | 27 | from sagemaker.sparkml.model import SparkMLModel |
27 | 28 | from sagemaker.utils import sagemaker_timestamp |
28 | | -from tests.integ import DATA_DIR |
29 | | -from tests.integ.timeout import timeout_and_delete_endpoint_by_name |
30 | 29 |
|
31 | 30 |
|
32 | 31 | @pytest.mark.continuous_testing |
33 | 32 | @pytest.mark.regional_testing |
34 | 33 | def test_inference_pipeline_model_deploy(sagemaker_session): |
35 | | - # Creates a Pipeline model comprising of SparkML (serialized by MLeap) and XGBoost and deploys to one endpoint |
36 | 34 | sparkml_data_path = os.path.join(DATA_DIR, 'sparkml_model') |
37 | 35 | xgboost_data_path = os.path.join(DATA_DIR, 'xgboost_model') |
38 | 36 | endpoint_name = 'test-inference-pipeline-deploy-{}'.format(sagemaker_timestamp()) |
39 | | - sparkml_model_data = sagemaker_session.upload_data(path=os.path.join(sparkml_data_path, 'mleap_model.tar.gz'), |
40 | | - key_prefix='integ-test-data/sparkml/model') |
41 | | - xgb_model_data = sagemaker_session.upload_data(path=os.path.join(xgboost_data_path, 'xgb_model.tar.gz'), |
42 | | - key_prefix='integ-test-data/xgboost/model') |
| 37 | + sparkml_model_data = sagemaker_session.upload_data( |
| 38 | + path=os.path.join(sparkml_data_path, 'mleap_model.tar.gz'), |
| 39 | + key_prefix='integ-test-data/sparkml/model') |
| 40 | + xgb_model_data = sagemaker_session.upload_data( |
| 41 | + path=os.path.join(xgboost_data_path, 'xgb_model.tar.gz'), |
| 42 | + key_prefix='integ-test-data/xgboost/model') |
43 | 43 | schema = json.dumps({ |
44 | 44 | "input": [ |
45 | 45 | { |
@@ -74,10 +74,12 @@ def test_inference_pipeline_model_deploy(sagemaker_session): |
74 | 74 | } |
75 | 75 | }) |
76 | 76 | with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session): |
77 | | - sparkml_model = SparkMLModel(model_data=sparkml_model_data, env={'SAGEMAKER_SPARKML_SCHEMA': schema}, |
| 77 | + sparkml_model = SparkMLModel(model_data=sparkml_model_data, |
| 78 | + env={'SAGEMAKER_SPARKML_SCHEMA': schema}, |
78 | 79 | sagemaker_session=sagemaker_session) |
79 | | - xgb_image = get_image_uri(Session().boto_region_name, 'xgboost') |
80 | | - xgb_model = Model(model_data=xgb_model_data, image=xgb_image, sagemaker_session=sagemaker_session) |
| 80 | + xgb_image = get_image_uri(sagemaker_session.boto_region_name, 'xgboost') |
| 81 | + xgb_model = Model(model_data=xgb_model_data, image=xgb_image, |
| 82 | + sagemaker_session=sagemaker_session) |
81 | 83 | model = PipelineModel(models=[sparkml_model, xgb_model], role='SageMakerRole', |
82 | 84 | sagemaker_session=sagemaker_session, name=endpoint_name) |
83 | 85 | model.deploy(1, 'ml.m4.xlarge', endpoint_name=endpoint_name) |
|
0 commit comments