Skip to content

Commit 15cb088

Browse files
vishalbolludeliahu
authored andcommitted
Set API version in test (#51)
(cherry picked from commit 6d66da7)
1 parent 4944852 commit 15cb088

File tree

2 files changed

+15
-7
lines changed

2 files changed

+15
-7
lines changed

pkg/workloads/spark_job/test/integration/iris_context.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import consts
16+
1517
"""
1618
HOW TO GENERATE CONTEXT
1719
@@ -25,10 +27,16 @@
2527
from lib.storage import S3
2628
bucket, key = S3.deconstruct_s3_path('s3://<cortex-bucket>/apps/<app-name>/contexts/<context-id>.msgpack')
2729
S3(bucket, client_config={}).get_msgpack(key)
28-
29-
5. Modify environment_data.csv_data.path to point to the correct input data file
3030
"""
3131

32+
33+
def get(input_data_path):
34+
raw_ctx["environment_data"]["csv_data"]["path"] = input_data_path
35+
raw_ctx["cortex_config"]["api_version"] = consts.CORTEX_VERSION
36+
37+
return raw_ctx
38+
39+
3240
raw_ctx = {
3341
"raw_dataset": {
3442
"key": "apps/iris/data/2019-03-08-09-58-35-701834/3976c5679bcf7cb550453802f4c3a9333c5f193f6097f1f5642de48d2397554/data_raw/raw.parquet",

pkg/workloads/spark_job/test/integration/iris_test.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from spark_job import spark_job
1818
from lib.exceptions import UserException
1919
from lib import Context
20-
from test.integration.iris_context import raw_ctx
20+
from test.integration import iris_context
2121

2222
import pytest
2323
from pyspark.sql.types import *
@@ -54,6 +54,10 @@ def test_simple_end_to_end(spark):
5454
local_storage_path = Path("/workspace/local_storage")
5555
local_storage_path.mkdir(parents=True, exist_ok=True)
5656
should_ingest = True
57+
input_data_path = os.path.join(str(local_storage_path), "iris.csv")
58+
59+
raw_ctx = iris_context.get(input_data_path)
60+
5761
workload_id = raw_ctx["raw_columns"]["raw_float_columns"]["sepal_length"]["workload_id"]
5862

5963
cols_to_validate = []
@@ -65,10 +69,6 @@ def test_simple_end_to_end(spark):
6569
iris_data_string = "\n".join(",".join(str(val) for val in line) for line in iris_data)
6670
Path(os.path.join(str(local_storage_path), "iris.csv")).write_text(iris_data_string)
6771

68-
raw_ctx["environment_data"]["csv_data"]["path"] = os.path.join(
69-
str(local_storage_path), "iris.csv"
70-
)
71-
7272
ctx = Context(
7373
raw_obj=raw_ctx, cache_dir="/workspace/cache", local_storage_path=str(local_storage_path)
7474
)

0 commit comments

Comments
 (0)