Skip to content

Commit 3a29181

Browse files
gustavocidornelaswhoseoyster
authored andcommitted
Send csv and config as request body. Add earliest and latest timestamps
1 parent 1cfb7ba commit 3a29181

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

openlayer/__init__.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1997,23 +1997,23 @@ def publish_batch_data(
19971997
config=batch_data, df=batch_df, column_name=column
19981998
)
19991999
# Get min and max timestamps
2000-
min_timestamp = batch_df[batch_data["timestampColumnName"]].min()
2001-
max_timestamp = batch_df[batch_data["timestampColumnName"]].max()
2000+
earliest_timestamp = batch_df[batch_data["timestampColumnName"]].min()
2001+
latest_timestamp = batch_df[batch_data["timestampColumnName"]].max()
20022002

20032003
with tempfile.TemporaryDirectory() as tmp_dir:
2004-
# Copy relevant files to tmp dir
2005-
utils.write_yaml(batch_data, f"{tmp_dir}/dataset_config.yaml")
2004+
# Copy save files to tmp dir
20062005
batch_df.to_csv(f"{tmp_dir}/dataset.csv", index=False)
2007-
2008-
tar_file_path = os.path.join(tmp_dir, "tarfile")
2009-
with tarfile.open(tar_file_path, mode="w:gz") as tar:
2010-
tar.add(tmp_dir, arcname=os.path.basename("reference_dataset"))
2006+
payload = {
2007+
"earliestTimestamp": earliest_timestamp,
2008+
"latestTimestamp": latest_timestamp,
2009+
**batch_data,
2010+
}
20112011

20122012
self.api.upload(
20132013
endpoint=f"inference-pipelines/{inference_pipeline_id}/data",
2014-
file_path=tar_file_path,
2015-
object_name="tarfile",
2016-
body={},
2014+
file_path=f"{tmp_dir}/dataset.csv",
2015+
object_name="dataset.csv",
2016+
body=payload,
20172017
storage_uri_key="storageUri",
20182018
method="POST",
20192019
)

0 commit comments

Comments
 (0)