From 33a14c5e95058ca9100a7698b9878a744763b8d7 Mon Sep 17 00:00:00 2001 From: Michael Date: Mon, 16 Jun 2025 21:30:32 +0300 Subject: [PATCH 1/2] run dataflow --- src/sempy_labs/_dataflows.py | 82 +++++++++++++++++++++++++++++++++++- 1 file changed, 81 insertions(+), 1 deletion(-) diff --git a/src/sempy_labs/_dataflows.py b/src/sempy_labs/_dataflows.py index acc7232d..3feec92b 100644 --- a/src/sempy_labs/_dataflows.py +++ b/src/sempy_labs/_dataflows.py @@ -11,7 +11,7 @@ _conv_b64, get_jsonpath_value, ) -from typing import Optional, Tuple +from typing import Optional, Tuple, List, Literal import sempy_labs._icons as icons from uuid import UUID from jsonpath_ng.ext import parse @@ -504,3 +504,83 @@ def create_dataflow( print( f"{icons.green_dot} The dataflow '{name}' has been created within the '{workspace_name}' workspace." ) + + +def run_dataflow( + dataflow: str | UUID, + workspace: Optional[str | UUID] = None, + job_type: Literal["Execute", "ApplyChanges"] = "Execute", + parameters: Optional[List[dict]] = None, +): + """ + Executes a dataflow. + + This is a wrapper function for the following APIs: `Background Jobs - Run On Demand Execute `_ and `Background Jobs - Run On Demand Apply Changes `_. + + Parameters + ---------- + dataflow : str | uuid.UUID + The name or ID of the dataflow. + workspace : str | uuid.UUID, default=None + The Fabric workspace name or ID. + Defaults to None which resolves to the workspace of the attached lakehouse + or if no lakehouse attached, resolves to the workspace of the notebook. + job_type : Literal["Execute", "ApplyChanges"], default="Execute" + The type of job to run. Can be either "Execute" or "ApplyChanges". + parameters : List[dict], default=None + A list of parameters to pass to the dataflow. Example: + [ + { + "parameterName": "OrderKey", + "type": "Automatic", + "value": 25 + }, + { + "parameterName": "Threshold", + "type": "Automatic", + "value": "start" + } + ] + """ + if job_type not in ["Execute", "ApplyChanges"]: + raise ValueError( + f"{icons.red_dot} The job_type parameter must be either 'Execute' or 'ApplyChanges'." + ) + if job_type == "ApplyChanges" and parameters: + print( + f"The job type is set to '{job_type}'. Parameters are not accepted for this job type." + ) + return + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + + (dataflow_name, dataflow_id, generation) = ( + _resolve_dataflow_name_and_id_and_generation(dataflow, workspace_id) + ) + + if generation != "Gen2 CI/CD": + print( + f"{icons.info} The dataflow '{dataflow_name}' is not a Fabric Dataflow Gen2 CI/CD item. This function only supports Dataflow Gen2 CI/CD." + ) + return + + payload = None + if parameters: + payload = { + "executionData": { + "executeOption": "ApplyChangesIfNeeded", + "parameters": parameters, + } + } + + _base_api( + request=f"/v1/workspaces/{workspace_id}/dataflows/{dataflow_id}/jobs/instances?jobType={job_type}", + method="post", + payload=payload, + lro_return_json=True, + status_codes=[200, 202], + ) + + print( + f"{icons.green_dot} The dataflow '{dataflow_name}' has been run within the '{workspace_name}' workspace." + ) From e6e3b4beb7305f59fcb3c97fa61ffebe281c3e4b Mon Sep 17 00:00:00 2001 From: Michael Date: Mon, 16 Jun 2025 21:45:57 +0300 Subject: [PATCH 2/2] update readme --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ada681d1..ee74be78 100644 --- a/README.md +++ b/README.md @@ -101,11 +101,11 @@ for file_name, file_url in notebook_files.items(): ## Once installed, run this code to import the library into your notebook ```python import sempy_labs as labs -from sempy_labs import migration, directlake, admin, graph -from sempy_labs import lakehouse as lake -from sempy_labs import report as rep +import sempy_labs.lakehouse as lake +import sempy_labs.report as rep +from sempy_labs import migration, directlake, admin, graph, mirrored_azure_databricks_catalog from sempy_labs.tom import connect_semantic_model -from sempy_labs.report import ReportWrapper +from sempy_labs.report import connect_report ``` ## Load Semantic Link Labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment)