4949
5050
5151def get_execution_engine_type (
52- data_frame : Union [DataFrame , pd .DataFrame ]
52+ data_frame : Union [DataFrame , pd .DataFrame ]
5353) -> ExecutionEngine :
5454 """
5555 Determines the execution engine type for a given DataFrame.
@@ -89,7 +89,7 @@ def get_metastore_id(feature_store_id: str):
8989
9090
9191def validate_delta_format_parameters (
92- timestamp : datetime = None , version_number : int = None , is_restore : bool = False
92+ timestamp : datetime = None , version_number : int = None , is_restore : bool = False
9393):
9494 """
9595 Validate the user input provided as part of preview, restore APIs for ingested data, Ingested data is
@@ -123,9 +123,9 @@ def validate_delta_format_parameters(
123123
124124
125125def show_ingestion_summary (
126- entity_id : str ,
127- entity_type : EntityType = EntityType .FEATURE_GROUP ,
128- error_details : str = None ,
126+ entity_id : str ,
127+ entity_type : EntityType = EntityType .FEATURE_GROUP ,
128+ error_details : str = None ,
129129):
130130 """
131131 Displays a ingestion summary table with the given entity type and error details.
@@ -165,7 +165,7 @@ def show_validation_summary(ingestion_status: str, validation_output, expectatio
165165 statistics = validation_output ["statistics" ]
166166
167167 table_headers = (
168- ["expectation_type" ] + list (statistics .keys ()) + ["ingestion_status" ]
168+ ["expectation_type" ] + list (statistics .keys ()) + ["ingestion_status" ]
169169 )
170170
171171 table_values = [expectation_type ] + list (statistics .values ()) + [ingestion_status ]
@@ -209,9 +209,9 @@ def show_validation_summary(ingestion_status: str, validation_output, expectatio
209209
210210
211211def get_features (
212- output_columns : List [dict ],
213- parent_id : str ,
214- entity_type : EntityType = EntityType .FEATURE_GROUP ,
212+ output_columns : List [dict ],
213+ parent_id : str ,
214+ entity_type : EntityType = EntityType .FEATURE_GROUP ,
215215) -> List [Feature ]:
216216 """
217217 Returns a list of features, given a list of output_columns and a feature_group_id.
@@ -268,7 +268,7 @@ def get_schema_from_spark_df(df: DataFrame):
268268
269269
270270def get_schema_from_df (
271- data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
271+ data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
272272) -> List [dict ]:
273273 """
274274 Given a DataFrame, returns a list of dictionaries that describe its schema.
@@ -282,7 +282,7 @@ def get_schema_from_df(
282282
283283
284284def get_input_features_from_df (
285- data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
285+ data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
286286) -> List [FeatureDetail ]:
287287 """
288288 Given a DataFrame, returns a list of FeatureDetail objects that represent its input features.
@@ -299,7 +299,7 @@ def get_input_features_from_df(
299299
300300
301301def convert_expectation_suite_to_expectation (
302- expectation_suite : ExpectationSuite , expectation_type : ExpectationType
302+ expectation_suite : ExpectationSuite , expectation_type : ExpectationType
303303):
304304 """
305305 Convert an ExpectationSuite object to an Expectation object with detailed rule information.
@@ -358,7 +358,7 @@ def largest_matching_subset_of_primary_keys(left_feature_group, right_feature_gr
358358
359359
360360def convert_pandas_datatype_with_schema (
361- raw_feature_details : List [dict ], input_df : pd .DataFrame
361+ raw_feature_details : List [dict ], input_df : pd .DataFrame
362362) -> pd .DataFrame :
363363 feature_detail_map = {}
364364 columns_to_remove = []
@@ -383,7 +383,7 @@ def convert_pandas_datatype_with_schema(
383383
384384
385385def convert_spark_dataframe_with_schema (
386- raw_feature_details : List [dict ], input_df : DataFrame
386+ raw_feature_details : List [dict ], input_df : DataFrame
387387) -> DataFrame :
388388 feature_detail_map = {}
389389 columns_to_remove = []
@@ -402,4 +402,4 @@ def convert_spark_dataframe_with_schema(
402402def validate_input_feature_details (input_feature_details , data_frame ):
403403 if isinstance (data_frame , pd .DataFrame ):
404404 return convert_pandas_datatype_with_schema (input_feature_details , data_frame )
405- return convert_spark_dataframe_with_schema (input_feature_details , data_frame )
405+ return convert_spark_dataframe_with_schema (input_feature_details , data_frame )
0 commit comments