@@ -30,12 +30,11 @@ def get_dtype(data):
3030 '''
3131 if hasattr (data , 'dtype' ):
3232 return data .dtype
33- elif hasattr (data , 'dtypes' ):
33+ if hasattr (data , 'dtypes' ):
3434 return str (data .dtypes [0 ])
35- elif hasattr (data , 'values' ):
35+ if hasattr (data , 'values' ):
3636 return data .values .dtype
37- else :
38- raise ValueError (f'Impossible to get data type of { type (data )} ' )
37+ raise ValueError (f'Impossible to get data type of { type (data )} ' )
3938
4039
4140def sklearn_disable_finiteness_check ():
@@ -66,10 +65,7 @@ def _parse_size(string, dim=2):
6665
6766
6867def float_or_int (string ):
69- if '.' in string :
70- return float (string )
71- else :
72- return int (string )
68+ return float (string ) if '.' in string else int (string )
7369
7470
7571def get_optimal_cache_size (n_rows , dtype = np .double , max_cache = 64 ):
@@ -90,10 +86,8 @@ def get_optimal_cache_size(n_rows, dtype=np.double, max_cache=64):
9086 optimal_cache_size_bytes = byte_size * (n_rows ** 2 )
9187 one_gb = 2 ** 30
9288 max_cache_bytes = max_cache * one_gb
93- if optimal_cache_size_bytes > max_cache_bytes :
94- return max_cache_bytes
95- else :
96- return optimal_cache_size_bytes
89+ return max_cache_bytes \
90+ if optimal_cache_size_bytes > max_cache_bytes else optimal_cache_size_bytes
9791
9892
9993def parse_args (parser , size = None , loop_types = (),
@@ -175,9 +169,10 @@ def parse_args(parser, size=None, loop_types=(),
175169 help = 'Seed to pass as random_state' )
176170 parser .add_argument ('--dataset-name' , type = str , default = None ,
177171 help = 'Dataset name' )
178- parser .add_argument ('--no-intel-optimized' , default = False , action = 'store_true' ,
172+ parser .add_argument ('--no-intel-optimized' , default = False ,
173+ action = 'store_true' ,
179174 help = 'Use no intel optimized version. '
180- 'Now avalible for scikit-learn benchmarks' ),
175+ 'Now avalible for scikit-learn benchmarks' )
181176 parser .add_argument ('--device' , default = 'None' , type = str ,
182177 choices = ('host' , 'cpu' , 'gpu' , 'None' ),
183178 help = 'Execution context device' )
@@ -519,8 +514,8 @@ def print_output(library, algorithm, stages, params, functions,
519514 alg_params = None ):
520515 if params .output_format == 'json' :
521516 output = []
522- for i in range ( len ( stages ) ):
523- result = gen_basic_dict (library , algorithm , stages [ i ] , params ,
517+ for i , stage in enumerate ( stages ):
518+ result = gen_basic_dict (library , algorithm , stage , params ,
524519 data [i ], alg_instance , alg_params )
525520 result .update ({'time[s]' : times [i ]})
526521 if metric_type is not None :
0 commit comments