22
33from copy import deepcopy
44import itertools
5- from functools import reduce
5+ from math import prod
66import logging
77import typing as ty
88from pydra .utils .typing import StateArray , TypeParser
@@ -1150,12 +1150,12 @@ def _processing_terms(self, term, previous_states_ind):
11501150 else :
11511151 container_ndim = self .container_ndim_all .get (term , 1 )
11521152 shape = input_shape (self .inputs [term ], container_ndim = container_ndim )
1153- var_ind = range (reduce ( lambda x , y : x * y , shape ))
1153+ var_ind = range (prod ( shape ))
11541154 new_keys = [term ]
11551155 # checking if the term is in inner_inputs
11561156 if term in self .inner_inputs :
11571157 # TODO: have to be changed if differ length
1158- inner_len = [shape [- 1 ]] * reduce ( lambda x , y : x * y , shape [:- 1 ])
1158+ inner_len = [shape [- 1 ]] * prod ( shape [:- 1 ])
11591159 # this come from the previous node
11601160 outer_ind = self .inner_inputs [term ].ind_l
11611161 var_ind_out = itertools .chain .from_iterable (
@@ -1172,10 +1172,10 @@ def _single_op_splits(self, op_single):
11721172 self .inputs [op_single ],
11731173 container_ndim = self .container_ndim_all .get (op_single , 1 ),
11741174 )
1175- val_ind = range (reduce ( lambda x , y : x * y , shape ))
1175+ val_ind = range (prod ( shape ))
11761176 if op_single in self .inner_inputs :
11771177 # TODO: have to be changed if differ length
1178- inner_len = [shape [- 1 ]] * reduce ( lambda x , y : x * y , shape [:- 1 ], 1 )
1178+ inner_len = [shape [- 1 ]] * prod ( shape [:- 1 ])
11791179
11801180 # this come from the previous node
11811181 outer_ind = self .inner_inputs [op_single ].ind_l
0 commit comments