@@ -2453,19 +2453,21 @@ async def probe_tip_inventory(
24532453
24542454 results : Dict [str , bool ] = {}
24552455
2456- num_channels = self .backend .num_channels
24572456 if use_channels is None :
2458- use_channels = list (range (num_channels ))
2457+ use_channels = list (range (self .backend .num_channels ))
2458+ num_channels = len (use_channels )
24592459
24602460 for i in range (0 , len (tip_spots ), num_channels ):
24612461 subset = tip_spots [i : i + num_channels ]
2462- use_channels = list ( range ( len (subset )))
2462+ use_channels = use_channels [: len (subset )]
24632463 batch_result = await probing_fn (subset , use_channels )
24642464 results .update (batch_result )
24652465
24662466 return results
24672467
2468- async def consolidate_tip_inventory (self , tip_racks : List [TipRack ]):
2468+ async def consolidate_tip_inventory (
2469+ self , tip_racks : List [TipRack ], use_channels : Optional [List [int ]] = None
2470+ ):
24692471 """
24702472 Consolidate partial tip racks on the deck by redistributing tips.
24712473
@@ -2475,6 +2477,11 @@ async def consolidate_tip_inventory(self, tip_racks: List[TipRack]):
24752477 as possible, grouped by tip model.
24762478 Tips are moved efficiently to minimize pipetting steps, avoiding redundant
24772479 visits to the same drop columns.
2480+
2481+ Args:
2482+ tip_racks: List of TipRack objects to consolidate.
2483+ use_channels: Optional list of channels to use for consolidation. If not
2484+ provided, the first 8 available channels will be used.
24782485 """
24792486
24802487 def merge_sublists (lists : List [List [TipSpot ]], max_len : int ) -> List [List [TipSpot ]]:
@@ -2595,13 +2602,16 @@ def key_for_tip_spot(tip_spot: TipSpot) -> Tuple[str, float]:
25952602 current_tip_model = all_origin_tip_spots [0 ].tracker .get_tip ()
25962603
25972604 # Ensure there are channels that can pick up the tip model
2598- num_channels_available = len (
2599- [
2600- c
2601- for c in range (self .backend .num_channels )
2602- if self .backend .can_pick_up_tip (c , current_tip_model )
2603- ]
2604- )
2605+ if use_channels is None :
2606+ num_channels_available = len (
2607+ [
2608+ c
2609+ for c in range (self .backend .num_channels )
2610+ if self .backend .can_pick_up_tip (c , current_tip_model )
2611+ ]
2612+ )
2613+ use_channels = list (range (len (target_tip_spots )))
2614+ num_channels_available = len (use_channels )
26052615
26062616 # 5: Optimize speed
26072617 if num_channels_available == 0 :
@@ -2622,10 +2632,9 @@ def key_for_tip_spot(tip_spot: TipSpot) -> Tuple[str, float]:
26222632 # 6: Execute tip movement/consolidation
26232633 for idx , target_tip_spots in enumerate (merged_target_tip_clusters ):
26242634 print (f" - tip transfer cycle: { idx + 1 } / { len_transfers } " )
2625- num_channels = len (target_tip_spots )
2626- use_channels = list (range (num_channels ))
26272635
2628- origin_tip_spots = [all_origin_tip_spots .pop (0 ) for _ in range (num_channels )]
2636+ origin_tip_spots = [all_origin_tip_spots .pop (0 ) for _ in range (len ( target_tip_spots ) )]
26292637
2630- await self .pick_up_tips (origin_tip_spots , use_channels = use_channels )
2631- await self .drop_tips (target_tip_spots , use_channels = use_channels )
2638+ these_channels = use_channels [: len (target_tip_spots )]
2639+ await self .pick_up_tips (origin_tip_spots , use_channels = these_channels )
2640+ await self .drop_tips (target_tip_spots , use_channels = these_channels )
0 commit comments