@@ -123,7 +123,7 @@ def __init__(self, repo: 'Repo', file_path: Union[PathLike, None] = None) -> Non
123123 self .repo = repo
124124 self .version = self ._VERSION
125125 self ._extension_data = b''
126- self ._file_path = file_path or self ._index_path () # type: PathLike
126+ self ._file_path : PathLike = file_path or self ._index_path ()
127127
128128 def _set_cache_ (self , attr : str ) -> None :
129129 if attr == "entries" :
@@ -136,7 +136,7 @@ def _set_cache_(self, attr: str) -> None:
136136 ok = True
137137 except OSError :
138138 # in new repositories, there may be no index, which means we are empty
139- self .entries = {} # type : Dict[Tuple[PathLike, StageType], IndexEntry]
139+ self .entries : Dict [Tuple [PathLike , StageType ], IndexEntry ] = {}
140140 return None
141141 finally :
142142 if not ok :
@@ -266,7 +266,7 @@ def merge_tree(self, rhs: Treeish, base: Union[None, Treeish] = None) -> 'IndexF
266266 # -i : ignore working tree status
267267 # --aggressive : handle more merge cases
268268 # -m : do an actual merge
269- args = ["--aggressive" , "-i" , "-m" ] # type: List[Union[Treeish, str] ]
269+ args : List [ Union [ Treeish , str ]] = ["--aggressive" , "-i" , "-m" ]
270270 if base is not None :
271271 args .append (base )
272272 args .append (rhs )
@@ -288,14 +288,14 @@ def new(cls, repo: 'Repo', *tree_sha: Union[str, Tree]) -> 'IndexFile':
288288 New IndexFile instance. Its path will be undefined.
289289 If you intend to write such a merged Index, supply an alternate file_path
290290 to its 'write' method."""
291- tree_sha_bytes = [to_bin_sha (str (t )) for t in tree_sha ] # List[bytes ]
291+ tree_sha_bytes : List [ bytes ] = [to_bin_sha (str (t )) for t in tree_sha ]
292292 base_entries = aggressive_tree_merge (repo .odb , tree_sha_bytes )
293293
294294 inst = cls (repo )
295295 # convert to entries dict
296- entries = dict (zip (
296+ entries : Dict [ Tuple [ PathLike , int ], IndexEntry ] = dict (zip (
297297 ((e .path , e .stage ) for e in base_entries ),
298- (IndexEntry .from_base (e ) for e in base_entries ))) # type: Dict[Tuple[PathLike, int], IndexEntry]
298+ (IndexEntry .from_base (e ) for e in base_entries )))
299299
300300 inst .entries = entries
301301 return inst
@@ -338,7 +338,7 @@ def from_tree(cls, repo: 'Repo', *treeish: Treeish, **kwargs: Any) -> 'IndexFile
338338 if len (treeish ) == 0 or len (treeish ) > 3 :
339339 raise ValueError ("Please specify between 1 and 3 treeish, got %i" % len (treeish ))
340340
341- arg_list = [] # type : List[Union[Treeish, str]]
341+ arg_list : List [Union [Treeish , str ]] = [ ]
342342 # ignore that working tree and index possibly are out of date
343343 if len (treeish ) > 1 :
344344 # drop unmerged entries when reading our index and merging
@@ -445,7 +445,7 @@ def _write_path_to_stdin(self, proc: 'Popen', filepath: PathLike, item: TBD, fma
445445 we will close stdin to break the pipe."""
446446
447447 fprogress (filepath , False , item )
448- rval = None # type : Union[None, str]
448+ rval : Union [None , str ] = None
449449
450450 if proc .stdin is not None :
451451 try :
@@ -492,7 +492,7 @@ def unmerged_blobs(self) -> Dict[PathLike, List[Tuple[StageType, Blob]]]:
492492 are at stage 3 will not have a stage 3 entry.
493493 """
494494 is_unmerged_blob = lambda t : t [0 ] != 0
495- path_map = {} # type : Dict[PathLike, List[Tuple[TBD, Blob]]]
495+ path_map : Dict [PathLike , List [Tuple [TBD , Blob ]]] = {}
496496 for stage , blob in self .iter_blobs (is_unmerged_blob ):
497497 path_map .setdefault (blob .path , []).append ((stage , blob ))
498498 # END for each unmerged blob
@@ -624,8 +624,8 @@ def _store_path(self, filepath: PathLike, fprogress: Callable) -> BaseIndexEntry
624624 st = os .lstat (filepath ) # handles non-symlinks as well
625625 if S_ISLNK (st .st_mode ):
626626 # in PY3, readlink is string, but we need bytes. In PY2, it's just OS encoded bytes, we assume UTF-8
627- open_stream = lambda : BytesIO (force_bytes (os .readlink (filepath ),
628- encoding = defenc )) # type: Callable[[], BinaryIO]
627+ open_stream : Callable [[], BinaryIO ] = lambda : BytesIO (force_bytes (os .readlink (filepath ),
628+ encoding = defenc ))
629629 else :
630630 open_stream = lambda : open (filepath , 'rb' )
631631 with open_stream () as stream :
@@ -1160,7 +1160,7 @@ def handle_stderr(proc: 'Popen[bytes]', iter_checked_out_files: Iterable[PathLik
11601160 proc = self .repo .git .checkout_index (args , ** kwargs )
11611161 # FIXME: Reading from GIL!
11621162 make_exc = lambda : GitCommandError (("git-checkout-index" ,) + tuple (args ), 128 , proc .stderr .read ())
1163- checked_out_files = [] # type: List[PathLike ]
1163+ checked_out_files : List [ PathLike ] = [ ]
11641164
11651165 for path in paths :
11661166 co_path = to_native_path_linux (self ._to_relative_path (path ))
0 commit comments