6363git_working_dir
6464)
6565
66+ # typing -----------------------------------------------------------------------------
67+
68+ from typing import Any ,Callable ,Dict ,IO ,Iterator ,List ,Sequence ,TYPE_CHECKING ,Tuple ,Union
69+
70+ from git .types import PathLike ,TBD
71+
72+ if TYPE_CHECKING :
73+ from subprocess import Popen
74+ from git .repo import Repo
75+
76+ StageType = int
77+ Treeish = Union [Tree ,Commit ,bytes ]
78+
6679
6780__all__ = ('IndexFile' ,'CheckoutError' )
6881
@@ -93,7 +106,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
93106_VERSION = 2 # latest version we support
94107S_IFGITLINK = S_IFGITLINK # a submodule
95108
96- def __init__ (self ,repo ,file_path = None ):
109+ def __init__ (self ,repo : 'Repo' ,file_path : PathLike = None )-> None :
97110"""Initialize this Index instance, optionally from the given ``file_path``.
98111 If no file_path is given, we will be created from the current index file.
99112
@@ -102,9 +115,9 @@ def __init__(self, repo, file_path=None):
102115self .repo = repo
103116self .version = self ._VERSION
104117self ._extension_data = b''
105- self ._file_path = file_path or self ._index_path ()
118+ self ._file_path = file_path or self ._index_path ()# type: PathLike
106119
107- def _set_cache_ (self ,attr ) :
120+ def _set_cache_ (self ,attr : str ) -> None :
108121if attr == "entries" :
109122# read the current index
110123# try memory map for speed
@@ -115,8 +128,8 @@ def _set_cache_(self, attr):
115128ok = True
116129except OSError :
117130# in new repositories, there may be no index, which means we are empty
118- self .entries = {}
119- return
131+ self .entries = {}# type: Dict[Tuple[PathLike, StageType], IndexEntry]
132+ return None
120133finally :
121134if not ok :
122135lfd .rollback ()
@@ -133,15 +146,18 @@ def _set_cache_(self, attr):
133146else :
134147super (IndexFile ,self )._set_cache_ (attr )
135148
136- def _index_path (self ):
137- return join_path_native (self .repo .git_dir ,"index" )
149+ def _index_path (self )-> PathLike :
150+ if self .repo .git_dir :
151+ return join_path_native (self .repo .git_dir ,"index" )
152+ else :
153+ raise GitCommandError ("No git directory given to join index path" )
138154
139155@property
140- def path (self ):
156+ def path (self )-> PathLike :
141157""" :return: Path to the index file we are representing """
142158return self ._file_path
143159
144- def _delete_entries_cache (self ):
160+ def _delete_entries_cache (self )-> None :
145161"""Safely clear the entries cache so it can be recreated"""
146162try :
147163del (self .entries )
@@ -152,26 +168,26 @@ def _delete_entries_cache(self):
152168
153169#{ Serializable Interface
154170
155- def _deserialize (self ,stream ) :
171+ def _deserialize (self ,stream : IO ) -> 'IndexFile' :
156172"""Initialize this instance with index values read from the given stream"""
157173self .version ,self .entries ,self ._extension_data ,_conten_sha = read_cache (stream )
158174return self
159175
160- def _entries_sorted (self ):
176+ def _entries_sorted (self )-> List [ TBD ] :
161177""":return: list of entries, in a sorted fashion, first by path, then by stage"""
162178return sorted (self .entries .values (),key = lambda e : (e .path ,e .stage ))
163179
164- def _serialize (self ,stream ,ignore_extension_data = False ):
180+ def _serialize (self ,stream : IO ,ignore_extension_data : bool = False )-> 'IndexFile' :
165181entries = self ._entries_sorted ()
166- extension_data = self ._extension_data
182+ extension_data = self ._extension_data # type: Union[None, bytes]
167183if ignore_extension_data :
168184extension_data = None
169185write_cache (entries ,stream ,extension_data )
170186return self
171187
172188#} END serializable interface
173189
174- def write (self ,file_path = None ,ignore_extension_data = False ):
190+ def write (self ,file_path : Union [ None , PathLike ] = None ,ignore_extension_data : bool = False )-> None :
175191"""Write the current state to our file path or to the given one
176192
177193 :param file_path:
@@ -191,7 +207,7 @@ def write(self, file_path=None, ignore_extension_data=False):
191207 Alternatively, use IndexFile.write_tree() to handle this case
192208 automatically
193209
194- :return: self"""
210+ :return: self # does it? or returns None? """
195211# make sure we have our entries read before getting a write lock
196212# else it would be done when streaming. This can happen
197213# if one doesn't change the index, but writes it right away
@@ -215,7 +231,7 @@ def write(self, file_path=None, ignore_extension_data=False):
215231
216232@post_clear_cache
217233@default_index
218- def merge_tree (self ,rhs ,base = None ):
234+ def merge_tree (self ,rhs : Treeish ,base : Union [ None , Treeish ] = None )-> 'IndexFile' :
219235"""Merge the given rhs treeish into the current index, possibly taking
220236 a common base treeish into account.
221237
@@ -242,7 +258,7 @@ def merge_tree(self, rhs, base=None):
242258# -i : ignore working tree status
243259# --aggressive : handle more merge cases
244260# -m : do an actual merge
245- args = ["--aggressive" ,"-i" ,"-m" ]
261+ args = ["--aggressive" ,"-i" ,"-m" ]# type: List[Union[Treeish, str]]
246262if base is not None :
247263args .append (base )
248264args .append (rhs )
@@ -251,7 +267,7 @@ def merge_tree(self, rhs, base=None):
251267return self
252268
253269@classmethod
254- def new (cls ,repo ,* tree_sha ) :
270+ def new (cls ,repo : 'Repo' ,* tree_sha : bytes ) -> 'IndexFile' :
255271""" Merge the given treeish revisions into a new index which is returned.
256272 This method behaves like git-read-tree --aggressive when doing the merge.
257273
@@ -275,7 +291,7 @@ def new(cls, repo, *tree_sha):
275291return inst
276292
277293@classmethod
278- def from_tree (cls ,repo ,* treeish ,** kwargs ) :
294+ def from_tree (cls ,repo : 'Repo' ,* treeish : Treeish ,** kwargs : Any ) -> 'IndexFile' :
279295"""Merge the given treeish revisions into a new index which is returned.
280296 The original index will remain unaltered
281297
@@ -312,7 +328,7 @@ def from_tree(cls, repo, *treeish, **kwargs):
312328if len (treeish )== 0 or len (treeish )> 3 :
313329raise ValueError ("Please specify between 1 and 3 treeish, got %i" % len (treeish ))
314330
315- arg_list = []
331+ arg_list = []# type: List[Union[Treeish, str]]
316332# ignore that working tree and index possibly are out of date
317333if len (treeish )> 1 :
318334# drop unmerged entries when reading our index and merging
@@ -331,7 +347,8 @@ def from_tree(cls, repo, *treeish, **kwargs):
331347# as it considers existing entries. moving it essentially clears the index.
332348# Unfortunately there is no 'soft' way to do it.
333349# The TemporaryFileSwap assure the original file get put back
334- index_handler = TemporaryFileSwap (join_path_native (repo .git_dir ,'index' ))
350+ if repo .git_dir :
351+ index_handler = TemporaryFileSwap (join_path_native (repo .git_dir ,'index' ))
335352try :
336353repo .git .read_tree (* arg_list ,** kwargs )
337354index = cls (repo ,tmp_index )
@@ -346,18 +363,18 @@ def from_tree(cls, repo, *treeish, **kwargs):
346363
347364# UTILITIES
348365@unbare_repo
349- def _iter_expand_paths (self ,paths ) :
366+ def _iter_expand_paths (self ,paths : Sequence [ PathLike ]) -> Iterator [ PathLike ] :
350367"""Expand the directories in list of paths to the corresponding paths accordingly,
351368
352369 Note: git will add items multiple times even if a glob overlapped
353370 with manually specified paths or if paths where specified multiple
354371 times - we respect that and do not prune"""
355372def raise_exc (e ):
356373raise e
357- r = self .repo .working_tree_dir
374+ r = str ( self .repo .working_tree_dir )
358375rs = r + os .sep
359376for path in paths :
360- abs_path = path
377+ abs_path = str ( path )
361378if not osp .isabs (abs_path ):
362379abs_path = osp .join (r ,path )
363380# END make absolute path
@@ -374,7 +391,7 @@ def raise_exc(e):
374391# end check symlink
375392
376393# if the path is not already pointing to an existing file, resolve globs if possible
377- if not os .path .exists (path )and ('?' in path or '*' in path or '[' in path ):
394+ if not os .path .exists (abs_path )and ('?' in abs_path or '*' in abs_path or '[' in abs_path ):
378395resolved_paths = glob .glob (abs_path )
379396# not abs_path in resolved_paths:
380397# a glob() resolving to the same path we are feeding it with
@@ -396,12 +413,12 @@ def raise_exc(e):
396413# END for each subdirectory
397414except OSError :
398415# was a file or something that could not be iterated
399- yield path .replace (rs ,'' )
416+ yield abs_path .replace (rs ,'' )
400417# END path exception handling
401418# END for each path
402419
403- def _write_path_to_stdin (self ,proc ,filepath ,item ,fmakeexc ,fprogress ,
404- read_from_stdout = True ):
420+ def _write_path_to_stdin (self ,proc : 'Popen' ,filepath : PathLike ,item ,fmakeexc ,fprogress ,
421+ read_from_stdout : bool = True )-> Union [ None , str ] :
405422"""Write path to proc.stdin and make sure it processes the item, including progress.
406423
407424 :return: stdout string
@@ -417,20 +434,24 @@ def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
417434 we will close stdin to break the pipe."""
418435
419436fprogress (filepath ,False ,item )
420- rval = None
421- try :
422- proc .stdin .write (("%s\n " % filepath ).encode (defenc ))
423- except IOError as e :
424- # pipe broke, usually because some error happened
425- raise fmakeexc ()from e
426- # END write exception handling
427- proc .stdin .flush ()
428- if read_from_stdout :
437+ rval = None # type: Union[None, str]
438+
439+ if proc .stdin is not None :
440+ try :
441+ proc .stdin .write (("%s\n " % filepath ).encode (defenc ))
442+ except IOError as e :
443+ # pipe broke, usually because some error happened
444+ raise fmakeexc ()from e
445+ # END write exception handling
446+ proc .stdin .flush ()
447+
448+ if read_from_stdout and proc .stdout is not None :
429449rval = proc .stdout .readline ().strip ()
430450fprogress (filepath ,True ,item )
431451return rval
432452
433- def iter_blobs (self ,predicate = lambda t :True ):
453+ def iter_blobs (self ,predicate :Callable [[Tuple [StageType ,Blob ]],bool ]= lambda t :True
454+ )-> Iterator [Tuple [StageType ,Blob ]]:
434455"""
435456 :return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)
436457
@@ -446,20 +467,21 @@ def iter_blobs(self, predicate=lambda t: True):
446467yield output
447468# END for each entry
448469
449- def unmerged_blobs (self ):
470+ def unmerged_blobs (self )-> Dict [ PathLike , List [ Tuple [ StageType , Blob ]]] :
450471"""
451472 :return:
452473 Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being
453474 a dictionary associating a path in the index with a list containing
454475 sorted stage/blob pairs
476+ ##### Does it return iterator? or just the Dict?
455477
456478 :note:
457479 Blobs that have been removed in one side simply do not exist in the
458480 given stage. I.e. a file removed on the 'other' branch whose entries
459481 are at stage 3 will not have a stage 3 entry.
460482 """
461483is_unmerged_blob = lambda t :t [0 ]!= 0
462- path_map = {}
484+ path_map = {}# type: Dict[PathLike, List[Tuple[TBD, Blob]]]
463485for stage ,blob in self .iter_blobs (is_unmerged_blob ):
464486path_map .setdefault (blob .path , []).append ((stage ,blob ))
465487# END for each unmerged blob
@@ -468,10 +490,10 @@ def unmerged_blobs(self):
468490return path_map
469491
470492@classmethod
471- def entry_key (cls ,* entry ) :
472- return entry_key (* entry )
493+ def entry_key (cls ,entry : Union [ Tuple [ BaseIndexEntry ], Tuple [ PathLike , StageType ]]) -> Tuple [ PathLike , StageType ] :
494+ return entry_key (entry )
473495
474- def resolve_blobs (self ,iter_blobs ) :
496+ def resolve_blobs (self ,iter_blobs : Iterator [ Blob ]) -> 'IndexFile' :
475497"""Resolve the blobs given in blob iterator. This will effectively remove the
476498 index entries of the respective path at all non-null stages and add the given
477499 blob as new stage null blob.
@@ -489,9 +511,9 @@ def resolve_blobs(self, iter_blobs):
489511for blob in iter_blobs :
490512stage_null_key = (blob .path ,0 )
491513if stage_null_key in self .entries :
492- raise ValueError ("Path %r already exists at stage 0" % blob .path )
514+ raise ValueError ("Path %r already exists at stage 0" % str ( blob .path ) )
493515# END assert blob is not stage 0 already
494-
516+
495517# delete all possible stages
496518for stage in (1 ,2 ,3 ):
497519try :
@@ -506,7 +528,7 @@ def resolve_blobs(self, iter_blobs):
506528
507529return self
508530
509- def update (self ):
531+ def update (self )-> 'IndexFile' :
510532"""Reread the contents of our index file, discarding all cached information
511533 we might have.
512534
@@ -517,7 +539,7 @@ def update(self):
517539# allows to lazily reread on demand
518540return self
519541
520- def write_tree (self ):
542+ def write_tree (self )-> Tree :
521543"""Writes this index to a corresponding Tree object into the repository's
522544 object database and return it.
523545
@@ -542,22 +564,22 @@ def write_tree(self):
542564root_tree ._cache = tree_items
543565return root_tree
544566
545- def _process_diff_args (self ,args ) :
567+ def _process_diff_args (self ,args : Any ) -> List [ Any ] :
546568try :
547569args .pop (args .index (self ))
548570except IndexError :
549571pass
550572# END remove self
551573return args
552574
553- def _to_relative_path (self ,path ) :
575+ def _to_relative_path (self ,path : PathLike ) -> PathLike :
554576""":return: Version of path relative to our git directory or raise ValueError
555577 if it is not within our git direcotory"""
556578if not osp .isabs (path ):
557579return path
558580if self .repo .bare :
559581raise InvalidGitRepositoryError ("require non-bare repository" )
560- if not path .startswith (self .repo .working_tree_dir ):
582+ if not str ( path ) .startswith (str ( self .repo .working_tree_dir ) ):
561583raise ValueError ("Absolute path %r is not in git repository at %r" % (path ,self .repo .working_tree_dir ))
562584return os .path .relpath (path ,self .repo .working_tree_dir )
563585