Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit9636459

Browse files
committed
Add initial types to IndexFile .init() to _to_relative_path()
1 parent33346b2 commit9636459

File tree

3 files changed

+82
-57
lines changed

3 files changed

+82
-57
lines changed

‎git/index/base.py‎

Lines changed: 72 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,19 @@
6363
git_working_dir
6464
)
6565

66+
# typing -----------------------------------------------------------------------------
67+
68+
fromtypingimportAny,Callable,Dict,IO,Iterator,List,Sequence,TYPE_CHECKING,Tuple,Union
69+
70+
fromgit.typesimportPathLike,TBD
71+
72+
ifTYPE_CHECKING:
73+
fromsubprocessimportPopen
74+
fromgit.repoimportRepo
75+
76+
StageType=int
77+
Treeish=Union[Tree,Commit,bytes]
78+
6679

6780
__all__= ('IndexFile','CheckoutError')
6881

@@ -93,7 +106,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
93106
_VERSION=2# latest version we support
94107
S_IFGITLINK=S_IFGITLINK# a submodule
95108

96-
def__init__(self,repo,file_path=None):
109+
def__init__(self,repo:'Repo',file_path:PathLike=None)->None:
97110
"""Initialize this Index instance, optionally from the given ``file_path``.
98111
If no file_path is given, we will be created from the current index file.
99112
@@ -102,9 +115,9 @@ def __init__(self, repo, file_path=None):
102115
self.repo=repo
103116
self.version=self._VERSION
104117
self._extension_data=b''
105-
self._file_path=file_pathorself._index_path()
118+
self._file_path=file_pathorself._index_path()# type: PathLike
106119

107-
def_set_cache_(self,attr):
120+
def_set_cache_(self,attr:str)->None:
108121
ifattr=="entries":
109122
# read the current index
110123
# try memory map for speed
@@ -115,8 +128,8 @@ def _set_cache_(self, attr):
115128
ok=True
116129
exceptOSError:
117130
# in new repositories, there may be no index, which means we are empty
118-
self.entries= {}
119-
return
131+
self.entries= {}# type: Dict[Tuple[PathLike, StageType], IndexEntry]
132+
returnNone
120133
finally:
121134
ifnotok:
122135
lfd.rollback()
@@ -133,15 +146,18 @@ def _set_cache_(self, attr):
133146
else:
134147
super(IndexFile,self)._set_cache_(attr)
135148

136-
def_index_path(self):
137-
returnjoin_path_native(self.repo.git_dir,"index")
149+
def_index_path(self)->PathLike:
150+
ifself.repo.git_dir:
151+
returnjoin_path_native(self.repo.git_dir,"index")
152+
else:
153+
raiseGitCommandError("No git directory given to join index path")
138154

139155
@property
140-
defpath(self):
156+
defpath(self)->PathLike:
141157
""" :return: Path to the index file we are representing """
142158
returnself._file_path
143159

144-
def_delete_entries_cache(self):
160+
def_delete_entries_cache(self)->None:
145161
"""Safely clear the entries cache so it can be recreated"""
146162
try:
147163
del(self.entries)
@@ -152,26 +168,26 @@ def _delete_entries_cache(self):
152168

153169
#{ Serializable Interface
154170

155-
def_deserialize(self,stream):
171+
def_deserialize(self,stream:IO)->'IndexFile':
156172
"""Initialize this instance with index values read from the given stream"""
157173
self.version,self.entries,self._extension_data,_conten_sha=read_cache(stream)
158174
returnself
159175

160-
def_entries_sorted(self):
176+
def_entries_sorted(self)->List[TBD]:
161177
""":return: list of entries, in a sorted fashion, first by path, then by stage"""
162178
returnsorted(self.entries.values(),key=lambdae: (e.path,e.stage))
163179

164-
def_serialize(self,stream,ignore_extension_data=False):
180+
def_serialize(self,stream:IO,ignore_extension_data:bool=False)->'IndexFile':
165181
entries=self._entries_sorted()
166-
extension_data=self._extension_data
182+
extension_data=self._extension_data# type: Union[None, bytes]
167183
ifignore_extension_data:
168184
extension_data=None
169185
write_cache(entries,stream,extension_data)
170186
returnself
171187

172188
#} END serializable interface
173189

174-
defwrite(self,file_path=None,ignore_extension_data=False):
190+
defwrite(self,file_path:Union[None,PathLike]=None,ignore_extension_data:bool=False)->None:
175191
"""Write the current state to our file path or to the given one
176192
177193
:param file_path:
@@ -191,7 +207,7 @@ def write(self, file_path=None, ignore_extension_data=False):
191207
Alternatively, use IndexFile.write_tree() to handle this case
192208
automatically
193209
194-
:return: self"""
210+
:return: self # does it? or returns None?"""
195211
# make sure we have our entries read before getting a write lock
196212
# else it would be done when streaming. This can happen
197213
# if one doesn't change the index, but writes it right away
@@ -215,7 +231,7 @@ def write(self, file_path=None, ignore_extension_data=False):
215231

216232
@post_clear_cache
217233
@default_index
218-
defmerge_tree(self,rhs,base=None):
234+
defmerge_tree(self,rhs:Treeish,base:Union[None,Treeish]=None)->'IndexFile':
219235
"""Merge the given rhs treeish into the current index, possibly taking
220236
a common base treeish into account.
221237
@@ -242,7 +258,7 @@ def merge_tree(self, rhs, base=None):
242258
# -i : ignore working tree status
243259
# --aggressive : handle more merge cases
244260
# -m : do an actual merge
245-
args= ["--aggressive","-i","-m"]
261+
args= ["--aggressive","-i","-m"]# type: List[Union[Treeish, str]]
246262
ifbaseisnotNone:
247263
args.append(base)
248264
args.append(rhs)
@@ -251,7 +267,7 @@ def merge_tree(self, rhs, base=None):
251267
returnself
252268

253269
@classmethod
254-
defnew(cls,repo,*tree_sha):
270+
defnew(cls,repo:'Repo',*tree_sha:bytes)->'IndexFile':
255271
""" Merge the given treeish revisions into a new index which is returned.
256272
This method behaves like git-read-tree --aggressive when doing the merge.
257273
@@ -275,7 +291,7 @@ def new(cls, repo, *tree_sha):
275291
returninst
276292

277293
@classmethod
278-
deffrom_tree(cls,repo,*treeish,**kwargs):
294+
deffrom_tree(cls,repo:'Repo',*treeish:Treeish,**kwargs:Any)->'IndexFile':
279295
"""Merge the given treeish revisions into a new index which is returned.
280296
The original index will remain unaltered
281297
@@ -312,7 +328,7 @@ def from_tree(cls, repo, *treeish, **kwargs):
312328
iflen(treeish)==0orlen(treeish)>3:
313329
raiseValueError("Please specify between 1 and 3 treeish, got %i"%len(treeish))
314330

315-
arg_list= []
331+
arg_list= []# type: List[Union[Treeish, str]]
316332
# ignore that working tree and index possibly are out of date
317333
iflen(treeish)>1:
318334
# drop unmerged entries when reading our index and merging
@@ -331,7 +347,8 @@ def from_tree(cls, repo, *treeish, **kwargs):
331347
# as it considers existing entries. moving it essentially clears the index.
332348
# Unfortunately there is no 'soft' way to do it.
333349
# The TemporaryFileSwap assure the original file get put back
334-
index_handler=TemporaryFileSwap(join_path_native(repo.git_dir,'index'))
350+
ifrepo.git_dir:
351+
index_handler=TemporaryFileSwap(join_path_native(repo.git_dir,'index'))
335352
try:
336353
repo.git.read_tree(*arg_list,**kwargs)
337354
index=cls(repo,tmp_index)
@@ -346,18 +363,18 @@ def from_tree(cls, repo, *treeish, **kwargs):
346363

347364
# UTILITIES
348365
@unbare_repo
349-
def_iter_expand_paths(self,paths):
366+
def_iter_expand_paths(self,paths:Sequence[PathLike])->Iterator[PathLike]:
350367
"""Expand the directories in list of paths to the corresponding paths accordingly,
351368
352369
Note: git will add items multiple times even if a glob overlapped
353370
with manually specified paths or if paths where specified multiple
354371
times - we respect that and do not prune"""
355372
defraise_exc(e):
356373
raisee
357-
r=self.repo.working_tree_dir
374+
r=str(self.repo.working_tree_dir)
358375
rs=r+os.sep
359376
forpathinpaths:
360-
abs_path=path
377+
abs_path=str(path)
361378
ifnotosp.isabs(abs_path):
362379
abs_path=osp.join(r,path)
363380
# END make absolute path
@@ -374,7 +391,7 @@ def raise_exc(e):
374391
# end check symlink
375392

376393
# if the path is not already pointing to an existing file, resolve globs if possible
377-
ifnotos.path.exists(path)and ('?'inpathor'*'inpathor'['inpath):
394+
ifnotos.path.exists(abs_path)and ('?'inabs_pathor'*'inabs_pathor'['inabs_path):
378395
resolved_paths=glob.glob(abs_path)
379396
# not abs_path in resolved_paths:
380397
# a glob() resolving to the same path we are feeding it with
@@ -396,12 +413,12 @@ def raise_exc(e):
396413
# END for each subdirectory
397414
exceptOSError:
398415
# was a file or something that could not be iterated
399-
yieldpath.replace(rs,'')
416+
yieldabs_path.replace(rs,'')
400417
# END path exception handling
401418
# END for each path
402419

403-
def_write_path_to_stdin(self,proc,filepath,item,fmakeexc,fprogress,
404-
read_from_stdout=True):
420+
def_write_path_to_stdin(self,proc:'Popen',filepath:PathLike,item,fmakeexc,fprogress,
421+
read_from_stdout:bool=True)->Union[None,str]:
405422
"""Write path to proc.stdin and make sure it processes the item, including progress.
406423
407424
:return: stdout string
@@ -417,20 +434,24 @@ def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
417434
we will close stdin to break the pipe."""
418435

419436
fprogress(filepath,False,item)
420-
rval=None
421-
try:
422-
proc.stdin.write(("%s\n"%filepath).encode(defenc))
423-
exceptIOErrorase:
424-
# pipe broke, usually because some error happened
425-
raisefmakeexc()frome
426-
# END write exception handling
427-
proc.stdin.flush()
428-
ifread_from_stdout:
437+
rval=None# type: Union[None, str]
438+
439+
ifproc.stdinisnotNone:
440+
try:
441+
proc.stdin.write(("%s\n"%filepath).encode(defenc))
442+
exceptIOErrorase:
443+
# pipe broke, usually because some error happened
444+
raisefmakeexc()frome
445+
# END write exception handling
446+
proc.stdin.flush()
447+
448+
ifread_from_stdoutandproc.stdoutisnotNone:
429449
rval=proc.stdout.readline().strip()
430450
fprogress(filepath,True,item)
431451
returnrval
432452

433-
defiter_blobs(self,predicate=lambdat:True):
453+
defiter_blobs(self,predicate:Callable[[Tuple[StageType,Blob]],bool]=lambdat:True
454+
)->Iterator[Tuple[StageType,Blob]]:
434455
"""
435456
:return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)
436457
@@ -446,20 +467,21 @@ def iter_blobs(self, predicate=lambda t: True):
446467
yieldoutput
447468
# END for each entry
448469

449-
defunmerged_blobs(self):
470+
defunmerged_blobs(self)->Dict[PathLike,List[Tuple[StageType,Blob]]]:
450471
"""
451472
:return:
452473
Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being
453474
a dictionary associating a path in the index with a list containing
454475
sorted stage/blob pairs
476+
##### Does it return iterator? or just the Dict?
455477
456478
:note:
457479
Blobs that have been removed in one side simply do not exist in the
458480
given stage. I.e. a file removed on the 'other' branch whose entries
459481
are at stage 3 will not have a stage 3 entry.
460482
"""
461483
is_unmerged_blob=lambdat:t[0]!=0
462-
path_map= {}
484+
path_map= {}# type: Dict[PathLike, List[Tuple[TBD, Blob]]]
463485
forstage,blobinself.iter_blobs(is_unmerged_blob):
464486
path_map.setdefault(blob.path, []).append((stage,blob))
465487
# END for each unmerged blob
@@ -468,10 +490,10 @@ def unmerged_blobs(self):
468490
returnpath_map
469491

470492
@classmethod
471-
defentry_key(cls,*entry):
472-
returnentry_key(*entry)
493+
defentry_key(cls,entry:Union[Tuple[BaseIndexEntry],Tuple[PathLike,StageType]])->Tuple[PathLike,StageType]:
494+
returnentry_key(entry)
473495

474-
defresolve_blobs(self,iter_blobs):
496+
defresolve_blobs(self,iter_blobs:Iterator[Blob])->'IndexFile':
475497
"""Resolve the blobs given in blob iterator. This will effectively remove the
476498
index entries of the respective path at all non-null stages and add the given
477499
blob as new stage null blob.
@@ -489,9 +511,9 @@ def resolve_blobs(self, iter_blobs):
489511
forblobiniter_blobs:
490512
stage_null_key= (blob.path,0)
491513
ifstage_null_keyinself.entries:
492-
raiseValueError("Path %r already exists at stage 0"%blob.path)
514+
raiseValueError("Path %r already exists at stage 0"%str(blob.path))
493515
# END assert blob is not stage 0 already
494-
516+
495517
# delete all possible stages
496518
forstagein (1,2,3):
497519
try:
@@ -506,7 +528,7 @@ def resolve_blobs(self, iter_blobs):
506528

507529
returnself
508530

509-
defupdate(self):
531+
defupdate(self)->'IndexFile':
510532
"""Reread the contents of our index file, discarding all cached information
511533
we might have.
512534
@@ -517,7 +539,7 @@ def update(self):
517539
# allows to lazily reread on demand
518540
returnself
519541

520-
defwrite_tree(self):
542+
defwrite_tree(self)->Tree:
521543
"""Writes this index to a corresponding Tree object into the repository's
522544
object database and return it.
523545
@@ -542,22 +564,22 @@ def write_tree(self):
542564
root_tree._cache=tree_items
543565
returnroot_tree
544566

545-
def_process_diff_args(self,args):
567+
def_process_diff_args(self,args:Any)->List[Any]:
546568
try:
547569
args.pop(args.index(self))
548570
exceptIndexError:
549571
pass
550572
# END remove self
551573
returnargs
552574

553-
def_to_relative_path(self,path):
575+
def_to_relative_path(self,path:PathLike)->PathLike:
554576
""":return: Version of path relative to our git directory or raise ValueError
555577
if it is not within our git direcotory"""
556578
ifnotosp.isabs(path):
557579
returnpath
558580
ifself.repo.bare:
559581
raiseInvalidGitRepositoryError("require non-bare repository")
560-
ifnotpath.startswith(self.repo.working_tree_dir):
582+
ifnotstr(path).startswith(str(self.repo.working_tree_dir)):
561583
raiseValueError("Absolute path %r is not in git repository at %r"% (path,self.repo.working_tree_dir))
562584
returnos.path.relpath(path,self.repo.working_tree_dir)
563585

‎git/index/fun.py‎

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Contains standalone functions to accompany the index implementation and make it
22
# more versatile
33
# NOTE: Autodoc hates it if this is a docstring
4+
fromgit.typesimportPathLike,TBD
45
fromioimportBytesIO
56
importos
67
fromstatimport (
@@ -12,6 +13,7 @@
1213
S_IFREG,
1314
)
1415
importsubprocess
16+
fromtypingimportList,Tuple,Union,cast
1517

1618
fromgit.cmdimportPROC_CREATIONFLAGS,handle_process_output
1719
fromgit.compatimport (
@@ -166,11 +168,12 @@ def read_header(stream):
166168
returnversion,num_entries
167169

168170

169-
defentry_key(*entry):
171+
defentry_key(entry:Union[Tuple[BaseIndexEntry],Tuple[PathLike,TBD]]):
170172
""":return: Key suitable to be used for the index.entries dictionary
171173
:param entry: One instance of type BaseIndexEntry or the path and the stage"""
172174
iflen(entry)==1:
173-
return (entry[0].path,entry[0].stage)
175+
entry_first=cast(BaseIndexEntry,entry[0])# type: BaseIndexEntry
176+
return (entry_first.path,entry_first.stage)
174177
returntuple(entry)
175178
# END handle entry
176179

@@ -283,7 +286,7 @@ def _tree_entry_to_baseindexentry(tree_entry, stage):
283286
returnBaseIndexEntry((tree_entry[1],tree_entry[0],stage<<CE_STAGESHIFT,tree_entry[2]))
284287

285288

286-
defaggressive_tree_merge(odb,tree_shas):
289+
defaggressive_tree_merge(odb,tree_shas)->List[BaseIndexEntry]:
287290
"""
288291
:return: list of BaseIndexEntries representing the aggressive merge of the given
289292
trees. All valid entries are on stage 0, whereas the conflicting ones are left
@@ -292,7 +295,7 @@ def aggressive_tree_merge(odb, tree_shas):
292295
:param tree_shas: 1, 2 or 3 trees as identified by their binary 20 byte shas
293296
If 1 or two, the entries will effectively correspond to the last given tree
294297
If 3 are given, a 3 way merge is performed"""
295-
out= []
298+
out= []# type: List[BaseIndexEntry]
296299
out_append=out.append
297300

298301
# one and two way is the same for us, as we don't have to handle an existing

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2026 Movatter.jp