Skip to content

Commit 025fe17

Browse files
committed
add types to index.fun.py
1 parent 595181d commit 025fe17

File tree

5 files changed

+59
-39
lines changed

5 files changed

+59
-39
lines changed

Diff for: git/exc.py

+7-4
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
# typing ----------------------------------------------------
1313

14-
from typing import List, Optional, Sequence, Tuple, Union, TYPE_CHECKING
14+
from typing import List, Sequence, Tuple, Union, TYPE_CHECKING
1515
from git.types import PathLike
1616

1717
if TYPE_CHECKING:
@@ -113,7 +113,7 @@ class CheckoutError(GitError):
113113
were checked out successfully and hence match the version stored in the
114114
index"""
115115

116-
def __init__(self, message: str, failed_files: Sequence[PathLike], valid_files: List[PathLike],
116+
def __init__(self, message: str, failed_files: Sequence[PathLike], valid_files: Sequence[PathLike],
117117
failed_reasons: List[str]) -> None:
118118

119119
Exception.__init__(self, message)
@@ -139,8 +139,11 @@ class HookExecutionError(CommandError):
139139
"""Thrown if a hook exits with a non-zero exit code. It provides access to the exit code and the string returned
140140
via standard output"""
141141

142-
def __init__(self, command: Union[List[str], Tuple[str, ...], str], status: Optional[str],
143-
stderr: Optional[str] = None, stdout: Optional[str] = None) -> None:
142+
def __init__(self, command: Union[List[str], Tuple[str, ...], str],
143+
status: Union[str, int, None, Exception],
144+
stderr: Union[bytes, str, None] = None,
145+
stdout: Union[bytes, str, None] = None) -> None:
146+
144147
super(HookExecutionError, self).__init__(command, status, stderr, stdout)
145148
self._msg = "Hook('%s') failed%s"
146149

Diff for: git/index/base.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,8 @@ def new(cls, repo: 'Repo', *tree_sha: Union[str, Tree]) -> 'IndexFile':
285285
New IndexFile instance. Its path will be undefined.
286286
If you intend to write such a merged Index, supply an alternate file_path
287287
to its 'write' method."""
288-
base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha])
288+
tree_sha_bytes = [to_bin_sha(str(t)) for t in tree_sha] # List[bytes]
289+
base_entries = aggressive_tree_merge(repo.odb, tree_sha_bytes)
289290

290291
inst = cls(repo)
291292
# convert to entries dict
@@ -1023,7 +1024,7 @@ def _flush_stdin_and_wait(cls, proc: 'Popen[bytes]', ignore_stdout: bool = False
10231024
@default_index
10241025
def checkout(self, paths: Union[None, Iterable[PathLike]] = None, force: bool = False,
10251026
fprogress: Callable = lambda *args: None, **kwargs: Any
1026-
) -> Union[None, Iterator[PathLike], List[PathLike]]:
1027+
) -> Union[None, Iterator[PathLike], Sequence[PathLike]]:
10271028
"""Checkout the given paths or all files from the version known to the index into
10281029
the working tree.
10291030

Diff for: git/index/fun.py

+46-31
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Contains standalone functions to accompany the index implementation and make it
22
# more versatile
33
# NOTE: Autodoc hates it if this is a docstring
4-
from git.types import PathLike
4+
55
from io import BytesIO
66
import os
77
from stat import (
@@ -13,7 +13,6 @@
1313
S_IFREG,
1414
)
1515
import subprocess
16-
from typing import List, Tuple, Union, cast
1716

1817
from git.cmd import PROC_CREATIONFLAGS, handle_process_output
1918
from git.compat import (
@@ -49,6 +48,17 @@
4948
unpack
5049
)
5150

51+
# typing -----------------------------------------------------------------------------
52+
53+
from typing import (Dict, IO, List, Sequence, TYPE_CHECKING, Tuple, Type, Union, cast)
54+
55+
from git.types import PathLike
56+
57+
if TYPE_CHECKING:
58+
from .base import IndexFile
59+
60+
# ------------------------------------------------------------------------------------
61+
5262

5363
S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule
5464
CE_NAMEMASK_INV = ~CE_NAMEMASK
@@ -57,23 +67,23 @@
5767
'stat_mode_to_index_mode', 'S_IFGITLINK', 'run_commit_hook', 'hook_path')
5868

5969

60-
def hook_path(name, git_dir):
70+
def hook_path(name: str, git_dir: PathLike) -> str:
6171
""":return: path to the given named hook in the given git repository directory"""
6272
return osp.join(git_dir, 'hooks', name)
6373

6474

65-
def run_commit_hook(name, index, *args):
75+
def run_commit_hook(name: str, index: IndexFile, *args: str) -> None:
6676
"""Run the commit hook of the given name. Silently ignores hooks that do not exist.
6777
:param name: name of hook, like 'pre-commit'
6878
:param index: IndexFile instance
6979
:param args: arguments passed to hook file
7080
:raises HookExecutionError: """
7181
hp = hook_path(name, index.repo.git_dir)
7282
if not os.access(hp, os.X_OK):
73-
return
83+
return None
7484

7585
env = os.environ.copy()
76-
env['GIT_INDEX_FILE'] = safe_decode(index.path)
86+
env['GIT_INDEX_FILE'] = safe_decode(str(index.path))
7787
env['GIT_EDITOR'] = ':'
7888
try:
7989
cmd = subprocess.Popen([hp] + list(args),
@@ -86,14 +96,14 @@ def run_commit_hook(name, index, *args):
8696
except Exception as ex:
8797
raise HookExecutionError(hp, ex) from ex
8898
else:
89-
stdout = []
90-
stderr = []
91-
handle_process_output(cmd, stdout.append, stderr.append, finalize_process)
92-
stdout = ''.join(stdout)
93-
stderr = ''.join(stderr)
99+
stdout_list = [] # type: List[str]
100+
stderr_list = [] # type: List[str]
101+
handle_process_output(cmd, stdout_list.append, stderr_list.append, finalize_process)
102+
stdout_str = ''.join(stderr_list)
103+
stderr_str = ''.join(stderr_list)
94104
if cmd.returncode != 0:
95-
stdout = force_text(stdout, defenc)
96-
stderr = force_text(stderr, defenc)
105+
stdout = force_text(stdout_str, defenc)
106+
stderr = force_text(stderr_str, defenc)
97107
raise HookExecutionError(hp, cmd.returncode, stderr, stdout)
98108
# end handle return code
99109

@@ -108,7 +118,9 @@ def stat_mode_to_index_mode(mode):
108118
return S_IFREG | 0o644 | (mode & 0o111) # blobs with or without executable bit
109119

110120

111-
def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer):
121+
def write_cache(entries: Sequence[Union[BaseIndexEntry, 'IndexEntry']], stream: IO[bytes],
122+
extension_data: Union[None, bytes] = None,
123+
ShaStreamCls: Type[IndexFileSHA1Writer] = IndexFileSHA1Writer) -> None:
112124
"""Write the cache represented by entries to a stream
113125
114126
:param entries: **sorted** list of entries
@@ -121,10 +133,10 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
121133
:param extension_data: any kind of data to write as a trailer, it must begin
122134
a 4 byte identifier, followed by its size ( 4 bytes )"""
123135
# wrap the stream into a compatible writer
124-
stream = ShaStreamCls(stream)
136+
stream_sha = ShaStreamCls(stream)
125137

126-
tell = stream.tell
127-
write = stream.write
138+
tell = stream_sha.tell
139+
write = stream_sha.write
128140

129141
# header
130142
version = 2
@@ -136,8 +148,8 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
136148
beginoffset = tell()
137149
write(entry[4]) # ctime
138150
write(entry[5]) # mtime
139-
path = entry[3]
140-
path = force_bytes(path, encoding=defenc)
151+
path_str = entry[3] # type: str
152+
path = force_bytes(path_str, encoding=defenc)
141153
plen = len(path) & CE_NAMEMASK # path length
142154
assert plen == len(path), "Path %s too long to fit into index" % entry[3]
143155
flags = plen | (entry[2] & CE_NAMEMASK_INV) # clear possible previous values
@@ -150,18 +162,19 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
150162

151163
# write previously cached extensions data
152164
if extension_data is not None:
153-
stream.write(extension_data)
165+
stream_sha.write(extension_data)
154166

155167
# write the sha over the content
156-
stream.write_sha()
168+
stream_sha.write_sha()
157169

158170

159-
def read_header(stream):
171+
def read_header(stream: IO[bytes]) -> Tuple[int, int]:
160172
"""Return tuple(version_long, num_entries) from the given stream"""
161173
type_id = stream.read(4)
162174
if type_id != b"DIRC":
163175
raise AssertionError("Invalid index file header: %r" % type_id)
164-
version, num_entries = unpack(">LL", stream.read(4 * 2))
176+
unpacked = cast(Tuple[int, int], unpack(">LL", stream.read(4 * 2)))
177+
version, num_entries = unpacked
165178

166179
# TODO: handle version 3: extended data, see read-cache.c
167180
assert version in (1, 2)
@@ -180,7 +193,7 @@ def entry_key(*entry: Union[BaseIndexEntry, PathLike, int]) -> Tuple[PathLike, i
180193
# END handle entry
181194

182195

183-
def read_cache(stream):
196+
def read_cache(stream: IO[bytes]) -> Tuple[int, Dict[Tuple[PathLike, int], 'IndexEntry'], bytes, bytes]:
184197
"""Read a cache file from the given stream
185198
:return: tuple(version, entries_dict, extension_data, content_sha)
186199
* version is the integer version number
@@ -189,7 +202,7 @@ def read_cache(stream):
189202
* content_sha is a 20 byte sha on all cache file contents"""
190203
version, num_entries = read_header(stream)
191204
count = 0
192-
entries = {}
205+
entries = {} # type: Dict[Tuple[PathLike, int], 'IndexEntry']
193206

194207
read = stream.read
195208
tell = stream.tell
@@ -228,7 +241,8 @@ def read_cache(stream):
228241
return (version, entries, extension_data, content_sha)
229242

230243

231-
def write_tree_from_cache(entries, odb, sl, si=0):
244+
def write_tree_from_cache(entries: List[IndexEntry], odb, sl: slice, si: int = 0
245+
) -> Tuple[bytes, List[Tuple[str, int, str]]]:
232246
"""Create a tree from the given sorted list of entries and put the respective
233247
trees into the given object database
234248
@@ -238,7 +252,7 @@ def write_tree_from_cache(entries, odb, sl, si=0):
238252
:param sl: slice indicating the range we should process on the entries list
239253
:return: tuple(binsha, list(tree_entry, ...)) a tuple of a sha and a list of
240254
tree entries being a tuple of hexsha, mode, name"""
241-
tree_items = []
255+
tree_items = [] # type: List[Tuple[Union[bytes, str], int, str]]
242256
tree_items_append = tree_items.append
243257
ci = sl.start
244258
end = sl.stop
@@ -277,18 +291,19 @@ def write_tree_from_cache(entries, odb, sl, si=0):
277291

278292
# finally create the tree
279293
sio = BytesIO()
280-
tree_to_stream(tree_items, sio.write)
294+
tree_to_stream(tree_items, sio.write) # converts bytes of each item[0] to str
295+
tree_items_stringified = cast(List[Tuple[str, int, str]], tree_items) # type: List[Tuple[str, int, str]]
281296
sio.seek(0)
282297

283298
istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
284-
return (istream.binsha, tree_items)
299+
return (istream.binsha, tree_items_stringified)
285300

286301

287-
def _tree_entry_to_baseindexentry(tree_entry, stage):
302+
def _tree_entry_to_baseindexentry(tree_entry: Tuple[str, int, str], stage: int) -> BaseIndexEntry:
288303
return BaseIndexEntry((tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2]))
289304

290305

291-
def aggressive_tree_merge(odb, tree_shas) -> List[BaseIndexEntry]:
306+
def aggressive_tree_merge(odb, tree_shas: Sequence[bytes]) -> List[BaseIndexEntry]:
292307
"""
293308
:return: list of BaseIndexEntries representing the aggressive merge of the given
294309
trees. All valid entries are on stage 0, whereas the conflicting ones are left

Diff for: git/repo/base.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737
from git.types import TBD, PathLike, Lit_config_levels
3838
from typing import (Any, BinaryIO, Callable, Dict,
39-
Iterator, List, Mapping, Optional,
39+
Iterator, List, Mapping, Optional, Sequence,
4040
TextIO, Tuple, Type, Union,
4141
NamedTuple, cast, TYPE_CHECKING)
4242

@@ -536,7 +536,7 @@ def tree(self, rev: Union['Commit', 'Tree', None] = None) -> 'Tree':
536536
return self.head.commit.tree
537537
return self.rev_parse(str(rev) + "^{tree}")
538538

539-
def iter_commits(self, rev: Optional[TBD] = None, paths: Union[PathLike, List[PathLike]] = '',
539+
def iter_commits(self, rev: Optional[TBD] = None, paths: Union[PathLike, Sequence[PathLike]] = '',
540540
**kwargs: Any) -> Iterator[Commit]:
541541
"""A list of Commit objects representing the history of a given ref/commit
542542

Diff for: git/util.py

+1
Original file line numberDiff line numberDiff line change
@@ -377,6 +377,7 @@ def expand_path(p: None, expand_vars: bool = ...) -> None:
377377

378378
@overload
379379
def expand_path(p: PathLike, expand_vars: bool = ...) -> str:
380+
# improve these overloads when 3.5 dropped
380381
...
381382

382383

0 commit comments

Comments
 (0)