Skip to content

Commit ef5b771

Browse files
authored
update prek libraries, make lint (#2696)
<!-- Thanks for opening a pull request! --> <!-- In the case this PR will resolve an issue, please replace ${GITHUB_ISSUE_ID} below with the actual Github issue id. --> <!-- Closes #${GITHUB_ISSUE_ID} --> # Rationale for this change Updated prek libraries (`poetry run prek auto-update`) This also triggered updates to deprecate `Union` and `Optional` since we no longer support python 3.9 Skipping [ruff `zip-without-explicit-strict (B905)`](https://docs.astral.sh/ruff/rules/zip-without-explicit-strict/) for now ## Are these changes tested? Yes, `make lint` ## Are there any user-facing changes? <!-- In the case of user-facing changes, please add the changelog label. -->
1 parent 9e16bc2 commit ef5b771

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+918
-1005
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,13 +27,13 @@ repos:
2727
- id: check-yaml
2828
- id: check-ast
2929
- repo: https://github.com/astral-sh/ruff-pre-commit
30-
rev: v0.12.9
30+
rev: v0.14.3
3131
hooks:
3232
- id: ruff
3333
args: [ --fix, --exit-non-zero-on-fix ]
3434
- id: ruff-format
3535
- repo: https://github.com/pre-commit/mirrors-mypy
36-
rev: v1.17.1
36+
rev: v1.18.2
3737
hooks:
3838
- id: mypy
3939
args:

pyiceberg/avro/codecs/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626

2727
from __future__ import annotations
2828

29-
from typing import Dict, Literal, Optional, Type
29+
from typing import Dict, Literal, Type
3030

3131
from typing_extensions import TypeAlias
3232

@@ -40,7 +40,7 @@
4040

4141
AVRO_CODEC_KEY = "avro.codec"
4242

43-
KNOWN_CODECS: Dict[AvroCompressionCodec, Optional[Type[Codec]]] = {
43+
KNOWN_CODECS: Dict[AvroCompressionCodec, Type[Codec] | None] = {
4444
"null": None,
4545
"bzip2": BZip2Codec,
4646
"snappy": SnappyCodec,

pyiceberg/avro/decoder.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
Dict,
2222
List,
2323
Tuple,
24-
Union,
2524
cast,
2625
)
2726

@@ -137,7 +136,7 @@ class StreamingBinaryDecoder(BinaryDecoder):
137136
__slots__ = "_input_stream"
138137
_input_stream: InputStream
139138

140-
def __init__(self, input_stream: Union[bytes, InputStream]) -> None:
139+
def __init__(self, input_stream: bytes | InputStream) -> None:
141140
"""Reader is a Python object on which we can call read, seek, and tell."""
142141
if isinstance(input_stream, bytes):
143142
# In the case of bytes, we wrap it into a BytesIO to make it a stream

pyiceberg/avro/file.py

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
Dict,
3131
Generic,
3232
List,
33-
Optional,
3433
Type,
3534
TypeVar,
3635
)
@@ -85,7 +84,7 @@ def meta(self) -> Dict[str, str]:
8584
def sync(self) -> bytes:
8685
return self._data[2]
8786

88-
def compression_codec(self) -> Optional[Type[Codec]]:
87+
def compression_codec(self) -> Type[Codec] | None:
8988
"""Get the file's compression codec algorithm from the file's metadata.
9089
9190
In the case of a null codec, we return a None indicating that we
@@ -146,20 +145,20 @@ class AvroFile(Generic[D]):
146145
"block",
147146
)
148147
input_file: InputFile
149-
read_schema: Optional[Schema]
148+
read_schema: Schema | None
150149
read_types: Dict[int, Callable[..., StructProtocol]]
151150
read_enums: Dict[int, Callable[..., Enum]]
152151
header: AvroFileHeader
153152
schema: Schema
154153
reader: Reader
155154

156155
decoder: BinaryDecoder
157-
block: Optional[Block[D]]
156+
block: Block[D] | None
158157

159158
def __init__(
160159
self,
161160
input_file: InputFile,
162-
read_schema: Optional[Schema] = None,
161+
read_schema: Schema | None = None,
163162
read_types: Dict[int, Callable[..., StructProtocol]] = EMPTY_DICT,
164163
read_enums: Dict[int, Callable[..., Enum]] = EMPTY_DICT,
165164
) -> None:
@@ -186,9 +185,7 @@ def __enter__(self) -> AvroFile[D]:
186185

187186
return self
188187

189-
def __exit__(
190-
self, exctype: Optional[Type[BaseException]], excinst: Optional[BaseException], exctb: Optional[TracebackType]
191-
) -> None:
188+
def __exit__(self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None) -> None:
192189
"""Perform cleanup when exiting the scope of a 'with' statement."""
193190

194191
def __iter__(self) -> AvroFile[D]:
@@ -242,7 +239,7 @@ def __init__(
242239
output_file: OutputFile,
243240
file_schema: Schema,
244241
schema_name: str,
245-
record_schema: Optional[Schema] = None,
242+
record_schema: Schema | None = None,
246243
metadata: Dict[str, str] = EMPTY_DICT,
247244
) -> None:
248245
self.output_file = output_file
@@ -270,9 +267,7 @@ def __enter__(self) -> AvroOutputFile[D]:
270267

271268
return self
272269

273-
def __exit__(
274-
self, exctype: Optional[Type[BaseException]], excinst: Optional[BaseException], exctb: Optional[TracebackType]
275-
) -> None:
270+
def __exit__(self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None) -> None:
276271
"""Perform cleanup when exiting the scope of a 'with' statement."""
277272
self.output_stream.close()
278273

@@ -289,7 +284,7 @@ def _write_header(self) -> None:
289284
header = AvroFileHeader(MAGIC, meta, self.sync_bytes)
290285
construct_writer(META_SCHEMA).write(self.encoder, header)
291286

292-
def compression_codec(self) -> Optional[Type[Codec]]:
287+
def compression_codec(self) -> Type[Codec] | None:
293288
"""Get the file's compression codec algorithm from the file's metadata.
294289
295290
In the case of a null codec, we return a None indicating that we

pyiceberg/avro/reader.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
Callable,
3636
List,
3737
Mapping,
38-
Optional,
3938
Tuple,
4039
)
4140
from uuid import UUID
@@ -292,7 +291,7 @@ def __repr__(self) -> str:
292291
class OptionReader(Reader):
293292
option: Reader = dataclassfield()
294293

295-
def read(self, decoder: BinaryDecoder) -> Optional[Any]:
294+
def read(self, decoder: BinaryDecoder) -> Any | None:
296295
# For the Iceberg spec it is required to set the default value to null
297296
# From https://iceberg.apache.org/spec/#avro
298297
# Optional fields must always set the Avro field default value to null.
@@ -320,14 +319,14 @@ class StructReader(Reader):
320319
"_hash",
321320
"_max_pos",
322321
)
323-
field_readers: Tuple[Tuple[Optional[int], Reader], ...]
322+
field_readers: Tuple[Tuple[int | None, Reader], ...]
324323
create_struct: Callable[..., StructProtocol]
325324
struct: StructType
326-
field_reader_functions = Tuple[Tuple[Optional[str], int, Optional[Callable[[BinaryDecoder], Any]]], ...]
325+
field_reader_functions = Tuple[Tuple[str | None, int, Callable[[BinaryDecoder], Any] | None], ...]
327326

328327
def __init__(
329328
self,
330-
field_readers: Tuple[Tuple[Optional[int], Reader], ...],
329+
field_readers: Tuple[Tuple[int | None, Reader], ...],
331330
create_struct: Callable[..., StructProtocol],
332331
struct: StructType,
333332
) -> None:
@@ -339,7 +338,7 @@ def __init__(
339338
if not isinstance(self.create_struct(), StructProtocol):
340339
raise ValueError(f"Incompatible with StructProtocol: {self.create_struct}")
341340

342-
reading_callbacks: List[Tuple[Optional[int], Callable[[BinaryDecoder], Any]]] = []
341+
reading_callbacks: List[Tuple[int | None, Callable[[BinaryDecoder], Any]]] = []
343342
max_pos = -1
344343
for pos, field in field_readers:
345344
if pos is not None:

0 commit comments

Comments
 (0)