Skip to content

Commit ff0fa55

Browse files
Re-enable rule B208 (#2738)
<!-- Thanks for opening a pull request! --> Part of #2700 <!-- In the case this PR will resolve an issue, please replace ${GITHUB_ISSUE_ID} below with the actual Github issue id. --> <!-- Closes #${GITHUB_ISSUE_ID} --> # Rationale for this change This lets us enable rule B208 on the linter. ## Are these changes tested? `make lint` and `make test` should pass. ## Are there any user-facing changes? <!-- In the case of user-facing changes, please add the changelog label. --> Co-authored-by: Kevin Liu <kevinjqliu@users.noreply.github.com>
1 parent d6e978c commit ff0fa55

File tree

7 files changed

+10
-11
lines changed

7 files changed

+10
-11
lines changed

pyiceberg/avro/decoder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,6 @@ def new_decoder(b: bytes) -> BinaryDecoder:
181181
except ModuleNotFoundError:
182182
import warnings
183183

184-
warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation")
184+
warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation", stacklevel=2)
185185

186186
return StreamingBinaryDecoder(b)

pyiceberg/io/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -340,7 +340,7 @@ def _infer_file_io_from_scheme(path: str, properties: Properties) -> FileIO | No
340340
if file_io := _import_file_io(file_io_path, properties):
341341
return file_io
342342
else:
343-
warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}")
343+
warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}", stacklevel=2)
344344
return None
345345

346346

pyiceberg/io/pyarrow.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def _import_retry_strategy(impl: str) -> S3RetryStrategy | None:
232232
class_ = getattr(module, class_name)
233233
return class_()
234234
except (ModuleNotFoundError, AttributeError):
235-
warnings.warn(f"Could not initialize S3 retry strategy: {impl}")
235+
warnings.warn(f"Could not initialize S3 retry strategy: {impl}", stacklevel=2)
236236
return None
237237

238238

@@ -2768,7 +2768,7 @@ def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]:
27682768
f"{TableProperties.PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX}.*",
27692769
]:
27702770
if unsupported_keys := fnmatch.filter(table_properties, key_pattern):
2771-
warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented")
2771+
warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented", stacklevel=2)
27722772

27732773
compression_codec = table_properties.get(TableProperties.PARQUET_COMPRESSION, TableProperties.PARQUET_COMPRESSION_DEFAULT)
27742774
compression_level = property_as_int(

pyiceberg/table/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -663,7 +663,7 @@ def delete(
663663
self.table_metadata.properties.get(TableProperties.DELETE_MODE, TableProperties.DELETE_MODE_DEFAULT)
664664
== TableProperties.DELETE_MODE_MERGE_ON_READ
665665
):
666-
warnings.warn("Merge on read is not yet supported, falling back to copy-on-write")
666+
warnings.warn("Merge on read is not yet supported, falling back to copy-on-write", stacklevel=2)
667667

668668
if isinstance(delete_filter, str):
669669
delete_filter = _parse_row_filter(delete_filter)
@@ -731,7 +731,7 @@ def delete(
731731
overwrite_snapshot.append_data_file(replaced_data_file)
732732

733733
if not delete_snapshot.files_affected and not delete_snapshot.rewrites_needed:
734-
warnings.warn("Delete operation did not match any records")
734+
warnings.warn("Delete operation did not match any records", stacklevel=2)
735735

736736
def upsert(
737737
self,
@@ -1502,7 +1502,7 @@ def _do_commit(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[Table
15021502
try:
15031503
self.catalog._delete_old_metadata(self.io, self.metadata, response.metadata)
15041504
except Exception as e:
1505-
warnings.warn(f"Failed to delete old metadata after commit: {e}")
1505+
warnings.warn(f"Failed to delete old metadata after commit: {e}", stacklevel=2)
15061506

15071507
self.metadata = response.metadata
15081508
self.metadata_location = response.metadata_location
@@ -1728,7 +1728,7 @@ def projection(self) -> Schema:
17281728
schema for schema in self.table_metadata.schemas if schema.schema_id == snapshot.schema_id
17291729
)
17301730
except StopIteration:
1731-
warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}")
1731+
warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}", stacklevel=2)
17321732
else:
17331733
raise ValueError(f"Snapshot not found: {self.snapshot_id}")
17341734

pyiceberg/table/snapshots.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ class Summary(IcebergBaseModel, Mapping[str, str]):
187187

188188
def __init__(self, operation: Operation | None = None, **data: Any) -> None:
189189
if operation is None:
190-
warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite")
190+
warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite", stacklevel=2)
191191
operation = Operation.OVERWRITE
192192
super().__init__(operation=operation, **data)
193193
self._additional_properties = data

ruff.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@ select = [
5959
]
6060
ignore = [
6161
"E501",
62-
"B028",
6362
"UP037",
6463
"UP035",
6564
"UP006"

tests/io/test_pyarrow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2804,7 +2804,7 @@ def test_pyarrow_io_multi_fs() -> None:
28042804
class SomeRetryStrategy(AwsDefaultS3RetryStrategy):
28052805
def __init__(self) -> None:
28062806
super().__init__()
2807-
warnings.warn("Initialized SomeRetryStrategy 👍")
2807+
warnings.warn("Initialized SomeRetryStrategy 👍", stacklevel=2)
28082808

28092809

28102810
def test_retry_strategy() -> None:

0 commit comments

Comments
 (0)