Skip to content

Commit d661995

Browse files
committed
Add tests and github workflows
1 parent ab42122 commit d661995

File tree

8 files changed

+407
-4
lines changed

8 files changed

+407
-4
lines changed

.github/workflows/publish.yml

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
2+
#
3+
# SPDX-License-Identifier: CC0-1.0
4+
5+
name: Publish to PyPI
6+
7+
on:
8+
release:
9+
types: [published]
10+
workflow_dispatch:
11+
12+
jobs:
13+
build:
14+
runs-on: ubuntu-latest
15+
steps:
16+
- uses: actions/checkout@v4
17+
18+
- name: Set up Python
19+
uses: actions/setup-python@v5
20+
with:
21+
python-version: '3.11'
22+
23+
- name: Install uv
24+
uses: astral-sh/setup-uv@v3
25+
26+
- name: Build package
27+
run: uv build
28+
29+
- name: Store the distribution packages
30+
uses: actions/upload-artifact@v4
31+
with:
32+
name: python-package-distributions
33+
path: dist/
34+
35+
publish:
36+
needs: build
37+
runs-on: ubuntu-latest
38+
environment:
39+
name: pypi
40+
url: https://pypi.org/p/snakemake-storage-plugin-cached-http
41+
permissions:
42+
id-token: write
43+
steps:
44+
- name: Download all the dists
45+
uses: actions/download-artifact@v4
46+
with:
47+
name: python-package-distributions
48+
path: dist/
49+
50+
- name: Publish to PyPI
51+
uses: pypa/gh-action-pypi-publish@release/v1

.github/workflows/test.yml

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
2+
#
3+
# SPDX-License-Identifier: CC0-1.0
4+
5+
name: Tests
6+
7+
on:
8+
push:
9+
branches: [main]
10+
pull_request:
11+
branches: [main]
12+
13+
jobs:
14+
test:
15+
runs-on: ubuntu-latest
16+
strategy:
17+
matrix:
18+
python-version: ['3.11', '3.12', '3.13']
19+
20+
steps:
21+
- uses: actions/checkout@v4
22+
23+
- name: Set up Python ${{ matrix.python-version }}
24+
uses: actions/setup-python@v5
25+
with:
26+
python-version: ${{ matrix.python-version }}
27+
28+
- name: Install uv
29+
uses: astral-sh/setup-uv@v3
30+
31+
- name: Install dependencies
32+
run: |
33+
uv pip install --system -e ".[dev]"
34+
35+
- name: Run tests
36+
run: |
37+
pytest -v

MANIFEST.in

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
2+
#
3+
# SPDX-License-Identifier: MIT
4+
5+
include README.md
6+
include LICENSE
7+
recursive-include LICENSES *

pyproject.toml

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,12 @@ authors = [
1010
{ name = "PyPSA-Eur Authors", email = "jonas.hoersch@openenergytransition.org" },
1111
]
1212
readme = "README.md"
13-
license = { text = "MIT" }
13+
license = "MIT"
1414
keywords = ["snakemake", "plugin", "storage", "http", "cache"]
15-
requires-python = ">=3.10"
15+
requires-python = ">=3.11"
1616
dependencies = [
1717
"httpx ~= 0.27",
18+
"platformdirs ~= 4.0",
1819
"reretry ~= 0.11",
1920
"snakemake-interface-common ~= 1.14",
2021
"snakemake-interface-storage-plugins >=4.2,<5.0",
@@ -24,20 +25,43 @@ dependencies = [
2425
classifiers = [
2526
"Development Status :: 4 - Beta",
2627
"Intended Audience :: Science/Research",
27-
"License :: OSI Approved :: MIT License",
2828
"Programming Language :: Python :: 3",
29-
"Programming Language :: Python :: 3.10",
3029
"Programming Language :: Python :: 3.11",
3130
"Programming Language :: Python :: 3.12",
31+
"Programming Language :: Python :: 3.13",
3232
]
3333

3434
[project.urls]
3535
homepage = "https://github.com/pypsa/pypsa-eur"
3636
repository = "https://github.com/pypsa/pypsa-eur"
3737

38+
[project.optional-dependencies]
39+
dev = [
40+
"pytest>=7.0",
41+
"pytest-asyncio>=0.21",
42+
]
43+
3844
[build-system]
3945
requires = ["setuptools>=61.0"]
4046
build-backend = "setuptools.build_meta"
4147

4248
[project.entry-points."snakemake_storage_plugins"]
4349
cached-http = "snakemake_storage_plugin_cached_http:StorageProvider"
50+
51+
[tool.pytest.ini_options]
52+
testpaths = ["tests"]
53+
markers = [
54+
"asyncio: mark test as an asyncio test",
55+
]
56+
57+
[tool.pytest_asyncio]
58+
asyncio_mode = "auto"
59+
asyncio_default_fixture_loop_scope = "function"
60+
61+
[dependency-groups]
62+
dev = [
63+
"humanfriendly>=10.0",
64+
"pytest>=8.4.2",
65+
"pytest-asyncio>=1.2.0",
66+
"snakemake>=9.13.4",
67+
]

tests/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
2+
#
3+
# SPDX-License-Identifier: MIT

tests/test_download.py

Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
2+
#
3+
# SPDX-License-Identifier: MIT
4+
5+
"""Functional tests for downloading and checksumming from Zenodo."""
6+
7+
import json
8+
import logging
9+
from pathlib import Path
10+
11+
import pytest
12+
13+
from snakemake_storage_plugin_cached_http import (
14+
StorageObject,
15+
StorageProvider,
16+
StorageProviderSettings,
17+
WrongChecksum,
18+
)
19+
20+
TEST_URL = "https://zenodo.org/records/16810901/files/attributed_ports.json"
21+
22+
23+
@pytest.fixture
24+
def storage_provider(tmp_path):
25+
"""Create a StorageProvider instance for testing."""
26+
cache_dir = tmp_path / "cache"
27+
cache_dir.mkdir()
28+
local_prefix = tmp_path / "local"
29+
local_prefix.mkdir()
30+
31+
settings = StorageProviderSettings(
32+
cache=str(cache_dir),
33+
skip_remote_checks=False,
34+
max_concurrent_downloads=3,
35+
)
36+
37+
logger = logging.getLogger("test")
38+
39+
provider = StorageProvider(
40+
local_prefix=local_prefix,
41+
logger=logger,
42+
settings=settings,
43+
)
44+
45+
return provider
46+
47+
48+
@pytest.fixture
49+
def storage_object(storage_provider):
50+
"""Create a StorageObject for the test file."""
51+
# Create storage object
52+
obj = StorageObject(
53+
query=TEST_URL,
54+
keep_local=False,
55+
retrieve=True,
56+
provider=storage_provider,
57+
)
58+
59+
yield obj
60+
61+
62+
@pytest.mark.asyncio
63+
async def test_zenodo_metadata_fetch(storage_provider):
64+
"""Test that we can fetch metadata from Zenodo API."""
65+
record_id = "16810901"
66+
netloc = "zenodo.org"
67+
68+
metadata = await storage_provider.get_metadata(record_id, netloc)
69+
70+
assert "attributed_ports.json" in metadata
71+
file_meta = metadata["attributed_ports.json"]
72+
assert file_meta.checksum is not None
73+
assert file_meta.size > 0
74+
assert file_meta.checksum.startswith("md5:")
75+
76+
77+
@pytest.mark.asyncio
78+
async def test_storage_object_exists(storage_object):
79+
"""Test that the storage object reports existence correctly."""
80+
exists = await storage_object.managed_exists()
81+
assert exists is True
82+
83+
84+
@pytest.mark.asyncio
85+
async def test_storage_object_size(storage_object):
86+
"""Test that the storage object reports size correctly."""
87+
size = await storage_object.managed_size()
88+
assert size > 0
89+
# The file is a small JSON file, should be less than 1MB
90+
assert size < 1_000_000
91+
92+
93+
@pytest.mark.asyncio
94+
async def test_storage_object_mtime(storage_object):
95+
"""Test that mtime is 0 for immutable Zenodo URLs."""
96+
mtime = await storage_object.managed_mtime()
97+
assert mtime == 0
98+
99+
100+
@pytest.mark.asyncio
101+
async def test_download_and_checksum(storage_object, tmp_path):
102+
"""Test downloading a file and verifying its checksum."""
103+
local_path = tmp_path / "test_download" / "attributed_ports.json"
104+
local_path.parent.mkdir(parents=True, exist_ok=True)
105+
106+
# Mock the local_path method to return our test path
107+
storage_object.local_path = lambda: local_path
108+
109+
# Download the file
110+
await storage_object.managed_retrieve()
111+
112+
# Verify file was downloaded
113+
assert local_path.exists()
114+
assert local_path.stat().st_size > 0
115+
116+
# Verify it's valid JSON (use utf-8 with error handling for any encoding issues)
117+
with open(local_path, encoding="utf-8", errors="replace") as f:
118+
data = json.load(f)
119+
assert isinstance(data, dict)
120+
121+
# Verify checksum (should not raise WrongChecksum exception)
122+
await storage_object.verify_checksum(local_path)
123+
124+
125+
@pytest.mark.asyncio
126+
async def test_cache_functionality(storage_provider, tmp_path):
127+
"""Test that files are cached after download."""
128+
# First download
129+
obj1 = StorageObject(
130+
query=TEST_URL,
131+
keep_local=False,
132+
retrieve=True,
133+
provider=storage_provider,
134+
)
135+
136+
local_path1 = tmp_path / "download1" / "attributed_ports.json"
137+
local_path1.parent.mkdir(parents=True, exist_ok=True)
138+
obj1.local_path = lambda: local_path1
139+
140+
await obj1.managed_retrieve()
141+
142+
# Verify cache was populated
143+
assert obj1.query_path is not None
144+
assert obj1.query_path.exists()
145+
146+
# Second download should use cache
147+
obj2 = StorageObject(
148+
query=TEST_URL,
149+
keep_local=False,
150+
retrieve=True,
151+
provider=storage_provider,
152+
)
153+
154+
local_path2 = tmp_path / "download2" / "attributed_ports.json"
155+
local_path2.parent.mkdir(parents=True, exist_ok=True)
156+
obj2.local_path = lambda: local_path2
157+
158+
await obj2.managed_retrieve()
159+
160+
# Both files should be identical
161+
assert local_path1.read_bytes() == local_path2.read_bytes()
162+
163+
164+
@pytest.mark.asyncio
165+
async def test_skip_remote_checks(storage_provider, tmp_path):
166+
"""Test that skip_remote_checks works correctly."""
167+
local_prefix = tmp_path / "local"
168+
local_prefix.mkdir()
169+
170+
# Create provider with skip_remote_checks enabled
171+
settings = StorageProviderSettings(
172+
cache="", # No cache
173+
skip_remote_checks=True,
174+
max_concurrent_downloads=3,
175+
)
176+
177+
logger = logging.getLogger("test")
178+
provider_skip = StorageProvider(
179+
local_prefix=local_prefix,
180+
logger=logger,
181+
settings=settings,
182+
)
183+
184+
obj = StorageObject(
185+
query=TEST_URL,
186+
keep_local=False,
187+
retrieve=True,
188+
provider=provider_skip,
189+
)
190+
191+
# With skip_remote_checks, these should return default values without API calls
192+
assert await obj.managed_exists() is True
193+
assert await obj.managed_mtime() == 0
194+
assert await obj.managed_size() == 0
195+
196+
197+
@pytest.mark.asyncio
198+
async def test_wrong_checksum_detection(storage_object, tmp_path):
199+
"""Test that corrupted files are detected via checksum."""
200+
# Create a corrupted file
201+
corrupted_path = tmp_path / "corrupted.json"
202+
corrupted_path.write_text('{"corrupted": "data"}')
203+
204+
# Verify checksum should raise WrongChecksum
205+
with pytest.raises(WrongChecksum):
206+
await storage_object.verify_checksum(corrupted_path)

0 commit comments

Comments
 (0)