diff --git a/.gitattributes b/.gitattributes index 7a79ddd6b0b..3c57696a336 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,3 +2,5 @@ doc/whats-new.rst merge=union # allow installing from git archives .git_archival.txt export-subst +# SCM syntax highlighting & preventing 3-way merges +pixi.lock merge=binary linguist-language=YAML linguist-generated=true diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 90c3aff8531..66a26de2cf4 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -9,7 +9,10 @@ env: PR_HEAD_LABEL: ${{ github.event.pull_request.head.label }} jobs: + cache-pixi-lock: + uses: ./.github/workflows/cache-pixi-lock.yml benchmark: + needs: cache-pixi-lock if: ${{ contains( github.event.pull_request.labels.*.name, 'run-benchmark') && github.event_name == 'pull_request' || contains( github.event.pull_request.labels.*.name, 'topic-performance') && github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }} name: Linux runs-on: ubuntu-latest @@ -24,6 +27,18 @@ jobs: with: fetch-depth: 0 + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + cache: true + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + - name: Set up conda environment uses: mamba-org/setup-micromamba@v2 with: diff --git a/.github/workflows/cache-pixi-lock.yml b/.github/workflows/cache-pixi-lock.yml new file mode 100644 index 00000000000..bf1e190935e --- /dev/null +++ b/.github/workflows/cache-pixi-lock.yml @@ -0,0 +1,52 @@ +name: Generate and cache Pixi lockfile + +on: + workflow_call: + inputs: + pixi-version: + type: string + outputs: + cache-id: + description: "The lock file contents" + value: ${{ jobs.cache-pixi-lock.outputs.cache-id }} + +jobs: + cache-pixi-lock: + name: Generate output + runs-on: ubuntu-latest + outputs: + cache-id: ${{ steps.restore.outputs.cache-primary-key }} + steps: + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + submodules: recursive + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT" + - uses: actions/cache/restore@v4 + id: restore + with: + path: | + pixi.lock + key: ${{ steps.date.outputs.date }}_${{ inputs.pixi-version }}_${{hashFiles('pixi.toml')}} + - uses: prefix-dev/setup-pixi@v0.9.0 + if: ${{ !steps.restore.outputs.cache-hit }} + with: + pixi-version: ${{ inputs.pixi-version }} + run-install: false + - name: Run pixi lock + if: ${{ !steps.restore.outputs.cache-hit }} + run: pixi lock + - uses: actions/cache/save@v4 + if: ${{ !steps.restore.outputs.cache-hit }} + id: cache + with: + path: | + pixi.lock + key: ${{ steps.restore.outputs.cache-primary-key }} + - name: Upload pixi.lock + uses: actions/upload-artifact@v4 + with: + name: pixi-lock + path: pixi.lock diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml index bb5a1217c18..b61dfc87d47 100644 --- a/.github/workflows/ci-additional.yaml +++ b/.github/workflows/ci-additional.yaml @@ -14,6 +14,7 @@ concurrency: env: FORCE_COLOR: 3 + PIXI_VERSION: "v0.58.0" jobs: detect-ci-trigger: @@ -32,19 +33,21 @@ jobs: id: detect-trigger with: keyword: "[skip-ci]" - + cache-pixi-lock: + uses: ./.github/workflows/cache-pixi-lock.yml + with: + pixi-version: "v0.58.0" # keep in sync with env var above doctest: name: Doctests runs-on: "ubuntu-latest" - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] if: needs.detect-ci-trigger.outputs.triggered == 'false' defaults: run: shell: bash -l {0} env: - CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.12" + PIXI_ENV: test-all-deps-py313 steps: - uses: actions/checkout@v5 with: @@ -54,22 +57,24 @@ jobs: run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock with: - environment-file: ${{env.CONDA_ENV_FILE}} - environment-name: xarray-tests - create-args: >- - python=${{env.PYTHON_VERSION}} - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ env.PIXI_ENV }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} - - name: Install xarray - run: | - python -m pip install --no-deps -e . - name: Version info run: | - python xarray/util/print_versions.py + pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py - name: Run doctests run: | # Raise an error if there are warnings in the doctests, with `-Werror`. @@ -78,49 +83,47 @@ jobs: # # If dependencies emit warnings we can't do anything about, add ignores to # `xarray/tests/__init__.py`. - python -m pytest --doctest-modules xarray --ignore xarray/tests -Werror + pixi run -e ${{env.PIXI_ENV}} python -m pytest --doctest-modules xarray --ignore xarray/tests -Werror mypy: name: Mypy runs-on: "ubuntu-latest" - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] defaults: run: shell: bash -l {0} env: - CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.12" + PIXI_ENV: test-with-typing-py313 steps: - uses: actions/checkout@v5 with: fetch-depth: 0 # Fetch all history for all branches and tags. - + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ env.PIXI_ENV }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} - name: set environment variables run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 - with: - environment-file: ${{env.CONDA_ENV_FILE}} - environment-name: xarray-tests - create-args: >- - python=${{env.PYTHON_VERSION}} - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" - - name: Install xarray - run: | - python -m pip install --no-deps -e . + echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV - name: Version info run: | - python xarray/util/print_versions.py - - name: Install mypy - run: | - python -m pip install "mypy==1.18.1" --force-reinstall + pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py - name: Run mypy run: | - python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report + pixi run -e ${{env.PIXI_ENV}} python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report - name: Upload mypy coverage to Codecov uses: codecov/codecov-action@v5.5.1 @@ -134,44 +137,42 @@ jobs: mypy-min: name: Mypy 3.11 runs-on: "ubuntu-latest" - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] defaults: run: shell: bash -l {0} env: - CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.11" + PIXI_ENV: test-with-typing-py311 steps: - uses: actions/checkout@v5 with: fetch-depth: 0 # Fetch all history for all branches and tags. - + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ env.PIXI_ENV }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} - name: set environment variables run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 - with: - environment-file: ${{env.CONDA_ENV_FILE}} - environment-name: xarray-tests - create-args: >- - python=${{env.PYTHON_VERSION}} - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" - - name: Install xarray - run: | - python -m pip install --no-deps -e . + echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV - name: Version info run: | - python xarray/util/print_versions.py - - name: Install mypy - run: | - python -m pip install "mypy==1.18.1" --force-reinstall + pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py - name: Run mypy run: | - python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report + pixi run -e ${{env.PIXI_ENV}} python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report - name: Upload mypy coverage to Codecov uses: codecov/codecov-action@v5.5.1 @@ -185,7 +186,7 @@ jobs: pyright: name: Pyright runs-on: "ubuntu-latest" - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] if: | always() && ( @@ -195,39 +196,38 @@ jobs: run: shell: bash -l {0} env: - CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.12" + PIXI_ENV: test-all-deps-py313 steps: - uses: actions/checkout@v5 with: fetch-depth: 0 # Fetch all history for all branches and tags. + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ env.PIXI_ENV }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} - name: set environment variables run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 - with: - environment-file: ${{env.CONDA_ENV_FILE}} - environment-name: xarray-tests - create-args: >- - python=${{env.PYTHON_VERSION}} - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" - - name: Install xarray - run: | - python -m pip install --no-deps -e . + echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV - name: Version info run: | - python xarray/util/print_versions.py - - name: Install pyright - run: | - python -m pip install pyright --force-reinstall + pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py - name: Run pyright run: | - python -m pyright xarray/ + pixi run -e ${{env.PIXI_ENV}} python -m pyright xarray/ - name: Upload pyright coverage to Codecov uses: codecov/codecov-action@v5.5.1 @@ -241,7 +241,7 @@ jobs: pyright39: name: Pyright 3.11 runs-on: "ubuntu-latest" - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] if: | always() && ( @@ -251,39 +251,37 @@ jobs: run: shell: bash -l {0} env: - CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.11" + PIXI_ENV: test-all-deps-py313 steps: - uses: actions/checkout@v5 with: fetch-depth: 0 # Fetch all history for all branches and tags. - + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ env.PIXI_ENV }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} - name: set environment variables run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 - with: - environment-file: ${{env.CONDA_ENV_FILE}} - environment-name: xarray-tests - create-args: >- - python=${{env.PYTHON_VERSION}} - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" - - name: Install xarray - run: | - python -m pip install --no-deps -e . + echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV - name: Version info run: | - python xarray/util/print_versions.py - - name: Install pyright - run: | - python -m pip install pyright --force-reinstall + pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py - name: Run pyright run: | - python -m pyright xarray/ + pixi run -e ${{env.PIXI_ENV}} python -m pyright xarray/ - name: Upload pyright coverage to Codecov uses: codecov/codecov-action@v5.5.1 @@ -319,10 +317,10 @@ jobs: uses: xarray-contrib/minimum-dependency-versions@e2ac8ff0a76e8603d8536ef5d64743a375961ce9 # v0.1.1 with: policy: ci/policy.yaml - environment-paths: ci/requirements/min-all-deps.yml + environment-paths: ci/requirement-linting/min-all-deps.yml - name: Bare minimum versions policy uses: xarray-contrib/minimum-dependency-versions@e2ac8ff0a76e8603d8536ef5d64743a375961ce9 # v0.1.1 with: policy: ci/policy.yaml - environment-paths: ci/requirements/bare-minimum.yml + environment-paths: ci/requirement-linting/bare-minimum.yml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 414d5ad2549..1445076a111 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,6 +14,7 @@ concurrency: env: FORCE_COLOR: 3 + PIXI_VERSION: "v0.58.0" jobs: detect-ci-trigger: @@ -32,10 +33,15 @@ jobs: id: detect-trigger with: keyword: "[skip-ci]" + + cache-pixi-lock: + uses: ./.github/workflows/cache-pixi-lock.yml + with: + pixi-version: "v0.58.0" # keep in sync with env var above test: - name: ${{ matrix.os }} py${{ matrix.python-version }} ${{ matrix.env }} + name: "${{ matrix.os }} | pixi shell -e ${{ matrix.pixi-env }}" runs-on: ${{ matrix.os }} - needs: detect-ci-trigger + needs: [detect-ci-trigger, cache-pixi-lock] if: needs.detect-ci-trigger.outputs.triggered == 'false' defaults: run: @@ -45,110 +51,87 @@ jobs: matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] # Bookend python versions - python-version: ["3.11", "3.13"] - env: [""] + pixi-env: ["test-all-deps-py311", "test-all-deps-py313"] + pytest-addopts: [""] include: # Minimum python version: - - env: "bare-minimum" - python-version: "3.11" - os: ubuntu-latest - - env: "bare-min-and-scipy" - python-version: "3.11" + - pixi-env: "test-bare-minimum" os: ubuntu-latest - - env: "min-all-deps" - python-version: "3.11" + - pixi-env: "test-bare-min-and-scipy" os: ubuntu-latest + # - pixi-env: "min-all-deps" # TODO: include later by duplicating old workflow and using conda. Not using Pixi for now. + # python-version: "3.11" + # os: ubuntu-latest # Latest python version: - - env: "all-but-numba" - python-version: "3.13" + - pixi-env: "test-all-but-numba" os: ubuntu-latest - - env: "all-but-dask" - python-version: "3.12" + - pixi-env: "test-all-but-dask" os: ubuntu-latest - - env: "flaky" - python-version: "3.13" + - pixi-env: "test-all-deps-py313" + pytest-addopts: "flaky" os: ubuntu-latest # The mypy tests must be executed using only 1 process in order to guarantee # predictable mypy output messages for comparison to expectations. - - env: "mypy" - python-version: "3.11" + - pixi-env: "test-with-typing-py311" + pytest-addopts: "mypy" numprocesses: 1 os: ubuntu-latest - - env: "mypy" - python-version: "3.13" + - pixi-env: "test-with-typing-py313" numprocesses: 1 os: ubuntu-latest steps: - uses: actions/checkout@v5 with: fetch-depth: 0 # Fetch all history for all branches and tags. + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + enableCrossOsArchive: true + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + pixi-version: ${{ env.PIXI_VERSION }} + cache: true + environments: ${{ matrix.pixi-env }} + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + - name: Set environment variables run: | echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV - if [[ ${{ matrix.os }} == windows* ]] ; - then - if [[ ${{ matrix.python-version }} != "3.14" ]]; then - echo "CONDA_ENV_FILE=ci/requirements/environment-windows.yml" >> $GITHUB_ENV - else - echo "CONDA_ENV_FILE=ci/requirements/environment-windows-3.14.yml" >> $GITHUB_ENV - fi - elif [[ "${{ matrix.env }}" != "" ]] ; + if [[ "${{ matrix.pytest-addopts }}" != "" ]] ; then - if [[ "${{ matrix.env }}" == "flaky" ]] ; + if [[ "${{ matrix.pytest-addopts }}" == "flaky" ]] ; then - echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV echo "PYTEST_ADDOPTS=-m 'flaky or network' --run-flaky --run-network-tests -W default" >> $GITHUB_ENV - elif [[ "${{ matrix.env }}" == "mypy" ]] ; + elif [[ "${{ matrix.pytest-addopts }}" == "mypy" ]] ; then - echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV echo "PYTEST_ADDOPTS=-n 1 -m 'mypy' --run-mypy -W default" >> $GITHUB_ENV - else - echo "CONDA_ENV_FILE=ci/requirements/${{ matrix.env }}.yml" >> $GITHUB_ENV fi - if [[ "${{ matrix.env }}" == "min-all-deps" ]] ; + if [[ "${{ matrix.pixi-env }}" == "min-all-deps" ]] ; then # Don't raise on warnings echo "PYTEST_ADDOPTS=-W default" >> $GITHUB_ENV fi - else - if [[ ${{ matrix.python-version }} != "3.14" ]]; then - echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV - else - echo "CONDA_ENV_FILE=ci/requirements/environment-3.14.yml" >> $GITHUB_ENV - fi fi - echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV - - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v2 - with: - environment-file: ${{ env.CONDA_ENV_FILE }} - environment-name: xarray-tests - cache-environment: true - cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{matrix.python-version}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" - create-args: >- - python=${{matrix.python-version}} - # We only want to install this on one run, because otherwise we'll have # duplicate annotations. - name: Install error reporter - if: ${{ matrix.os }} == 'ubuntu-latest' and ${{ matrix.python-version }} == '3.12' + if: ${{ matrix.os }} == 'ubuntu-latest' and ${{ matrix.pixi-env}} == 'test-all-deps-py313' run: | - python -m pip install pytest-github-actions-annotate-failures - - - name: Install xarray - run: | - python -m pip install --no-deps -e . + pixi add --pypi pytest-github-actions-annotate-failures - name: Version info run: | - python xarray/util/print_versions.py + pixi run -e ${{ matrix.pixi-env }} python xarray/util/print_versions.py - name: Import xarray run: | - python -c "import xarray" + pixi run -e ${{ matrix.pixi-env }} python -c "import xarray" - name: Restore cached hypothesis directory uses: actions/cache@v4 @@ -159,7 +142,8 @@ jobs: save-always: true - name: Run tests - run: python -m pytest -n ${{ matrix.numprocesses || 4 }} + run: | + pixi run -e ${{ matrix.pixi-env }} python -m pytest -n ${{ matrix.numprocesses || 4 }} --timeout 180 --cov=xarray --cov-report=xml @@ -169,7 +153,7 @@ jobs: if: always() uses: actions/upload-artifact@v5 with: - name: Test results for ${{ runner.os }}-${{ matrix.python-version }} ${{ matrix.env }} + name: Test results for OS ${{ runner.os }} pixi-env -${{ matrix.pixi-env }} path: pytest.xml - name: Upload code coverage to Codecov @@ -179,7 +163,7 @@ jobs: with: file: ./coverage.xml flags: unittests - env_vars: RUNNER_OS,PYTHON_VERSION + env_vars: RUNNER_OS name: codecov-umbrella fail_ci_if_error: false diff --git a/.gitignore b/.gitignore index 19dceefd192..bb2b49c2cd4 100644 --- a/.gitignore +++ b/.gitignore @@ -91,3 +91,7 @@ doc/videos-gallery.txt uv.lock mypy_report/ xarray-docs/ + +# pixi environments +.pixi +pixi.lock diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 57f93911f5d..9da07cf3bf9 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,16 +7,23 @@ sphinx: build: os: ubuntu-lts-latest tools: - python: mambaforge-latest + # just so RTD stops complaining + python: "latest" jobs: + create_environment: + - asdf plugin add pixi + - asdf install pixi latest + - asdf global pixi latest post_checkout: - (git --no-pager log --pretty="tformat:%s" -1 | grep -vqF "[skip-rtd]") || exit 183 - git fetch --unshallow || true pre_install: - - git update-index --assume-unchanged doc/conf.py ci/requirements/doc.yml - -conda: - environment: ci/requirements/doc.yml + - git update-index --assume-unchanged doc/conf.py + install: + - pixi install -e doc + build: + html: + - BUILDDIR=$READTHEDOCS_OUTPUT pixi run -e doc make html formats: - htmlzip diff --git a/README.md b/README.md index e7c168d8f0d..14d621aabf7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # xarray: N-D labeled arrays and datasets +[![Xarray](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydata/xarray/refs/heads/main/doc/badge.json)](https://xarray.dev) +[![Powered by Pixi](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/prefix-dev/pixi/main/assets/badge/v0.json)](https://pixi.sh) [![CI](https://github.com/pydata/xarray/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/pydata/xarray/actions/workflows/ci.yaml?query=branch%3Amain) [![Code coverage](https://codecov.io/gh/pydata/xarray/branch/main/graph/badge.svg?flag=unittests)](https://codecov.io/gh/pydata/xarray) [![Docs](https://readthedocs.org/projects/xray/badge/?version=latest)](https://docs.xarray.dev/) @@ -12,7 +14,6 @@ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.598201.svg)](https://doi.org/10.5281/zenodo.598201) [![Examples on binder](https://img.shields.io/badge/launch-binder-579ACA.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC)](https://mybinder.org/v2/gh/pydata/xarray/main?urlpath=lab/tree/doc/examples/weather-data.ipynb) [![Twitter](https://img.shields.io/twitter/follow/xarray_dev?style=social)](https://x.com/xarray_dev) -[![image](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydata/xarray/refs/heads/main/doc/badge.json)](https://xarray.dev) **xarray** (pronounced "ex-array", formerly known as **xray**) is an open source project and Python package that makes working with labelled multi-dimensional arrays diff --git a/ci/requirement-linting/bare-minimum.yml b/ci/requirement-linting/bare-minimum.yml new file mode 100644 index 00000000000..5ba31759969 --- /dev/null +++ b/ci/requirement-linting/bare-minimum.yml @@ -0,0 +1,18 @@ +name: xarray-tests #! keep this file in sync with pixi.toml +channels: + - conda-forge + - nodefaults +dependencies: + - python=3.11 + - coveralls + - pip + - pytest + - pytest-asyncio + - pytest-cov + - pytest-env + - pytest-mypy-plugins + - pytest-timeout + - pytest-xdist + - numpy=1.26 + - packaging=24.1 + - pandas=2.2 diff --git a/ci/requirements/min-all-deps.yml b/ci/requirement-linting/min-all-deps.yml similarity index 95% rename from ci/requirements/min-all-deps.yml rename to ci/requirement-linting/min-all-deps.yml index add738630f1..4f90228294b 100644 --- a/ci/requirements/min-all-deps.yml +++ b/ci/requirement-linting/min-all-deps.yml @@ -1,4 +1,4 @@ -name: xarray-tests +name: xarray-tests #! keep this file in sync with pixi.toml channels: - conda-forge - nodefaults diff --git a/ci/requirements/all-but-dask.yml b/ci/requirements/all-but-dask.yml index 65780d91949..e69de29bb2d 100644 --- a/ci/requirements/all-but-dask.yml +++ b/ci/requirements/all-but-dask.yml @@ -1,43 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - aiobotocore - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - coveralls - - flox - - h5netcdf - - h5py - - hdf5 - - hypothesis - - lxml # Optional dep of pydap - - matplotlib-base - - nc-time-axis - - netcdf4 - - numba - - numbagg - - numpy - - packaging - - pandas - - pint>=0.22 - - pip - - pydap - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - - sparse - - toolz - - typing_extensions - - zarr diff --git a/ci/requirements/all-but-numba.yml b/ci/requirements/all-but-numba.yml index 23c38cc8267..e69de29bb2d 100644 --- a/ci/requirements/all-but-numba.yml +++ b/ci/requirements/all-but-numba.yml @@ -1,55 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - # Pin a "very new numpy" (updated Sept 24, 2024) - - numpy>=2.2 - - aiobotocore - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - dask-core - - distributed - - flox - - fsspec - - h5netcdf - - h5py - - hdf5 - - hypothesis - - iris - - lxml # Optional dep of pydap - - matplotlib-base - - nc-time-axis - - netcdf4 - # numba, sparse, numbagg, numexpr often conflicts with newer versions of numpy. - # This environment helps us test xarray with the latest versions - # of numpy - # - numba - # - numbagg - # - numexpr - # - sparse - - opt_einsum - - packaging - - pandas - # - pint>=0.22 - - pip - - pooch - - pre-commit - - pyarrow # pandas raises a deprecation warning without this, breaking doctests - - pydap - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - - toolz - - typing_extensions - - zarr diff --git a/ci/requirements/bare-min-and-scipy.yml b/ci/requirements/bare-min-and-scipy.yml index d4a61586d82..e69de29bb2d 100644 --- a/ci/requirements/bare-min-and-scipy.yml +++ b/ci/requirements/bare-min-and-scipy.yml @@ -1,19 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - python=3.11 - - coveralls - - pip - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - numpy=1.26 - - packaging=24.1 - - pandas=2.2 - - scipy=1.13 diff --git a/ci/requirements/bare-minimum.yml b/ci/requirements/bare-minimum.yml index 777ff09b3e6..e69de29bb2d 100644 --- a/ci/requirements/bare-minimum.yml +++ b/ci/requirements/bare-minimum.yml @@ -1,18 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - python=3.11 - - coveralls - - pip - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - numpy=1.26 - - packaging=24.1 - - pandas=2.2 diff --git a/ci/requirements/doc.yml b/ci/requirements/doc.yml deleted file mode 100644 index 64ea08b73ff..00000000000 --- a/ci/requirements/doc.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: xarray-docs -channels: - # Don't change to pkgs/main, as it causes random timeouts in readthedocs - - conda-forge - - nodefaults -dependencies: - - python - - bottleneck - - cartopy - - cfgrib - - kerchunk - - dask-core - - hypothesis - - h5netcdf - - ipykernel - - ipywidgets # silence nbsphinx warning - - ipython - - iris - - jupyter_client - - jupyter_sphinx - - matplotlib-base - - nbsphinx - - ncdata - - netcdf4 - - numba - - numpy>=2.2 - - packaging - - pandas - - pooch - - pip - - pre-commit - - pyarrow - - pydata-sphinx-theme - - pyproj - - rich # for Zarr tree() - - scipy - - seaborn - - setuptools - - sparse - - sphinx-autosummary-accessors - - sphinx-copybutton - - sphinx-design - - sphinx-inline-tabs - - sphinx>=6,<8 - - sphinxcontrib-mermaid - - sphinxcontrib-srclinks - - sphinx-remove-toctrees - - sphinxext-opengraph - - sphinxext-rediraffe - - zarr - - pip: - # relative to this file. Needs to be editable to be accepted. - - -e ../.. diff --git a/ci/requirements/environment-3.14.yml b/ci/requirements/environment-3.14.yml index d4d47d85536..e69de29bb2d 100644 --- a/ci/requirements/environment-3.14.yml +++ b/ci/requirements/environment-3.14.yml @@ -1,64 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - aiobotocore - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - dask-core - - distributed - - flox - - fsspec - - h5netcdf - - h5py - - hdf5 - - hypothesis - - iris - - lxml # Optional dep of pydap - - matplotlib-base - - nc-time-axis - - netcdf4 - # - numba - # - numbagg - - numexpr - - numpy - - opt_einsum - - packaging - - pandas - - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110 - # - pint>=0.22 - - pip - - pooch - - pre-commit - - pyarrow # pandas raises a deprecation warning without this, breaking doctests - - pydap - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - # - sparse - - toolz - - types-colorama - - types-docutils - - types-psutil - - types-Pygments - - types-python-dateutil - - types-pytz - - types-PyYAML - - types-setuptools - - typing_extensions - - zarr - - pip: - - jax # no way to get cpu-only jaxlib from conda if gpu is present - - types-defusedxml - - types-pexpect diff --git a/ci/requirements/environment-windows-3.14.yml b/ci/requirements/environment-windows-3.14.yml index e86d57beb95..e69de29bb2d 100644 --- a/ci/requirements/environment-windows-3.14.yml +++ b/ci/requirements/environment-windows-3.14.yml @@ -1,58 +0,0 @@ -name: xarray-tests -channels: - - conda-forge -dependencies: - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - dask-core - - distributed - - flox - - fsspec - - h5netcdf - - h5py - - hdf5 - - hypothesis - - iris - - lxml # Optional dep of pydap - - matplotlib-base - - nc-time-axis - - netcdf4 - # - numba - # - numbagg - - numpy - - packaging - - pandas - - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110 - # - pint>=0.22 - - pip - - pre-commit - - pyarrow # importing dask.dataframe raises an ImportError without this - - pydap - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - # - sparse - - toolz - - types-colorama - - types-docutils - - types-psutil - - types-Pygments - - types-python-dateutil - - types-pytz - - types-PyYAML - - types-setuptools - - typing_extensions - - zarr - - pip: - - types-defusedxml - - types-pexpect diff --git a/ci/requirements/environment-windows.yml b/ci/requirements/environment-windows.yml index 7c0d4dd9231..e69de29bb2d 100644 --- a/ci/requirements/environment-windows.yml +++ b/ci/requirements/environment-windows.yml @@ -1,58 +0,0 @@ -name: xarray-tests -channels: - - conda-forge -dependencies: - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - dask-core - - distributed - - flox - - fsspec - - h5netcdf - - h5py - - hdf5 - - hypothesis - - iris - - lxml # Optional dep of pydap - - matplotlib-base - - nc-time-axis - - netcdf4 - - numba - - numbagg - - numpy - - packaging - - pandas - - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110 - # - pint>=0.22 - - pip - - pre-commit - - pyarrow # importing dask.dataframe raises an ImportError without this - - pydap - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - - sparse - - toolz - - types-colorama - - types-docutils - - types-psutil - - types-Pygments - - types-python-dateutil - - types-pytz - - types-PyYAML - - types-setuptools - - typing_extensions - - zarr - - pip: - - types-defusedxml - - types-pexpect diff --git a/ci/requirements/environment.yml b/ci/requirements/environment.yml index f56b2bc1d1c..e69de29bb2d 100644 --- a/ci/requirements/environment.yml +++ b/ci/requirements/environment.yml @@ -1,68 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - aiobotocore - - array-api-strict<2.4 - - boto3 - - bottleneck - - cartopy - - cftime - - dask-core - - distributed - - flox - - fsspec - - h5netcdf - - h5py - - hdf5 - - hypothesis - - iris - - lxml # Optional dep of pydap - - matplotlib-base - - mypy==1.18.1 - - nc-time-axis - - netcdf4 - - numba - - numbagg - - numexpr - - numpy>=2.2 - - opt_einsum - - packaging - - pandas - - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110 - # - pint>=0.22 - - pip - - pooch - - pre-commit - - pyarrow # pandas raises a deprecation warning without this, breaking doctests - - pydap - - pydap-server - - pytest - - pytest-asyncio - - pytest-cov - - pytest-env - - pytest-mypy-plugins - - pytest-timeout - - pytest-xdist - - rasterio - - scipy - - seaborn - - sparse - - toolz - - types-colorama - - types-docutils - - types-psutil - - types-Pygments - - types-python-dateutil - - types-pytz - - types-PyYAML - - types-requests - - types-setuptools - - types-openpyxl - - typing_extensions - - zarr - - pip: - - jax # no way to get cpu-only jaxlib from conda if gpu is present - - types-defusedxml - - types-pexpect diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 00000000000..4c04164e839 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,277 @@ +[workspace] +preview = ["pixi-build"] +channels = ["conda-forge", "nodefaults"] +platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] + +[tasks.test-all] +depends-on = [ + { task = "test", environment = "test-default" }, + { task = "test", environment = "test-bare-min" }, + { task = "test", environment = "test-all-deps-py313" }, +] + +[environments] +# Testing +test-default = { features = ["test"] } +test-all-but-numba = { features = [ + "py313", + "test", + "backends", + "accel", + "dask", + "plotting", + "extras", +] } +test-all-but-dask = { features = [ + "py312", + "test", + "backends", + "accel", + "numba", + "plotting", + "extras", +] } +test-all-deps-py313 = { features = [ + "py313", + "test", + "backends", + "accel", + "numba", + "dask", + "plotting", + "extras", +] } +test-all-deps-py311 = { features = [ + "py311", + "test", + "backends", + "accel", + "numba", + "dask", + "plotting", + "extras", +] } + +test-with-typing-py311 = { features = [ + "py311", + "test", + "backends", + "accel", + "numba", + "dask", + "plotting", + "extras", + "typing", +] } + +test-with-typing-py313 = { features = [ + "py313", + "test", + "backends", + "accel", + "numba", + "dask", + "plotting", + "extras", + "typing", +] } + +test-bare-minimum = { features = ["test", "minimal"] } +test-bare-min-and-scipy = { features = [ + "test", + "minimal", + "minimum-scipy", +] } + +# Extra +typing = { features = ["typing"] } +doc = { features = [ + "doc", + "backends", + "test", + "accel", + "plotting", + "extras", +], solve-group = "doc" } +pre-commit = { features = ["pre-commit"], no-default-feature = true } + +[package] +name = "xarray" +version = "dynamic" # dynamic versioning needs better support in pixi https://github.com/prefix-dev/pixi/issues/2923#issuecomment-2598460666 . Putting `version = "dynamic"` here for now until pixi recommends something else. + +[package.build] +backend = { name = "pixi-build-python", version = "==0.4.0" } + +[package.host-dependencies] +setuptools = "*" +setuptools_scm = "*" + +[package.run-dependencies] +python = "*" +numpy = "*" +pandas = "*" + +packaging = "24.1.*" #? Can be removed? +git = "*" # needed for dynamic versioning + +[dependencies] +xarray = { path = "." } + +[target.linux-64.dependencies] +pydap-server = "*" + +[feature.minimal.dependencies] +# minimal versions +python = "3.11.*" +numpy = "1.26.*" +pandas = "2.2.*" + +[feature.minimum-scipy.dependencies] +scipy = "1.13.*" + +[feature.py311.dependencies] +python = "3.11.*" + +[feature.py312.dependencies] +python = "3.12.*" + +[feature.py313.dependencies] +python = "3.13.*" + +[feature.backends.dependencies] +# files +h5netcdf = "*" +h5py = "*" +hdf5 = "*" +netcdf4 = "*" +zarr = "*" +rasterio = "*" + +# opendap +pydap = "*" +lxml = "*" # Optional dep of pydap + +# s3 +boto3 = "*" +fsspec = "*" +aiobotocore = "*" + +[feature.numba.dependencies] +numba = "*" +numbagg = "*" + +[feature.dask.dependencies] +dask = "*" +distributed = "*" + +[feature.accel.dependencies] +flox = "*" +bottleneck = "*" +numexpr = "*" +pyarrow = "*" +opt_einsum = "*" + +[feature.plotting.dependencies] +cartopy = "*" +matplotlib-base = "*" +nc-time-axis = "*" +seaborn = "*" + +[feature.extras.dependencies] +# array +array-api-strict = "<2.4" +sparse = "*" + +# algorithms +scipy = "*" +toolz = "*" + +# tutorial +pooch = "*" + +# other +cftime = "*" +pint = "*" +iris = "*" + +[feature.extras.pypi-dependencies] +# array +jax = "*" # no way to get cpu-only jaxlib from conda if gpu is present + +[feature.test.dependencies] +pytest = "*" +pytest-asyncio = "*" +pytest-cov = "*" +pytest-env = "*" +pytest-mypy-plugins = "*" +pytest-timeout = "*" +pytest-xdist = "*" +hypothesis = "*" +cftime = "*" # https://github.com/pydata/xarray/pull/10888#issuecomment-3481432315 +coveralls = "*" + +[feature.test.tasks] +test = { cmd = "pytest" } + +[feature.doc.dependencies] +kerchunk = "*" +ipykernel = "*" +ipywidgets = "*" # silence nbsphinx warning +ipython = '*' +jupyter_client = '*' +jupyter_sphinx = '*' +nbsphinx = '*' +ncdata = '*' +pydata-sphinx-theme = "*" +pyproj = "*" +rich = "*" # for Zarr tree() +setuptools = "*" +sphinx-autosummary-accessors = "*" +sphinx-copybutton = "*" +sphinx-design = "*" +sphinx-inline-tabs = "*" +sphinx = ">=6,<8" +sphinxcontrib-mermaid = "*" +sphinxcontrib-srclinks = "*" +sphinx-remove-toctrees = "*" +sphinxext-opengraph = "*" +sphinxext-rediraffe = "*" + +[feature.doc.pypi-dependencies] +cfgrib = "*" # pypi dep because of https://github.com/prefix-dev/pixi/issues/3032#issuecomment-3302638043 + +[feature.doc.tasks] +doc = { cmd = "make clean && make html", cwd = "doc" } + + +[feature.typing.dependencies] +mypy = "==1.18.1" +pyright = "*" +hypothesis = "*" +lxml = "*" +pandas-stubs = "<=2.2.3.241126" # https://github.com/pydata/xarray/issues/10110 +types-colorama = "*" +types-docutils = "*" +types-psutil = "*" +types-Pygments = "*" +types-python-dateutil = "*" +types-pytz = "*" +types-PyYAML = "*" +types-requests = "*" +types-setuptools = "*" +types-openpyxl = "*" +typing_extensions = "*" +pip = "*" + +[feature.typing.pypi-dependencies] +types-defusedxml = "*" +types-pexpect = "*" + +[feature.typing.tasks] +mypy = "mypy --install-types --non-interactive --cobertura-xml-report mypy_report" + +[feature.pre-commit.dependencies] +pre-commit = "*" + +[feature.pre-commit.tasks] +pre-commit = { cmd = "pre-commit" }