diff --git a/.gitattributes b/.gitattributes
index 7a79ddd6b0b..3c57696a336 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -2,3 +2,5 @@
doc/whats-new.rst merge=union
# allow installing from git archives
.git_archival.txt export-subst
+# SCM syntax highlighting & preventing 3-way merges
+pixi.lock merge=binary linguist-language=YAML linguist-generated=true
diff --git a/.github/workflows/cache-pixi-lock.yml b/.github/workflows/cache-pixi-lock.yml
new file mode 100644
index 00000000000..bf1e190935e
--- /dev/null
+++ b/.github/workflows/cache-pixi-lock.yml
@@ -0,0 +1,52 @@
+name: Generate and cache Pixi lockfile
+
+on:
+ workflow_call:
+ inputs:
+ pixi-version:
+ type: string
+ outputs:
+ cache-id:
+ description: "The lock file contents"
+ value: ${{ jobs.cache-pixi-lock.outputs.cache-id }}
+
+jobs:
+ cache-pixi-lock:
+ name: Generate output
+ runs-on: ubuntu-latest
+ outputs:
+ cache-id: ${{ steps.restore.outputs.cache-primary-key }}
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Get current date
+ id: date
+ run: echo "date=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT"
+ - uses: actions/cache/restore@v4
+ id: restore
+ with:
+ path: |
+ pixi.lock
+ key: ${{ steps.date.outputs.date }}_${{ inputs.pixi-version }}_${{hashFiles('pixi.toml')}}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ if: ${{ !steps.restore.outputs.cache-hit }}
+ with:
+ pixi-version: ${{ inputs.pixi-version }}
+ run-install: false
+ - name: Run pixi lock
+ if: ${{ !steps.restore.outputs.cache-hit }}
+ run: pixi lock
+ - uses: actions/cache/save@v4
+ if: ${{ !steps.restore.outputs.cache-hit }}
+ id: cache
+ with:
+ path: |
+ pixi.lock
+ key: ${{ steps.restore.outputs.cache-primary-key }}
+ - name: Upload pixi.lock
+ uses: actions/upload-artifact@v4
+ with:
+ name: pixi-lock
+ path: pixi.lock
diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml
index bb5a1217c18..049febb0a12 100644
--- a/.github/workflows/ci-additional.yaml
+++ b/.github/workflows/ci-additional.yaml
@@ -14,6 +14,7 @@ concurrency:
env:
FORCE_COLOR: 3
+ PIXI_VERSION: "v0.58.0"
jobs:
detect-ci-trigger:
@@ -32,19 +33,21 @@ jobs:
id: detect-trigger
with:
keyword: "[skip-ci]"
-
+ cache-pixi-lock:
+ uses: ./.github/workflows/cache-pixi-lock.yml
+ with:
+ pixi-version: "v0.58.0" # keep in sync with env var above
doctest:
name: Doctests
runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
if: needs.detect-ci-trigger.outputs.triggered == 'false'
defaults:
run:
shell: bash -l {0}
env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
- PYTHON_VERSION: "3.12"
+ PIXI_ENV: test-all-deps-py313
steps:
- uses: actions/checkout@v5
with:
@@ -54,22 +57,24 @@ jobs:
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
with:
- environment-file: ${{env.CONDA_ENV_FILE}}
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ env.PIXI_ENV }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
- name: Version info
run: |
- python xarray/util/print_versions.py
+ pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py
- name: Run doctests
run: |
# Raise an error if there are warnings in the doctests, with `-Werror`.
@@ -78,49 +83,47 @@ jobs:
#
# If dependencies emit warnings we can't do anything about, add ignores to
# `xarray/tests/__init__.py`.
- python -m pytest --doctest-modules xarray --ignore xarray/tests -Werror
+ pixi run -e ${{env.PIXI_ENV}} python -m pytest --doctest-modules xarray --ignore xarray/tests -Werror
mypy:
name: Mypy
runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
defaults:
run:
shell: bash -l {0}
env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
- PYTHON_VERSION: "3.12"
+ PIXI_ENV: test-with-typing-py313
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
-
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
+ with:
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ env.PIXI_ENV }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: set environment variables
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ${{env.CONDA_ENV_FILE}}
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
+ echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- name: Version info
run: |
- python xarray/util/print_versions.py
- - name: Install mypy
- run: |
- python -m pip install "mypy==1.18.1" --force-reinstall
+ pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py
- name: Run mypy
run: |
- python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
+ pixi run -e ${{env.PIXI_ENV}} python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.5.1
@@ -134,44 +137,42 @@ jobs:
mypy-min:
name: Mypy 3.11
runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
defaults:
run:
shell: bash -l {0}
env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
- PYTHON_VERSION: "3.11"
+ PIXI_ENV: test-with-typing-py311
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
-
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
+ with:
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ env.PIXI_ENV }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: set environment variables
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ${{env.CONDA_ENV_FILE}}
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
+ echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- name: Version info
run: |
- python xarray/util/print_versions.py
- - name: Install mypy
- run: |
- python -m pip install "mypy==1.18.1" --force-reinstall
+ pixi run -e ${{env.PIXI_ENV}} python xarray/util/print_versions.py
- name: Run mypy
run: |
- python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
+ pixi run -e ${{env.PIXI_ENV}} python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.5.1
@@ -183,9 +184,13 @@ jobs:
fail_ci_if_error: false
pyright:
- name: Pyright
+ name: Pyright | pixi shell -e ${{ matrix.pixi-env }}"
runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
+ strategy:
+ fail-fast: false
+ matrix:
+ pixi-env: ["test-with-typing-py313", "test-with-typing-py311"]
if: |
always()
&& (
@@ -194,102 +199,43 @@ jobs:
defaults:
run:
shell: bash -l {0}
- env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
- PYTHON_VERSION: "3.12"
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- - name: set environment variables
- run: |
- echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
with:
- environment-file: ${{env.CONDA_ENV_FILE}}
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
- - name: Version info
- run: |
- python xarray/util/print_versions.py
- - name: Install pyright
- run: |
- python -m pip install pyright --force-reinstall
-
- - name: Run pyright
- run: |
- python -m pyright xarray/
-
- - name: Upload pyright coverage to Codecov
- uses: codecov/codecov-action@v5.5.1
- with:
- file: pyright_report/cobertura.xml
- flags: pyright
- env_vars: PYTHON_VERSION
- name: codecov-umbrella
- fail_ci_if_error: false
-
- pyright39:
- name: Pyright 3.11
- runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
- if: |
- always()
- && (
- contains( github.event.pull_request.labels.*.name, 'run-pyright')
- )
- defaults:
- run:
- shell: bash -l {0}
- env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
- PYTHON_VERSION: "3.11"
-
- steps:
- - uses: actions/checkout@v5
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
with:
- fetch-depth: 0 # Fetch all history for all branches and tags.
-
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ matrix.pixi-env }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: set environment variables
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ${{env.CONDA_ENV_FILE}}
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
+ echo "PYTHON_VERSION=$(pixi run -e ${{ matrix.pixi-env }} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- name: Version info
run: |
- python xarray/util/print_versions.py
- - name: Install pyright
- run: |
- python -m pip install pyright --force-reinstall
+ pixi run -e ${{ matrix.pixi-env }} python xarray/util/print_versions.py
- name: Run pyright
run: |
- python -m pyright xarray/
+ pixi run -e ${{ matrix.pixi-env }} python -m pyright xarray/
- name: Upload pyright coverage to Codecov
uses: codecov/codecov-action@v5.5.1
with:
file: pyright_report/cobertura.xml
- flags: pyright39
+ flags: pyright
env_vars: PYTHON_VERSION
name: codecov-umbrella
fail_ci_if_error: false
@@ -298,7 +244,9 @@ jobs:
name: Minimum Version Policy
runs-on: "ubuntu-latest"
needs: detect-ci-trigger
- if: needs.detect-ci-trigger.outputs.triggered == 'false'
+ # min-version-policy doesn't work with Pixi yet https://github.com/pydata/xarray/pull/10888#discussion_r2504335457
+ if: false
+ # if: needs.detect-ci-trigger.outputs.triggered == 'false'
defaults:
run:
shell: bash -l {0}
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 414d5ad2549..ca07a882328 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -14,6 +14,7 @@ concurrency:
env:
FORCE_COLOR: 3
+ PIXI_VERSION: "v0.58.0"
jobs:
detect-ci-trigger:
@@ -32,10 +33,15 @@ jobs:
id: detect-trigger
with:
keyword: "[skip-ci]"
+
+ cache-pixi-lock:
+ uses: ./.github/workflows/cache-pixi-lock.yml
+ with:
+ pixi-version: "v0.58.0" # keep in sync with env var above
test:
- name: ${{ matrix.os }} py${{ matrix.python-version }} ${{ matrix.env }}
+ name: "${{ matrix.os }} | pixi shell -e ${{ matrix.pixi-env }}"
runs-on: ${{ matrix.os }}
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
if: needs.detect-ci-trigger.outputs.triggered == 'false'
defaults:
run:
@@ -45,110 +51,87 @@ jobs:
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
# Bookend python versions
- python-version: ["3.11", "3.13"]
- env: [""]
+ pixi-env: ["test-all-deps-py311", "test-all-deps-py313"]
+ pytest-addopts: [""]
include:
# Minimum python version:
- - env: "bare-minimum"
- python-version: "3.11"
+ - pixi-env: "test-bare-minimum"
os: ubuntu-latest
- - env: "bare-min-and-scipy"
- python-version: "3.11"
+ - pixi-env: "test-bare-min-and-scipy"
os: ubuntu-latest
- - env: "min-all-deps"
- python-version: "3.11"
+ - pixi-env: "test-min-versions"
os: ubuntu-latest
# Latest python version:
- - env: "all-but-numba"
- python-version: "3.13"
+ - pixi-env: "test-all-but-numba"
os: ubuntu-latest
- - env: "all-but-dask"
- python-version: "3.12"
+ - pixi-env: "test-all-but-dask"
os: ubuntu-latest
- - env: "flaky"
- python-version: "3.13"
+ - pixi-env: "test-all-deps-py313"
+ pytest-addopts: "flaky"
os: ubuntu-latest
# The mypy tests must be executed using only 1 process in order to guarantee
# predictable mypy output messages for comparison to expectations.
- - env: "mypy"
- python-version: "3.11"
+ - pixi-env: "test-with-typing-py311"
+ pytest-addopts: "mypy"
numprocesses: 1
os: ubuntu-latest
- - env: "mypy"
- python-version: "3.13"
+ - pixi-env: "test-with-typing-py313"
numprocesses: 1
os: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
+ with:
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ matrix.pixi-env }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
+
- name: Set environment variables
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
+ echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- if [[ ${{ matrix.os }} == windows* ]] ;
- then
- if [[ ${{ matrix.python-version }} != "3.14" ]]; then
- echo "CONDA_ENV_FILE=ci/requirements/environment-windows.yml" >> $GITHUB_ENV
- else
- echo "CONDA_ENV_FILE=ci/requirements/environment-windows-3.14.yml" >> $GITHUB_ENV
- fi
- elif [[ "${{ matrix.env }}" != "" ]] ;
+ if [[ "${{ matrix.pytest-addopts }}" != "" ]] ;
then
- if [[ "${{ matrix.env }}" == "flaky" ]] ;
+ if [[ "${{ matrix.pytest-addopts }}" == "flaky" ]] ;
then
- echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
echo "PYTEST_ADDOPTS=-m 'flaky or network' --run-flaky --run-network-tests -W default" >> $GITHUB_ENV
- elif [[ "${{ matrix.env }}" == "mypy" ]] ;
+ elif [[ "${{ matrix.pytest-addopts }}" == "mypy" ]] ;
then
- echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
echo "PYTEST_ADDOPTS=-n 1 -m 'mypy' --run-mypy -W default" >> $GITHUB_ENV
- else
- echo "CONDA_ENV_FILE=ci/requirements/${{ matrix.env }}.yml" >> $GITHUB_ENV
fi
- if [[ "${{ matrix.env }}" == "min-all-deps" ]] ;
+ if [[ "${{ matrix.pixi-env }}" == "min-all-deps" ]] ;
then
# Don't raise on warnings
echo "PYTEST_ADDOPTS=-W default" >> $GITHUB_ENV
fi
- else
- if [[ ${{ matrix.python-version }} != "3.14" ]]; then
- echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV
- else
- echo "CONDA_ENV_FILE=ci/requirements/environment-3.14.yml" >> $GITHUB_ENV
- fi
fi
- echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV
-
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ${{ env.CONDA_ENV_FILE }}
- environment-name: xarray-tests
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{matrix.python-version}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
- create-args: >-
- python=${{matrix.python-version}}
-
# We only want to install this on one run, because otherwise we'll have
# duplicate annotations.
- name: Install error reporter
- if: ${{ matrix.os }} == 'ubuntu-latest' and ${{ matrix.python-version }} == '3.12'
- run: |
- python -m pip install pytest-github-actions-annotate-failures
-
- - name: Install xarray
+ if: ${{ matrix.os }} == 'ubuntu-latest' and ${{ matrix.pixi-env}} == 'test-all-deps-py313'
run: |
- python -m pip install --no-deps -e .
+ pixi add --pypi pytest-github-actions-annotate-failures
- name: Version info
run: |
- python xarray/util/print_versions.py
+ pixi run -e ${{ matrix.pixi-env }} python xarray/util/print_versions.py
- name: Import xarray
run: |
- python -c "import xarray"
+ pixi run -e ${{ matrix.pixi-env }} python -c "import xarray"
- name: Restore cached hypothesis directory
uses: actions/cache@v4
@@ -159,7 +142,8 @@ jobs:
save-always: true
- name: Run tests
- run: python -m pytest -n ${{ matrix.numprocesses || 4 }}
+ run:
+ pixi run -e ${{ matrix.pixi-env }} python -m pytest -n ${{ matrix.numprocesses || 4 }}
--timeout 180
--cov=xarray
--cov-report=xml
@@ -169,7 +153,7 @@ jobs:
if: always()
uses: actions/upload-artifact@v5
with:
- name: Test results for ${{ runner.os }}-${{ matrix.python-version }} ${{ matrix.env }}
+ name: Test results for OS ${{ runner.os }} pixi-env ${{ matrix.pixi-env }} pytest-addopts ${{ matrix.pytest-addopts }}
path: pytest.xml
- name: Upload code coverage to Codecov
diff --git a/.github/workflows/hypothesis.yaml b/.github/workflows/hypothesis.yaml
index 50e6557aea8..d782860e917 100644
--- a/.github/workflows/hypothesis.yaml
+++ b/.github/workflows/hypothesis.yaml
@@ -13,6 +13,7 @@ on:
env:
FORCE_COLOR: 3
+ PIXI_VERSION: "v0.58.0"
jobs:
detect-ci-trigger:
@@ -32,10 +33,15 @@ jobs:
with:
keyword: "[skip-ci]"
+ cache-pixi-lock:
+ uses: ./.github/workflows/cache-pixi-lock.yml
+ with:
+ pixi-version: "v0.58.0" # keep in sync with env var above
+
hypothesis:
name: Slow Hypothesis Tests
runs-on: "ubuntu-latest"
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
if: |
always()
&& (
@@ -48,7 +54,7 @@ jobs:
shell: bash -l {0}
env:
- CONDA_ENV_FILE: ci/requirements/environment.yml
+ PIXI_ENV: test-all-deps-py313
PYTHON_VERSION: "3.12"
steps:
@@ -56,27 +62,29 @@ jobs:
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
+ with:
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ env.PIXI_ENV }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
+
- name: set environment variables
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
+ echo "PYTHON_VERSION=$(pixi run -e ${{env.PIXI_ENV}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- - name: Setup micromamba
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ci/requirements/environment.yml
- environment-name: xarray-tests
- create-args: >-
- python=${{env.PYTHON_VERSION}}
- pytest-reportlog
- cache-environment: true
- cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
-
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
- name: Version info
run: |
- python xarray/util/print_versions.py
+ pixi run -e ${{ env.PIXI_ENV }} python xarray/util/print_versions.py
# https://github.com/actions/cache/blob/main/tips-and-workarounds.md#update-a-cache
- name: Restore cached hypothesis directory
@@ -92,7 +100,7 @@ jobs:
if: success()
id: status
run: |
- python -m pytest --hypothesis-show-statistics --run-slow-hypothesis properties/*.py \
+ pixi run -e ${{ env.PIXI_ENV }} python -m pytest --hypothesis-show-statistics --run-slow-hypothesis properties/*.py \
--report-log output-${{ matrix.python-version }}-log.jsonl
# explicitly save the cache so it gets updated, also do this even if it fails.
diff --git a/.github/workflows/upstream-dev-ci.yaml b/.github/workflows/upstream-dev-ci.yaml
index bd026da2272..a0e41bacd28 100644
--- a/.github/workflows/upstream-dev-ci.yaml
+++ b/.github/workflows/upstream-dev-ci.yaml
@@ -17,6 +17,7 @@ concurrency:
env:
FORCE_COLOR: 3
+ PIXI_VERSION: "v0.58.0"
jobs:
detect-ci-trigger:
@@ -35,11 +36,14 @@ jobs:
id: detect-trigger
with:
keyword: "[test-upstream]"
-
+ cache-pixi-lock:
+ uses: ./.github/workflows/cache-pixi-lock.yml
+ with:
+ pixi-version: "v0.58.0" # keep in sync with env var above
upstream-dev:
name: upstream-dev
runs-on: ubuntu-latest
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
if: |
always()
&& (
@@ -53,37 +57,24 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.12"]
+ pixi-env: ["test-nightly"]
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- - name: Set up conda environment
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-file: ci/requirements/environment.yml
- environment-name: xarray-tests
- create-args: >-
- python=${{ matrix.python-version }}
- pytest-reportlog
- - name: Install upstream versions
- run: |
- bash ci/install-upstream-wheels.sh
- - name: Install xarray
- run: |
- python -m pip install --no-deps -e .
+
- name: Version info
run: |
- python xarray/util/print_versions.py
+ pixi run -e ${{matrix.pixi-env}} python xarray/util/print_versions.py
- name: Import xarray
run: |
- python -c 'import xarray'
+ pixi run -e ${{matrix.pixi-env}} python -c 'import xarray'
- name: Run Tests
if: success()
id: status
run: |
- python -m pytest --timeout=60 -rf -nauto \
- --report-log output-${{ matrix.python-version }}-log.jsonl
+ pixi run -e ${{matrix.pixi-env}} python -m pytest --timeout=60 -rf -nauto \
+ --report-log output-${{ matrix.pixi-env }}-log.jsonl
- name: Generate and publish the report
if: |
failure()
@@ -92,12 +83,12 @@ jobs:
&& github.repository_owner == 'pydata'
uses: scientific-python/issue-from-pytest-log-action@v1
with:
- log-path: output-${{ matrix.python-version }}-log.jsonl
+ log-path: output-${{ matrix.pixi-env }}-log.jsonl
mypy-upstream-dev:
name: mypy-upstream-dev
runs-on: ubuntu-latest
- needs: detect-ci-trigger
+ needs: [detect-ci-trigger, cache-pixi-lock]
if: |
always()
&& (
@@ -109,34 +100,37 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.11"]
+ pixi-env: ["test-nightly"]
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- - name: Set up conda environment
- uses: mamba-org/setup-micromamba@v2
+
+ - name: Restore cached pixi lockfile
+ uses: actions/cache/restore@v4
+ id: restore-pixi-lock
with:
- environment-file: ci/requirements/environment.yml
- environment-name: xarray-tests
- create-args: >-
- python=${{ matrix.python-version }}
- pytest-reportlog
- - name: Install upstream versions
- run: |
- bash ci/install-upstream-wheels.sh
- - name: Install xarray
+ enableCrossOsArchive: true
+ path: |
+ pixi.lock
+ key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
+ - uses: prefix-dev/setup-pixi@v0.9.0
+ with:
+ pixi-version: ${{ env.PIXI_VERSION }}
+ cache: true
+ environments: ${{ matrix.pixi-env }}
+ cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
+
+ - name: set environment variables
run: |
- python -m pip install --no-deps -e .
+ echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
+ echo "PYTHON_VERSION=$(pixi run -e ${{matrix.pixi-env}} python --version | cut -d' ' -f2 | cut -d. -f1,2)" >> $GITHUB_ENV
- name: Version info
run: |
- python xarray/util/print_versions.py
- - name: Install mypy
- run: |
- python -m pip install mypy --force-reinstall
+ pixi run -e ${{matrix.pixi-env}} python xarray/util/print_versions.py
- name: Run mypy
run: |
- python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
+ pixi run -e ${{matrix.pixi-env}} python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.5.1
with:
diff --git a/.gitignore b/.gitignore
index 19dceefd192..bb2b49c2cd4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -91,3 +91,7 @@ doc/videos-gallery.txt
uv.lock
mypy_report/
xarray-docs/
+
+# pixi environments
+.pixi
+pixi.lock
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 57f93911f5d..f55a64213a1 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -7,16 +7,23 @@ sphinx:
build:
os: ubuntu-lts-latest
tools:
- python: mambaforge-latest
+ # just so RTD stops complaining
+ python: "latest"
jobs:
+ create_environment:
+ - asdf plugin add pixi
+ - asdf install pixi latest
+ - asdf global pixi latest
post_checkout:
- (git --no-pager log --pretty="tformat:%s" -1 | grep -vqF "[skip-rtd]") || exit 183
- git fetch --unshallow || true
pre_install:
- - git update-index --assume-unchanged doc/conf.py ci/requirements/doc.yml
-
-conda:
- environment: ci/requirements/doc.yml
+ - git update-index --assume-unchanged doc/conf.py
+ install:
+ - pixi install -e doc
+ build:
+ html:
+ - pixi run doc BUILDDIR=$READTHEDOCS_OUTPUT
formats:
- htmlzip
diff --git a/README.md b/README.md
index e7c168d8f0d..14d621aabf7 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,7 @@
# xarray: N-D labeled arrays and datasets
+[](https://xarray.dev)
+[](https://pixi.sh)
[](https://github.com/pydata/xarray/actions/workflows/ci.yaml?query=branch%3Amain)
[](https://codecov.io/gh/pydata/xarray)
[](https://docs.xarray.dev/)
@@ -12,7 +14,6 @@
[](https://doi.org/10.5281/zenodo.598201)
[](https://mybinder.org/v2/gh/pydata/xarray/main?urlpath=lab/tree/doc/examples/weather-data.ipynb)
[](https://x.com/xarray_dev)
-[](https://xarray.dev)
**xarray** (pronounced "ex-array", formerly known as **xray**) is an open source project and Python
package that makes working with labelled multi-dimensional arrays
diff --git a/ci/install-upstream-wheels.sh b/ci/install-upstream-wheels.sh
deleted file mode 100755
index f474fbb0df9..00000000000
--- a/ci/install-upstream-wheels.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-
-if which micromamba >/dev/null; then
- conda=micromamba
-elif which mamba >/dev/null; then
- conda=mamba
-else
- conda=conda
-fi
-
-# temporarily (?) remove numbagg and numba
-$conda remove -y numba numbagg sparse
-# temporarily remove numexpr
-$conda remove -y numexpr
-# forcibly remove packages to avoid artifacts
-$conda remove -y --force \
- numpy \
- scipy \
- pandas \
- distributed \
- fsspec \
- zarr \
- cftime \
- packaging \
- bottleneck \
- flox
- # pint
-
-# to limit the runtime of Upstream CI
-python -m pip install \
- -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \
- --no-deps \
- --pre \
- --upgrade \
- numpy \
- scipy \
- matplotlib \
- pandas \
- pyarrow
-# manually install `pint`, `donfig`, and `crc32c` to pull in new dependencies
-python -m pip install --upgrade pint donfig google_crc32c
-python -m pip install \
- --no-deps \
- --upgrade \
- git+https://github.com/dask/dask \
- git+https://github.com/dask/dask-expr \
- git+https://github.com/dask/distributed \
- git+https://github.com/zarr-developers/zarr-python \
- git+https://github.com/Unidata/cftime \
- git+https://github.com/pypa/packaging \
- git+https://github.com/hgrecco/pint \
- git+https://github.com/pydata/bottleneck \
- git+https://github.com/intake/filesystem_spec \
- git+https://github.com/SciTools/nc-time-axis \
- git+https://github.com/xarray-contrib/flox \
- git+https://github.com/h5netcdf/h5netcdf \
- git+https://github.com/dgasmith/opt_einsum
- # git+https://github.com/pydata/sparse
diff --git a/ci/requirements/all-but-dask.yml b/ci/requirements/all-but-dask.yml
deleted file mode 100644
index 65780d91949..00000000000
--- a/ci/requirements/all-but-dask.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- - aiobotocore
- - array-api-strict<2.4
- - boto3
- - bottleneck
- - cartopy
- - cftime
- - coveralls
- - flox
- - h5netcdf
- - h5py
- - hdf5
- - hypothesis
- - lxml # Optional dep of pydap
- - matplotlib-base
- - nc-time-axis
- - netcdf4
- - numba
- - numbagg
- - numpy
- - packaging
- - pandas
- - pint>=0.22
- - pip
- - pydap
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio
- - scipy
- - seaborn
- - sparse
- - toolz
- - typing_extensions
- - zarr
diff --git a/ci/requirements/all-but-numba.yml b/ci/requirements/all-but-numba.yml
deleted file mode 100644
index 23c38cc8267..00000000000
--- a/ci/requirements/all-but-numba.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- # Pin a "very new numpy" (updated Sept 24, 2024)
- - numpy>=2.2
- - aiobotocore
- - array-api-strict<2.4
- - boto3
- - bottleneck
- - cartopy
- - cftime
- - dask-core
- - distributed
- - flox
- - fsspec
- - h5netcdf
- - h5py
- - hdf5
- - hypothesis
- - iris
- - lxml # Optional dep of pydap
- - matplotlib-base
- - nc-time-axis
- - netcdf4
- # numba, sparse, numbagg, numexpr often conflicts with newer versions of numpy.
- # This environment helps us test xarray with the latest versions
- # of numpy
- # - numba
- # - numbagg
- # - numexpr
- # - sparse
- - opt_einsum
- - packaging
- - pandas
- # - pint>=0.22
- - pip
- - pooch
- - pre-commit
- - pyarrow # pandas raises a deprecation warning without this, breaking doctests
- - pydap
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio
- - scipy
- - seaborn
- - toolz
- - typing_extensions
- - zarr
diff --git a/ci/requirements/bare-min-and-scipy.yml b/ci/requirements/bare-min-and-scipy.yml
deleted file mode 100644
index d4a61586d82..00000000000
--- a/ci/requirements/bare-min-and-scipy.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- - python=3.11
- - coveralls
- - pip
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - numpy=1.26
- - packaging=24.1
- - pandas=2.2
- - scipy=1.13
diff --git a/ci/requirements/bare-minimum.yml b/ci/requirements/bare-minimum.yml
deleted file mode 100644
index 777ff09b3e6..00000000000
--- a/ci/requirements/bare-minimum.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- - python=3.11
- - coveralls
- - pip
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - numpy=1.26
- - packaging=24.1
- - pandas=2.2
diff --git a/ci/requirements/doc.yml b/ci/requirements/doc.yml
deleted file mode 100644
index 64ea08b73ff..00000000000
--- a/ci/requirements/doc.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-name: xarray-docs
-channels:
- # Don't change to pkgs/main, as it causes random timeouts in readthedocs
- - conda-forge
- - nodefaults
-dependencies:
- - python
- - bottleneck
- - cartopy
- - cfgrib
- - kerchunk
- - dask-core
- - hypothesis
- - h5netcdf
- - ipykernel
- - ipywidgets # silence nbsphinx warning
- - ipython
- - iris
- - jupyter_client
- - jupyter_sphinx
- - matplotlib-base
- - nbsphinx
- - ncdata
- - netcdf4
- - numba
- - numpy>=2.2
- - packaging
- - pandas
- - pooch
- - pip
- - pre-commit
- - pyarrow
- - pydata-sphinx-theme
- - pyproj
- - rich # for Zarr tree()
- - scipy
- - seaborn
- - setuptools
- - sparse
- - sphinx-autosummary-accessors
- - sphinx-copybutton
- - sphinx-design
- - sphinx-inline-tabs
- - sphinx>=6,<8
- - sphinxcontrib-mermaid
- - sphinxcontrib-srclinks
- - sphinx-remove-toctrees
- - sphinxext-opengraph
- - sphinxext-rediraffe
- - zarr
- - pip:
- # relative to this file. Needs to be editable to be accepted.
- - -e ../..
diff --git a/ci/requirements/environment-3.14.yml b/ci/requirements/environment-3.14.yml
deleted file mode 100644
index d4d47d85536..00000000000
--- a/ci/requirements/environment-3.14.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- - aiobotocore
- - array-api-strict<2.4
- - boto3
- - bottleneck
- - cartopy
- - cftime
- - dask-core
- - distributed
- - flox
- - fsspec
- - h5netcdf
- - h5py
- - hdf5
- - hypothesis
- - iris
- - lxml # Optional dep of pydap
- - matplotlib-base
- - nc-time-axis
- - netcdf4
- # - numba
- # - numbagg
- - numexpr
- - numpy
- - opt_einsum
- - packaging
- - pandas
- - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110
- # - pint>=0.22
- - pip
- - pooch
- - pre-commit
- - pyarrow # pandas raises a deprecation warning without this, breaking doctests
- - pydap
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio
- - scipy
- - seaborn
- # - sparse
- - toolz
- - types-colorama
- - types-docutils
- - types-psutil
- - types-Pygments
- - types-python-dateutil
- - types-pytz
- - types-PyYAML
- - types-setuptools
- - typing_extensions
- - zarr
- - pip:
- - jax # no way to get cpu-only jaxlib from conda if gpu is present
- - types-defusedxml
- - types-pexpect
diff --git a/ci/requirements/environment-windows-3.14.yml b/ci/requirements/environment-windows-3.14.yml
deleted file mode 100644
index e86d57beb95..00000000000
--- a/ci/requirements/environment-windows-3.14.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
-dependencies:
- - array-api-strict<2.4
- - boto3
- - bottleneck
- - cartopy
- - cftime
- - dask-core
- - distributed
- - flox
- - fsspec
- - h5netcdf
- - h5py
- - hdf5
- - hypothesis
- - iris
- - lxml # Optional dep of pydap
- - matplotlib-base
- - nc-time-axis
- - netcdf4
- # - numba
- # - numbagg
- - numpy
- - packaging
- - pandas
- - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110
- # - pint>=0.22
- - pip
- - pre-commit
- - pyarrow # importing dask.dataframe raises an ImportError without this
- - pydap
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio
- - scipy
- - seaborn
- # - sparse
- - toolz
- - types-colorama
- - types-docutils
- - types-psutil
- - types-Pygments
- - types-python-dateutil
- - types-pytz
- - types-PyYAML
- - types-setuptools
- - typing_extensions
- - zarr
- - pip:
- - types-defusedxml
- - types-pexpect
diff --git a/ci/requirements/environment-windows.yml b/ci/requirements/environment-windows.yml
deleted file mode 100644
index 7c0d4dd9231..00000000000
--- a/ci/requirements/environment-windows.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
-dependencies:
- - array-api-strict<2.4
- - boto3
- - bottleneck
- - cartopy
- - cftime
- - dask-core
- - distributed
- - flox
- - fsspec
- - h5netcdf
- - h5py
- - hdf5
- - hypothesis
- - iris
- - lxml # Optional dep of pydap
- - matplotlib-base
- - nc-time-axis
- - netcdf4
- - numba
- - numbagg
- - numpy
- - packaging
- - pandas
- - pandas-stubs<=2.2.3.241126 # https://github.com/pydata/xarray/issues/10110
- # - pint>=0.22
- - pip
- - pre-commit
- - pyarrow # importing dask.dataframe raises an ImportError without this
- - pydap
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio
- - scipy
- - seaborn
- - sparse
- - toolz
- - types-colorama
- - types-docutils
- - types-psutil
- - types-Pygments
- - types-python-dateutil
- - types-pytz
- - types-PyYAML
- - types-setuptools
- - typing_extensions
- - zarr
- - pip:
- - types-defusedxml
- - types-pexpect
diff --git a/ci/requirements/min-all-deps.yml b/ci/requirements/min-all-deps.yml
deleted file mode 100644
index add738630f1..00000000000
--- a/ci/requirements/min-all-deps.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-name: xarray-tests
-channels:
- - conda-forge
- - nodefaults
-dependencies:
- # MINIMUM VERSIONS POLICY: see doc/user-guide/installing.rst
- # Run ci/min_deps_check.py to verify that this file respects the policy.
- # When upgrading python, numpy, or pandas, must also change
- # doc/user-guide/installing.rst, doc/user-guide/plotting.rst and setup.py.
- - python=3.11
- - array-api-strict=1.1 # dependency for testing the array api compat
- - boto3=1.34
- - bottleneck=1.4
- - cartopy=0.23
- - cftime=1.6
- - coveralls
- - dask-core=2024.6
- - distributed=2024.6
- - flox=0.9
- - h5netcdf=1.3
- # h5py and hdf5 tend to cause conflicts
- # for e.g. hdf5 1.12 conflicts with h5py=3.1
- # prioritize bumping other packages instead
- - h5py=3.11
- - hdf5=1.14
- - hypothesis
- - iris=3.9
- - lxml=5.1 # Optional dep of pydap
- - matplotlib-base=3.8
- - nc-time-axis=1.4
- # netcdf follows a 1.major.minor[.patch] convention
- # (see https://github.com/Unidata/netcdf4-python/issues/1090)
- - netcdf4=1.6
- - numba=0.60
- - numbagg=0.8
- - numpy=1.26
- - packaging=24.1
- - pandas=2.2
- - pint=0.24
- - pip
- - pydap=3.5.0
- - pytest
- - pytest-asyncio
- - pytest-cov
- - pytest-env
- - pytest-mypy-plugins
- - pytest-timeout
- - pytest-xdist
- - rasterio=1.3
- - scipy=1.13
- - seaborn=0.13
- - sparse=0.15
- - toolz=0.12
- - zarr=2.18
diff --git a/doc/conf.py b/doc/conf.py
index 4a0cca2ddab..6848794fcea 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -20,14 +20,6 @@
print("python exec:", sys.executable)
print("sys.path:", sys.path)
-
-if "CONDA_DEFAULT_ENV" in os.environ or "conda" in sys.executable:
- print("conda environment:")
- subprocess.run([os.environ.get("CONDA_EXE", "conda"), "list"])
-else:
- print("pip environment:")
- subprocess.run([sys.executable, "-m", "pip", "list"])
-
print(f"xarray: {xarray.__version__}, {xarray.__file__}")
with suppress(ImportError):
diff --git a/doc/contribute/contributing.rst b/doc/contribute/contributing.rst
index 7733e73a120..832932d2459 100644
--- a/doc/contribute/contributing.rst
+++ b/doc/contribute/contributing.rst
@@ -186,77 +186,73 @@ documentation locally before pushing your changes.
Creating a Python Environment
-----------------------------
+Xarray uses `Pixi `_ to manage development environments.
Before starting any development, you'll need to create an isolated xarray
development environment:
-- Install either `Anaconda `_ or `miniconda
- `_
-- Make sure your conda is up to date (``conda update conda``)
+- Install `Pixi `_
+- Make sure your Pixi is up to date (``pixi self-update``)
- Make sure that you have :ref:`cloned the repository `
- ``cd`` to the *xarray* source directory
-We'll now kick off a two-step process:
+That's it! Now you're ready to contribute to Xarray.
-1. Install the build dependencies
-2. Build and install xarray
+Pixi defines multiple environments as well as tasks to help you with development. These include tasks for:
-.. code-block:: sh
-
- # Create and activate the build environment
- conda create -c conda-forge -n xarray-tests python=3.11
+- running the test suite
+- building the documentation
+- running the static type checker
+- running code formatters and linters
- # This is for Linux and MacOS
- conda env update -f ci/requirements/environment.yml
+Some of these tasks can be run in several environments (e.g., the test suite is run in environments with different,
+dependencies as well as different Python versions to make sure we have wide support for Xarray). Some of these tasks
+are only run in a single environment (e.g., building the documentation or running pre-commit hooks).
- # On windows, use environment-windows.yml instead
- conda env update -f ci/requirements/environment-windows.yml
+You can see all available environments and tasks by running::
- conda activate xarray-tests
+ pixi list
- # or with older versions of Anaconda:
- source activate xarray-tests
+For example:
- # Build and install xarray
- pip install -e .
+- ``pixi run doc`` will build the documentation
+- ``pixi run mypy`` will run the static type checker
+- ``pixi run test`` will run the test suite
+- ``pixi run pre-commit`` will run all code formatters and linters - defined via the pre-commit hooks
-At this point you should be able to import *xarray* from your locally
-built version:
+When running ``pixi run test`` you will be prompted to select which environment you want to use. You can specify the environment
+directly by providing the ``-e`` flag, e.g., ``pixi run -e my_environment test`` . Our CI setup uses Pixi as well - you can easily
+reproduce CI tests by running the same tasks in the same environments as defined in the CI.
-.. code-block:: sh
+You can enter any of the defined environments with::
- $ python # start an interpreter
- >>> import xarray
- >>> xarray.__version__
- '2025.7.2.dev14+g5ce69b2b.d20250725'
+ pixi shell -e my_environment
-This will create the new environment, and not touch any of your existing environments,
-nor any existing Python installation.
+This is similar to "activating" an environment in Conda. To exit this shell type ``exit`` or press ``Ctrl-D``.
-To view your environments::
+All these Pixi environments and tasks are defined in the ``pixi.toml`` file in the root of the repository.
- conda info -e
-To return to your root environment::
+Install pre-commit hooks
+-------------------------
- conda deactivate
+You can either run pre-commit manually via Pixi as described above, or set up git hooks to run pre-commit automatically.
-See the full `conda docs here `__.
+This is done by:
-Install pre-commit hooks
-------------------------
+.. code-block:: sh
+ pixi shell -e pre-commit # enter the pre-commit environment
+ pre-commit install # install the git hooks
-We highly recommend that you setup `pre-commit `_ hooks to automatically
-run all the above tools every time you make a git commit. To install the hooks::
+ # or
- python -m pip install pre-commit
- pre-commit install
+ pre-commit uninstall # uninstall the git hooks
-This can be done by running: ::
+Now, every time you make a git commit, all the pre-commit hooks will be run automatically using the pre-commit that comes
+with Pixi.
- pre-commit run
+Alternatively you can use a separate installation of ``pre-commit`` (e.g., install globally using Pixi (``pixi install -g pre_commit``), or via `Homebrew `_ ).
-from the root of the xarray repository. You can skip the pre-commit checks with
-``git commit --no-verify``.
+If you want to commit without running ``pre-commit`` hooks, you can use ``git commit --no-verify``.
Update the ``main`` branch
@@ -276,11 +272,6 @@ request. If you have uncommitted changes, you will need to ``git stash`` them
prior to updating. This will effectively store your changes, which can be
reapplied after updating.
-If the *xarray* ``main`` branch version has updated since you last fetched changes,
-you may also wish to reinstall xarray so that the pip version reflects the *xarray*
-version::
-
- pip install -e .
Create a new feature branch
---------------------------
@@ -433,29 +424,10 @@ How to build the *xarray* documentation
Requirements
~~~~~~~~~~~~
-Make sure to follow the instructions on :ref:`creating a development environment` above, but
-to build the docs you need to use the environment file ``ci/requirements/doc.yml``.
-You should also use this environment and these steps if you want to view changes you've made to the docstrings.
-
-.. code-block:: sh
-
- # Create and activate the docs environment
- conda env create -f ci/requirements/doc.yml
- conda activate xarray-docs
-
- # or with older versions of Anaconda:
- source activate xarray-docs
-
- # Build and install a local, editable version of xarray
- pip install -e .
-
-Building the documentation
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To build the documentation run::
+Make sure to follow the instructions on :ref:`creating a development environment` above. Once you
+have Pixi installed - you can build the documentation using the command::
- cd doc/
- make html
+ pixi run doc
Then you can find the HTML output files in the folder ``xarray/doc/_build/html/``.
@@ -475,8 +447,7 @@ evocations, Sphinx will try to only build the pages that have been modified.
If you want to do a full clean build, do::
- make clean
- make html
+ pixi run doc-clean
Writing ReST pages
------------------
diff --git a/pixi.toml b/pixi.toml
new file mode 100644
index 00000000000..5686bb941e5
--- /dev/null
+++ b/pixi.toml
@@ -0,0 +1,372 @@
+[workspace]
+preview = ["pixi-build"]
+channels = ["conda-forge", "nodefaults"]
+platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"]
+
+[tasks.test-all]
+depends-on = [
+ # { task = "test", environment = "test-just-xarray" }, # https://github.com/pydata/xarray/pull/10888/files#r2511336147
+ { task = "test", environment = "test-bare-min" },
+ { task = "test", environment = "test-all-deps-py313" },
+]
+
+[environments]
+# Testing
+# test-just-xarray = { features = ["test"] } # https://github.com/pydata/xarray/pull/10888/files#r2511336147
+test-all-but-numba = { features = [
+ "py313",
+ "test",
+ "backends",
+ "accel",
+ "dask",
+ "viz",
+ "extras",
+] }
+test-all-but-dask = { features = [
+ "py312",
+ "test",
+ "backends",
+ "accel",
+ "numba",
+ "viz",
+ "extras",
+] }
+test-all-deps-py313 = { features = [
+ "py313",
+ "test",
+ "backends",
+ "accel",
+ "numba",
+ "dask",
+ "viz",
+ "extras",
+] }
+test-nightly = { features = [
+ "test",
+ "nightly",
+ "typing",
+], no-default-feature = true }
+
+
+test-all-deps-py311 = { features = [
+ "py311",
+ "test",
+ "backends",
+ "accel",
+ "numba",
+ "dask",
+ "viz",
+ "extras",
+] }
+
+test-with-typing-py311 = { features = [
+ "py311",
+ "test",
+ "backends",
+ "accel",
+ "numba",
+ "dask",
+ "viz",
+ "extras",
+ "typing",
+] }
+
+test-with-typing-py313 = { features = [
+ "py313",
+ "test",
+ "backends",
+ "accel",
+ "numba",
+ "dask",
+ "viz",
+ "extras",
+ "typing",
+] }
+
+test-bare-minimum = { features = ["test", "minimal"] }
+test-bare-min-and-scipy = { features = [
+ "test",
+ "minimal",
+ "minimum-scipy",
+] }
+test-min-versions = { features = [
+ "test",
+ "min-versions",
+] }
+
+
+# Extra
+typing = { features = ["typing"] }
+doc = { features = [
+ "doc",
+ "backends",
+ "test",
+ "accel",
+ "viz",
+ "extras",
+], solve-group = "doc" }
+pre-commit = { features = ["pre-commit"], no-default-feature = true }
+
+[package]
+name = "xarray"
+version = "dynamic" # dynamic versioning needs better support in pixi https://github.com/prefix-dev/pixi/issues/2923#issuecomment-2598460666 . Putting `version = "dynamic"` here for now until pixi recommends something else.
+
+[package.build]
+backend = { name = "pixi-build-python", version = "==0.4.0" }
+
+[package.host-dependencies]
+setuptools = "*"
+setuptools_scm = "*"
+
+[package.run-dependencies]
+python = "*"
+numpy = "*"
+pandas = "*"
+
+packaging = "24.1.*"
+git = "*" # needed for dynamic versioning
+
+[dependencies]
+xarray = { path = "." }
+
+[target.linux-64.dependencies]
+pydap-server = "*"
+
+[feature.minimal.dependencies]
+# minimal versions
+python = "3.11.*"
+numpy = "1.26.*"
+pandas = "2.2.*"
+
+[feature.minimum-scipy.dependencies]
+scipy = "1.13.*"
+
+[feature.py311.dependencies]
+python = "3.11.*"
+
+[feature.py312.dependencies]
+python = "3.12.*"
+
+[feature.py313.dependencies]
+python = "3.13.*"
+
+[feature.backends.dependencies]
+# files
+h5netcdf = "*"
+h5py = "*"
+hdf5 = "*"
+netcdf4 = "*"
+zarr = "*"
+rasterio = "*"
+
+# opendap
+pydap = "*"
+lxml = "*" # Optional dep of pydap
+
+# s3
+boto3 = "*"
+fsspec = "*"
+aiobotocore = "*"
+
+[feature.numba.dependencies]
+numba = "*"
+numbagg = "*"
+
+[feature.dask.dependencies]
+dask = "*"
+distributed = "*"
+
+[feature.accel.dependencies]
+flox = "*"
+bottleneck = "*"
+numexpr = "*"
+pyarrow = "*"
+opt_einsum = "*"
+
+[feature.viz.dependencies]
+cartopy = "*"
+matplotlib-base = "*"
+nc-time-axis = "*"
+seaborn = "*"
+
+[feature.extras.dependencies]
+# array
+array-api-strict = "<2.4"
+sparse = "*"
+
+# algorithms
+scipy = "*"
+toolz = "*"
+
+# tutorial
+pooch = "*"
+
+# other
+cftime = "*"
+pint = "*"
+iris = "*"
+
+[feature.extras.pypi-dependencies]
+# array
+jax = "*" # no way to get cpu-only jaxlib from conda if gpu is present
+
+[feature.min-versions.dependencies]
+# minimal versions for all dependencies
+# Note that when you update min-supported versions, you should:
+# - Update the min version lower-bound in the corresponding feature(s) where applicable
+# - Update this section to pin to the min version
+
+python = "3.11.*"
+array-api-strict = "1.1.*" # dependency for testing the array api compat
+boto3 = "1.34.*"
+bottleneck = "1.4.*"
+cartopy = "0.23.*"
+cftime = "1.6.*"
+dask-core = "2024.6.*"
+distributed = "2024.6.*"
+flox = "0.9.*"
+h5netcdf = "1.3.*"
+# h5py and hdf5 tend to cause conflicts
+# for e.g. hdf5 1.12 conflicts with h5py=3.1
+# prioritize bumping other packages instead
+h5py = "3.11.*"
+hdf5 = "1.14.*"
+iris = "3.9.*"
+lxml = "5.1.*" # Optional dep of pydap
+matplotlib-base = "3.8.*"
+nc-time-axis = "1.4.*"
+# netcdf follows a 1.major.minor[.patch] convention
+# (see https://github.com/Unidata/netcdf4-python/issues/1090)
+netcdf4 = "1.6.*"
+numba = "0.60.*"
+numbagg = "0.8.*"
+numpy = "1.26.*"
+packaging = "24.1.*"
+pandas = "2.2.*"
+pint = "0.24.*"
+pydap = "3.5.*"
+rasterio = "1.3.*"
+scipy = "1.13.*"
+seaborn = "0.13.*"
+sparse = "0.15.*"
+toolz = "0.12.*"
+zarr = "2.18.*"
+
+[feature.nightly.pypi-options]
+extra-index-urls = [
+ "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple",
+]
+
+[feature.nightly.dependencies]
+python = "*"
+
+[feature.nightly.pypi-dependencies]
+xarray = { path = ".", editable = true }
+
+numpy = "*"
+scipy = "*"
+matplotlib = "*"
+pandas = "*"
+pyarrow = "*"
+
+# # Dask stuff not working at the moment https://github.com/pydata/xarray/pull/10888#issuecomment-3516452010
+# dask = { git = "https://github.com/dask/dask" }
+# distributed = { git = "https://github.com/dask/distributed"}
+# dask-expr = { git = "https://github.com/dask/dask-expr"}
+dask = "*"
+distributed = "*"
+dask-expr = "*"
+
+zarr = { git = "https://github.com/zarr-developers/zarr-python" }
+numcodecs = { git = "https://github.com/zarr-developers/numcodecs" }
+# cftime = { git = "https://github.com/Unidata/cftime"} # TODO: Enable once this is removed as a test dep
+# packaging = { git = "https://github.com/pypa/packaging"} #? Pixi warns if this is enabled
+pint = { git = "https://github.com/hgrecco/pint" }
+bottleneck = { git = "https://github.com/pydata/bottleneck" }
+fsspec = { git = "https://github.com/intake/filesystem_spec" }
+nc-time-axis = { git = "https://github.com/SciTools/nc-time-axis" }
+flox = { git = "https://github.com/xarray-contrib/flox" }
+h5netcdf = { git = "https://github.com/h5netcdf/h5netcdf" }
+opt_einsum = { git = "https://github.com/dgasmith/opt_einsum" }
+# sparse = { git = "https://github.com/pydata/sparse"}
+
+
+[feature.test.dependencies]
+pytest = "*"
+pytest-asyncio = "*"
+pytest-cov = "*"
+pytest-env = "*"
+pytest-mypy-plugins = "*"
+pytest-timeout = "*"
+pytest-xdist = "*"
+pytz = "*"
+hypothesis = "*"
+cftime = "*" # https://github.com/pydata/xarray/pull/10888#issuecomment-3481432315
+coveralls = "*"
+
+[feature.test.tasks]
+test = { cmd = "pytest" }
+
+[feature.doc.dependencies]
+kerchunk = "*"
+ipykernel = "*"
+ipywidgets = "*" # silence nbsphinx warning
+ipython = '*'
+jupyter_client = '*'
+jupyter_sphinx = '*'
+nbsphinx = '*'
+ncdata = '*'
+pydata-sphinx-theme = "*"
+pyproj = "*"
+rich = "*" # for Zarr tree()
+setuptools = "*"
+sphinx-autosummary-accessors = "*"
+sphinx-copybutton = "*"
+sphinx-design = "*"
+sphinx-inline-tabs = "*"
+sphinx = ">=6,<8"
+sphinxcontrib-mermaid = "*"
+sphinxcontrib-srclinks = "*"
+sphinx-remove-toctrees = "*"
+sphinxext-opengraph = "*"
+sphinxext-rediraffe = "*"
+
+[feature.doc.pypi-dependencies]
+cfgrib = "*" # pypi dep because of https://github.com/prefix-dev/pixi/issues/3032#issuecomment-3302638043
+
+[feature.doc.tasks]
+doc = { cmd = "make html", cwd = "doc" }
+doc-clean = { cmd = "make clean && make html", cwd = "doc" }
+
+
+[feature.typing.dependencies]
+mypy = "==1.18.1"
+pyright = "*"
+hypothesis = "*"
+lxml = "*"
+pandas-stubs = "<=2.2.3.241126" # https://github.com/pydata/xarray/issues/10110
+types-colorama = "*"
+types-docutils = "*"
+types-psutil = "*"
+types-Pygments = "*"
+types-python-dateutil = "*"
+types-pytz = "*"
+types-PyYAML = "*"
+types-requests = "*"
+types-setuptools = "*"
+types-openpyxl = "*"
+typing_extensions = "*"
+pip = "*"
+
+[feature.typing.pypi-dependencies]
+types-defusedxml = "*"
+types-pexpect = "*"
+
+[feature.typing.tasks]
+mypy = "mypy --install-types --non-interactive --cobertura-xml-report mypy_report"
+
+[feature.pre-commit.dependencies]
+pre-commit = "*"
+
+[feature.pre-commit.tasks]
+pre-commit = { cmd = "pre-commit" }
diff --git a/xarray/computation/rolling.py b/xarray/computation/rolling.py
index adb8a5e6380..25ad5b8f6a9 100644
--- a/xarray/computation/rolling.py
+++ b/xarray/computation/rolling.py
@@ -168,7 +168,7 @@ def _reduce_method( # type: ignore[misc]
bottleneck_move_func = getattr(bottleneck, "move_" + name, None)
if module_available("numbagg"):
- import numbagg
+ import numbagg # type: ignore[import-not-found, unused-ignore]
numbagg_move_func = getattr(numbagg, "move_" + name, None)
else:
diff --git a/xarray/computation/rolling_exp.py b/xarray/computation/rolling_exp.py
index 010cf1fe31a..100742d38d2 100644
--- a/xarray/computation/rolling_exp.py
+++ b/xarray/computation/rolling_exp.py
@@ -111,7 +111,7 @@ def mean(self, keep_attrs: bool | None = None) -> T_DataWithCoords:
Dimensions without coordinates: x
"""
- import numbagg
+ import numbagg # type: ignore[import-not-found, unused-ignore]
if keep_attrs is None:
keep_attrs = _get_keep_attrs(default=True)
diff --git a/xarray/core/duck_array_ops.py b/xarray/core/duck_array_ops.py
index b8a4011a72e..e1b7dbeb4d7 100644
--- a/xarray/core/duck_array_ops.py
+++ b/xarray/core/duck_array_ops.py
@@ -861,7 +861,7 @@ def _push(array, n: int | None = None, axis: int = -1):
" Call `xr.set_options(use_bottleneck=True)` or `xr.set_options(use_numbagg=True)` to enable one."
)
if OPTIONS["use_numbagg"] and module_available("numbagg"):
- import numbagg
+ import numbagg # type: ignore[import-not-found, unused-ignore]
return numbagg.ffill(array, limit=n, axis=axis)
diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py
index b517c67bca9..a70d132f7ed 100644
--- a/xarray/core/nputils.py
+++ b/xarray/core/nputils.py
@@ -208,7 +208,7 @@ def f(values, axis=None, **kwargs):
)
)
):
- import numbagg
+ import numbagg # type: ignore[import-not-found, unused-ignore]
nba_func = getattr(numbagg, name, None)
if nba_func is not None:
diff --git a/xarray/plot/dataarray_plot.py b/xarray/plot/dataarray_plot.py
index 921f3dcae31..3a140ad46ca 100644
--- a/xarray/plot/dataarray_plot.py
+++ b/xarray/plot/dataarray_plot.py
@@ -1000,7 +1000,7 @@ def newplotfunc(
ckw = {vv: cmap_params[vv] for vv in ("vmin", "vmax", "norm", "cmap")}
cmap_params_subset.update(**ckw)
- with plt.rc_context(_styles):
+ with plt.rc_context(_styles): # type: ignore[arg-type, unused-ignore]
if z is not None:
import mpl_toolkits
diff --git a/xarray/tests/test_assertions.py b/xarray/tests/test_assertions.py
index 6e18e47cc81..eb1a9492a49 100644
--- a/xarray/tests/test_assertions.py
+++ b/xarray/tests/test_assertions.py
@@ -16,7 +16,7 @@
try:
import pint
- unit_registry = pint.UnitRegistry(force_ndarray_like=True)
+ unit_registry: pint.UnitRegistry = pint.UnitRegistry(force_ndarray_like=True)
def quantity(x):
return unit_registry.Quantity(x, "m")
diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py
index 36a1e354d9c..c0433b6645f 100644
--- a/xarray/tests/test_backends.py
+++ b/xarray/tests/test_backends.py
@@ -209,7 +209,7 @@ def _check_compression_codec_available(codec: str | None) -> bool:
# Attempt to create a variable with the compression
if codec and codec.startswith("blosc"):
- nc.createVariable( # type: ignore[call-overload]
+ nc.createVariable( # type: ignore[call-overload, unused-ignore]
varname="test",
datatype="f4",
dimensions=("x",),
@@ -217,7 +217,7 @@ def _check_compression_codec_available(codec: str | None) -> bool:
blosc_shuffle=1,
)
else:
- nc.createVariable( # type: ignore[call-overload]
+ nc.createVariable( # type: ignore[call-overload, unused-ignore]
varname="test", datatype="f4", dimensions=("x",), compression=codec
)
diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py
index ccbfc06eeb0..2d103994410 100644
--- a/xarray/tests/test_dask.py
+++ b/xarray/tests/test_dask.py
@@ -312,7 +312,7 @@ def test_persist(self):
def test_tokenize_duck_dask_array(self):
import pint
- unit_registry = pint.UnitRegistry()
+ unit_registry: pint.UnitRegistry = pint.UnitRegistry()
q = unit_registry.Quantity(self.data, "meter")
variable = xr.Variable(("x", "y"), q)
@@ -791,7 +791,7 @@ def test_from_dask_variable(self):
def test_tokenize_duck_dask_array(self):
import pint
- unit_registry = pint.UnitRegistry()
+ unit_registry: pint.UnitRegistry = pint.UnitRegistry()
q = unit_registry.Quantity(self.data, unit_registry.meter)
data_array = xr.DataArray(