diff --git a/.binder/apt.txt b/.binder/apt.txt new file mode 100644 index 00000000..4d956093 --- /dev/null +++ b/.binder/apt.txt @@ -0,0 +1 @@ +graphviz diff --git a/.binder/environment.yml b/.binder/environment.yml index fca9d0ae..759f2e69 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -1,42 +1,33 @@ -name: default +name: xarray channels: - conda-forge - - nodefaults dependencies: - - jupyter-book >=1.0.3,<2 - - pre-commit >=4.1.0,<5 - - dask-labextension >=7.0.0,<8 - - jupyterlab >=4.3.5,<5 - - jupyter_bokeh >=4.0.5,<5 - - jupyterlab-myst >=2.4.2,<3 - - jupyter-resource-usage >=1.1.1,<2 - - cartopy >=0.24.0,<0.25 - - cf_xarray >=0.10.0,<0.11 - - dask >=2025.2.0,<2026 - - datashader >=0.17.0,<0.18 - - distributed >=2025.2.0,<2026 - - gcsfs >=2025.2.0,<2026 - - geoviews-core >=1.14.0,<2 - - gsw >=3.6.19,<4 - - hvplot >=0.11.2,<0.12 - - h5netcdf >=1.5.0,<2 - - ipykernel >=6.29.5,<7 - - matplotlib-base >=3.10.0,<4 - - netcdf4 >=1.7.2,<2 - - numpy >=2.1.3,<3 - - pint-xarray >=0.4,<0.5 - - pydap >=3.5.3,<4 - - python-graphviz >=0.20.3,<0.21 - - pooch >=1.8.2,<2 - - rioxarray >=0.18.2,<0.19 - - scipy >=1.15.2,<2 - - sphinx-codeautolink >=0.17.0,<0.18 - - sphinxcontrib-mermaid >=1.0.0,<2 - - sphinx-notfound-page >=1.0.4,<2 - - sphinxext-rediraffe >=0.2.7,<0.3 - - s3fs >=2025.2.0,<2026 - - xarray >=2025.1.2,<2026 - - zarr >=3.0.3,<4 - - flox >=0.10.0,<0.11 - - numbagg >=0.9.0,<0.10 - - python >=3.10 + - bokeh>=2.0.0 + - cartopy + - cf_xarray + - dask + - dask-labextension + - distributed + - fsspec + - gcsfs + - geoviews + - gsw + - hvplot + - ipywidgets>=7.5 + - jupyter-server-proxy + - jupyterlab>=2.0.0 + - matplotlib!=3.3.1 + - nbgitpuller + - netcdf4 + - nodejs + - notebook + - numpy>=1.18.1 + - pandas + - pip + - pydap + - python-graphviz + - python=3.8 + - scipy>=1.3.0 + - zarr + - pip: + - expectexception diff --git a/.binder/jupyterlab-workspace.json b/.binder/jupyterlab-workspace.json new file mode 100644 index 00000000..d499b299 --- /dev/null +++ b/.binder/jupyterlab-workspace.json @@ -0,0 +1,115 @@ +{ + "data": { + "file-browser-filebrowser:cwd": { + "path": "scipy-tutorial" + }, + "dask-dashboard-launcher:individual-progress": { + "data": { + "route": "individual-progress", + "label": "Progress" + } + }, + "dask-dashboard-launcher:individual-task-stream": { + "data": { + "route": "individual-task-stream", + "label": "Task Stream" + } + }, + "layout-restorer:data": { + "main": { + "dock": { + "type": "split-area", + "orientation": "horizontal", + "sizes": [ + 0.736625105372905, + 0.263374894627095 + ], + "children": [ + { + "type": "tab-area", + "currentIndex": 0, + "widgets": [ + "notebook:scipy-tutorial/00_overview.ipynb" + ] + }, + { + "type": "split-area", + "orientation": "vertical", + "sizes": [ + 0.5, + 0.5 + ], + "children": [ + { + "type": "tab-area", + "currentIndex": 0, + "widgets": [ + "dask-dashboard-launcher:individual-progress" + ] + }, + { + "type": "tab-area", + "currentIndex": 1, + "widgets": [ + "dask-dashboard-launcher:individual-task-stream" + ] + } + ] + } + ] + }, + "mode": "multiple-document", + "current": "notebook:scipy-tutorial/00_overview.ipynb" + }, + "left": { + "collapsed": false, + "current": "filebrowser", + "widgets": [ + "filebrowser", + "running-sessions", + "dask-dashboard-launcher", + "command-palette", + "jp-property-inspector", + "tab-manager", + "extensionmanager.main-view" + ] + }, + "right": { + "collapsed": true, + "widgets": [] + } + }, + "notebook:00_overview.ipynb": { + "data": { + "path": "scipy-tutorial/00_overview.ipynb", + "factory": "Notebook" + } + }, + "notebook:scipy-tutorial/00_overview.ipynb": { + "data": { + "path": "scipy-tutorial/00_overview.ipynb", + "factory": "Notebook" + } + }, + "dask-dashboard-launcher": { + "url": "DASK_DASHBOARD_URL", + "cluster": "" + }, + "@jupyterlab/settingeditor-extension:plugin": { + "sizes": [ + 0.18606224627875506, + 0.8139377537212449 + ], + "container": { + "plugin": "@jupyterlab/terminal-extension:plugin", + "sizes": [ + 0.5, + 0.5 + ] + } + } + }, + "metadata": { + "id": "/lab" + } +} diff --git a/.binder/postBuild b/.binder/postBuild new file mode 100755 index 00000000..2e5d882a --- /dev/null +++ b/.binder/postBuild @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +# Install the JupyterLab dask-labextension +jupyter labextension install dask-labextension +jupyter labextension install @jupyter-widgets/jupyterlab-manager +jupyter labextension install @bokeh/jupyter_bokeh +jupyter labextension install @pyviz/jupyterlab_pyviz +jupyter labextension install @jupyterlab/toc + + +export DASK_TICK_MAXIMUM_DELAY=5s +export DASK_DISTRIBUTED__DIAGNOSTICS_LINK={JUPYTERHUB_SERVICE_PREFIX}proxy/{port}/status + +#EOF diff --git a/.binder/start b/.binder/start new file mode 100644 index 00000000..7b1fff22 --- /dev/null +++ b/.binder/start @@ -0,0 +1,9 @@ +#!/bin/bash + +# Replace DASK_DASHBOARD_URL with the proxy location +sed -i -e "s|DASK_DASHBOARD_URL|/user/${JUPYTERHUB_USER}/proxy/8787|g" .binder/jupyterlab-workspace.json + +# Import the workspace +jupyter lab workspaces import .binder/jupyterlab-workspace.json + +exec "$@" diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json deleted file mode 100644 index 39a55761..00000000 --- a/.devcontainer/devcontainer.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "image": "quay.io/pangeo/pangeo-notebook:latest", - "postCreateCommand": { - "vscode": "mkdir ${containerWorkspaceFolder}/.vscode && cp ${containerWorkspaceFolder}/.devcontainer/tasks.json ${containerWorkspaceFolder}/.vscode/tasks.json" - }, - "hostRequirements": { - "cpus": 2 - }, - "customizations": { - "codespaces": { - "openFiles": ["README.md"] - }, - "vscode": { - "extensions": ["ms-toolsai.jupyter", "ms-python.python"] - } - } -} diff --git a/.devcontainer/scipy2023/devcontainer.json b/.devcontainer/scipy2023/devcontainer.json deleted file mode 100644 index 22f85b77..00000000 --- a/.devcontainer/scipy2023/devcontainer.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "image": "quay.io/pangeo/pangeo-notebook:2023.07.05", - "postCreateCommand": { - "jupyterlab": "mkdir /home/jovyan/.jupyter && cp ${containerWorkspaceFolder}/.devcontainer/scipy2023/jupyter_lab_config.py /home/jovyan/.jupyter/jupyter_lab_config.py", - "vscode": "mkdir ${containerWorkspaceFolder}/.vscode && cp ${containerWorkspaceFolder}/.devcontainer/scipy2023/tasks.json ${containerWorkspaceFolder}/.vscode/tasks.json" - }, - "hostRequirements": { - "cpus": 2 - }, - "customizations": { - "codespaces": { - "openFiles": ["workshops/scipy2023/README.md"] - }, - "vscode": { - "extensions": ["ms-toolsai.jupyter", "ms-python.python"] - } - } -} diff --git a/.devcontainer/scipy2023/jupyter_lab_config.py b/.devcontainer/scipy2023/jupyter_lab_config.py deleted file mode 100644 index 60b8642e..00000000 --- a/.devcontainer/scipy2023/jupyter_lab_config.py +++ /dev/null @@ -1,3 +0,0 @@ -c = get_config() # noqa -c.LabApp.default_url = '/lab/tree/workshops/scipy2023/index.ipynb' -c.ServerApp.allow_origin = '*' diff --git a/.devcontainer/scipy2023/tasks.json b/.devcontainer/scipy2023/tasks.json deleted file mode 100644 index 5660e45b..00000000 --- a/.devcontainer/scipy2023/tasks.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "jupyterlab", - "type": "shell", - "command": "/srv/conda/envs/notebook/bin/jupyter lab --no-browser", - "presentation": { - "reveal": "always" - }, - "runOptions": { - "runOn": "folderOpen" - } - } - ] -} diff --git a/.devcontainer/scipy2024/devcontainer.json b/.devcontainer/scipy2024/devcontainer.json deleted file mode 100644 index 83912331..00000000 --- a/.devcontainer/scipy2024/devcontainer.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "image": "quay.io/pangeo/pangeo-notebook:2024.07.08", - "postCreateCommand": { - "jupyterlab": "mkdir /home/jovyan/.jupyter && cp ${containerWorkspaceFolder}/.devcontainer/scipy2024/jupyter_lab_config.py /home/jovyan/.jupyter/jupyter_lab_config.py", - "vscode": "mkdir ${containerWorkspaceFolder}/.vscode && cp ${containerWorkspaceFolder}/.devcontainer/scipy2024/tasks.json ${containerWorkspaceFolder}/.vscode/tasks.json" - }, - "hostRequirements": { - "cpus": 2 - }, - "customizations": { - "codespaces": { - "openFiles": ["workshops/scipy2024/index.ipynb"] - }, - "vscode": { - "extensions": ["ms-toolsai.jupyter", "ms-python.python"] - } - } -} diff --git a/.devcontainer/scipy2024/jupyter_lab_config.py b/.devcontainer/scipy2024/jupyter_lab_config.py deleted file mode 100644 index 836bf30c..00000000 --- a/.devcontainer/scipy2024/jupyter_lab_config.py +++ /dev/null @@ -1,3 +0,0 @@ -c = get_config() # noqa -c.LabApp.default_url = '/lab/tree/workshops/scipy2024/index.ipynb' -c.ServerApp.allow_origin = '*' diff --git a/.devcontainer/scipy2024/tasks.json b/.devcontainer/scipy2024/tasks.json deleted file mode 100644 index 5660e45b..00000000 --- a/.devcontainer/scipy2024/tasks.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "jupyterlab", - "type": "shell", - "command": "/srv/conda/envs/notebook/bin/jupyter lab --no-browser", - "presentation": { - "reveal": "always" - }, - "runOptions": { - "runOn": "folderOpen" - } - } - ] -} diff --git a/.devcontainer/tasks.json b/.devcontainer/tasks.json deleted file mode 100644 index 3b034156..00000000 --- a/.devcontainer/tasks.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "jupyterlab", - "type": "shell", - "command": "/srv/conda/envs/notebook/bin/jupyter lab --LabApp.default_url='/lab/tree/overview/xarray-in-45-min.ipynb' --ServerApp.allow_origin='*' --no-browser", - "presentation": { - "reveal": "always" - }, - "runOptions": { - "runOn": "folderOpen" - } - } - ] -} diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 887a2c18..00000000 --- a/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# SCM syntax highlighting & preventing 3-way merges -pixi.lock merge=binary linguist-language=YAML linguist-generated=true diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index d00361ed..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,7 +0,0 @@ -# Regularly update Docker tags and Actions steps -version: 2 -updates: - - package-ecosystem: "github-actions" - directory: "/.github" - schedule: - interval: "monthly" diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml deleted file mode 100644 index 541b5413..00000000 --- a/.github/workflows/main.yaml +++ /dev/null @@ -1,77 +0,0 @@ -name: Deploy Website to GitHub Pages - -on: - push: - branches: main - paths-ignore: - - ".devcontainer/**" - -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: write - pages: write - id-token: write - -# Allow one concurrent deployment -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup JupyterBook Cache - uses: actions/cache@v4 - with: - path: _build - # NOTE: change key to "jupyterbook-DATE" to force rebuilding cache - key: jupyterbook-20250221 - - - uses: prefix-dev/setup-pixi@v0.8.2 - with: - manifest-path: pyproject.toml - cache: true - activate-environment: true - - - name: Build JupyterBook - run: | - jupyter-book build ./ --warningiserror --keep-going - - - name: Dump Build Logs - if: always() - run: | - if (test -a _build/html/reports/*log); then cat _build/html/reports/*log ; fi - - - name: Save Build Folder - if: always() - uses: actions/upload-artifact@v4 - with: - name: build - path: _build/ - - - name: Upload Pages Artifact - uses: actions/upload-pages-artifact@v3 - with: - path: _build/html - - # Publish Website to GitHub Pages if built successfully - deploy: - needs: build - if: github.ref == 'refs/heads/main' - runs-on: ubuntu-latest - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - - steps: - - name: Setup Pages - uses: actions/configure-pages@v5 - - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 diff --git a/.github/workflows/nocache.yaml b/.github/workflows/nocache.yaml deleted file mode 100644 index a33d0b22..00000000 --- a/.github/workflows/nocache.yaml +++ /dev/null @@ -1,45 +0,0 @@ -name: Rebuild Entire Jupyter Book on all Platforms - -on: - workflow_dispatch: - -# Allow one concurrent deployment -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - build: - name: Build on ${{ matrix.runs-on }} - runs-on: ${{ matrix.runs-on }} - strategy: - fail-fast: false - matrix: - runs-on: [ubuntu-latest, macos-latest, windows-latest] - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - uses: prefix-dev/setup-pixi@v0.8.2 - with: - manifest-path: pyproject.toml - cache: true - activate-environment: true - - # https://github.com/xarray-contrib/xarray-tutorial/issues/311 - - name: Configure graphviz - if: matrix.runs-on == 'macos-latest' - run: | - dot -c - - - name: Build JupyterBook - id: jb-build - continue-on-error: true - run: | - jupyter-book build ./ --warningiserror --keep-going - - - name: Dump Build Logs - if: steps.jb-build.outcome == 'failure' - run: | - cat _build/html/reports/**/*.log diff --git a/.github/workflows/pull_request.yaml b/.github/workflows/pull_request.yaml deleted file mode 100644 index b969d043..00000000 --- a/.github/workflows/pull_request.yaml +++ /dev/null @@ -1,50 +0,0 @@ -name: Pull Request Build - -on: - pull_request: - types: [opened, synchronize, reopened, closed] - paths-ignore: - - ".devcontainer/**" - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - preview: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - if: github.event.action != 'closed' - uses: actions/checkout@v4 - - - name: Setup JupyterBook Cache - if: github.event.action != 'closed' - uses: actions/cache@v4 - with: - path: _build - # NOTE: change key to "jupyterbook-DATE" to force rebuilding cache - key: jupyterbook-20250221 - - - uses: prefix-dev/setup-pixi@v0.8.2 - with: - manifest-path: pyproject.toml - cache: true - activate-environment: true - - - name: Build JupyterBook - if: github.event.action != 'closed' - run: | - jupyter-book build ./ --warningiserror --keep-going - - - name: Dump Build Logs - if: github.event.action != 'closed' - run: | - if (test -a _build/html/reports/*log); then cat _build/html/reports/*log ; fi - - - name: Upload artifact - if: github.event.action != 'closed' - uses: actions/upload-artifact@v4 - with: - name: html - path: _build/html diff --git a/.github/workflows/qaqc.yaml b/.github/workflows/qaqc.yaml deleted file mode 100644 index 1b22eb99..00000000 --- a/.github/workflows/qaqc.yaml +++ /dev/null @@ -1,56 +0,0 @@ -name: QualityContol - -on: - workflow_dispatch: - pull_request: - branches: - - main - paths-ignore: - - ".devcontainer/**" - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - quality-control: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - uses: prefix-dev/setup-pixi@v0.8.2 - with: - manifest-path: pyproject.toml - cache: true - activate-environment: true - - # NOTE: this isn't a comprehensive spellcheck, just common typos - - name: Spellcheck - if: always() - uses: codespell-project/actions-codespell@v2 - with: - check_filenames: true - check_hidden: true - skip: ".git,*.js,qaqc.yml" - ignore_words_list: hist,nd - - # borrowed from https://github.com/ProjectPythia/pythia-foundations/blob/main/.github/workflows/link-checker.yaml - - name: Disable Notebook Execution Before Linkcheck - if: always() - shell: python - run: | - import yaml - with open('./_config.yml') as f: - data = yaml.safe_load(f) - data['execute']['execute_notebooks'] = 'off' - with open('./_config.yml', 'w') as f: - yaml.dump(data, f) - - # Checking links is flaky, so continue-on-error: true - - name: Check External Links - timeout-minutes: 5 - continue-on-error: true - if: always() - run: | - jupyter-book build ./ --builder linkcheck diff --git a/.github/workflows/surge_preview.yml b/.github/workflows/surge_preview.yml deleted file mode 100644 index 69937ab7..00000000 --- a/.github/workflows/surge_preview.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Pull Request Preview - -on: - workflow_run: - workflows: ["Pull Request Build"] - types: - - completed - -permissions: - pull-requests: write # allow surge-preview to create/update PR comments - -concurrency: - group: ${{ github.workflow }}-${{ github.event.workflow_run.id }} - cancel-in-progress: true - -jobs: - # NOTE: match job name in pull_request.yaml - preview: - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }} - - steps: - # Ensure folder exists for PR 'closed' case - - run: mkdir html - - # Download built HTML from PR Build workflow - - uses: actions/download-artifact@v4 - continue-on-error: true - with: - github-token: ${{ github.token }} - run-id: ${{ github.event.workflow_run.id }} - - - name: Manage Surge.sh Deployment - id: preview_step - uses: afc163/surge-preview@v1 - with: - surge_token: ${{ secrets.SURGE_TOKEN }} - github_token: ${{ secrets.GITHUB_TOKEN }} - build: echo 'Uploading html/ folder contents to Surge.sh...' - dist: html # NOTE: match upload_artifact name in pull_request.yaml - failOnError: true - teardown: true diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 5de41283..00000000 --- a/.gitignore +++ /dev/null @@ -1,124 +0,0 @@ -# project/repo specific -advanced/backends/*.bin -scipy-tutorial/dask-report-large-chunk.html -mydask.png -dask-report.html -_build/ -*.zarr -*.nc -*.tiff -*.tif -dask-worker-space/ - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ - -# misc -.DS_Store -.vscode/ - -# pixi environments -.pixi -*.egg-info diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index cf491035..00000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,51 +0,0 @@ -ci: - autoupdate_schedule: monthly - -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-docstring-first - - id: check-json - exclude: ^.devcontainer/ - - id: check-yaml - - id: double-quote-string-fixer - - - repo: https://github.com/codespell-project/codespell - rev: "v2.4.1" - hooks: - - id: codespell - - - repo: https://github.com/psf/black - rev: 25.1.0 - hooks: - - id: black - - id: black-jupyter - - - repo: https://github.com/keewis/blackdoc - rev: v0.3.9 - hooks: - - id: blackdoc - - - repo: https://github.com/PyCQA/flake8 - rev: 7.1.2 - hooks: - - id: flake8 - - - repo: https://github.com/PyCQA/isort - rev: 6.0.1 - hooks: - - id: isort - - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v4.0.0-alpha.8 - hooks: - - id: prettier - - - repo: https://github.com/kynan/nbstripout - rev: 0.8.1 - hooks: - - id: nbstripout - args: [--extra-keys=metadata.kernelspec metadata.language_info.version] diff --git a/.prettierrc.toml b/.prettierrc.toml deleted file mode 100644 index addd6d36..00000000 --- a/.prettierrc.toml +++ /dev/null @@ -1,3 +0,0 @@ -tabWidth = 2 -semi = false -singleQuote = true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index f9940fab..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,108 +0,0 @@ -# Contributing Guide - -This tutorial repository is a great opportunity to start contributing to Xarray. - -- Report bugs, request features or submit feedback as a [GitHub Issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/about-issues). First check existing [issues](https://github.com/xarray-contrib/xarray-tutorial/issues) ! - -- Make fixes, add content or improvements using [GitHub Pull Requests](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests), the sections below go over this process in more detail: - -```{seealso} -The Project Pythia Foundations material on [Github](https://foundations.projectpythia.org/foundations/getting-started-github.html) and Github workflows is a great place to start if you are new to this. -``` - -## Content Guidelines - -Please note that examples submitted to this repository should follow these -guidelines: - -1. Run top-to-bottom without intervention from the user -1. Not require external data sources that may disappear over time (external data sources that are highly unlikely to disappear are fine). Small datasets for tutorial purposes can be added [here](https://github.com/pydata/xarray-data/) if necessary. -1. Not be resource intensive, and should run within 2GB of memory -1. Be clear and contain enough prose to explain the topic at hand -1. Be concise and limited to one or two topics, such that a reader can get through the example within a few minutes of reading -1. Be of general relevance to Xarray users, and so not too specific on a particular problem or use case. - -## Contribution process - -### Fork this repository - -We recommend first forking this repository and creating a local copy: - -``` -git clone https://github.com/YOURACCOUNT/xarray-tutorial.git -cd xarray-tutorial -``` - -### Create a Python environment - -You'll need `pixi` or `conda` or `mamba`, which can be installed from https://github.com/conda-forge/miniforge - -We also use [pre-commit hooks](https://pre-commit.com) to run styling and other checks before committing code. - -#### Using pixi (recommended) - -``` -pixi install -pixi shell # exit -``` - -#### Using conda - -``` -mamba env create -f .binder/environment.yml -n xarray-tutorial -conda activate xarray-tutorial # conda deactivate -pre-commit install -``` - -### Add content - -Develop your new content on a branch. See [JupyterBook Docs](https://jupyterbook.org/en/stable/intro.html) for guides on adding `.md`, `.ipynb` and other content. - -``` -git checkout -b newcontent -git add . -git commit -m "added pages x,y and improved z" -``` - -### Preview your changes - -Running jupyterbook will execute notebooks and render HTML pages for the website. Be sure to fix any execution errors and preview the website in your web browser to make sure everything looks good! - -``` -jupyter-book build ./ --warningiserror --keep-going -# Or "pixi run build" -``` - -### Open a pull request - -``` -git push -``` - -Follow the link reported in a terminal to open a pull request! - -## Instructions for environment management - -[`pixi`](https://pixi.sh) can be used to create and update a multi-platform lockfile, so a reproducible set of package versions is installed across different operating systems. - -Dependencies (with optional pins) are specified in the `pyproject.toml` file, and specific locked versions for all platforms are kept in `pixi.lock`. - -Install environment from the lockfile - -``` -pixi install -pixi shell # activate environment, "exit" to deactivate -``` - -Upgrade all packages to latest versions: - -``` -pixi upgrade -``` - -## Render conda/mamba environment files - -``` -pixi project export conda-environment -p linux-64 .binder/environment.yml -pixi project export conda-explicit-spec -p linux-64 /tmp -``` diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 20e4bd85..00000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md deleted file mode 100644 index 92dbc0e9..00000000 --- a/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Xarray Tutorial - -[![CI](https://github.com/xarray-contrib/xarray-tutorial/workflows/CI/badge.svg?branch=main)](https://github.com/xarray-contrib/xarray-tutorial/actions?query=branch%3Amain) -[![Jupyter Book Badge](https://jupyterbook.org/badge.svg)](https://tutorial.xarray.dev) -[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/xarray-contrib/xarray-tutorial/HEAD?labpath=workshops/scipy2024/index.ipynb) - -This is the repository for a Jupyter Book website with tutorial material for [Xarray](https://github.com/pydata/xarray), _an open source project and Python package that makes working with labelled multi-dimensional arrays simple, efficient, and fun!_ - -The website is hosted at https://tutorial.xarray.dev - -Tutorials are written as interactive Jupyter Notebooks with executable code examples that you can easily run and modify: - -#### On the Cloud - -All notebooks can be run via the Mybinder.org 'Launch Binder' badge at the top of this page. This will load a pre-configured JupyterLab interface with all tutorial notebooks for you to run. _You have minimal computing resources and any changes you make will not be saved._ - -#### Github Codespaces - -This tutorial is available to run within [Github Codespaces](https://github.com/features/codespaces) - "a development environment that's hosted in the cloud" - -[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/xarray-contrib/xarray-tutorial/tree/main) - -☝️ Click the button above to go to options window to launch a Github codespace. - -GitHub currently gives every user [120 vCPU hours per month for free](https://docs.github.com/en/billing/managing-billing-for-github-codespaces/about-billing-for-github-codespaces#monthly-included-storage-and-core-hours-for-personal-accounts), beyond that you must pay. **So be sure to explicitly stop or shut down your codespace when you are done by going to this page (https://github.com/codespaces).** - -Once your codespace is launched, the following happens: - -- [Visual Studio Code](https://code.visualstudio.com/) Interface will open up within your browser. -- A built in terminal will open and it will execute `jupyter lab` automatically. -- Once you see a url to click within the terminal, simply `cmd + click` the given url. -- This will open up another tab in your browser, leading to a [Jupyter Lab](https://jupyterlab.readthedocs.io/en/latest/) Interface. - -#### Locally - -You can also run these notebooks on your own computer! We recommend using [`pixi`](https://pixi.sh/latest/#installation) to ensure a fully reproducible Python environment: - -```bash -git clone https://github.com/xarray-contrib/xarray-tutorial.git -cd xarray-tutorial -pixi run tutorial -``` - -## Contributing - -Contributions are welcome and greatly appreciated! See our [CONTRIBUTING.md](./CONTRIBUTING.md) document. - -Thanks to our contributors so far! - -[![Contributors](https://contrib.rocks/image?repo=xarray-contrib/xarray-tutorial)](https://github.com/xarray-contrib/xarray-tutorial/graphs/contributors) - -## Acknowledgements - -This website is the result of many contributions from the Xarray community! We're very grateful for everyone's volunteered effort as well as [sponsored development](https://xarray.dev/#sponsors). Funding for SciPy 2022, SciPy 2023 tutorial material development specifically was supported by NASA's Open Source Tools, Frameworks, and Libraries Program (award 80NSSC22K0345). diff --git a/_config.yml b/_config.yml deleted file mode 100644 index 0e1f89c8..00000000 --- a/_config.yml +++ /dev/null @@ -1,113 +0,0 @@ -# Learn more at https://jupyterbook.org/customize/config.html -title: "" -author: The Xarray Community -copyright: "2025" -logo: images/logo.png -only_build_toc_files: true -exclude_patterns: [.github, .pixi] - -# See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository -html: - # NOTE: this announcement shows up on all pages - #announcement: 'The Xarray 2024 User Survey is live. Please take ~5 minutes to fill it out and help us improve Xarray.' - #announcement: 'ℹ️ SciPy 2024 Tutorial Attendees. Click here .' - home_page_in_navbar: false - use_edit_page_button: true - use_issues_button: true - use_repository_button: true - extra_footer: '

Xarray is a fiscally sponsored project of NumFOCUS, a nonprofit dedicated to supporting the open-source scientific computing community.
Theme by the Executable Book Project.

Content licensed under the terms of the Apache 2.0 License.' - analytics: - google_analytics_id: G-JRQHYVFQR7 - -parse: - # https://jupyterbook.org/content/content-blocks.html?highlight=myst%20substitution#define-substitutions-for-your-whole-book - # https://jupyterbook.org/content/content-blocks.html#using-substitutions-in-links - myst_substitutions: - xarray_homepage: https://xarray.dev - xarray_docs: https://docs.xarray.dev - xarray_repo: https://github.com/pydata/xarray - xarray_forum: https://github.com/pydata/xarray/discussions - myst_enable_extensions: - # Defaults - - dollarmath - - linkify - - substitution - - colon_fence - # Extras - - html_image - -# Force re-execution of notebooks on each build. -# See https://jupyterbook.org/content/execute.html -execute: - execute_notebooks: "cache" - allow_errors: false - # Per-cell notebook execution limit (seconds) - timeout: 300 - -# Define the name of the latex output file for PDF builds -latex: - latex_documents: - targetname: book.tex - -# Configure your Binder links, such as the URL of the BinderHub. -launch_buttons: - notebook_interface: jupyterlab - binderhub_url: "https://mybinder.org" - -# Information about where the book exists on the web -repository: - url: "https://github.com/xarray-contrib/xarray-tutorial" - branch: main - -# Bibliography -bibtex_bibfiles: - - reference/references.bib - -# Advanced configuration -sphinx: - extra_extensions: - # 404 not found page - - notfound.extension - # maintain old paths and redirect them (so google results dont go to 404) - # https://github.com/wpilibsuite/sphinxext-rediraffe - - sphinxext.rediraffe - - sphinx_codeautolink - - sphinxcontrib.mermaid - - config: - language: en # accessibility - # application/vnd.holoviews_load.v0+json, application/vnd.holoviews_exec.v0+json - suppress_warnings: ["mystnb.unknown_mime_type", "misc.highlighting_failure"] - codeautolink_concat_default: True - notfound_context: - body: "

Whoops! 404 Page Not Found

\n\n

Sorry, this page doesn't exist. Many sections of this book have been updated recently.

Try the search box 🔎 to find what you're looking for!

" - notfound_urls_prefix: / - rediraffe_redirects: - scipy-tutorial/00_overview.ipynb: overview/get-started.md - workshops/scipy2022/README.md: overview/fundamental-path/README.md - fundamentals/02.1_working_with_labeled_data.ipynb: fundamentals/02.1_indexing_Basic.ipynb - - bibtex_reference_style: author_year # or label, super, \supercite - - intersphinx_mapping: - xarray: - - https://docs.xarray.dev/en/latest/ - - null - numpy: - - https://numpy.org/doc/stable - - null - scipy: - - https://docs.scipy.org/doc/scipy - - null - matplotlib: - - https://matplotlib.org/stable/ - - null - dask: - - https://docs.dask.org/en/latest - - null - python: - - https://docs.python.org/3/ - - null - pandas: - - https://pandas.pydata.org/pandas-docs/stable - - null diff --git a/_static/style.css b/_static/style.css deleted file mode 100644 index b1024666..00000000 --- a/_static/style.css +++ /dev/null @@ -1,3 +0,0 @@ -.bd-header-announcement { - background-color: var(--pst-color-info-bg); -} diff --git a/_toc.yml b/_toc.yml deleted file mode 100644 index 6c0aeda8..00000000 --- a/_toc.yml +++ /dev/null @@ -1,104 +0,0 @@ -# Learn more at https://jupyterbook.org/customize/toc.html -root: intro -format: jb-book -parts: - - caption: Overview - chapters: - - file: overview/get-started.md - - file: overview/xarray-in-45-min - - file: overview/learning-paths.md - sections: - - file: overview/fundamental-path/README.md - - file: overview/intermediate-path/README.md - - - caption: Fundamentals - chapters: - - file: fundamentals/01_data_structures.md - sections: - - file: fundamentals/01_datastructures - - file: fundamentals/01.1_creating_data_structures - - file: fundamentals/01.1_io - - file: fundamentals/02_labeled_data.md - sections: - - file: fundamentals/02.1_indexing_Basic.ipynb - - file: fundamentals/02.2_manipulating_dimensions - - file: fundamentals/03_computation.md - sections: - - file: fundamentals/03.1_computation_with_xarray - - file: fundamentals/02.3_aligning_data_objects - - file: fundamentals/03.2_groupby_with_xarray - - file: fundamentals/03.3_windowed - - file: fundamentals/03.4_weighted - - file: fundamentals/04.0_plotting.md - sections: - - file: fundamentals/04.1_basic_plotting - - file: fundamentals/04.2_faceting - - file: fundamentals/04.3_geographic_plotting - - - caption: Intermediate - chapters: - - file: intermediate/01-high-level-computation-patterns - - file: intermediate/indexing/indexing - sections: - - file: intermediate/indexing/advanced-indexing.ipynb - - file: intermediate/indexing/boolean-masking-indexing.ipynb - - file: intermediate/xarray_and_dask - - file: intermediate/xarray_ecosystem - - file: intermediate/hvplot - - file: intermediate/remote_data/index - sections: - - file: intermediate/remote_data/cmip6-cloud.ipynb - - file: intermediate/remote_data/remote-data.ipynb - - file: intermediate/data_cleaning/05.1_intro.md - sections: - - file: intermediate/data_cleaning/05.2_examples.md - - file: intermediate/data_cleaning/05.3_ice_velocity - - file: intermediate/data_cleaning/05.4_contributing.md - - file: intermediate/data_cleaning/05.5_scipy_talk.md - - - caption: Advanced - chapters: - - file: advanced/parallel-intro.md - - file: advanced/apply_ufunc/apply_ufunc.md - sections: - - file: advanced/apply_ufunc/simple_numpy_apply_ufunc - - file: advanced/apply_ufunc/core-dimensions - - file: advanced/apply_ufunc/complex-output-numpy - - file: advanced/apply_ufunc/automatic-vectorizing-numpy - - file: advanced/apply_ufunc/dask_apply_ufunc - - file: advanced/apply_ufunc/numba-vectorization - - file: advanced/apply_ufunc/example-interp - - file: advanced/map_blocks/map_blocks.md - sections: - - file: advanced/map_blocks/simple_map_blocks - - file: advanced/backends/backends.md - sections: - - file: advanced/backends/1.Backend_without_Lazy_Loading.ipynb - - file: advanced/backends/2.Backend_with_Lazy_Loading.ipynb - - file: advanced/accessors/accessors.md - sections: - - file: advanced/accessors/01_accessor_examples.ipynb - - - caption: Workshops - chapters: - - file: workshops/scipy2024/index.ipynb - - file: workshops/scipy2023/README - - file: workshops/thinking-like-xarray/README - sections: - - url: https://tutorial.xarray.dev/intermediate/01-high-level-computation-patterns - title: High-level computation patterns - - file: workshops/oceanhackweek2020/README - sections: - - url: https://tutorial.xarray.dev/overview/xarray-in-45-min - title: Xarray in 45 minutes - - file: workshops/online-tutorial-series/README - sections: - - file: workshops/online-tutorial-series/01_xarray_fundamentals - - file: workshops/online-tutorial-series/02_indexing - - file: workshops/online-tutorial-series/03_computation - - - caption: Reference - chapters: - - file: CONTRIBUTING - - file: reference/resources - - file: reference/glossary diff --git a/advanced/accessors/01_accessor_examples.ipynb b/advanced/accessors/01_accessor_examples.ipynb deleted file mode 100644 index 8efce1c8..00000000 --- a/advanced/accessors/01_accessor_examples.ipynb +++ /dev/null @@ -1,429 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Creating custom accessors" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Introduction\n", - "\n", - "An accessor is a way of attaching a custom function to xarray objects so that it can be called as if it were a method while retaining a clear separation between the \"core\" xarray API and custom API. It enables you to easily *extend* (which is why you'll sometimes see it referred to as an extension) and customize xarray's functionality while limiting naming conflicts and minimizing the chances of your code breaking with xarray upgrades.\n", - "\n", - "If you've used [rioxarray](https://corteva.github.io/rioxarray/stable/) (e.g. `da.rio.crs`) or [hvplot](https://hvplot.holoviz.org/) (e.g. `ds.hvplot()`), you may have already used an xarray accessor without knowing it!\n", - "\n", - "The [Xarray documentation](https://docs.xarray.dev/en/stable/internals/extending-xarray.html) has some more technical details, and this tutorial provides example custom accessors and their uses." - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Why create a custom accessor\n", - "\n", - "- You can easily create a custom suite of tools that work on Xarray objects\n", - "- It keeps your workflows cleaner and simpler\n", - "- Your project-specific code is easy to share\n", - "- It's easy to implement: you don't need to integrate any code into Xarray\n", - "- It makes it easier to perform checks and write code documentation because you only have to create them once!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Easy steps to create your own accessor\n", - "\n", - "1. Create your custom class, including the mandatory `__init__` method\n", - "2. Add the `xr.register_dataarray_accessor()` or `xr.register_dataset_accessor()` \n", - "3. Use your custom functions " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example 1: accessing scipy functionality" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For example, imagine you're a statistician who regularly uses a special `skewness` function which acts on dataarrays but is only of interest to people in your specific field.\n", - "\n", - "You can create a method which applies this skewness function to an xarray object and then register the method under a custom `stats` accessor like this:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "from scipy.stats import skew\n", - "\n", - "xr.set_options(display_expand_attrs=False, display_expand_coords=False)\n", - "\n", - "\n", - "@xr.register_dataarray_accessor(\"stats\")\n", - "class StatsAccessor:\n", - " def __init__(self, da):\n", - " self._da = da\n", - "\n", - " def skewness(self, dim):\n", - " return self._da.reduce(func=skew, dim=dim)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we can conveniently access this functionality via the `stats` accessor" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds[\"skewair\"] = ds['air'].stats.skewness(dim=\"time\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice how the presence of `.stats` clearly differentiates our new \"accessor method\" from core xarray methods." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example 2: creating your own workflows\n", - "\n", - "Perhaps you find yourself running similar code for multiple xarray objects or across related projects. By packing your code into an extension, it makes it easy to repeat the same operation while reducing the likelihood of [human introduced] errors.\n", - "\n", - "Here we wrap the reorganization of InSAR ice velocity data illustrated in [this tutorial](https://tutorial.xarray.dev/intermediate/data_cleaning/05.3_ice_velocity.html) into a custom Xarray extension that makes it easy to re-apply each time you begin working with a new InSAR velocity dataset. Please see the linked tutorial for details on the data, applications, and each step in this process." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import os\n", - "import pandas as pd\n", - "import xarray as xr\n", - "\n", - "\n", - "@xr.register_dataset_accessor(\"insar_vel\")\n", - "class InsarReorg:\n", - " \"\"\"\n", - " An extension for an XArray dataset that will prepare InSAR data for analysis.\n", - "\n", - " Re-organize the data from its native structure to have x and y velocity and error along a time dimension.\n", - " \"\"\"\n", - "\n", - " # ----------------------------------------------------------------------\n", - " # Constructors\n", - "\n", - " def __init__(self, xrds):\n", - " self._xrds = xrds\n", - "\n", - " # ----------------------------------------------------------------------\n", - " # Methods\n", - "\n", - " @staticmethod\n", - " def _validate(self, req_dim=None, req_vars=None):\n", - " '''\n", - " Make sure the xarray dataset has the correct dimensions and variables.\n", - "\n", - " Running this function will check that my dataset has all the needed dimensions and variables\n", - " for a given function, saving time and headache later if they were missing and the computation fails\n", - " partway through.\n", - "\n", - " Parameters\n", - " ----------\n", - " req_dim : list of str\n", - " List of all required dimension names\n", - " req_vars : list of str\n", - " List of all required variable names\n", - " '''\n", - "\n", - " if req_dim is not None:\n", - " if all([dim not in list(self._xrds.dims) for dim in req_dim]):\n", - " raise AttributeError(\"Required dimensions are missing\")\n", - " if req_vars is not None:\n", - " if all([var not in self._xrds.variables for var in req_vars.keys()]):\n", - " raise AttributeError(\"Required variables are missing\")\n", - " # print(\"successfully validated your dataset\")\n", - "\n", - " # ----------------------------------------------------------------------\n", - " # Functions\n", - "\n", - " def change_vars_to_coords(\n", - " self,\n", - " req_dim=['ny', 'nx'],\n", - " req_vars={'xaxis': ['nx'], 'yaxis': ['ny']},\n", - " ):\n", - " \"\"\"\n", - " Turn the xaxis and y axis variables into coordinates.\n", - "\n", - " Parameters\n", - " ----------\n", - " req_dim : list of str\n", - " List of all required dimension names.\n", - " req_vars : list of str\n", - " List of all required variable names\n", - " \"\"\"\n", - "\n", - " self._validate(self, req_dim, req_vars)\n", - "\n", - " self._xrds = self._xrds.swap_dims({'ny': 'yaxis', 'nx': 'xaxis'})\n", - " self._xrds = self._xrds.rename({'xaxis': 'x', 'yaxis': 'y'})\n", - "\n", - " return self._xrds\n", - "\n", - " def reorg_dataset(self):\n", - " \"\"\"\n", - " Reorganize the data by time for each of the desired end variables (here vx, vy, err)\n", - "\n", - " \"\"\"\n", - "\n", - " reorged = []\n", - " for reorg_var in ['vx', 'vy', 'err']:\n", - " ds = self.reorg_var_time(reorg_var)\n", - " reorged.append(ds)\n", - "\n", - " reorged_ds = xr.merge(reorged)\n", - "\n", - " return reorged_ds\n", - "\n", - " def reorg_var_time(self, reorg_var):\n", - " \"\"\"\n", - " Repeat the process for a given variable.\n", - "\n", - " Figure out which of the original variables are time steps for this variable and turn each one into a dataarray.\n", - " Add a time dimension and update the variable name for each dataarray.\n", - " Combine the modified data arrays back into a single dataset.\n", - " \"\"\"\n", - "\n", - " # create storage list for reorganizing\n", - " var_ls = list(self._xrds)\n", - " to_reorg = [var for var in var_ls if reorg_var in var]\n", - "\n", - " # list the arrays from the original dataset that correspond to the variable\n", - " das_to_reorg = [self._xrds[var] for var in to_reorg]\n", - "\n", - " # add the time dimension\n", - " das_to_reorg = [das_to_reorg[var].expand_dims('time') for var in range(len(das_to_reorg))]\n", - "\n", - " # update variable name to remove time\n", - " das_to_reorg = [das_to_reorg[var].rename(reorg_var) for var in range(len(das_to_reorg))]\n", - "\n", - " ds = xr.concat(das_to_reorg, dim='time')\n", - "\n", - " return ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset('ASE_ice_velocity.nc')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = ds.insar_vel.change_vars_to_coords()\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = ds.insar_vel.reorg_dataset()\n", - "ds" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example 3: creating your own workflows with locally stored corrections\n", - "\n", - "Consider someone who frequently converts their elevations to be relative to the geoid (rather than the ellipsoid) using a custom, local conversion (otherwise, we'd recommend using an established conversion library like [pyproj](https://pypi.org/project/pyproj/) to switch between datums).\n", - "\n", - "An accessor provides an elegant way to build (once) and apply (as often as needed!) this custom conversion on top of the existing xarray ecosystem without the need to copy-paste the code into the start of each project. By standardizing our approach and adding a few sanity checks within the accessor, we also eliminate the risk of accidentally applying the correction multiple times." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import rasterio\n", - "import xarray as xr\n", - "\n", - "\n", - "@xr.register_dataset_accessor(\"geoidxr\")\n", - "class GeoidXR:\n", - " \"\"\"\n", - " An extension for an XArray dataset that will calculate geoidal elevations from a local source file.\n", - " \"\"\"\n", - "\n", - " # ----------------------------------------------------------------------\n", - " # Constructors\n", - "\n", - " def __init__(\n", - " self,\n", - " xrds,\n", - " ):\n", - " self._xrds = xrds\n", - " # Running this function on init will check that my dataset has all the needed dimensions and variables\n", - " # as specific to my workflow, saving time and headache later if they were missing and the computation fails\n", - " # partway through.\n", - " self._validate(\n", - " self, req_dim=['x', 'y', 'dtime'], req_vars={'elevation': ['x', 'y', 'dtime']}\n", - " )\n", - "\n", - " # ----------------------------------------------------------------------\n", - " # Methods\n", - "\n", - " @staticmethod\n", - " def _validate(self, req_dim=None, req_vars=None):\n", - " '''\n", - " Make sure the xarray dataset has the correct dimensions and variables\n", - "\n", - " Parameters\n", - " ----------\n", - " req_dim : list of str\n", - " List of all required dimension names\n", - " req_vars : list of str\n", - " List of all required variable names\n", - " '''\n", - "\n", - " if req_dim is not None:\n", - " if all([dim not in list(self._xrds.dims) for dim in req_dim]):\n", - " raise AttributeError(\"Required dimensions are missing\")\n", - " if req_vars is not None:\n", - " if all([var not in self._xrds.variables for var in req_vars.keys()]):\n", - " raise AttributeError(\"Required variables are missing\")\n", - "\n", - " # Notice that 'geoid' has been added to the req_vars list\n", - " def to_geoid(\n", - " self,\n", - " req_dim=['dtime', 'x', 'y'],\n", - " req_vars={'elevation': ['x', 'y', 'dtime', 'geoid']},\n", - " source=None,\n", - " ):\n", - " \"\"\"\n", - " Get geoid layer from your local file, which is provided to the function as \"source\",\n", - " and apply the offset to all elevation values.\n", - " Adds 'geoid_offset' keyword to \"offsets\" attribute so you know the geoid offset was applied.\n", - "\n", - " Parameters\n", - " ----------\n", - " req_dim : list of str\n", - " List of all required dimension names.\n", - " req_vars : list of str\n", - " List of all required variable names\n", - " source : str\n", - " Full path to your source file containing geoid offsets\n", - " \"\"\"\n", - "\n", - " # check to make sure you haven't already run this function (and are thus applying the offset twice)\n", - " try:\n", - " values = self._xrds.attrs['offset_names']\n", - " assert 'geoid_offset' not in values, \"You've already applied the geoid offset!\"\n", - " values = list([values]) + ['geoid_offset']\n", - " except KeyError:\n", - " values = ['geoid_offset']\n", - "\n", - " self._validate(self, req_dim, req_vars)\n", - "\n", - " # read in your geoid values\n", - " # WARNING: this implementation assumes your geoid values are in the same CRS and grid as the data you are applying\n", - " # them to. If not, you will need to reproject and/or resample them to match the data to which you are applying them.\n", - " # That step is not included here to emphasize the accessor aspect of the workflow.\n", - " with rasterio.open(source) as src:\n", - " geoid = src['geoid_varname']\n", - "\n", - " # As noted above, this step will fail or produce unreliable results if your data is not properly gridded\n", - " self._xrds['elevation'] = self._xrds.elevation - geoid\n", - "\n", - " self._xrds.attrs['offset_names'] = values\n", - "\n", - " return self._xrds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, each time we want to convert our ellipsoid data to the geoid, we only have to run one line of code, and it will also perform a multitude of checks for us to make sure we're performing exactly the operation we expect. Imagine the possibilities (and decrease in frustration)!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "ds = ds.geoidxr.to_geoid(source='/Path/to/Custom/source/file.nc')\n", - "ds" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "vscode": { - "interpreter": { - "hash": "eeef546aa85c5aee566c457bd2890cafb9e11a3b514b94bbf230bf44d1caf251" - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/advanced/accessors/accessors.md b/advanced/accessors/accessors.md deleted file mode 100644 index f49df5d2..00000000 --- a/advanced/accessors/accessors.md +++ /dev/null @@ -1,3 +0,0 @@ -```{tableofcontents} - -``` diff --git a/advanced/apply_ufunc/apply_ufunc.md b/advanced/apply_ufunc/apply_ufunc.md deleted file mode 100644 index 4fb0811d..00000000 --- a/advanced/apply_ufunc/apply_ufunc.md +++ /dev/null @@ -1,5 +0,0 @@ -# apply_ufunc - -```{tableofcontents} - -``` diff --git a/advanced/apply_ufunc/automatic-vectorizing-numpy.ipynb b/advanced/apply_ufunc/automatic-vectorizing-numpy.ipynb deleted file mode 100644 index 4fd42fdc..00000000 --- a/advanced/apply_ufunc/automatic-vectorizing-numpy.ipynb +++ /dev/null @@ -1,358 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "(vectorize)=\n", - "# Automatic Vectorization" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": { - "tags": [] - }, - "source": [ - "Previously we looked at [applying functions](gentle-intro) on numpy arrays, and the concept of [core dimensions](core-dimensions).\n", - "We learned that functions commonly support specifying \"core dimensions\" through the `axis` keyword\n", - "argument. \n", - "\n", - "However many functions exist, that implicitly have core dimensions, but do not provide an `axis` keyword\n", - "argument. Applying such functions to a nD array usually involves one or multiple loops over the other dimensions\n", - "--- termed \"loop dimensions\" or \"broadcast dimensions\".\n", - "\n", - "\n", - "A good example is numpy's 1D interpolate function `numpy.interp`:\n", - "\n", - "```\n", - " Signature: np.interp(x, xp, fp, left=None, right=None, period=None)\n", - " Docstring:\n", - " One-dimensional linear interpolation.\n", - "\n", - " Returns the one-dimensional piecewise linear interpolant to a function\n", - " with given discrete data points (`xp`, `fp`), evaluated at `x`.\n", - "```\n", - "\n", - "This function expects 1D arrays as input, so there is one core dimension and we cannot easily apply \n", - "it to a nD array since there is no `axis` keyword argument. \n", - "\n", - "\n", - "Our goal here is to \n", - "1. Understand the difference between core dimensions and loop dimensions\n", - "1. Understand vectorization\n", - "1. Learn how to apply such functions without loops using `apply_ufunc` by providing the `vectorize` keyword argument.\n", - "\n", - "## Core dimensions and looping\n", - "\n", - "Let's say we want to\n", - "interpolate an array with two dimensions (`space`, `time`) over the `time` dimension, we might \n", - "1. loop over the `space` dimension, \n", - "1. subset the array to a 1D array at that `space` location, \n", - "1. Interpolate the 1D arrays to the new `time` vector, and\n", - "1. Assign that new interpolated 1D array to the appropriate location of a 2D output array\n", - "\n", - "In pseudo-code this might look like\n", - "\n", - "```python\n", - "for index in range(size_of_space_axis):\n", - " out[index, :] = np.interp(..., array[index, :], ...)\n", - "```\n", - "\n", - "::::{admonition} Exercise\n", - ":class: tip\n", - "Consider the example problem of interpolating a 2D array with dimensions `space` and `time` along the `time` dimension.\n", - "Which dimension is the core dimension, and which is the \"loop dimension\"?\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "`time` is the core dimension, and `space` is the loop dimension.\n", - ":::\n", - "::::\n", - "\n", - "## Vectorization\n", - "\n", - "The pattern of looping over any number of \"loop dimensions\" and applying a function along \"core dimensions\" \n", - "is so common that numpy provides wrappers that automate these steps: \n", - "1. [numpy.apply_along_axis](https://numpy.org/doc/stable/reference/generated/numpy.apply_along_axis.html)\n", - "1. [numpy.apply_over_axes](https://numpy.org/doc/stable/reference/generated/numpy.apply_over_axes.html)\n", - "1. [numpy.vectorize](https://numpy.org/doc/stable/reference/generated/numpy.vectorize.html)\n", - "\n", - "\n", - "`apply_ufunc` provides an easy interface to `numpy.vectorize` through the keyword argument `vectorize`. Here we see how to use\n", - "that to automatically apply `np.interp` along a single axis of a nD array\n", - "\n", - "## Load data\n", - "\n", - "First lets load an example dataset\n", - "\n", - "```{tip}\n", - "We'll reduce the length of error messages using `%xmode minimal` See the [ipython documentation](https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-xmode) for details.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import xarray as xr\n", - "import numpy as np\n", - "\n", - "xr.set_options(display_expand_data=False)\n", - "\n", - "air = (\n", - " xr.tutorial.load_dataset(\"air_temperature\")\n", - " .air.sortby(\"lat\") # np.interp needs coordinate in ascending order\n", - " .isel(time=slice(4), lon=slice(3)) # choose a small subset for convenience\n", - ")\n", - "air" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": { - "tags": [] - }, - "source": [ - "## Review\n", - "\n", - "\n", - "We'll work with the `apply_ufunc` call from the section on [handling dimensions that change size](complex-output-change-size). See the \"Handling Complex Output\" section for how to get here.\n", - "\n", - "This version only works with 1D vectors. We will expand that to work with inputs of any number of dimensions." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air.isel(lon=0, time=0), # this version only works with 1D vectors\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": { - "tags": [] - }, - "source": [ - "## Try nD input\n", - "\n", - "Our goal is to interpolate latitude at every longitude and time, such that we go from a dataset with dimensions `(time: 4, lat: 25, lon: 3)` to `(time: 4, lat: 100, lon: 3)`. \n", - "\n", - "If we blindly try passing `air` (a 3D DataArray), we get a hard-to-understand error" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": { - "tags": [] - }, - "source": [ - "We will use a \"wrapper\" function `debug_interp` to examine what gets passed to `numpy.interp`.\n", - "\n", - "```{tip}\n", - "Such wrapper functions are a great way to understand and debug `apply_ufunc` use cases.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def debug_interp(xi, x, data):\n", - " print(f\"data: {data.shape} | x: {x.shape} | xi: {xi.shape}\")\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = xr.apply_ufunc(\n", - " debug_interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"}, # dimensions allowed to change size. Must be set!\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": { - "tags": [] - }, - "source": [ - "That's a hard-to-interpret error from NumPy but our `print` call helpfully printed the shapes of the input data: \n", - "\n", - " data: (4, 3, 25) | x: (25,) | xi: (100,)\n", - "\n", - "We see that `apply_ufunc` passes the full 3D array to `interp1d_np` which in turn passes that on to `numpy.interp`. But `numpy.interp` requires a 1D input, and thus the error.\n", - "\n", - "Instead of passing the full 3D array we want loop over all combinations of `lon` and `time`; and apply our function to each corresponding vector of data along `lat`." - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": { - "tags": [] - }, - "source": [ - "## Vectorization with `np.vectorize`\n" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": { - "tags": [] - }, - "source": [ - "`apply_ufunc` makes it easy to loop over the loop dimensions by specifying `vectorize=True`:\n", - "\n", - " vectorize : bool, optional\n", - " If True, then assume ``func`` only takes arrays defined over core\n", - " dimensions as input and vectorize it automatically with\n", - " :py:func:`numpy.vectorize`. This option exists for convenience, but is\n", - " almost always slower than supplying a pre-vectorized function.\n", - " Using this option requires NumPy version 1.12 or newer.\n", - " \n", - "\n", - "```{warning}\n", - "Also see the numpy documentation for [numpy.vectorize](https://numpy.org/doc/stable/reference/generated/numpy.vectorize.html). Most importantly\n", - "\n", - " The vectorize function is provided primarily for convenience, not for performance. \n", - " The implementation is essentially a for loop.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": { - "tags": [], - "user_expressions": [] - }, - "outputs": [], - "source": [ - "interped = xr.apply_ufunc(\n", - " debug_interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"}, # dimensions allowed to change size. Must be set!\n", - " vectorize=True,\n", - ")\n", - "interped" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": { - "tags": [] - }, - "source": [ - "Wow that worked!\n", - "\n", - "Notice that \n", - "1. the printed input shapes are all 1D and correspond to one vector of size 25 along the `lat` dimension.\n", - "2. `debug_interp` was called 4x3 = 12 times which is the total number `lat` vectors since the size along `time` is 4, and the size along `lon` is 3.\n", - "3. The result `interped` is now an xarray object with coordinate values copied over from `data`. \n", - "\n", - "\n", - "```{note}\n", - "`lat` is now the *last* dimension in `interped`. This is a \"property\" of core dimensions: they are moved to the end before being sent to `interp1d_np` as noted in the docstring for `input_core_dims`\n", - "\n", - " Core dimensions are automatically moved to the last axes of input\n", - " variables before applying ``func``, which facilitates using NumPy style\n", - " generalized ufuncs [2]_.\n", - "```\n", - "\n", - "## Conclusion\n", - "This is why `apply_ufunc` is so convenient; it takes care of a lot of code necessary to apply functions that consume and produce numpy arrays to xarray objects.\n", - "\n", - "The `vectorize` keyword argument, when set to True, will use `numpy.vectorize` to apply the function by looping over the \"loop dimensions\" --- dimensions that are not the core dimensions for the applied function." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/apply_ufunc/complex-output-numpy.ipynb b/advanced/apply_ufunc/complex-output-numpy.ipynb deleted file mode 100644 index 0ad4c244..00000000 --- a/advanced/apply_ufunc/complex-output-numpy.ipynb +++ /dev/null @@ -1,376 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "(complex-output)=\n", - "# Handling complex output\n", - "\n", - "We've seen how to use `apply_ufunc` to handle relatively simple functions that transform every element, or reduce along a single dimension.\n", - "\n", - "This lesson will show you how to handle cases where the output is more complex in two ways:\n", - "1. Handle adding a new dimension by specifying `output_core_dims`\n", - "1. Handling the change in size of an existing dimension by specifying `exclude_dims` in addition to `output_core_dims`\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "A good example of a function that returns relatively complex output is numpy's 1D interpolate function `numpy.interp`:\n", - "\n", - "```\n", - " Signature: np.interp(x, xp, fp, left=None, right=None, period=None)\n", - " Docstring:\n", - " One-dimensional linear interpolation.\n", - "\n", - " Returns the one-dimensional piecewise linear interpolant to a function\n", - " with given discrete data points (`xp`, `fp`), evaluated at `x`.\n", - "```\n", - "\n", - "This function expects a 1D array as input, and returns a 1D array as output. That is, `numpy.interp` has one core dimension.\n", - "\n", - "\n", - "```{tip}\n", - "We'll reduce the length of error messages using `%xmode minimal` See the [ipython documentation](https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-xmode) for details.\n", - "```\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import xarray as xr\n", - "import numpy as np\n", - "\n", - "np.set_printoptions(threshold=10, edgeitems=2)\n", - "xr.set_options(display_expand_data=False)\n", - "\n", - "air = (\n", - " xr.tutorial.load_dataset(\"air_temperature\")\n", - " .air.sortby(\"lat\") # np.interp needs coordinate in ascending order\n", - " .isel(time=-0, lon=0) # choose a 1D subset\n", - ")\n", - "air" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Our goal is to densify from 25 to 100 coordinate values:s\n", - "newlat = np.linspace(15, 75, 100)\n", - "np.interp(newlat, air.lat.data, air.data)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": { - "tags": [] - }, - "source": [ - "(interp-add-new-dim)=\n", - "## Adding a new dimension\n", - "\n", - "1D interpolation transforms the size of the input along a single dimension.\n", - "\n", - "Logically, we can think of this as removing the old dimension and adding a new dimension.\n", - "\n", - "We provide this information to `apply_ufunc` using the `output_core_dims` keyword argument\n", - "\n", - "```\n", - " output_core_dims : List[tuple], optional\n", - " List of the same length as the number of output arguments from\n", - " ``func``, giving the list of core dimensions on each output that were\n", - " not broadcast on the inputs. By default, we assume that ``func``\n", - " outputs exactly one array, with axes corresponding to each broadcast\n", - " dimension.\n", - "\n", - " Core dimensions are assumed to appear as the last dimensions of each\n", - " output in the provided order.\n", - "```\n", - "\n", - "For `interp` we expect one returned output with one new core dimension that we will call `\"lat_interp\"`.\n", - "\n", - "Specify this using `output_core_dims=[[\"lat_interp\"]]`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # function to apply\n", - " newlat, # 1st input to np.interp\n", - " air.lat, # 2nd input to np.interp\n", - " air, # 3rd input to np.interp\n", - " input_core_dims=[[\"lat_interp\"], [\"lat\"], [\"lat\"]], # one entry per function input, 3 in total!\n", - " output_core_dims=[[\"lat_interp\"]],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Apply the following function using `apply_ufunc`. It adds a new dimension to the input array, let's call it `newdim`. Specify the new dimension using `output_core_dims`. Do you need any `input_core_dims`?\n", - "\n", - "```python\n", - "def add_new_dim(array):\n", - " return np.expand_dims(array, axis=-1)\n", - "```\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "def add_new_dim(array):\n", - " return np.expand_dims(array, axis=-1)\n", - "\n", - "\n", - "xr.apply_ufunc(\n", - " add_new_dim,\n", - " air,\n", - " output_core_dims=[[\"newdim\"]],\n", - ")\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": { - "tags": [], - "user_expressions": [] - }, - "source": [ - "(complex-output-change-size)=\n", - "## Dimensions that change size\n", - "\n", - "Imagine that you want the output to have the same dimension name `\"lat\"` i.e. applying`np.interp` changes the size of the `\"lat\"` dimension.\n", - "\n", - "We get an a error if we specify `\"lat\"` in `output_core_dims`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": { - "tags": [], - "user_expressions": [] - }, - "source": [ - "As the error message points out,\n", - "```\n", - "Only dimensions specified in ``exclude_dims`` with xarray.apply_ufunc are allowed to change size.\n", - "```\n", - "\n", - "Looking at the docstring we need to specify `exclude_dims` as a \"set\":\n", - "\n", - "```\n", - "exclude_dims : set, optional\n", - " Core dimensions on the inputs to exclude from alignment and\n", - " broadcasting entirely. Any input coordinates along these dimensions\n", - " will be dropped. Each excluded dimension must also appear in\n", - " ``input_core_dims`` for at least one argument. Only dimensions listed\n", - " here are allowed to change size between input and output objects.\n", - "```\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " air.lat,\n", - " air,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": { - "tags": [] - }, - "source": [ - "## Returning multiple variables\n", - "\n", - "Another common, but more complex, case is to handle multiple outputs returned by the function.\n", - "\n", - "As an example we will write a function that returns the minimum and maximum value along the last axis of the array.\n", - "\n", - "We will work with a 2D array, and apply the function `minmax` along the `\"lat\"` dimension:\n", - "```python\n", - "def minmax(array):\n", - " return array.min(axis=-1), array.max(axis=-1)\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def minmax(array):\n", - " return array.min(axis=-1), array.max(axis=-1)\n", - "\n", - "\n", - "air2d = xr.tutorial.load_dataset(\"air_temperature\").air.isel(time=0)\n", - "air2d" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": { - "tags": [], - "user_expressions": [] - }, - "source": [ - "By default, Xarray assumes one array is returned by the applied function.\n", - "\n", - "Here we have two returned arrays, and the input core dimension `\"lat\"` is removed (or reduced over).\n", - "\n", - "So we provide `output_core_dims=[[], []]` i.e. an empty list of core dimensions for each of the two returned arrays." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "minda, maxda = xr.apply_ufunc(\n", - " minmax,\n", - " air2d,\n", - " input_core_dims=[[\"lat\"]],\n", - " output_core_dims=[[], []],\n", - ")\n", - "minda" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "We presented the concept of \"core dimensions\" as the \"smallest unit of data the function could handle.\" Do you understand how the above use of `apply_ufunc` generalizes to an array with more than one dimension? \n", - "\n", - "Try applying the minmax function to a 3d air temperature dataset \n", - "```python\n", - "air3d = xr.tutorial.load_dataset(\"air_temperature\").air\n", - "``` \n", - "Your goal is to have a minimum and maximum value of temperature across all latitudes for a given time and longitude.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "We want to use `minmax` to compute the minimum and maximum along the \"lat\" dimension always, regardless of how many dimensions are on the input. So we specify `input_core_dims=[[\"lat\"]]`. The output does not contain the \"lat\" dimension, but we expect two returned variables. So we pass an empty list `[]` for each returned array, so `output_core_dims=[[], []]` just as before.\n", - "\n", - "\n", - "```python\n", - "minda, maxda = xr.apply_ufunc(\n", - " minmax,\n", - " air3d,\n", - " input_core_dims=[[\"lat\"]],\n", - " output_core_dims=[[],[]],\n", - ")\n", - ":::\n", - "::::" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/apply_ufunc/core-dimensions.ipynb b/advanced/apply_ufunc/core-dimensions.ipynb deleted file mode 100644 index 7a009782..00000000 --- a/advanced/apply_ufunc/core-dimensions.ipynb +++ /dev/null @@ -1,372 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# Core dimensions\n", - "\n", - "[Previously](gentle-intro) we learned to use `apply_ufunc` on simple functions that acted element by element. \n", - "\n", - "Here we move on to slightly more complex functions like `np.mean` that can act along a subset of an input array's dimensions.\n", - "\n", - "Such operations involve the concept of \"core dimensions\". \n", - "\n", - "Our learning goals are:\n", - "- Learn how to identify \"core dimensions\" for the function you're applying.\n", - "- Learn that \"core dimensions\" are automatically moved or transposed to the end of the array.\n", - "\n", - "\n", - "## Introduction\n", - "\n", - "For using more complex operations that consider some array values collectively,\n", - "it’s important to understand the idea of **core dimensions**. \n", - "Usually, they correspond to the fundamental dimensions over\n", - "which an operation is defined, e.g., the summed axis in `np.sum`. One way to think about core dimensions \n", - "is to consider the smallest dimensionality of data that the function acts on.\n", - "\n", - "```{important}\n", - "\n", - "A good clue that core dimensions are needed is the presence of an `axis` argument on the\n", - "corresponding NumPy function.\n", - "\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "## Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "# limit the amount of information printed to screen\n", - "xr.set_options(display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "Let's load a dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Reducing with `np.mean`\n", - "\n", - "Let's write a function that computes the mean along `time` for a provided xarray object. \n", - "\n", - "This function requires one core dimension `time`. For `ds.air` note that `time` is the 0th axis." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.air.dims" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": { - "tags": [] - }, - "source": [ - "`get_axis_num` is a useful method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.air.get_axis_num(\"time\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "np.mean(ds.air, axis=ds.air.get_axis_num(\"time\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "np.mean(ds.air.data, axis=0)" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "Let's try to use `apply_ufunc` to replicate `np.mean(ds.air.data, axis=0)`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " # function to apply\n", - " np.mean,\n", - " # object with data to pass to function\n", - " ds,\n", - " # keyword arguments to pass to np.mean\n", - " kwargs={\"axis\": 0},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": { - "tags": [] - }, - "source": [ - "The error here\n", - "```\n", - "applied function returned data with unexpected number of dimensions. \n", - "Received 2 dimension(s) but expected 3 dimensions with names: ('time', 'lat', 'lon')\n", - "```\n", - "\n", - "means that while `np.mean` did indeed reduce one dimension, we did not tell `apply_ufunc` that this would happen. That is, we need to specify the core dimensions on the input.\n", - "\n", - "Do that by passing a list of dimension names for each input object. For this function we have one input : `ds` and with a single core dimension `\"time\"` so we have `input_core_dims=[[\"time\"]]`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " np.mean,\n", - " ds,\n", - " # specify core dimensions as a list of lists\n", - " # here 'time' is the core dimension on `ds`\n", - " input_core_dims=[\n", - " [\"time\"], # core dimension for ds\n", - " ],\n", - " kwargs={\"axis\": 0},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": { - "tags": [] - }, - "source": [ - "This next error is a little confusing.\n", - "\n", - "```\n", - "size of dimension 'lat' on inputs was unexpectedly changed by applied function from 25 to 53. \n", - "Only dimensions specified in ``exclude_dims`` with xarray.apply_ufunc are allowed to change size.\n", - "```\n", - "\n", - "\n", - "A good trick here is to pass a little wrapper function to `apply_ufunc` instead and inspect the shapes of data received by the wrapper.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def wrapper(array, **kwargs):\n", - " print(f\"received {type(array)} shape: {array.shape}, kwargs: {kwargs}\")\n", - " result = np.mean(array, **kwargs)\n", - " print(f\"result.shape: {result.shape}\")\n", - " return result\n", - "\n", - "\n", - "xr.apply_ufunc(\n", - " wrapper,\n", - " ds,\n", - " # specify core dimensions as a list of lists\n", - " # here 'time' is the core dimension on `ds`\n", - " input_core_dims=[[\"time\"]],\n", - " kwargs={\"axis\": 0},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "Now we see the issue:\n", - "\n", - " received shape: (25, 53, 2920), kwargs: {'axis': 0}\n", - " result.shape: (53, 2920)\n", - " \n", - "The `time` dimension is of size `2920` and is now the last axis of the array but was initially the first axis" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.get_axis_num(\"time\")" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": { - "tags": [] - }, - "source": [ - "```{important}\n", - "This illustrates an important concept. Arrays are transposed so that core dimensions are at the end.\n", - "```\n", - "\n", - "With `apply_ufunc`, core dimensions are recognized by name, and then moved to\n", - "the last dimension of any input arguments before applying the given function.\n", - "This means that for functions that accept an `axis` argument, you usually need\n", - "to set `axis=-1`\n", - "\n", - "Such behaviour means that our functions (like `wrapper` or `np.mean`) do not need to know the exact order of dimensions. They can rely on the core dimensions being at the end allowing us to write very general code! \n", - "\n", - "We can fix our `apply_ufunc` call by specifying `axis=-1` instead." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "def wrapper(array, **kwargs):\n", - " print(f\"received {type(array)} shape: {array.shape}, kwargs: {kwargs}\")\n", - " result = np.mean(array, **kwargs)\n", - " print(f\"result.shape: {result.shape}\")\n", - " return result\n", - "\n", - "\n", - "xr.apply_ufunc(\n", - " wrapper,\n", - " ds,\n", - " input_core_dims=[[\"time\"]],\n", - " kwargs={\"axis\": -1},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Use `apply_ufunc` to apply `scipy.integrate.trapezoid` along the `time` axis.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "import scipy as sp\n", - "import scipy.integrate\n", - "\n", - "xr.apply_ufunc(scipy.integrate.trapezoid, ds, input_core_dims=[[\"time\"]], kwargs={\"axis\": -1})\n", - "```\n", - ":::\n", - "::::" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/apply_ufunc/dask_apply_ufunc.ipynb b/advanced/apply_ufunc/dask_apply_ufunc.ipynb deleted file mode 100644 index 768674c5..00000000 --- a/advanced/apply_ufunc/dask_apply_ufunc.ipynb +++ /dev/null @@ -1,933 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# Handling dask arrays\n", - "\n", - "We have previously worked over applying functions to NumPy arrays contained in Xarray objects.\n", - "`apply_ufunc` also lets you easily perform many of the steps involving in applying \n", - "functions that expect and return Dask arrays.\n", - "\n", - "Learning goals:\n", - "- Learn that `apply_ufunc` can automate aspects of applying computation functions on dask arrays\n", - "- It is possible to automatically parallelize certain operations by providing `dask=\"parallelized\"`\n", - "- In some cases, extra information needs to be provided such as sizes of any new dimensions added, or data types for output variables.\n", - "- Learn that all the concepts from the numpy lessons carry over: like [automatic vectorization](vectorize) and specifying input and\n", - " output core dimensions.\n", - "\n", - "\n", - "```{tip}\n", - "We'll reduce the length of error messages using `%xmode minimal` See the [ipython documentation](https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-xmode) for details.\n", - "```\n", - "\n", - "## Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import dask\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "# limit the amount of information printed to screen\n", - "xr.set_options(display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "First lets set up a `LocalCluster` using [dask.distributed](https://distributed.dask.org/).\n", - "\n", - "You can use any kind of dask cluster. This step is completely independent of\n", - "xarray. While not strictly necessary, the dashboard provides a nice learning\n", - "tool.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "from dask.distributed import Client\n", - "\n", - "client = Client()\n", - "client" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "

👆

Click the Dashboard link above. Or click the \"Search\" button in the dashboard.\n", - "\n", - "Let's test that the dashboard is working..\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "import dask.array\n", - "\n", - "dask.array.ones((1000, 4), chunks=(2, 1)).compute() # should see activity in dashboard" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "Let's open a dataset. We specify `chunks` so that we create a dask arrays for the DataArrays" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature\", chunks={\"time\": 100})\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": { - "tags": [] - }, - "source": [ - "## A simple example\n", - "\n", - "All the concepts from applying numpy functions carry over.\n", - "\n", - "However the handling of dask arrays needs to be explicitly activated.\n", - "\n", - "There are three options for the `dask` kwarg.\n", - "\n", - "```\n", - " dask : {\"forbidden\", \"allowed\", \"parallelized\"}, default: \"forbidden\"\n", - " How to handle applying to objects containing lazy data in the form of\n", - " dask arrays:\n", - "\n", - " - 'forbidden' (default): raise an error if a dask array is encountered.\n", - " - 'allowed': pass dask arrays directly on to ``func``. Prefer this option if\n", - " ``func`` natively supports dask arrays.\n", - " - 'parallelized': automatically parallelize ``func`` if any of the\n", - " inputs are a dask array by using :py:func:`dask.array.apply_gufunc`. Multiple output\n", - " arguments are supported. Only use this option if ``func`` does not natively\n", - " support dask arrays (e.g. converts them to numpy arrays).\n", - "```\n", - "\n", - "We will work through the following two:\n", - "\n", - "1. `dask=\"allowed\"` Dask arrays are passed to the user function. This is a good\n", - " choice if your function can handle dask arrays and won't compute the result unless \n", - " explicitly requested.\n", - "2. `dask=\"parallelized\"`. This applies the user function over blocks of the dask\n", - " array using `dask.array.apply_gufunc`. This is useful when your function cannot\n", - " handle dask arrays natively (e.g. scipy API)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "# Expect an error here\n", - "def squared_error(x, y):\n", - " return (x - y) ** 2\n", - "\n", - "\n", - "xr.apply_ufunc(squared_error, ds.air, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": { - "tags": [] - }, - "source": [ - " \n", - "A good thing to check is whether the applied function (here `squared_error`) can handle pure dask arrays. \n", - "To do this call `squared_error(ds.air.data, 1)` and make sure of the following:\n", - "1. That you don't see any activity on the dask dashboard\n", - "2. That the returned result is a dask array." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "squared_error(ds.air.data, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": { - "tags": [] - }, - "source": [ - "Since `squared_error` can handle dask arrays without computing them, we specify\n", - "`dask=\"allowed\"`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "sqer = xr.apply_ufunc(\n", - " squared_error,\n", - " ds.air,\n", - " 1,\n", - " dask=\"allowed\",\n", - ")\n", - "sqer # dask-backed DataArray! with nice metadata!" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": { - "tags": [] - }, - "source": [ - "### Understanding what's happening\n", - "\n", - "Let's again use the wrapper trick to understand what `squared_error` receives.\n", - "\n", - "We see that it receives a dask array (analogous to the numpy array in the previous example)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "def wrapper(x, y):\n", - " print(f\"received x of type {type(x)}, shape {x.shape}\")\n", - " print(f\"received y of type {type(y)}\")\n", - " return squared_error(x, y)\n", - "\n", - "\n", - "xr.apply_ufunc(wrapper, ds.air, 1, dask=\"allowed\")" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": { - "tags": [] - }, - "source": [ - "## Core dimensions\n", - "\n", - "`squared_error` operated on a per-element basis. How about a reduction like `np.mean`?\n", - "\n", - "Such functions involve the concept of \"core dimensions\". This concept is independent of the underlying array type, and is a property of the applied function. See the [core dimensions with NumPy](core-dimensions) tutorial for more.\n", - "\n", - "\n", - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Use `dask.array.mean` as an example of a function that can handle dask\n", - "arrays and uses an `axis` kwarg. \n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "def time_mean(da):\n", - " return xr.apply_ufunc(\n", - " dask.array.mean,\n", - " da,\n", - " input_core_dims=[[\"time\"]],\n", - " dask=\"allowed\",\n", - " kwargs={\"axis\": -1}, # core dimensions are moved to the end\n", - " )\n", - " \n", - "time_mean(ds.air)\n", - "```\n", - ":::\n", - "::::\n" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": { - "tags": [] - }, - "source": [ - "Again, this is identical to the built-in `mean`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "def time_mean(da):\n", - " return xr.apply_ufunc(\n", - " dask.array.mean,\n", - " da,\n", - " input_core_dims=[[\"time\"]],\n", - " dask=\"allowed\",\n", - " kwargs={\"axis\": -1}, # core dimensions are moved to the end\n", - " )\n", - "\n", - "\n", - "ds.air.mean(\"time\").identical(time_mean(ds.air))" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": { - "tags": [] - }, - "source": [ - "## Automatically parallelizing dask-unaware functions\n", - "\n", - "### Basics\n", - "\n", - "Not all functions can handle dask arrays appropriately by default.\n", - "\n", - "A very useful `apply_ufunc` feature is the ability to apply arbitrary functions\n", - "in parallel to each block. This ability can be activated using\n", - "`dask=\"parallelized\"`. \n", - "\n", - "We will use `scipy.integrate.trapezoid` as an example of a function that cannot\n", - "handle dask arrays and requires a core dimension. If we call `trapezoid` with a dask\n", - "array, we get a numpy array back that is, the values have been eagerly computed.\n", - "This is undesirable behaviour\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "import scipy as sp\n", - "import scipy.integrate\n", - "\n", - "sp.integrate.trapezoid(\n", - " ds.air.data, axis=ds.air.get_axis_num(\"lon\")\n", - ") # does NOT return a dask array, you should see activity on the dashboard" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": { - "tags": [] - }, - "source": [ - "Let's activate automatic parallelization by using `apply_ufunc` with `dask=\"parallelized\"`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "integrated = xr.apply_ufunc(\n", - " sp.integrate.trapezoid,\n", - " ds,\n", - " input_core_dims=[[\"lon\"]],\n", - " kwargs={\"axis\": -1},\n", - " dask=\"parallelized\",\n", - ")\n", - "integrated" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": { - "tags": [] - }, - "source": [ - "And make sure the returned data is a dask array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "integrated.air.data" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "Now you have control over executing this parallel computation." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "# Dask -> Numpy array of integrated values\n", - "parallelized_results = integrated.compute()\n", - "parallelized_results" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": { - "tags": [] - }, - "source": [ - "### Understanding `dask=\"parallelized\"`\n", - "\n", - "It is very important to understand what `dask=\"parallelized\"` does. To fully understand it, requires understanding some core concepts.\n", - "\n", - "```{seealso}\n", - "For `dask=\"parallelized\"` `apply_ufunc` will call `dask.array.apply_gufunc`. See the dask documentation on [generalized ufuncs](https://docs.dask.org/en/stable/array-gufunc.html) and [`apply_gufunc`](https://docs.dask.org/en/stable/generated/dask.array.gufunc.apply_gufunc.html) for more.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": { - "tags": [] - }, - "source": [ - "#### Embarrassingly parallel or blockwise operations\n", - "\n", - "`dask=\"parallelized\"` works well for \"blockwise\" or \"embarrassingly parallel\" operations ([Wikipedia](https://en.wikipedia.org/wiki/Embarrassingly_parallel)).\n", - "\n", - "These are operations where one block or chunk of the output array corresponds to one block or chunk of the input array. Specifically, the blocks or chunks of the _core dimension_ is what matters. Importantly, no communication between blocks is necessary to create the output, which makes parallelization quite simple or \"embarrassing\".\n", - "\n", - "Let's look at the dask repr for `ds` and note chunksizes are (100,25,53) for a array with shape (2920, 25, 53). This means that each block or chunk of the array contains all `lat`, `lon` points and a subset of `time` points." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.air.data" - ] - }, - { - "cell_type": "markdown", - "id": "30", - "metadata": { - "tags": [] - }, - "source": [ - "The core dimension for `trapezoid` is `lon`, and there is only one chunk along `lon`. This means that integrating along `lon` is a \"blockwise\" or \"embarrassingly parallel\" operation and `dask=\"parallelized\"` works quite well. \n", - "\n", - "```{caution} Question\n", - "Do you understand why `integrate(ds)` when `ds` has a single chunk along `lon` is a \"embarrassingly parallel\" operation?\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Apply the integrate function to `ds` after rechunking to have a different chunksize along `lon` using `ds.chunk(lon=4)` (for example). What happens?\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "`apply_ufunc` complains that it cannot automatically parallelize because the dataset `ds` is now chunked along the core dimension `lon`. You should see the following error:\n", - "\n", - " ValueError: dimension lon on 0th function argument to apply_ufunc with dask='parallelized' \n", - " consists of multiple chunks, but is also a core dimension. To fix, either rechunk \n", - " into a single array chunk along this dimension, i.e., \n", - " ``.chunk(dict(lon=-1))``, or pass ``allow_rechunk=True`` in ``dask_gufunc_kwargs`` \n", - " but beware that this may significantly increase memory usage.\n", - "\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": { - "tags": [] - }, - "source": [ - "#### Understanding execution\n", - "\n", - "We are layering many concepts together there so it is important to understand how the function is executed, and what input it will receive. Again we will use our wrapper trick." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def integrate_wrapper(array, **kwargs):\n", - " print(f\"received array of type {type(array)}, shape {array.shape}\")\n", - " result = sp.integrate.trapezoid(array, **kwargs)\n", - " print(f\"received array of type {type(result)}, shape {result.shape}\")\n", - " return result\n", - "\n", - "\n", - "integrated = xr.apply_ufunc(\n", - " integrate_wrapper,\n", - " ds,\n", - " input_core_dims=[[\"lon\"]],\n", - " kwargs={\"axis\": -1},\n", - " dask=\"parallelized\",\n", - ")\n", - "integrated" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "Note that we received an Xarray object back (`integrated`) but our wrapper function was called with a numpy array of shape `(1,1,1)`.\n", - "\n", - "```{important}\n", - "the full 3D array has **not yet been** passed to `integrate_wrapper`. Yet dask needs to know the shape and dtype of the result. This is key. \n", - "```\n", - "\n", - "The `integrate_wrapper` function is treated like a black box, and its effect on the inputs has to either be described through additional keyword arguments, or inferred by passing dummy inputs.\n", - "\n", - "To do so, `dask.array.apply_gufunc` calls the user function with dummy inputs (here a numpy array of shape `(1,1,1)`), and inspects the returned value to understand that one dimension was removed (returned a numpy array of shape `(1,1)`.\n", - "\n", - "````{caution}\n", - ":class: dropdown\n", - "\n", - "Some functions can have trouble handling such dummy inputs. Alternatively you can pass `meta = np.ones((1,1))` in `dask_gufunc_kwargs` to prevent dask from providing dummy inputs to the array.\n", - "```python\n", - "xr.apply_ufunc(\n", - " integrate_wrapper,\n", - " ds,\n", - " input_core_dims=[[\"lon\"]],\n", - " kwargs={\"axis\": -1},\n", - " dask=\"parallelized\",\n", - " dask_gufunc_kwargs={\"meta\": np.ones((1,1))},\n", - ")\n", - "```\n", - "````\n", - "\n", - "Since no errors were raised we proceed as-is.\n", - "\n", - "Let's compute the array to get real values." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": { - "tags": [ - "output-scroll" - ] - }, - "outputs": [], - "source": [ - "integrated.compute()" - ] - }, - { - "cell_type": "markdown", - "id": "36", - "metadata": {}, - "source": [ - "We see that `integrate_wrapper` is called many times! As many times as there are blocks in the array in fact, which is 30 here (`ds.air.data.numblocks`).\n", - "\n", - "Our function is independently executed on each block of the array, and then the results are concatenated to form the final result.\n", - "\n", - "Conceptually, there is a two-way flow of information between various packages when executing `integrated.compute()`:\n", - "\n", - "`xarray.apply_ufunc` ↔ `dask.array.apply_gufunc` ↔ `integrate_wrapper` ↔ `scipy.integrate.trapezoid` ↔ `ds.air.data`\n", - "\n", - "\n", - "When executed\n", - "\n", - "1. Xarray loops over all data variables.\n", - "1. Xarray unwraps the underlying dask array (e.g. `ds.air`) and passes that to dask's `apply_gufunc`.\n", - "1. `apply_gufunc` calls `integrate_wrapper` on each block of the array.\n", - "1. For each block, `integrate_wrapper` calls `scipy.integrate.trapezoid` and returns one block of the output array.\n", - "1. dask stitches all the output blocks to form the output array.\n", - "1. `xarray.apply_ufunc` wraps the output array with Xarray metadata to give the final result.\n", - "\n", - "Phew!\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "37", - "metadata": { - "tags": [] - }, - "source": [ - "## More complex situations\n", - "\n", - "Here we quickly demonstrate that all the concepts from the numpy material earlier carry over.\n", - "\n", - "Xarray needs a lot of extra metadata, so depending\n", - "on the function, extra arguments such as `output_dtypes` and `output_sizes` may\n", - "be necessary for supporting dask arrays. We demonstrate this below." - ] - }, - { - "cell_type": "markdown", - "id": "38", - "metadata": { - "tags": [] - }, - "source": [ - "### Adding new dimensions\n", - "\n", - "We use the `np.expand_dims` to change the size of the input along a single dimension.\n", - "\n", - "```python\n", - "def add_new_dim(array):\n", - " return np.expand_dims(array, axis=0)\n", - "```\n", - "\n", - "When automatically parallelizing with `dask`, we need to provide some more information about the outputs.\n", - "1. When adding a new dimensions, we need to provide the size in `dask_gufunc_kwargs` using the key `output_sizes`\n", - "2. Usually we need provide the datatype or `dtype` of the returned array. Usually the dtype of the input is a good guess." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def add_new_dim(array):\n", - " return np.expand_dims(array, axis=-1)\n", - "\n", - "\n", - "xr.apply_ufunc(\n", - " add_new_dim, # first the function\n", - " ds.air.chunk({\"time\": 2, \"lon\": 2}),\n", - " output_core_dims=[[\"newdim\"]],\n", - " dask=\"parallelized\",\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "40", - "metadata": {}, - "source": [ - "Provide the size of the newly added dimension `newdim` in `output_sizes` as part of the `dask_gufunc_kwargs` keyword argument:\n", - "\n", - " dask_gufunc_kwargs (dict, optional) – Optional keyword arguments passed to dask.array.apply_gufunc() \n", - " if dask=’parallelized’. Possible keywords are output_sizes, allow_rechunk and meta.\n", - " \n", - "The syntax is \n", - "```python\n", - "dask_gufunc_kwargs={\n", - " \"output_sizes\": {\"newdim\": 1}\n", - "}\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "41", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " add_new_dim, # first the function\n", - " ds.air.chunk({\"time\": 2, \"lon\": 2}),\n", - " output_core_dims=[[\"newdim\"]],\n", - " dask=\"parallelized\",\n", - " dask_gufunc_kwargs={\"output_sizes\": {\"newdim\": 1}},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "42", - "metadata": {}, - "source": [ - "### Dimensions that change size" - ] - }, - { - "cell_type": "markdown", - "id": "43", - "metadata": {}, - "source": [ - "We will now repeat the [interpolation example from earlier](interp-add-new-dim) with `\"lat\"` as the output core dimension. See the numpy notebook on [complex output](complex-output) for more.\n", - "\n", - "```python\n", - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp,\n", - " newlat,\n", - " ds.air.lat,\n", - " ds.air.chunk({\"time\": 2, \"lon\": 2}),\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - ")\n", - "```\n", - "\n", - "We will first add `dask=\"parallelized\"` and provide `output_sizes` in `dask_gufunc_kwargs`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "44", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " ds.air.lat,\n", - " ds.air.chunk({\"time\": 2, \"lon\": 2}),\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - " # The following are dask-specific\n", - " dask=\"parallelized\",\n", - " dask_gufunc_kwargs=dict(output_sizes={\"lat\": len(newlat)}),\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "45", - "metadata": {}, - "source": [ - "This error means that we need to provide `output_dtypes`\n", - "\n", - " output_dtypes (list of dtype, optional) – Optional list of output dtypes. \n", - " Only used if dask='parallelized' or vectorize=True." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "\n", - "xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " ds.air.lat,\n", - " ds.air.chunk({\"time\": 100, \"lon\": -1}),\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"},\n", - " # The following are dask-specific\n", - " dask=\"parallelized\",\n", - " dask_gufunc_kwargs=dict(output_sizes={\"lat\": len(newlat)}),\n", - " output_dtypes=[ds.air.dtype],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "47", - "metadata": {}, - "source": [ - "```{tip}\n", - "Dask can sometimes figure out the output sizes and dtypes. The usual workflow is to read the error messages and iteratively pass more information to `apply_ufunc`.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "48", - "metadata": {}, - "source": [ - "### Automatic Vectorizing\n", - "\n", - "[Automatic vectorizing](vectorize) with `vectorize=True` also carries over!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "49", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "interped = xr.apply_ufunc(\n", - " np.interp, # first the function\n", - " newlat,\n", - " ds.air.lat,\n", - " ds.chunk({\"time\": 100, \"lon\": -1}),\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"lat\"]],\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims={\"lat\"}, # dimensions allowed to change size. Must be set!\n", - " dask=\"parallelized\",\n", - " dask_gufunc_kwargs=dict(output_sizes={\"lat\": len(newlat)}),\n", - " output_dtypes=[ds.air.dtype],\n", - " vectorize=True,\n", - ")\n", - "interped" - ] - }, - { - "cell_type": "markdown", - "id": "50", - "metadata": {}, - "source": [ - "Again, it is important to understand the conceptual flow of information between the variuus packages when executing `interped.compute()` which looks ilke\n", - "\n", - "`xarray.apply_ufunc` ↔ `dask.array.apply_gufunc` ↔ `numpy.vectorize` ↔ `numpy.interp`\n", - "\n", - "\n", - "When executed\n", - "\n", - "1. Xarray loops over all data variables.\n", - "1. Xarray unwraps the underlying dask array (e.g. `ds.air`) and passes that to dask's `apply_gufunc`.\n", - "1. `apply_gufunc` calls the vectorized function on each block of the array.\n", - "1. For each block, `numpy.vectorize` handles looping over the loop dimensions \n", - " and passes 1D vectors along the core dimension to `numpy.interp`\n", - "1. The 1D results for each block are concatenated by `numpy.vectorize` to create one output block.\n", - "1. dask stitches all the output blocks to form the output array.\n", - "1. `xarray.apply_ufunc` wraps the output array with Xarray metadata to give the final result.\n", - "\n", - "Phew!\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "51", - "metadata": { - "tags": [] - }, - "source": [ - "## Clean up the cluster" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52", - "metadata": { - "tags": [ - "remove-output" - ] - }, - "outputs": [], - "source": [ - "client.close();" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/apply_ufunc/example-interp.ipynb b/advanced/apply_ufunc/example-interp.ipynb deleted file mode 100644 index d895b9aa..00000000 --- a/advanced/apply_ufunc/example-interp.ipynb +++ /dev/null @@ -1,731 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "# np.interp : An end-to-end example\n", - "\n", - "**Author** [Deepak Cherian (NCAR)](https://cherian.net)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "This example will illustrate how to conveniently apply an unvectorized function `func` to xarray objects using `apply_ufunc`. `func` expects 1D numpy arrays and returns a 1D numpy array. Our goal is to conveniently apply this function along a dimension of xarray objects that may or may not wrap dask arrays with a signature.\n", - "\n", - "We will illustrate this using [`np.interp`](https://numpy.org/doc/stable/reference/generated/numpy.interp.html): \n", - "\n", - " Signature: np.interp(x, xp, fp, left=None, right=None, period=None)\n", - " Docstring:\n", - " One-dimensional linear interpolation.\n", - "\n", - " Returns the one-dimensional piecewise linear interpolant to a function\n", - " with given discrete data points (`xp`, `fp`), evaluated at `x`.\n", - "\n", - "and write an `xr_interp` function with signature\n", - "\n", - " xr_interp(xarray_object, dimension_name, new_coordinate_to_interpolate_to)\n", - " \n", - " \n", - "## Learning goals \n", - "\n", - "Our goal is to use `apply_ufunc` with a general function so that we can reuse our code to apply to different xarray datasets or along different dimensions. Specifically, this example will illustrate \n", - "1. Specifying core dimensions with `input_core_dims`\n", - "1. Handling core dimensions of the output with `output_core_dims`\n", - "1. Handling core dimensions that change size using `exclude_dims`\n", - "1. Automatic vectorizing or looping over dimensions that are not core dimensions using `vectorize=True`\n", - "1. Automatically parallelization with dask arrays using `dask=\"parallelized\"`\n", - "1. High-performance vectorization with numba and `vectorize=False`.\n", - "\n", - "It puts together all the concepts covered earlier.\n", - "\n", - "\n", - "```{tip}\n", - "We'll reduce the length of error messages using in this tutorial using `%xmode minimal` See the [ipython documentation](https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-xmode) for details.\n", - "```\n", - "\n", - "## Load data\n", - "\n", - "First lets load an example dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import xarray as xr\n", - "import numpy as np\n", - "\n", - "# limit the amount of information printed to screen\n", - "xr.set_options(display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)\n", - "\n", - "air = (\n", - " xr.tutorial.load_dataset(\"air_temperature\")\n", - " .air.sortby(\"lat\") # np.interp needs coordinate in ascending order\n", - " .isel(time=slice(4), lon=slice(3))\n", - ") # choose a small subset for convenience\n", - "air" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "The function we will apply is `np.interp` which expects 1D numpy arrays. This functionality is already implemented in xarray so we use that capability to make sure we are not making mistakes." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "newlat = np.linspace(15, 75, 100)\n", - "air.interp(lat=newlat)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's define a function that works with one vector of data along `lat` at a time." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def interp1d_np(data, x, xi):\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = interp1d_np(air.isel(time=0, lon=0), air.lat, newlat)\n", - "expected = air.interp(lat=newlat)\n", - "\n", - "# no errors are raised if values are equal to within floating point precision\n", - "np.testing.assert_allclose(expected.isel(time=0, lon=0).values, interped)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "No errors are raised so our interpolation is working.\n", - "\n", - "This function consumes and returns numpy arrays, which means we need to do a lot of work to convert the result back to an xarray object with meaningful metadata. This is where `apply_ufunc` is very useful.\n", - "\n", - "## `apply_ufunc`\n", - "\n", - " Apply a vectorized function for unlabeled arrays on xarray objects.\n", - "\n", - " The function will be mapped over the data variable(s) of the input arguments using \n", - " xarray’s standard rules for labeled computation, including alignment, broadcasting, \n", - " looping over GroupBy/Dataset variables, and merging of coordinates.\n", - " \n", - "`apply_ufunc` has many capabilities but for simplicity this example will focus on the common task of vectorizing 1D functions over nD xarray objects. We will iteratively build up the right set of arguments to `apply_ufunc` and read through many error messages in doing so." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(time=0, lon=0), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`apply_ufunc` needs to know a lot of information about what our function does so that it can reconstruct the outputs. In this case, the size of dimension lat has changed and we need to explicitly specify that this will happen. xarray helpfully tells us that we need to specify the kwarg `exclude_dims`." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## `exclude_dims`\n", - "\n", - "\n", - "```\n", - "exclude_dims : set, optional\n", - " Core dimensions on the inputs to exclude from alignment and\n", - " broadcasting entirely. Any input coordinates along these dimensions\n", - " will be dropped. Each excluded dimension must also appear in\n", - " ``input_core_dims`` for at least one argument. Only dimensions listed\n", - " here are allowed to change size between input and output objects.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(time=0, lon=0), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Core dimensions\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Core dimensions are central to using `apply_ufunc`. In our case, our function expects to receive a 1D vector along `lat` — this is the dimension that is \"core\" to the function's functionality. Multiple core dimensions are possible. `apply_ufunc` needs to know which dimensions of each variable are core dimensions.\n", - "\n", - " input_core_dims : Sequence[Sequence], optional\n", - " List of the same length as ``args`` giving the list of core dimensions\n", - " on each input argument that should not be broadcast. By default, we\n", - " assume there are no core dimensions on any input arguments.\n", - "\n", - " For example, ``input_core_dims=[[], ['time']]`` indicates that all\n", - " dimensions on the first argument and all dimensions other than 'time'\n", - " on the second argument should be broadcast.\n", - "\n", - " Core dimensions are automatically moved to the last axes of input\n", - " variables before applying ``func``, which facilitates using NumPy style\n", - " generalized ufuncs [2]_.\n", - " \n", - " output_core_dims : List[tuple], optional\n", - " List of the same length as the number of output arguments from\n", - " ``func``, giving the list of core dimensions on each output that were\n", - " not broadcast on the inputs. By default, we assume that ``func``\n", - " outputs exactly one array, with axes corresponding to each broadcast\n", - " dimension.\n", - "\n", - " Core dimensions are assumed to appear as the last dimensions of each\n", - " output in the provided order.\n", - " \n", - "Next we specify `\"lat\"` as `input_core_dims` on both `air` and `air.lat`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(time=0, lon=0), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], []],\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "xarray is telling us that it expected to receive back a numpy array with 0 dimensions but instead received an array with 1 dimension corresponding to `newlat`. We can fix this by specifying `output_core_dims`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(time=0, lon=0), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], []], # list with one entry per arg\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally we get some output! Let's check that this is right\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "interped = xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(time=0, lon=0), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], []], # list with one entry per arg\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - ")\n", - "interped[\"lat\"] = newlat # need to add this manually\n", - "xr.testing.assert_allclose(expected.isel(time=0, lon=0), interped)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "No errors are raised so it is right!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Automatic vectorization with `np.vectorize`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now our function currently only works on one vector of data which is not so useful given our 3D dataset.\n", - "Let's try passing the whole dataset. We add a `print` statement so we can see what our function receives." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def interp1d_np(data, x, xi):\n", - " print(f\"data: {data.shape} | x: {x.shape} | xi: {xi.shape}\")\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.isel(lon=slice(3), time=slice(4)), # now arguments in the order expected by 'interp1_np'\n", - " air.lat,\n", - " newlat,\n", - " input_core_dims=[[\"lat\"], [\"lat\"], []], # list with one entry per arg\n", - " output_core_dims=[[\"lat\"]],\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "That's a hard-to-interpret error but our `print` call helpfully printed the shapes of the input data: \n", - "\n", - " data: (10, 53, 25) | x: (25,) | xi: (100,)\n", - "\n", - "We see that `air` has been passed as a 3D numpy array which is not what `np.interp` expects. Instead we want loop over all combinations of `lon` and `time`; and apply our function to each corresponding vector of data along `lat`.\n", - "\n", - "\n", - "`apply_ufunc` makes this easy by specifying `vectorize=True`:\n", - "\n", - " vectorize : bool, optional\n", - " If True, then assume ``func`` only takes arrays defined over core\n", - " dimensions as input and vectorize it automatically with\n", - " :py:func:`numpy.vectorize`. This option exists for convenience, but is\n", - " almost always slower than supplying a pre-vectorized function.\n", - " Using this option requires NumPy version 1.12 or newer.\n", - " \n", - "```{caution}\n", - "The documentation for [`np.vectorize`](https://numpy.org/doc/stable/reference/generated/numpy.vectorize.html) points out that\n", - "\"The vectorize function is provided primarily for convenience, not for performance. The implementation is essentially a for loop.\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def interp1d_np(data, x, xi):\n", - " print(f\"data: {data.shape} | x: {x.shape} | xi: {xi.shape}\")\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air, # now arguments in the order expected by 'interp1_np'\n", - " air.lat, # as above\n", - " newlat, # as above\n", - " input_core_dims=[[\"lat\"], [\"lat\"], []], # list with one entry per arg\n", - " output_core_dims=[[\"lat\"]], # returned data has one dimension\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be set!\n", - " vectorize=True, # loop over non-core dims\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This unfortunately is another cryptic error from numpy. \n", - "\n", - "Notice that `newlat` is not an xarray object. Let's add a dimension name `new_lat` and modify the call. Note this cannot be `lat` because xarray expects dimensions to be the same size (or broadcastable) among all inputs. `output_core_dims` needs to be modified appropriately. We'll manually rename `new_lat` back to `lat` for easy checking." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def interp1d_np(data, x, xi):\n", - " print(f\"data: {data.shape} | x: {x.shape} | xi: {xi.shape}\")\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air, # now arguments in the order expected by 'interp1_np'\n", - " air.lat, # as above\n", - " newlat, # as above\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n", - " output_core_dims=[[\"new_lat\"]], # returned data has one dimension\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be a set!\n", - " vectorize=True, # loop over non-core dims\n", - ")\n", - "interped = interped.rename({\"new_lat\": \"lat\"})\n", - "interped[\"lat\"] = newlat # need to add this manually\n", - "xr.testing.assert_allclose(\n", - " expected.transpose(*interped.dims), interped\n", - ") # order of dims is different\n", - "interped" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice that the printed input shapes are all 1D and correspond to one vector along the `lat` dimension.\n", - "\n", - "The result is now an xarray object with coordinate values copied over from `data`. This is why `apply_ufunc` is so convenient; it takes care of a lot of boilerplate necessary to apply functions that consume and produce numpy arrays to xarray objects.\n", - "\n", - "One final point: `lat` is now the *last* dimension in `interped`. This is a \"property\" of core dimensions: they are moved to the end before being sent to `interp1d_np` as was noted in the docstring for `input_core_dims`\n", - "\n", - " Core dimensions are automatically moved to the last axes of input\n", - " variables before applying ``func``, which facilitates using NumPy style\n", - " generalized ufuncs [2]_." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Parallelization with dask\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "So far our function can only handle numpy arrays. A real benefit of `apply_ufunc` is the ability to easily parallelize over dask chunks _when needed_. \n", - "\n", - "We want to apply this function in a vectorized fashion over each chunk of the dask array. This is possible using dask's `blockwise`, `map_blocks`, or `apply_gufunc`. Xarray's `apply_ufunc` wraps dask's `apply_gufunc` and asking it to map the function over chunks using `apply_gufunc` is as simple as specifying `dask=\"parallelized\"`. With this level of flexibility we need to provide dask with some extra information: \n", - " 1. `output_dtypes`: dtypes of all returned objects, and \n", - " 2. `output_sizes`: lengths of any new dimensions. \n", - " \n", - "Here we need to specify `output_dtypes` since `apply_ufunc` can infer the size of the new dimension `new_lat` from the argument corresponding to the third element in `input_core_dims`. \n", - "\n", - "Here I choose the chunk sizes to illustrate that `np.vectorize` is still applied so that our function receives 1D vectors even though the blocks are 3D." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def interp1d_np(data, x, xi):\n", - " print(f\"data: {data.shape} | x: {x.shape} | xi: {xi.shape}\")\n", - " return np.interp(xi, x, data)\n", - "\n", - "\n", - "interped = xr.apply_ufunc(\n", - " interp1d_np, # first the function\n", - " air.chunk({\"time\": 2, \"lon\": 2}), # now arguments in the order expected by 'interp1_np'\n", - " air.lat, # as above\n", - " newlat, # as above\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n", - " output_core_dims=[[\"new_lat\"]], # returned data has one dimension\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be a set!\n", - " vectorize=True, # loop over non-core dims\n", - " dask=\"parallelized\",\n", - " output_dtypes=[air.dtype], # one per output\n", - ").rename({\"new_lat\": \"lat\"})\n", - "interped[\"lat\"] = newlat # need to add this manually\n", - "xr.testing.assert_allclose(expected.transpose(*interped.dims), interped)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Yay! our function is receiving 1D vectors, so we've successfully parallelized applying a 1D function over a block. If you have a distributed dashboard up, you should see computes happening as equality is checked.\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## High performance vectorization: gufuncs, numba & guvectorize\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "`np.vectorize` is a very convenient function but is unfortunately slow. It is only marginally faster than writing a for loop in Python and looping. A common way to get around this is to write a base interpolation function that can handle nD arrays in a compiled language like Fortran and then pass that to `apply_ufunc`.\n", - "\n", - "Another option is to use the numba package which provides a very [convenient `guvectorize` decorator](https://numba.readthedocs.io/en/stable/user/vectorize.html#the-guvectorize-decorator). Any decorated function gets compiled and will loop over any non-core dimension in parallel when necessary. \n", - "\n", - "We need to specify some extra information:\n", - "\n", - " 1. Our function cannot return a variable any more. Instead it must receive a variable (the last argument) whose contents the function will modify. So we change from `def interp1d_np(data, x, xi)` to `def interp1d_np_gufunc(data, x, xi, out)`. Our computed results must be assigned to `out`. All values of `out` must be assigned explicitly.\n", - " \n", - " 2. `guvectorize` needs to know the dtypes of the input and output. This is specified in string form as the first argument. Each element of the tuple corresponds to each argument of the function. In this case, we specify `float64` for all inputs and outputs: `\"(float64[:], float64[:], float64[:], float64[:])\"` corresponding to `data, x, xi, out`\n", - " \n", - " 3. Now we need to tell numba the size of the dimensions the function takes as inputs and returns as output i.e. _core dimensions_. This is done in symbolic form i.e. `data` and `x` are vectors of the same length, say `n`; `xi` and the output `out` have a different length, say `m`. So the second argument is (again as a string)\n", - " `\"(n), (n), (m) -> (m).\"` corresponding again to `data, x, xi, out`\n", - " \n", - "```{seealso}\n", - "\n", - "Read the [numba documentation](https://numba.readthedocs.io/en/stable/user/vectorize.html#the-guvectorize-decorator) for more details.\n", - "```\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from numba import float64, guvectorize\n", - "\n", - "\n", - "@guvectorize(\"(float64[:], float64[:], float64[:], float64[:])\", \"(n), (n), (m) -> (m)\")\n", - "def interp1d_np_gufunc(data, x, xi, out):\n", - " # numba doesn't really like this.\n", - " print(\"data: \" + str(data.shape) + \" | x:\" + str(x.shape) + \" | xi: \" + str(xi.shape))\n", - " out[:] = np.interp(xi, x, data)\n", - " # gufuncs don't return data\n", - " # instead you assign to a the last arg\n", - " # return np.interp(xi, x, data)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "The warnings are about [object-mode compilation](https://numba.readthedocs.io/en/stable/user/performance-tips.html) relating to the `print` statement. This means we don't get much speed up. We'll keep the `print` statement temporarily to make sure that `guvectorize` acts like we want it to." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "interped = xr.apply_ufunc(\n", - " interp1d_np_gufunc, # first the function\n", - " air.chunk({\"time\": 2, \"lon\": 2}), # now arguments in the order expected by 'interp1_np'\n", - " air.lat, # as above\n", - " newlat, # as above\n", - " input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n", - " output_core_dims=[[\"new_lat\"]], # returned data has one dimension\n", - " exclude_dims=set((\"lat\",)), # dimensions allowed to change size. Must be a set!\n", - " # vectorize=True, # not needed since numba takes care of vectorizing\n", - " dask=\"parallelized\",\n", - " output_dtypes=[air.dtype], # one per output\n", - ").rename({\"new_lat\": \"lat\"})\n", - "interped[\"lat\"] = newlat # need to add this manually\n", - "xr.testing.assert_allclose(expected.transpose(*interped.dims), interped)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Yay! Our function is receiving 1D vectors and is working automatically with dask arrays. \n", - "\n", - "Finally let's comment out the print line and wrap everything up in a nice reusable function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from numba import float64, guvectorize\n", - "\n", - "\n", - "@guvectorize(\n", - " \"(float64[:], float64[:], float64[:], float64[:])\",\n", - " \"(n), (n), (m) -> (m)\",\n", - " nopython=True,\n", - ")\n", - "def interp1d_np_gufunc(data, x, xi, out):\n", - " out[:] = np.interp(xi, x, data)\n", - "\n", - "\n", - "def xr_interp(data, dim, newdim):\n", - " interped = xr.apply_ufunc(\n", - " interp1d_np_gufunc, # first the function\n", - " data, # now arguments in the order expected by 'interp1_np'\n", - " data[dim], # as above\n", - " newdim, # as above\n", - " input_core_dims=[[dim], [dim], [\"__newdim__\"]], # list with one entry per arg\n", - " output_core_dims=[[\"__newdim__\"]], # returned data has one dimension\n", - " exclude_dims=set((dim,)), # dimensions allowed to change size. Must be a set!\n", - " # vectorize=True, # not needed since numba takes care of vectorizing\n", - " dask=\"parallelized\",\n", - " output_dtypes=[data.dtype], # one per output; could also be float or np.dtype(\"float64\")\n", - " ).rename({\"__newdim__\": dim})\n", - " interped[dim] = newdim # need to add this manually\n", - "\n", - " return interped\n", - "\n", - "\n", - "xr.testing.assert_allclose(\n", - " expected.transpose(*interped.dims),\n", - " xr_interp(air.chunk({\"time\": 2, \"lon\": 2}), \"lat\", newlat),\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Summary\n", - "\n", - "This technique is generalizable to any 1D function that [can be compiled](https://numba.readthedocs.io/en/stable/reference/pysupported.html#pysupported) by Numba." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "nbsphinx": { - "allow_errors": true - }, - "org": null, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": false, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/advanced/apply_ufunc/numba-vectorization.ipynb b/advanced/apply_ufunc/numba-vectorization.ipynb deleted file mode 100644 index 5cff94b1..00000000 --- a/advanced/apply_ufunc/numba-vectorization.ipynb +++ /dev/null @@ -1,292 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# Fast vectorization with Numba\n", - "\n", - "" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": { - "tags": [] - }, - "source": [ - "`np.vectorize` is a very convenient function but is unfortunately slow. It is only marginally faster than writing a for loop in Python and looping. \n", - "\n", - "A common way to get around this is to write a base interpolation function that can handle nD arrays in a compiled language like C or Fortran and then pass that to `apply_ufunc`.\n", - "\n", - "Another option is to use the [numba package](https://numba.pydata.org/) which provides two very convenient decorators to build [numpy universal functions or ufuncs](https://numba.readthedocs.io/en/stable/user/vectorize.html):\n", - "1. [`vectorize`](https://numba.readthedocs.io/en/stable/user/vectorize.html#the-vectorize-decorator) for functions that act on scalars, and \n", - "2. [`guvectorize`](https://numba.readthedocs.io/en/stable/user/vectorize.html#the-guvectorize-decorator) for functions that operates on subsets of the array along core-dimensions. Any decorated function gets compiled and will loop over the loop dimensions in parallel when necessary. \n", - "\n", - "For `apply_ufunc` the key concept is that we must provide `vectorize=False` (the default) when using Numba vectorized functions. \n", - "Numba handles the vectorization (or looping) and `apply_ufunc` handles converting Xarray objects to bare arrays and handling metadata." - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "## Load data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "da = xr.DataArray(\n", - " np.arange(12).reshape(3, 4),\n", - " dims=(\"x\", \"y\"),\n", - " coords={\"x\": [12, 13, 14]},\n", - " attrs={\"foo\": \"bar\"},\n", - ")\n", - "da" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "## `vectorize`\n", - "\n", - "Our `squared_error` example from earlier works element-by-element, and is a great example for `vectorize`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from numba import vectorize, float64\n", - "\n", - "\n", - "@vectorize([float64(float64, float64)])\n", - "def squared_error(x, y):\n", - " return (x - y) ** 2" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "See the numba documentation to understand `@vectorize([float64(float64, float64)])`\n", - "\n", - "Now use `apply_ufunc` to apply it." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(squared_error, da, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## `guvectorize`\n", - "\n", - "`guvectorize` is for functions that work on small subsets of the data. Quoting the Numba documentation\n", - "> While `vectorize()` allows you to write ufuncs that work on one element at a time, the `guvectorize()` decorator takes the concept one step further and allows you to write ufuncs that will work on an arbitrary number of elements of input arrays, and take and return arrays of differing dimensions. The typical example is a running median or a convolution filter.\n", - "\n", - "This description should remind you of `apply_ufunc`!\n", - "\n", - "We will use the example function `g` from the [numba docs](https://numba.readthedocs.io/en/stable/user/vectorize.html#the-guvectorize-decorator), which adds a scalar `y` to a 1D vector `x`. The `res` argument here will contain the output (this is a Numba detail).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from numba import guvectorize, int64\n", - "\n", - "\n", - "@guvectorize([(int64[:], int64, int64[:])], '(n),()->(n)')\n", - "def g(x, y, res):\n", - " for i in range(x.shape[0]):\n", - " res[i] = x[i] + y\n", - "\n", - "\n", - "a = np.arange(5)\n", - "g(a, 2)" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "Unlike `squared_error` we cannot pass an Xarray object to `g` directly." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "g(da, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "Now use `apply_ufunc`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(\n", - " g,\n", - " da,\n", - " 1,\n", - " input_core_dims=[[\"x\"], []],\n", - " output_core_dims=[[\"x\"]],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "Notice the following:\n", - "1. The `guvectorize` decorator includes the concept of \"core dimensions\": `'(n),()->(n)'`. This string means that the `g` takes a 1D vector of size `n`, a scalar, and returns a 1D vector of size `n`. There is one core dimension for the input, and one core dimension for the output. Both core dimensions have the same size.\n", - "2. That string translates to `input_core_dims=[[\"x\"], []], output_core_dims=[[\"x\"]]` in `apply_ufunc`.\n", - "3. We don't provide `vectorize=True` to `apply_ufunc` since `numba` will handle the vectorization in compiled code automatically." - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "## With dask\n" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "Use the chunked DataArray" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da_dask = da.chunk({\"y\": 1})\n", - "da_dask" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Apply `g` to `da_dask`\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "xr.apply_ufunc(\n", - " g,\n", - " da_dask, \n", - " 1, \n", - " input_core_dims=[[\"x\"], []], \n", - " output_core_dims=[[\"x\"]],\n", - " dask=\"parallelized\",\n", - ")\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "## Next\n", - "\n", - "For more, see the numpy.interp end-to-end example in the left sidebar." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/apply_ufunc/simple_numpy_apply_ufunc.ipynb b/advanced/apply_ufunc/simple_numpy_apply_ufunc.ipynb deleted file mode 100644 index 4305f936..00000000 --- a/advanced/apply_ufunc/simple_numpy_apply_ufunc.ipynb +++ /dev/null @@ -1,432 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "(gentle-intro)=\n", - "# A gentle introduction\n", - "\n", - "Many, but not all, useful array methods are wrapped by Xarray and accessible\n", - "as methods on Xarray objects. For example `DataArray.mean` calls `numpy.nanmean`.\n", - "A very common use-case is to apply functions that expect and return NumPy \n", - "(or other array types) on Xarray objects. For example, this would include all of SciPy's API. \n", - "Applying many of these functions to Xarray object involves a series of repeated steps.\n", - "`apply_ufunc` provides a convenient wrapper function that generalizes the steps\n", - "involved in applying such functions to Xarray objects.\n", - "\n", - "```{tip}\n", - "Xarray uses `apply_ufunc` internally to implement much of its API, meaning that it is quite powerful!\n", - "```\n", - "\n", - "Our goals are to learn that `apply_ufunc` automates aspects of applying computation functions that are designed for pure arrays (like numpy arrays) on xarray objects including\n", - "- Propagating dimension names, coordinate variables, and (optionally) attributes.\n", - "- Handle Dataset input by looping over data variables.\n", - "- Allow passing arbitrary positional and keyword arguments\n", - "\n", - "\n", - "```{tip}\n", - "We'll reduce the length of error messages using `%xmode minimal` See the [ipython documentation](https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-xmode) for details.\n", - "```\n", - "\n", - "\n", - "## Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "# limit the amount of information printed to screen\n", - "xr.set_options(display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "Let's load a dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": { - "tags": [] - }, - "source": [ - "## A simple example: pure numpy\n", - "\n", - "Simple functions that act independently on each value should work without any\n", - "additional arguments. \n", - "\n", - "Consider the following `squared_error` function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "def squared_error(x, y):\n", - " return (x - y) ** 2" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "````{tip}\n", - "\n", - "This function uses only arithmetic operations. For such simple functions, you can pass Xarray objects directly and receive Xarray objects back.\n", - "Try\n", - "```python\n", - "squared_error(ds.air, 1)\n", - "```\n", - "\n", - "We use it here as a very simple example\n", - "````" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "We can apply `squared_error` manually by extracting the underlying numpy array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "numpy_result = squared_error(ds.air.data, 1)\n", - "numpy_result" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "To convert this result to a DataArray, we could do it manually" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "xr.DataArray(\n", - " data=numpy_result,\n", - " # propagate all the Xarray metadata manually\n", - " dims=ds.air.dims,\n", - " coords=ds.air.coords,\n", - " attrs=ds.air.attrs,\n", - " name=ds.air.name,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "A shorter version uses [DataArray.copy](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.copy.html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.copy(data=numpy_result)" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": { - "tags": [] - }, - "source": [ - "```{caution}\n", - "Using `DataArray.copy` works for such simple cases but doesn't generalize that well. \n", - "\n", - "For example, consider a function that removed one dimension and added a new dimension.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": { - "tags": [] - }, - "source": [ - "## apply_ufunc\n", - "\n", - "`apply_ufunc` can handle more complicated functions. Here's how to use it with `squared_error`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "xr.apply_ufunc(squared_error, ds.air, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": { - "tags": [] - }, - "source": [ - "## How does apply_ufunc work?\n", - "\n", - "\n", - "This line\n", - "```python\n", - "xr.apply_ufunc(squared_error, ds.air, 1)\n", - "```\n", - "is equivalent to `squared_error(ds.air.data, 1)` with automatic propagation of xarray metadata like dimension names, coordinate values etc.\n", - "\n", - "\n", - "To illustrate how `apply_ufunc` works, let us write a small wrapper function. This will let us examine what data is received and returned from the applied function. \n", - "\n", - "```{tip}\n", - "This trick is very useful for debugging\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "def wrapper(x, y):\n", - " print(f\"received x of type {type(x)}, shape {x.shape}\")\n", - " print(f\"received y of type {type(y)}\")\n", - " return squared_error(x, y)\n", - "\n", - "\n", - "xr.apply_ufunc(wrapper, ds.air, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": { - "tags": [] - }, - "source": [ - "We see that `wrapper` receives the underlying numpy array (`ds.air.data`), and the integer `1`. \n", - "\n", - "Essentially, `apply_ufunc` does the following:\n", - "1. extracts the underlying array data (`.data`), \n", - "2. passes it to the user function, \n", - "3. receives the returned values, and \n", - "4. then wraps that back up as a DataArray\n", - "\n", - "```{tip}\n", - "`apply_ufunc` always takes in at least one DataArray or Dataset and returns one DataArray or Dataset\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "## Handling attributes\n", - "\n", - "By default, attributes are omitted since they may now be inaccurate" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "result = xr.apply_ufunc(wrapper, ds.air, 1)\n", - "result.attrs" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "To propagate attributes, pass `keep_attrs=True`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "result = xr.apply_ufunc(wrapper, ds.air, 1, keep_attrs=True)\n", - "result.attrs" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": { - "tags": [] - }, - "source": [ - "## Handling datasets\n", - "\n", - "`apply_ufunc` easily handles both DataArrays and Datasets. \n", - "\n", - "When passed a Dataset, `apply_ufunc` will loop over the data variables and sequentially pass those to `squared_error`.\n", - "\n", - "So `squared_error` always receives a _single_ numpy array.\n", - "\n", - "To illustrate that lets create a new `Dataset` with two arrays. We'll create a new array `air2` that is 2D `time, lat`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds2 = ds.copy()\n", - "ds2[\"air2\"] = ds2.air.isel(lon=0) ** 2" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "We see that `wrapper` is called twice" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "xr.apply_ufunc(wrapper, ds2, 1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "xr.apply_ufunc(squared_error, ds2, 1)" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": { - "tags": [] - }, - "source": [ - "## Passing positional and keyword arguments\n", - "\n", - "```{seealso}\n", - "See the Python tutorial on [defining functions](https://docs.python.org/3/tutorial/controlflow.html#defining-functions) for more on positional and keyword arguments.\n", - "```\n", - "\n", - "`squared_error` takes two arguments named `x` and `y`.\n", - "\n", - "In `xr.apply_ufunc(squared_error, ds.air, 1)`, the value of `1` for `y` was passed positionally. \n", - "\n", - "to use the keyword argument form, pass it using the `kwargs` keyword argument to `apply_ufunc`\n", - "> kwargs (dict, optional) – Optional keyword arguments passed directly on to call func." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "xr.apply_ufunc(squared_error, ds.air, kwargs={\"y\": 1})" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/backends/1.Backend_without_Lazy_Loading.ipynb b/advanced/backends/1.Backend_without_Lazy_Loading.ipynb deleted file mode 100644 index e396d869..00000000 --- a/advanced/backends/1.Backend_without_Lazy_Loading.ipynb +++ /dev/null @@ -1,180 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Binary data without lazy loading\n", - "\n", - "\n", - "**Author**: Aureliana Barghini ([B-Open](https://www.bopen.eu/))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## BackendEntrypoint\n", - "Implement a subclass of `BackendEntrypoint` that expose a method `open_dataset`:\n", - "\n", - "```python\n", - "from xarray.backends import BackendEntrypoint\n", - "\n", - "class MyBackendEntrypoint(BackendEntrypoint):\n", - " def open_dataset(\n", - " self,\n", - " filename_or_obj,\n", - " *,\n", - " drop_variables=None,\n", - " ):\n", - "\n", - " return my_open_dataset(filename_or_obj, drop_variables=drop_variables)\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## BackendEntrypoint integration\n", - "Declare this class as an external plugin in your `setup.py`:\n", - "\n", - "```python\n", - "setuptools.setup(\n", - " entry_points={\n", - " 'xarray.backends': ['engine_name=package.module:my_backendentrypoint'],\n", - " },\n", - ")\n", - "```\n", - "or pass it in `xr.open_dataset`:\n", - "\n", - "```python\n", - "xr.open_dataset(filename, engine=MyBackendEntrypoint)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example backend for binary files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create sample files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = np.arange(30000000, dtype=np.int64)\n", - "with open(\"foo.bin\", \"w\") as f:\n", - " arr.tofile(f)\n", - "\n", - "arr = np.arange(30000000, dtype=np.float64)\n", - "with open(\"foo_float.bin\", \"w\") as f:\n", - " arr.tofile(f)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define the entrypoint\n", - "Example of backend to open binary files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class BinaryBackend(xr.backends.BackendEntrypoint):\n", - " def open_dataset(\n", - " self,\n", - " filename_or_obj,\n", - " *,\n", - " drop_variables=None,\n", - " # backend specific parameter\n", - " dtype=np.int64,\n", - " ):\n", - " with open(filename_or_obj) as f:\n", - " arr = np.fromfile(f, dtype)\n", - "\n", - " var = xr.Variable(dims=(\"x\"), data=arr)\n", - " coords = {\"x\": np.arange(arr.size) * 10}\n", - " return xr.Dataset({\"foo\": var}, coords=coords)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### It Works! \n", - "But it may be memory demanding" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = xr.open_dataarray(\"foo.bin\", engine=BinaryBackend)\n", - "arr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = xr.open_dataarray(\"foo_float.bin\", engine=BinaryBackend, dtype=np.float64)\n", - "arr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.sel(x=slice(0, 100))" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/advanced/backends/2.Backend_with_Lazy_Loading.ipynb b/advanced/backends/2.Backend_with_Lazy_Loading.ipynb deleted file mode 100644 index ce2fe9f8..00000000 --- a/advanced/backends/2.Backend_with_Lazy_Loading.ipynb +++ /dev/null @@ -1,235 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Binary data with lazy loading\n", - "\n", - "**Author**: Aureliana Barghini ([B-Open](https://www.bopen.eu/))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If you want to make your backend effective with big datasets, then you should\n", - "support lazy loading.
\n", - "For doing that you need:\n", - "- Implement `_raw_indexing_method` for reading blocks form disk

\n", - "- Implement some glue code to make it work with Xarray:

\n", - " - put your `_raw_indexing_method` in a `BackendArray` subclass

\n", - " - replace the `numpy.ndarray` inside your **dataset** with your subclass of `BackendArray`\n", - " \n", - " \n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create sample files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "import dask\n", - "import numpy as np\n", - "import xarray as xr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = np.arange(30000000, dtype=np.int64)\n", - "with open(\"foo.bin\", \"w\") as f:\n", - " arr.tofile(f)\n", - "\n", - "arr = np.arange(30000000, dtype=np.float64)\n", - "with open(\"foo_float.bin\", \"w\") as f:\n", - " arr.tofile(f)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## BinaryBackendArray" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The BackendArray subclass shall implement the following method and attributes:\n", - "\n", - "- `_raw_indexing_method` method, supporting **item selection** and **slicing**\n", - "\n", - "- `__getitem__` that wraps `_raw_indexing_method` with an xarray helper function `explicit_indexing_adapter` (threadsafe)\n", - "\n", - "- `shape` attribute\n", - "\n", - "- `dtype` attribute.\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class BinaryBackendArray(xr.backends.BackendArray):\n", - " def __init__(\n", - " self,\n", - " filename_or_obj,\n", - " shape,\n", - " dtype,\n", - " lock,\n", - " ):\n", - " self.filename_or_obj = filename_or_obj\n", - " self.shape = shape\n", - " self.dtype = dtype\n", - " self.lock = lock\n", - "\n", - " def __getitem__(self, key: tuple):\n", - " return xr.core.indexing.explicit_indexing_adapter(\n", - " key,\n", - " self.shape,\n", - " xr.core.indexing.IndexingSupport.BASIC,\n", - " self._raw_indexing_method,\n", - " )\n", - "\n", - " def _raw_indexing_method(self, key: tuple):\n", - " key0 = key[0]\n", - " size = np.dtype(self.dtype).itemsize\n", - "\n", - " if isinstance(key0, slice):\n", - " start = key0.start or 0\n", - " stop = key0.stop or self.shape[0]\n", - " offset = size * start\n", - " count = stop - start\n", - " else:\n", - " offset = size * key0\n", - " count = 1\n", - "\n", - " with self.lock, open(self.filename_or_obj) as f:\n", - " arr = np.fromfile(f, np.int64, offset=offset, count=count)\n", - "\n", - " if isinstance(key, int):\n", - " arr = arr.squeeze()\n", - "\n", - " return arr" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## BinaryBackend Entrypoint" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class BinaryBackend(xr.backends.BackendEntrypoint):\n", - " def open_dataset(self, filename_or_obj, *, drop_variables=None, dtype=np.int64):\n", - " size = np.dtype(dtype).itemsize\n", - " shape = os.stat(filename_or_obj).st_size // size\n", - "\n", - " backend_array = BinaryBackendArray(\n", - " filename_or_obj=filename_or_obj,\n", - " shape=(shape,),\n", - " dtype=dtype,\n", - " lock=dask.utils.SerializableLock(),\n", - " )\n", - " data = xr.core.indexing.LazilyIndexedArray(backend_array)\n", - "\n", - " var = xr.Variable(dims=(\"x\"), data=data)\n", - " return xr.Dataset({\"foo\": var})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "## Reduced memory usage with dask" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = xr.open_dataarray(\"foo.bin\", engine=BinaryBackend, chunks=10000)\n", - "arr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.mean()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.sel(x=slice(0, 10))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.sel(x=slice(0, 10)).compute()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.load()" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/advanced/backends/backends.md b/advanced/backends/backends.md deleted file mode 100644 index 04f81265..00000000 --- a/advanced/backends/backends.md +++ /dev/null @@ -1,49 +0,0 @@ -# Reading data using backends - -## Introduction - -You can [read different types of files](https://docs.xarray.dev/en/stable/user-guide/io.html) in `xr.open_dataset` by specifying the engine to be used: - -```python -import xarray as xr -xr.open_dataset("my_file.grib" , engine="cfgrib") -``` - -Navigating Xarray backends can be confusing, -so we recommend checking out [this flow chart](https://docs.xarray.dev/en/stable/user-guide/io.html) -to help you figure out which engine you need and how to use it. - -You can see what backends are currently available in your working environment -with `xarray.backends.list_engines()`. - -## Why use the Xarray backend API to write your own backend? - -- Your users don't need to learn a new interface; they can use `xr.open_dataset` with the `engine` kwarg. -- With little extra effort you can have lazy loading with Dask. Simply implement a function for reading blocks and Xarray will manage lazy loading with Dask for you. -- It's easy to implement: using the backend API (introduced in v0.18.0), you don't need to integrate any code in Xarray. - -## More Information - -See the [documentation](https://docs.xarray.dev/en/stable/internals/how-to-add-new-backend.html) for more details on adding and registering a new backend. - -Follow the tutorials on creating a new backend for binary files. - -```{tableofcontents} - -``` - -### Links to internal backends - -- [netcdf4](https://pypi.org/project/netCDF4/) - netCDF4 -- [scipy](https://scipy.org/) - netCDF3 -- [zarr](https://pypi.org/project/zarr/) - Zarr -- [pydap](https://pydap.github.io/pydap/) - Data Access Protocol (DAP/DODS/OPeNDAP) -- [h5netcdf](https://h5netcdf.org/) - hdf5 - -### Links to external backends (not comprehensive) - -- [cfgrib](https://github.com/ecmwf/cfgrib) - GRIB -- [tiledb](https://github.com/TileDB-Inc/TileDB-CF-Py) - TileDB -- [rioxarray](https://corteva.github.io/rioxarray/stable/) - GeoTIFF, JPEG-2000, ESRI-hdr, etc (via GDAL) -- [xarray-sentinel](https://github.com/bopen/xarray-sentinel) - Sentinel-1 SAFE -- ... diff --git a/advanced/map_blocks/map_blocks.md b/advanced/map_blocks/map_blocks.md deleted file mode 100644 index 6ad6d333..00000000 --- a/advanced/map_blocks/map_blocks.md +++ /dev/null @@ -1,5 +0,0 @@ -# map_blocks - -```{tableofcontents} - -``` diff --git a/advanced/map_blocks/simple_map_blocks.ipynb b/advanced/map_blocks/simple_map_blocks.ipynb deleted file mode 100644 index 0fe06b16..00000000 --- a/advanced/map_blocks/simple_map_blocks.ipynb +++ /dev/null @@ -1,203 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# A gentle introduction\n", - "\n", - "`map_blocks` is inspired by the `dask.array` function of the same name and lets\n", - "you map a function on blocks of the xarray object (including Datasets!).\n", - "\n", - "At _compute_ time, your function will receive an xarray object with concrete\n", - "(computed) values along with appropriate metadata. This function should return\n", - "an xarray object.\n" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "## Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import dask\n", - "import numpy as np\n", - "import xarray as xr" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "First lets set up a `LocalCluster` using [dask.distributed](https://distributed.dask.org/).\n", - "\n", - "You can use any kind of dask cluster. This step is completely independent of\n", - "xarray. While not strictly necessary, the dashboard provides a nice learning\n", - "tool.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "from dask.distributed import Client\n", - "\n", - "client = Client()\n", - "client" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "

👆

Click the Dashboard link above. Or click the \"Search\" button in the dashboard.\n", - "\n", - "Let's test that the dashboard is working..\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "import dask.array\n", - "\n", - "dask.array.ones((1000, 4), chunks=(2, 1)).compute() # should see activity in dashboard" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "Let's open a dataset. We specify `chunks` so that we create a dask arrays for the DataArrays" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature\", chunks={\"time\": 100})\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Simple example\n", - "\n", - "Here is an example" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "def time_mean(obj):\n", - " # use xarray's convenient API here\n", - " # you could convert to a pandas dataframe and use pandas' extensive API\n", - " # or use .plot() and plt.savefig to save visualizations to disk in parallel.\n", - " return obj.mean(\"lat\")\n", - "\n", - "\n", - "ds.map_blocks(time_mean) # this is lazy!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# this will calculate values and will return True if the computation works as expected\n", - "ds.map_blocks(time_mean).identical(ds.mean(\"lat\"))" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Try applying the following function with `map_blocks`. Specify `scale` as an\n", - "argument and `offset` as a kwarg.\n", - "\n", - "The docstring should help:\n", - "https://docs.xarray.dev/en/stable/generated/xarray.map_blocks.html\n", - "\n", - "```\n", - "def time_mean_scaled(obj, scale, offset):\n", - " return obj.mean(\"lat\") * scale + offset\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "### More advanced functions\n", - "\n", - "`map_blocks` needs to know what the returned object looks like _exactly_. It\n", - "does so by passing a 0-shaped xarray object to the function and examining the\n", - "result. This approach cannot work in all cases For such advanced use cases,\n", - "`map_blocks` allows a `template` kwarg. See\n", - "https://docs.xarray.dev/en/stable/user-guide/dask.html#map-blocks for more details\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "client.close()" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/advanced/parallel-intro.md b/advanced/parallel-intro.md deleted file mode 100644 index 7d20b5c9..00000000 --- a/advanced/parallel-intro.md +++ /dev/null @@ -1,19 +0,0 @@ -# Parallelizing custom functions - -Almost all of xarray’s built-in operations work on Dask arrays. - -Sometimes analysis calls for functions that aren't in xarray's API (e.g. scipy). -There are three ways to apply these functions in parallel on each block of your -xarray object: - -1. Extract Dask arrays from xarray objects ([.data](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.data.html)) and use Dask directly e.g. - ([apply_gufunc](https://docs.dask.org/en/latest/generated/dask.array.gufunc.apply_gufunc.html), [map_blocks](https://docs.dask.org/en/latest/generated/dask.array.map_blocks.html), [map_overlap](https://docs.dask.org/en/latest/generated/dask.array.map_overlap.html), [blockwise](https://docs.dask.org/en/latest/generated/dask.array.core.blockwise.html), [reduction](https://docs.dask.org/en/latest/generated/dask.array.reduction.html)). Then wrap the result as an Xarray object. - -2. Use [apply_ufunc](https://docs.xarray.dev/en/stable/generated/xarray.apply_ufunc.html) to apply functions that consume and return duck arrays. This automates extracting the data from Xarray objects, applying a function, and then converting the bare array result back to a Xarray object. - -3. Use [map_blocks](https://docs.xarray.dev/en/stable/generated/xarray.map_blocks.html), [Dataset.map_blocks](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.map_blocks.html) or [DataArray.map_blocks](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.map_blocks.html) - to apply functions that consume and return xarray objects. - -Which method you use ultimately depends on the type of input objects expected by -the function you're wrapping, and the level of performance or convenience you -desire. diff --git a/data/sst.mnmean.nc b/data/sst.mnmean.nc deleted file mode 100644 index 8b9be28a..00000000 Binary files a/data/sst.mnmean.nc and /dev/null differ diff --git a/fundamentals/01.1_creating_data_structures.ipynb b/fundamentals/01.1_creating_data_structures.ipynb deleted file mode 100644 index 826b9c35..00000000 --- a/fundamentals/01.1_creating_data_structures.ipynb +++ /dev/null @@ -1,641 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Creating Data Structures" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import xarray as xr\n", - "\n", - "xr.set_options(display_expand_data=False)\n", - "\n", - "rng = np.random.default_rng(seed=0) # we'll use this later" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "In the last lecture, we looked at the following example Dataset. In most cases Xarray Datasets are created by reading a file. We'll address this in the next lecture. Here we'll learn how to create Xarray objects from scratch" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "## DataArray\n", - "\n", - "The `DataArray` class is used to attach a name, dimension names, labels, and\n", - "attributes to an array.\n", - "\n", - "Our goal will be to recreate the `ds.air` DataArray starting with the underlying numpy data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "ds.air" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "array = ds.air.data" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "We do this using the [DataArray](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.html) _constructor_." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "xr.DataArray(array)" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "This works. Notice that the default dimension names are not so useful: `dim_0`, `dim_1`, `dim_2`\n", - "\n", - "\n", - "### Dimension Names\n", - "\n", - "We can change this by specifying dimension names in the appropriate order using the `dims` kwarg " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "xr.DataArray(array, dims=(\"time\", \"lat\", \"lon\"))" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "Much better! But notice we have no entries under \"Coordinates\".\n", - "\n", - "### Coordinates\n", - "\n", - "While associating names with dimensions (or axes) of an array is quite useful, attaching coordinate labels to DataArrays makes a lot of analysis quite convenient.\n", - "\n", - "First we'll simply add values for `lon` using the `coords` kwarg. For this datasets, longitudes are regularly spaced at 2.5° intervals between 200°E and 330°E.\n", - "\n", - "`coords` takes a dictionary that maps the name of a dimension to one of\n", - "- another `DataArray` object\n", - "- a tuple of the form `(dims, data, attrs)` where `attrs` is optional. This is\n", - " roughly equivalent to creating a new `DataArray` object with\n", - " `DataArray(dims=dims, data=data, attrs=attrs)`\n", - "- a `numpy` array (or anything that can be coerced to one using `numpy.array`).\n", - "\n", - "We'll start with the last one" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "lon_values = np.arange(200, 331, 2.5)\n", - "xr.DataArray(array, dims=(\"time\", \"lat\", \"lon\"), coords={\"lon\": lon_values})" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "Assigning a plain numpy array is equivalent to creating a DataArray with those values and the same dimension name" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "lon_da = xr.DataArray(lon_values, dims=\"lon\")\n", - "da = xr.DataArray(array, dims=(\"time\", \"lat\", \"lon\"), coords={\"lon\": lon_da})\n", - "da" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "We can also assign coordinates after a DataArray has been created." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "da.coords[\"lat\"] = np.arange(75, 14.9, -2.5)\n", - "da" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "### Attributes \n", - "\n", - "Arbitrary attributes can be assigned using the `.attrs` property" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "da.attrs[\"attribute\"] = \"hello\"\n", - "da" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "or specified in the constructor" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "da2 = xr.DataArray(\n", - " array, dims=(\"time\", \"lat\", \"lon\"), coords={\"lon\": lon_da}, attrs={\"attribute\": \"hello\"}\n", - ")\n", - "da2" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "### Non-dimension coordinates\n", - "\n", - "Sometimes we want to attach coordinate variables along an existing dimension. Notice that \n", - "1. `itime` is not bolded and \n", - "2. has a name \"time\" that is different from the dimension name \"time\"\n", - "\n", - "`itime` is an example of a non-dimension coordinate variable i.e. it is a coordinate variable that does not match a dimension name. Here we demonstrate the \"tuple\" form of assigninment: `(dims, data, attrs)`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "da.coords[\"itime\"] = (\"time\", np.arange(2920), {\"name\": \"value\"})\n", - "da" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "### Exercises\n", - "\n", - "create a `DataArray` named \"height\" from random data `rng.random((180, 360)) * 400`\n", - "\n", - "1. with dimensions named \"latitude\" and \"longitude\"\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": { - "tags": [ - "hide-input", - "hide-output" - ] - }, - "outputs": [], - "source": [ - "xr.DataArray(rng.random((180, 360)) * 400, dims=(\"latitude\", \"longitude\"), name=\"height\")" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "2. with dimension coordinates:\n", - "\n", - "- \"latitude\": -90 to 89 with step size 1\n", - "- \"longitude\": -180 to 179 with step size 1\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": { - "tags": [ - "hide-input", - "hide-output" - ] - }, - "outputs": [], - "source": [ - "xr.DataArray(\n", - " rng.random((180, 360)) * 400,\n", - " dims=(\"latitude\", \"longitude\"),\n", - " coords={\"latitude\": np.arange(-90, 90, 1), \"longitude\": np.arange(-180, 180, 1)},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": {}, - "source": [ - "3. with metadata for both data and coordinates:\n", - "\n", - "- height: \"type\": \"ellipsoid\"\n", - "- latitude: \"type\": \"geodetic\"\n", - "- longitude: \"prime_meridian\": \"greenwich\"\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "xr.DataArray(\n", - " rng.random((180, 360)) * 400,\n", - " dims=(\"latitude\", \"longitude\"),\n", - " coords={\n", - " \"latitude\": (\"latitude\", np.arange(-90, 90, 1), {\"type\": \"geodetic\"}),\n", - " \"longitude\": (\n", - " \"longitude\",\n", - " np.arange(-180, 180, 1),\n", - " {\"prime_meridian\": \"greenwich\"},\n", - " ),\n", - " },\n", - " attrs={\"type\": \"ellipsoid\"},\n", - " name=\"height\",\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "29", - "metadata": {}, - "source": [ - "## Dataset\n", - "\n", - "`Dataset` objects collect multiple data variables, each with possibly different\n", - "dimensions.\n", - "\n", - "The constructor of `Dataset` takes three parameters:\n", - "\n", - "- `data_vars`: dict-like mapping names to values. Values are either `DataArray` objects\n", - " or defined with tuples consisting of of dimension names and arrays.\n", - "- `coords`: same as for `DataArray`\n", - "- `attrs`: same as for `Dataset`" - ] - }, - { - "cell_type": "markdown", - "id": "30", - "metadata": {}, - "source": [ - "Creating an empty Dataset is easy!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "xr.Dataset()" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "### Data Variables\n", - "\n", - "Let's create a `Dataset` with two data variables: `da` and `da2`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.Dataset({\"air\": da, \"air2\": da2})\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "You can directly assign a new data variables" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [ - "ds[\"air3\"] = da\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "36", - "metadata": {}, - "source": [ - "### Coordinates\n", - "\n", - "Coordinate variables can be assigned using the `coords` kwarg to `xr.Dataset`. Here we use `date_range` from pandas to create a time vector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "xr.Dataset(\n", - " {\"air\": da, \"air2\": da2},\n", - " coords={\"time\": pd.date_range(\"2013-01-01\", \"2014-12-31 18:00\", freq=\"6H\")},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "38", - "metadata": {}, - "source": [ - "Again we can assign coordinate variables after a Dataset has been created." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39", - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "40", - "metadata": {}, - "outputs": [], - "source": [ - "ds.coords[\"time\"] = pd.date_range(\"2013-01-01\", \"2014-12-31 18:00\", freq=\"6H\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "41", - "metadata": {}, - "source": [ - "### Attributes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "42", - "metadata": {}, - "outputs": [], - "source": [ - "xr.Dataset(\n", - " {\"air\": da, \"air2\": da2},\n", - " coords={\"time\": pd.date_range(\"2013-01-01\", \"2014-12-31 18:00\", freq=\"6H\")},\n", - " attrs={\"key0\": \"value0\"},\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43", - "metadata": {}, - "outputs": [], - "source": [ - "ds.attrs[\"key\"] = \"value\"" - ] - }, - { - "cell_type": "markdown", - "id": "44", - "metadata": {}, - "source": [ - "### Exercises\n", - "\n", - "1. create a Dataset with two variables along `latitude` and `longitude`:\n", - " `height` and `gravity_anomaly`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "45", - "metadata": {}, - "outputs": [], - "source": [ - "height = rng.random((180, 360)) * 400\n", - "gravity_anomaly = rng.random((180, 360)) * 400 - 200" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": { - "tags": [ - "hide-input", - "hide-output" - ] - }, - "outputs": [], - "source": [ - "xr.Dataset(\n", - " {\n", - " \"height\": ((\"latitude\", \"longitude\"), height),\n", - " \"gravity_anomaly\": ((\"latitude\", \"longitude\"), gravity_anomaly),\n", - " }\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "47", - "metadata": {}, - "source": [ - "2. add coordinates to `latitude` and `longitude`:\n", - "\n", - "- `latitude`: from -90 to 90 with step size 1\n", - "- `longitude`: from -180 to 180 with step size 1\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "48", - "metadata": {}, - "outputs": [], - "source": [ - "xr.Dataset(\n", - " {\n", - " \"height\": ((\"latitude\", \"longitude\"), height),\n", - " \"gravity_anomaly\": ((\"latitude\", \"longitude\"), gravity_anomaly),\n", - " },\n", - " coords={\n", - " \"latitude\": (\"latitude\", np.arange(-90, 90, 1)),\n", - " \"longitude\": (\"longitude\", np.arange(-180, 180, 1)),\n", - " },\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "49", - "metadata": {}, - "source": [ - "3. add metadata to coordinates and variables:\n", - "\n", - "- `latitude`: \"type\": \"geodetic\"\n", - "- `longitude`: \"prime_meridian\": \"greenwich\"\n", - "- `height`: \"ellipsoid\": \"wgs84\"\n", - "- `gravity_anomaly`: \"ellipsoid\": \"grs80\"\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "50", - "metadata": { - "tags": [ - "hide-input", - "hide-output" - ] - }, - "outputs": [], - "source": [ - "xr.Dataset(\n", - " {\n", - " \"height\": ((\"latitude\", \"longitude\"), height, {\"ellipsoid\": \"wgs84\"}),\n", - " \"gravity_anomaly\": ((\"latitude\", \"longitude\"), gravity_anomaly, {\"ellipsoid\": \"grs80\"}),\n", - " },\n", - " coords={\n", - " \"latitude\": (\"latitude\", np.arange(-90, 90, 1), {\"type\": \"geodetic\"}),\n", - " \"longitude\": (\n", - " \"longitude\",\n", - " np.arange(-180, 180, 1),\n", - " {\"prime_meridian\": \"greenwich\"},\n", - " ),\n", - " },\n", - ")" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/01.1_io.ipynb b/fundamentals/01.1_io.ipynb deleted file mode 100644 index f5328c4c..00000000 --- a/fundamentals/01.1_io.ipynb +++ /dev/null @@ -1,291 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Reading and writing files\n", - "\n", - "One of Xarray's most widely used features is its ability to [read from and write\n", - "to a variety of data formats](https://docs.xarray.dev/en/stable/user-guide/io.html). \n", - "For example, Xarray can read the following formats using `open_dataset`/`open_mfdataset`:\n", - "\n", - "- [NetCDF](https://www.unidata.ucar.edu/software/netcdf/)\n", - "- [Zarr](https://zarr.readthedocs.io/en/stable/)\n", - "\n", - "Support for additional formats is possible using external packages\n", - "- [GRIB](https://en.wikipedia.org/wiki/GRIB) using the [cfgrib](https://github.com/ecmwf/cfgrib) package\n", - "- [GeoTIFF](https://gdal.org/drivers/raster/gtiff.html) /\n", - " [GDAL rasters](https://svn.osgeo.org/gdal/tags/gdal_1_2_5/frmts/formats_list.html)\n", - " using the [rioxarray package](https://corteva.github.io/rioxarray/stable/)\n", - "\n", - "\n", - "\n", - "## NetCDF\n", - "\n", - "The recommended way to store xarray data structures is NetCDF, which is a binary\n", - "file format for self-described datasets that originated in the geosciences.\n", - "Xarray is based on the netCDF data model, so netCDF files on disk directly\n", - "correspond to Dataset objects.\n", - "\n", - "Xarray reads and writes to NetCDF files using the `open_dataset` /\n", - "`open_dataarray` functions and the `to_netcdf` method.\n", - "\n", - "Let's first create some datasets and write them to disk using `to_netcdf`, which\n", - "takes the path we want to write to:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "# Ensure random arrays are the same each time\n", - "np.random.seed(0)" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "The constructor of `Dataset` takes three parameters:\n", - "\n", - "- `data_vars`: dict-like mapping names to values. Values are either `DataArray` objects\n", - " or defined with tuples consisting of of dimension names and arrays.\n", - "- `coords`: same as for `DataArray`\n", - "- `attrs`: same as for `DataArray`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "ds1 = xr.Dataset(\n", - " data_vars={\n", - " \"a\": ((\"x\", \"y\"), np.random.randn(4, 2)),\n", - " \"b\": ((\"z\", \"x\"), np.random.randn(6, 4)),\n", - " },\n", - " coords={\n", - " \"x\": np.arange(4),\n", - " \"y\": np.arange(-2, 0),\n", - " \"z\": np.arange(-3, 3),\n", - " },\n", - ")\n", - "ds2 = xr.Dataset(\n", - " data_vars={\n", - " \"a\": ((\"x\", \"y\"), np.random.randn(7, 3)),\n", - " \"b\": ((\"z\", \"x\"), np.random.randn(2, 7)),\n", - " },\n", - " coords={\n", - " \"x\": np.arange(6, 13),\n", - " \"y\": np.arange(3),\n", - " \"z\": np.arange(3, 5),\n", - " },\n", - ")\n", - "\n", - "# write datasets\n", - "ds1.to_netcdf(\"ds1.nc\")\n", - "ds2.to_netcdf(\"ds2.nc\")\n", - "\n", - "# write dataarray\n", - "ds1.a.to_netcdf(\"da1.nc\")" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "Reading those files is just as simple:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "xr.open_dataset(\"ds1.nc\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "xr.open_dataarray(\"da1.nc\")" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "\n", - "\n", - "\n", - "## Zarr\n", - "\n", - "[Zarr](https://zarr.readthedocs.io/en/stable/) is a Python package and data\n", - "format providing an implementation of chunked, compressed, N-dimensional arrays.\n", - "Zarr has the ability to store arrays in a range of ways, including in memory, in\n", - "files, and in cloud-based object storage such as Amazon S3 and Google Cloud\n", - "Storage. Xarray’s Zarr backend allows xarray to leverage these capabilities.\n", - "\n", - "Zarr files can be written with:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "ds1.to_zarr(\"ds1.zarr\", mode=\"w\")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "We can then read the created file with:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "xr.open_zarr(\"ds1.zarr\", chunks=None)" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "setting the `chunks` parameter to `None` avoids `dask` (more on that in a later\n", - "session)\n" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "**tip:** You can write to any dictionary-like (`MutableMapping`) interface:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "mystore = {}\n", - "\n", - "ds1.to_zarr(store=mystore)" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "## Raster files using rioxarray\n", - "\n", - "[rioxarray](https://corteva.github.io/rioxarray/) is an *Xarray extension* that allows reading and writing a wide variety of geospatial image formats compatible with Geographic Information Systems (GIS), for example GeoTIFF.\n", - "\n", - "If rioxarray is installed your environment it will be automatically detected and give you access to the `.rio` accessor:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "da = xr.DataArray(\n", - " data=ds1.a.data,\n", - " coords={\n", - " \"y\": np.linspace(47.5, 47.8, 4),\n", - " \"x\": np.linspace(-122.9, -122.7, 2),\n", - " },\n", - ")\n", - "\n", - "# Add Geospatial Coordinate Reference https://epsg.io/4326\n", - "# this is stored as a 'spatial_ref' coordinate\n", - "da.rio.write_crs(\"epsg:4326\", inplace=True)\n", - "da" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "da.rio.to_raster('ds1_a.tiff')" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "NOTE: you can now load this file into GIS tools like [QGIS](https://www.qgis.org)! Or open back into Xarray:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "DA = xr.open_dataarray('ds1_a.tiff', engine='rasterio')\n", - "DA.rio.crs" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "vscode": { - "interpreter": { - "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/01_data_structures.md b/fundamentals/01_data_structures.md deleted file mode 100644 index 389f1f4d..00000000 --- a/fundamentals/01_data_structures.md +++ /dev/null @@ -1,69 +0,0 @@ -# Data Structures - -Multi-dimensional (a.k.a. N-dimensional, ND) arrays (sometimes called “tensors”) -are an essential part of computational science. They are encountered in a wide -range of fields, including physics, astronomy, geoscience, bioinformatics, -engineering, finance, and deep learning. In Python, [NumPy](https://numpy.org/) -provides the fundamental data structure and API for working with raw ND arrays. -However, real-world datasets are usually more than just raw numbers; they have -labels which encode information about how the array values map to locations in -space, time, etc. - -The N-dimensional nature of Xarray’s data structures makes it suitable for -dealing with multi-dimensional scientific data, and its use of dimension names -instead of axis labels (`dim='time'` instead of `axis=0`) makes such arrays much -more manageable than the raw NumPy ndarray: with Xarray, you don’t need to keep -track of the order of an array’s dimensions or insert dummy dimensions of size 1 -to align arrays (e.g., using np.newaxis). - -The immediate payoff of using Xarray is that you’ll write less code. The -long-term payoff is that you’ll understand what you were thinking when you come -back to look at it weeks or months later. - -## Example: Weather forecast - -Here is an example of how we might structure a dataset for a weather forecast: - - - -You'll notice multiple data variables (temperature, precipitation), coordinate -variables (latitude, longitude), and dimensions (x, y, t). We'll cover how these -fit into Xarray's data structures below. - -Xarray doesn’t just keep track of labels on arrays – it uses them to provide a -powerful and concise interface. For example: - -- Apply operations over dimensions by name: `x.sum('time')`. - -- Select values by label (or logical location) instead of integer location: - `x.loc['2014-01-01']` or `x.sel(time='2014-01-01')`. - -- Mathematical operations (e.g., `x - y`) vectorize across multiple dimensions - (array broadcasting) based on dimension names, not shape. - -- Easily use the split-apply-combine paradigm with groupby: - `x.groupby('time.dayofyear').mean()`. - -- Database-like alignment based on coordinate labels that smoothly handles - missing values: `x, y = xr.align(x, y, join='outer')`. - -- Keep track of arbitrary metadata in the form of a Python dictionary: - `x.attrs`. - -## Example: Mosquito genetics - -Although the Xarray library was originally developed with Earth Science datasets in mind, the datastructures work well across many other domains! For example, below is a side-by-side view of a data schematic on the left and Xarray Dataset representation on the right taken from a mosquito genetics analysis: - -![malaria_dataset](../images/malaria_dataset.png) - -The data can be stored as a 3-dimensional array, where one dimension of the array corresponds to positions (**variants**) within a reference genome, another dimension corresponds to the individual mosquitoes that were sequenced (**samples**), and a third dimension corresponds to the number of genomes within each individual (**ploidy**)." - -You can explore this dataset in detail via the [training course in data analysis for genomic surveillance of African malaria vectors](https://anopheles-genomic-surveillance.github.io/workshop-5/module-1-xarray.html)! - -## Explore on your own - -The following collection of notebooks provide interactive code examples for working with example datasets and constructing Xarray data structures manually. - -```{tableofcontents} - -``` diff --git a/fundamentals/01_datastructures.ipynb b/fundamentals/01_datastructures.ipynb deleted file mode 100644 index fa3875de..00000000 --- a/fundamentals/01_datastructures.ipynb +++ /dev/null @@ -1,426 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Xarray's Data structures\n", - "\n", - "In this lesson, we cover the basics of Xarray data structures. By the end of the lesson, we will be able to:\n", - "\n", - ":::{admonition} Learning Goals\n", - "- Understand the basic Xarray data structures `DataArray` and `Dataset` \n", - "- Customize the display of Xarray data structures\n", - "- The connection between Pandas and Xarray data structures\n", - ":::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Data structures\n", - "\n", - "Xarray provides two data structures: the `DataArray` and `Dataset`. The\n", - "`DataArray` class attaches dimension names, coordinates and attributes to\n", - "multi-dimensional arrays while `Dataset` combines multiple DataArrays.\n", - "\n", - "Both classes are most commonly created by reading data.\n", - "To learn how to create a DataArray or Dataset manually, see the [Creating Data Structures](01.1_creating_data_structures.ipynb) tutorial." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import pandas as pd\n", - "\n", - "# When working in a Jupyter Notebook you might want to customize Xarray display settings to your liking\n", - "# The following settings reduce the amount of data displayed out by default\n", - "xr.set_options(display_expand_attrs=False, display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Dataset\n", - "\n", - "`Dataset` objects are dictionary-like containers of DataArrays, mapping a variable name to each DataArray.\n", - "\n", - "Xarray has a few small real-world tutorial datasets hosted in this GitHub repository https://github.com/pydata/xarray-data.\n", - "We'll use the [xarray.tutorial.load_dataset](https://docs.xarray.dev/en/stable/generated/xarray.tutorial.open_dataset.html#xarray.tutorial.open_dataset) convenience function to download and open the `air_temperature` (National Centers for Environmental Prediction) Dataset by name." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can access \"layers\" of the Dataset (individual DataArrays) with dictionary syntax" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds[\"air\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can save some typing by using the \"attribute\" or \"dot\" notation. This won't work for variable names that clash with built-in\n", - "method names (for example, `mean`)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### HTML vs text representations\n", - "\n", - "Xarray has two representation types: `\"html\"` (which is only available in\n", - "notebooks) and `\"text\"`. To choose between them, use the `display_style` option.\n", - "\n", - "So far, our notebook has automatically displayed the `\"html\"` representation (which we will continue using).\n", - "The `\"html\"` representation is interactive, allowing you to collapse sections (▶) and\n", - "view attributes and values for each value (📄 and ≡)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with xr.set_options(display_style=\"html\"):\n", - " display(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "☝️ From top to bottom the output consists of:\n", - "\n", - "- **Dimensions**: summary of all *dimensions* of the `Dataset` `(lat: 25, time: 2920, lon: 53)`: this tells us that the first dimension is named `lat` and has a size of `25`, the second dimension is named `time` and has a size of `2920`, and the third dimension is named `lon` and has a size of `53`. Because we will access the dimensions by name, the order doesn't matter.\n", - "- **Coordinates**: an unordered list of *coordinates* or dimensions with coordinates with one item per line. Each item has a name, one or more dimensions in parentheses, a dtype and a preview of the values. Also, if it is a dimension coordinate, it will be printed in **bold** font. *dimensions without coordinates* appear in plain font (there are none in this example, but you might imagine a 'mask' coordinate that has a value assigned at every point).\n", - "- **Data variables**: names of each nD *measurement* in the dataset, followed by its dimensions `(time, lat, lon)`, dtype, and a preview of values.\n", - "- **Indexes**: Each dimension with coordinates is backed by an \"Index\". In this example, each dimension is backed by a `PandasIndex`\n", - "- **Attributes**: an unordered list of metadata (for example, a paragraph describing the dataset)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Compare that with the string representation, which is very similar except the dimensions are given a `*` prefix instead of bold and you cannot collapse or expand the outputs." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with xr.set_options(display_style=\"text\"):\n", - " display(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To understand each of the components better, we'll explore the \"air\" variable of our Dataset." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### DataArray\n", - "\n", - "The `DataArray` class consists of an array (data) and its associated dimension names, labels, and attributes (metadata).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da = ds[\"air\"]\n", - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### String representations\n", - "\n", - "We can use the same two representations (`\"html\"`, which is only available in\n", - "notebooks, and `\"text\"`) to display our `DataArray`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with xr.set_options(display_style=\"html\"):\n", - " display(da)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with xr.set_options(display_style=\"text\"):\n", - " display(da)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the string representation of a `DataArray` (versus a `Dataset`), we also see:\n", - "- the `DataArray` name ('air')\n", - "- a preview of the array data (collapsible in the `\"html\"` representation)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can also access the data array directly:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.data # (or equivalently, `da.data`)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Named dimensions \n", - "\n", - "`.dims` are the named axes of your data. They may (dimension coordinates) or may not (dimensions without coordinates) have associated values. Names can be anything that fits into a Python `set` (i.e. calling `hash()` on it doesn't raise an error), but to be\n", - "useful they should be strings.\n", - "\n", - "In this case we have 2 spatial dimensions (`latitude` and `longitude` are stored with shorthand names `lat` and `lon`) and one temporal dimension (`time`)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.dims" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Coordinates\n", - "\n", - "\n", - "`.coords` is a simple [dict-like](https://docs.python.org/3/glossary.html#term-mapping) [data container](https://docs.xarray.dev/en/stable/user-guide/data-structures.html#coordinates)\n", - "for mapping coordinate names to values. These values can be:\n", - "- another `DataArray` object\n", - "- a tuple of the form `(dims, data, attrs)` where `attrs` is optional. This is\n", - " roughly equivalent to creating a new `DataArray` object with\n", - " `DataArray(dims=dims, data=data, attrs=attrs)`\n", - "- a 1-dimensional `numpy` array (or anything that can be coerced to one using [`numpy.array`](https://numpy.org/doc/stable/reference/generated/numpy.array.html), such as a `list`) containing numbers, datetime objects, strings, etc. to label each point.\n", - "\n", - "Here we see the actual timestamps and spatial positions of our air temperature data:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.coords" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The difference between the dimension labels (dimension coordinates) and normal\n", - "coordinates is that for now it only is possible to use indexing operations\n", - "(`sel`, `reindex`, etc.) with dimension coordinates. Also, while coordinates can\n", - "have arbitrary dimensions, dimension coordinates have to be one-dimensional." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Attributes \n", - "\n", - "`.attrs` is a dictionary that can contain arbitrary Python objects (strings, lists, integers, dictionaries, etc.) containing information about your data. Your only\n", - "limitation is that some attributes may not be writeable to certain file formats." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.attrs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## To Pandas and back\n", - "\n", - "`DataArray` and `Dataset` objects are frequently created by converting from\n", - "other libraries such as [pandas](https://pandas.pydata.org/) or by reading from\n", - "data storage formats such as\n", - "[NetCDF](https://www.unidata.ucar.edu/software/netcdf/) or\n", - "[zarr](https://zarr.readthedocs.io/en/stable/).\n", - "\n", - "To convert from / to `pandas`, we can use the\n", - "[to_xarray](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_xarray.html)\n", - "methods on [pandas](https://zarr.readthedocs.io/en/stable/) objects or the\n", - "[to_pandas](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.to_pandas.html)\n", - "methods on `xarray` objects:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "series = pd.Series(np.ones((10,)), index=list(\"abcdefghij\"))\n", - "series" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = series.to_xarray()\n", - "arr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr.to_pandas()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can also control what `pandas` object is used by calling `to_series` /\n", - "`to_dataframe`:\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### to_series\n", - "This will always convert `DataArray` objects to `pandas.Series`, using a `MultiIndex` for higher dimensions\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.to_series()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### to_dataframe\n", - "\n", - "This will always convert `DataArray` or `Dataset` objects to a `pandas.DataFrame`. Note that `DataArray` objects have to be named for this. Since columns in a `DataFrame` need to have the same index, they are\n", - "broadcasted." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.to_dataframe()" - ] - } - ], - "metadata": { - "interpreter": { - "hash": "e1fd91e1433c68a24bbc2950cbc84f731f846faaebb12a808b0de0faaa910846" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/02.1_indexing_Basic.ipynb b/fundamentals/02.1_indexing_Basic.ipynb deleted file mode 100644 index ef4f4d39..00000000 --- a/fundamentals/02.1_indexing_Basic.ipynb +++ /dev/null @@ -1,787 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Indexing and Selecting Data\n", - "\n", - "## Learning Objectives\n", - "\n", - "- Understanding the difference between position and label-based indexing\n", - "- Select data by position using `.isel` with values or slices\n", - "- Select data by label using `.sel` with values or slices\n", - "- Use nearest-neighbor lookups with `.sel`\n", - "- Select timeseries data by date/time with values or slices\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Introduction\n", - "\n", - "Xarray offers extremely flexible indexing routines that combine the best features of NumPy and Pandas for data selection.\n", - "\n", - "The most basic way to access elements of a `DataArray` object is to use Python’s `[]` syntax, such as `array[i, j]`, where `i` and `j` are both integers.\n", - "\n", - "As xarray objects can store coordinates corresponding to each dimension of an array, label-based indexing is also possible (e.g. `.sel(latitude=0)`, similar to `pandas.DataFrame.loc`). In label-based indexing, the element position `i` is automatically looked-up from the coordinate values.\n", - "\n", - "By leveraging the labeled dimensions and coordinates provided by Xarray, users can effortlessly access, subset, and manipulate data along multiple axes, enabling complex operations such as slicing, masking, and aggregating data based on specific criteria. \n", - "\n", - "This indexing and selection capability of Xarray not only enhances data exploration and analysis workflows but also promotes reproducibility and efficiency by providing a convenient interface for working with multi-dimensional data structures.\n", - "\n", - "## Quick Overview \n", - "\n", - "In total, xarray supports four different kinds of indexing, as described below and summarized in this table:\n", - "\n", - "| Dimension lookup | Index lookup | `DataArray` syntax | `Dataset` syntax |\n", - "| ---------------- | ------------ | ---------------------| ---------------------|\n", - "| Positional | By integer | `da[:,0]` | *not available* |\n", - "| Positional | By label | `da.loc[:,'IA']` | *not available* |\n", - "| By name | By integer | `da.isel(space=0)` or `da[dict(space=0)]` | `ds.isel(space=0)` or `ds[dict(space=0)]` |\n", - "| By name | By label | `da.sel(space='IA')` or `da.loc[dict(space='IA')]` | `ds.sel(space='IA')` or `ds.loc[dict(space='IA')]` |\n", - "\n", - "\n", - "----------\n", - "\n", - "In this tutorial, first we cover the positional indexing and label-based indexing, next we will cover more advanced techniques such as nearest neighbor lookups. \n", - "\n", - "First, let's import packages: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import xarray as xr\n", - "\n", - "xr.set_options(display_expand_attrs=False, display_expand_data=False);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we’ll use air temperature tutorial dataset from the [National Center for Environmental Prediction](https://www.weather.gov/ncep/). " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da = ds[\"air\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Position-based Indexing\n", - "\n", - "Indexing a `DataArray` directly works (mostly) just like it does for numpy `ndarrays`, except that the returned object is always another `DataArray`:\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### NumPy Positional Indexing\n", - "\n", - "When working with numpy, indexing is done by position (slices/ranges/scalars).\n", - "\n", - "For example:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np_array = ds[\"air\"].data # numpy array\n", - "np_array.shape" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Indexing is 0-based in NumPy:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np_array[1, 0, 0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Similarly, we can select a range in NumPy:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# extract a time-series for one spatial location\n", - "np_array[:, 20, 40]" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Positional Indexing with Xarray" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Xarray offers extremely flexible indexing routines that combine the best\n", - "features of NumPy and pandas for data selection." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "#### NumPy style indexing with Xarray\n", - "\n", - "NumPy style indexing works exactly the same with Xarray but it also preserves labels and metadata. \n", - "\n", - "This approach however does not take advantage of the dimension names and coordinate location information that is present in a Xarray object." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da[:, 20, 40]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Positional Indexing Using Dimension Names" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Remembering the axis order can be challenging even with 2D arrays:\n", - "- is `np_array[0,3]` the first row and third column or first column and third row? \n", - "- or did I store these samples by row or by column when I saved the data?!. \n", - "\n", - "The difficulty is compounded with added dimensions. \n", - "\n", - "Xarray objects eliminate much of the mental overhead by allowing indexing using dimension names instead of axes numbers:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.isel(lat=20, lon=40).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Slicing is also possible similarly:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.isel(time=slice(0, 20), lat=20, lon=40).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{note}\n", - "Using the `isel` method, the user can choose/slice the specific elements from a Dataset or DataArray.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Indexing a DataArray directly works (mostly) just like it does for numpy arrays, except that the returned object is always another DataArray; however,when indexing with multiple arrays, positional indexing in Xarray behaves differently compared to NumPy.\n", - "\n", - "```{caution}\n", - "Positional indexing deviates from the NumPy behavior when indexing with multiple arrays. \n", - "```\n", - "We can show this with an example: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np_array[:, [0, 1], [0, 1]].shape" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da[:, [0, 1], [0, 1]].shape" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Please note how the dimension of the `DataArray()` object is different from the `numpy.ndarray`.\n", - "\n", - "```{tip}\n", - "However, users can still achieve NumPy-like pointwise indexing across multiple labeled dimensions by using Xarray vectorized indexing techniques. We will delve further into this topic in the advanced indexing notebook.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "So far, we have explored positional indexing, which relies on knowing the exact indices. But, what if you wanted to select data specifically for a particular latitude? It becomes challenging to determine the corresponding indices in such cases. Xarray reduce this complexity by introducing label-based indexing. \n", - "\n", - "## Label-based Indexing\n", - "\n", - "To select data by coordinate labels instead of integer indices we can use the same syntax, using `sel` instead of `isel`:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For example, let's select all data for Lat 25 °N and Lon 210 °E using `sel` :" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "da.sel(lat=25, lon=210).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Similarly we can do slicing or filter a range using the `.slice` function: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# demonstrate slicing\n", - "da.sel(lon=slice(210, 215))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# demonstrate slicing\n", - "da.sel(lat=slice(50, 25), lon=slice(210, 215))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Dropping using `drop_sel`\n", - "\n", - "If instead of selecting data we want to drop it, we can use `drop_sel` method with syntax similar to `sel`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.drop_sel(lat=50.0, lon=200.0)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "So far, all the above will require us to specify exact coordinate values, but what if we don't have the exact values? We can use nearest neighbor lookups to address this issue:\n", - "\n", - "## Nearest Neighbor Lookups\n", - "\n", - "The label based selection methods `sel()` support `method` and `tolerance` keyword argument. The `method` parameter allows for enabling nearest neighbor (inexact) lookups by use of the methods `pad`, `backfill` or `nearest`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.sel(lat=52.25, lon=251.8998, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "`tolerance` argument limits the maximum distance for valid matches with an inexact lookup:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.sel(lat=52.25, lon=251.8998, method=\"nearest\", tolerance=2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "All of these indexing methods work on the dataset too!\n", - "```\n", - "\n", - "We can also use these methods to index all variables in a dataset simultaneously, returning a new dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(lat=52.25, lon=251.8998, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Datetime Indexing\n", - "\n", - "\n", - "Datetime indexing is a critical feature when working with time series data, which is a common occurrence in many fields, including finance, economics, and environmental sciences. Essentially, datetime indexing allows you to select data points or a series of data points that correspond to certain date or time criteria. This becomes essential for time-series analysis where the date or time information associated with each data point can be as critical as the data point itself.\n", - "\n", - "Let's see some of the techniques to perform datetime indexing in Xarray:\n", - "\n", - "### Selecting data based on single datetime\n", - "\n", - "Let's say we have a Dataset ds and we want to select data at a particular date and time, for instance, '2013-01-01' at 6AM. We can do this by using the `sel` (select) method, like so:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time='2013-01-01 06:00')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "By default, datetime selection will return a range of values that match the provided string. For e.g. `time=\"2013-01-01\"` will return all timestamps for that day (4 of them here):" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time='2013-01-01')" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "We can use this feature to select all points in a year:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time=\"2014\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "or a month:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time=\"2014-May\")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Selecting data for a range of dates\n", - "\n", - "Now, let's say we want to select data between a certain range of dates. We can still use the `sel` method, but this time we will combine it with slice:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# This will return a subset of the dataset corresponding to the entire year of 2013.\n", - "ds.sel(time=slice('2013-01-01', '2013-12-31'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "```{note}\n", - "\n", - "The slice function takes two arguments, start and stop, to make a slice that includes these endpoints. When we use `slice` with the `sel` method, it provides an efficient way to select a range of dates. The above example shows the usage of slice for datetime indexing.\n", - "\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Indexing with a DatetimeIndex or date string list\n", - "\n", - "Another technique is to use a list of datetime objects or date strings for indexing. For example, you could select data for specific, non-contiguous dates like this:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "dates = ['2013-07-09', '2013-10-11', '2013-12-24']\n", - "ds.sel(time=dates)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Fancy indexing based on year, month, day, or other datetime components\n", - "\n", - "In addition to the basic datetime indexing techniques, Xarray also supports \"fancy\" indexing options, which can provide more flexibility and efficiency in your data analysis tasks. You can directly access datetime components such as year, month, day, hour, etc. using the `.dt` accessor. Here is an example of selecting all data points from July across all years:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time=ds.time.dt.month == 7)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Or, if you wanted to select data from a specific day of each month, you could use:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.sel(time=ds.time.dt.day == 15)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exercises\n", - "\n", - "Practice the syntax you’ve learned so far:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Select the first 30 entries of `latitude` and 30th to 40th entries of `longitude`:\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "ds.isel(lat=slice(None, 30), lon=slice(30, 40))\n", - "```\n", - "\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Select all data at 75 degree north and between Jan 1, 2013 and Oct 15, 2013\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "ds.sel(lat=75, time=slice(\"2013-01-01\", \"2013-10-15\"))\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Remove all entries at 260 and 270 degrees\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "ds.drop_sel(lon=[260, 270])\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Summary\n", - "\n", - "In total, Xarray supports four different kinds of indexing, as described below and summarized in this table:\n", - "\n", - "\n", - "| Dimension lookup | Index lookup | `DataArray` syntax | `Dataset` syntax |\n", - "| ---------------- | ------------ | ---------------------| ---------------------|\n", - "| Positional | By integer | `da[:,0]` | *not available* |\n", - "| Positional | By label | `da.loc[:,'IA']` | *not available* |\n", - "| By name | By integer | `da.isel(space=0)` or `da[dict(space=0)]` | `ds.isel(space=0)` or `ds[dict(space=0)]` |\n", - "| By name | By label | `da.sel(space='IA')` or `da.loc[dict(space='IA')]` | `ds.sel(space='IA')` or `ds.loc[dict(space='IA')]` |\n", - "\n", - "\n", - "For enhanced indexing capabilities across all methods, you can utilize DataArray objects as an indexer. For more detailed information, please see the Advanced Indexing notebook.\n", - "\n", - "\n", - "## More Resources\n", - "\n", - "- [Xarray Docs - Indexing and Selecting Data](https://docs.xarray.dev/en/stable/indexing.html)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/02.2_manipulating_dimensions.ipynb b/fundamentals/02.2_manipulating_dimensions.ipynb deleted file mode 100644 index 9cf4bd15..00000000 --- a/fundamentals/02.2_manipulating_dimensions.ipynb +++ /dev/null @@ -1,117 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Manipulating Dimensions (Data Resolution)\n", - "\n", - "Sometimes we need to change the resolution of our data. We might need to look at inferred values between dimension (grid) spaces\n", - "or change the dimension spacing completely (for instance to add another variable). Learning goals:\n", - "\n", - "- Interpolate data to new coordinates" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import pandas as pd\n", - "import xarray as xr\n", - "\n", - "np.random.seed(0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr = xr.DataArray(\n", - " np.random.randn(4, 6),\n", - " dims=(\"x\", \"y\"),\n", - " coords={\n", - " \"x\": [-3.2, 2.1, 5.3, 6.5],\n", - " \"y\": pd.date_range(\"2009-01-05\", periods=6, freq=\"M\"),\n", - " },\n", - ")\n", - "arr" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Interpolation\n", - "\n", - "If we want to look at values between the current grid cells (interpolation), we\n", - "can do that with `interp` (requires `scipy`):\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "arr.interp(\n", - " x=np.linspace(2, 6, 10),\n", - " y=pd.date_range(\"2009-04-01\", \"2009-04-30\", freq=\"D\"),\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "when trying to extrapolate, the resulting values will be `nan`.\n", - "\n", - "If we already have a object with the desired coordinates, we can use\n", - "`interp_like`:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "other = xr.DataArray(\n", - " dims=(\"x\", \"y\"),\n", - " coords={\n", - " \"x\": np.linspace(2, 4, 10),\n", - " \"y\": pd.date_range(\"2009-04-01\", \"2009-04-30\", freq=\"D\"),\n", - " },\n", - ")\n", - "arr.interp_like(other)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/02.3_aligning_data_objects.ipynb b/fundamentals/02.3_aligning_data_objects.ipynb deleted file mode 100644 index 490fbc88..00000000 --- a/fundamentals/02.3_aligning_data_objects.ipynb +++ /dev/null @@ -1,570 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Computing with Multiple Objects\n", - "\n", - "Learning goals:\n", - "\n", - "- Perform operations across multiple datasets\n", - "- Understand two important concepts: broadcasting and alignment." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'\n", - "\n", - "plt.style.use(\"bmh\")\n", - "\n", - "np.random.seed(0)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here is a motivating calculation where we subtract two DataArrays with data available at different locations in the (space, time) plane." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr1 = xr.DataArray(\n", - " np.arange(12).reshape(3, 4),\n", - " dims=(\"space\", \"time\"),\n", - " coords={\"space\": [\"a\", \"b\", \"c\"], \"time\": [0, 1, 2, 3]},\n", - ")\n", - "arr1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr2 = xr.DataArray(\n", - " [0, 1],\n", - " dims=\"space\",\n", - " coords={\"space\": [\"b\", \"d\"]},\n", - ")\n", - "arr2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that `arr1` is 2D; while `arr2` is 1D along `space` and has values at two locations only.\n", - "\n", - "Now subtract the two." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr1 - arr2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To understand this output, we must understand two fundamental concepts underlying computation with Xarray objects\n", - "\n", - "1. Broadcasting: The objects need to have compatible shapes.\n", - "2. Alignment: The objects need to have values at the same coordinate labels\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Broadcasting: adjusting arrays to the same shape\n", - "\n", - "**Broadcasting** allows an operator or a function to act on two or more arrays\n", - "to operate even if these arrays do not have the same shape. That said, not all\n", - "the dimensions can be subjected to broadcasting; they must meet certain rules.\n", - "The image below illustrates how an operation on arrays with\n", - "different coordinates will result in automatic broadcasting\n", - "\n", - "![](../images/broadcasting_schematic.png)\n", - "\n", - "Credit: Stephan Hoyer --\n", - "[xarray ECMWF Python workshop](https://docs.google.com/presentation/d/16CMY3g_OYr6fQplUZIDqVtG-SKZqsG8Ckwoj2oOqepU/)\n", - "\n", - "Numpy's broadcasting rules, based on array shape, can sometimes be\n", - "difficult to understand and remember. Xarray does broadcasting by dimension name,\n", - "rather than array shape. This is a huge convenience.\n", - "\n", - "Here are two 1D arrays" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "array1 = xr.DataArray(\n", - " np.arange(3),\n", - " dims=\"space\",\n", - " coords={\"space\": [\"a\", \"b\", \"c\"]},\n", - " name=\"array1\",\n", - ")\n", - "array2 = xr.DataArray(\n", - " np.arange(4),\n", - " dims=\"time\",\n", - " coords={\"time\": [0, 1, 2, 3]},\n", - " name=\"array2\",\n", - ")\n", - "display(array1)\n", - "display(array2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's subtract the two:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "array1 - array2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We see that the result is a 2D array. \n", - "\n", - "When subtracting, Xarray first realizes that `array1` is missing the dimension `time` and `array2` is missing the dimension `space`. Xarray then broadcasts or \"expands\" both arrays to 2D with dimensions `space`, `time`. Here is an illustration:\n", - "\n", - "![](../images/broadcasting_schematic.png)\n", - "\n", - "While this detail is hidden, we can explicitly broadcast any number of arrays against each other using [xr.broadcast](https://docs.xarray.dev/en/stable/generated/xarray.broadcast.html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "array1_broadcasted, array2_broadcasted = xr.broadcast(array1, array2)\n", - "display(array1_broadcasted.dims)\n", - "display(array2_broadcasted.dims)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To get the final anomaly, Xarray calculates" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# identical to array1 - array2\n", - "array1_broadcasted - array2_broadcasted" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Broadcasting in numpy\n", - "\n", - "For contrast let us examine the pure numpy version of this calculation. We use [.data](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.data.html) to extract the underlying numpy array object.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "array1.data - array2.data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To get this calculation to work, we need to insert new axes manually using [np.newaxis](https://numpy.org/doc/stable/reference/constants.html?highlight=newaxis)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "array1.data[:, np.newaxis] - array2.data[np.newaxis, :]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "Because xarray knows about dimension names we avoid having to create unnecessary\n", - "size-1 dimensions using `np.newaxis` or `.reshape`. This is yet another example where the _metadata_ (dimension names) reduces the mental overhead associated with coding a calculation\n", - "\n", - "For more, see the [Xarray documentation](https://docs.xarray.dev/en/stable/user-guide/computation.html#broadcasting-by-dimension-name) and the [numpy documentation](https://numpy.org/doc/stable/user/basics.broadcasting.html) on broadcasting.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Consider the following 2D array. What are the dimensions of `array - array.mean(\"time\")`?\n", - "```python\n", - "array = xr.DataArray(\n", - " np.arange(12).reshape(3, 4),\n", - " dims=(\"space\", \"time\"),\n", - " coords={\"space\": [\"a\", \"b\", \"c\"], \"time\": [0, 1, 2, 3]},\n", - " name=\"array\",\n", - ")\n", - "```\n", - "\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "(array - array.mean(\"time\")).dims\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Alignment: putting data on the same grid\n", - "\n", - "\n", - "When combining two input arrays using an arithmetic operation, both arrays must first be converted to the same coordinate system. This is \"alignment\".\n", - "\n", - "![](../images/alignment_schematic.png)\n", - "\n", - "\n", - "Here are two 2D DataArrays with different shapes." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr1 = xr.DataArray(\n", - " np.arange(12).reshape(3, 4),\n", - " dims=(\"space\", \"time\"),\n", - " coords={\"space\": [\"a\", \"b\", \"c\"], \"time\": [0, 1, 2, 3]},\n", - ")\n", - "arr1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr2 = xr.DataArray(\n", - " np.arange(14).reshape(2, 7),\n", - " dims=(\"space\", \"time\"),\n", - " coords={\"space\": [\"b\", \"d\"], \"time\": [-2, -1, 0, 1, 2, 3, 4]},\n", - ")\n", - "arr2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`arr1` and `arr2` have the same dimensions (space, time) but have values at different locations in the (space, time) plane with some locations in common.\n", - "\n", - ":::{note}\n", - "xarray assumes coordinate labels are in the same coordinate system such that space='b' in arr1 is the same as space='b' in arr2. For more sophisticated handling of coordinate systems see [rioxarray](https://corteva.github.io/rioxarray/stable/)\n", - ":::" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-input" - ] - }, - "outputs": [], - "source": [ - "def visualize_mesh(array, *args, ax=None, **kwargs):\n", - " \"\"\"Visualizes array's mesh ((points at which values are present).\"\"\"\n", - "\n", - " # Use broadcast to generate 2D x_, y_ arrays from the 1D x,y arrays\n", - " space_, time_ = xr.broadcast(array.space, array.time)\n", - " if ax is None:\n", - " ax = plt.gca()\n", - " kwargs.setdefault(\"fillstyle\", \"none\")\n", - " kwargs.setdefault(\"markersize\", 10)\n", - " ax.plot(space_.data.ravel(), time_.data.ravel(), *args, **kwargs)\n", - " ax.set_xlabel(\"space\")\n", - " ax.set_ylabel(\"time\")\n", - "\n", - "\n", - "visualize_mesh(arr1, \"<\")\n", - "visualize_mesh(arr2, \">\")\n", - "plt.ylim([-3, 6])\n", - "plt.legend([\"arr1\", \"arr2\"]);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We see that both arrays only have values in common at `x=\"b\"` and `y=[0, 1, 2, 3]`. Before applying an arithmetic operation we must first modify each DataArray so that they have values at the *same* points. This is \"alignment\".\n", - "\n", - "### Controlling alignment\n", - "\n", - "We can explicitly align objects using [xr.align](https://docs.xarray.dev/en/stable/generated/xarray.align.html). The key decision to make is how to decide which points must be kept. The other way to think of alignment is that objects must be converted to a common grid prior to any operation combining multiiple objects. This decision is controlled by the `\"join\"` keyword argument. Xarray provides 5 ways to convert the coordinate labels of multiple Datasets to a common grid. This [terminology](https://en.wikipedia.org/wiki/Join_(SQL)) originates in the database community.\n", - "\n", - "1. `join=\"inner\"` or reindex to the \"intersection set\" of coordinate labels\n", - "2. `join=\"outer\"` or reindex to the \"union set\" of coordinate labels\n", - "3. `join=\"left\"` or reindex to the coordinate labels of the leftmost object\n", - "4. `join=\"right\"` or reindex to the coordinate labels of the rightmost object\n", - "5. `join=\"exact\"` checks for exact equality of coordinate labels before the operation.\n", - "\n", - "First lets try an inner join. This is the default for arithmetic operations in Xarray. We see that the result has values for locations that `arr1` and `arr2` have in common: `x=\"b\"` and `y=[0, 1, 2, 3]`. Here is an illustration\n", - "\n", - "![](../images/alignment_schematic.png)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "a1_aligned, a2_aligned = xr.align(arr1, arr2, join=\"inner\")\n", - "a1_aligned" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "a2_aligned" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here's a visual depiction of all the `join` options" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-input" - ] - }, - "outputs": [], - "source": [ - "def visualize_join(a1, a2, join, ax=None):\n", - " a1_aligned, a2_aligned = xr.align(arr1, arr2, join=join)\n", - "\n", - " visualize_mesh(a1, \"<\", ax=ax)\n", - " visualize_mesh(a2, \">\", ax=ax)\n", - " visualize_mesh(a1_aligned, \".\", markersize=32, color=\"C3\", ax=ax)\n", - "\n", - " ax.set_ylim([-3, 6])\n", - " ax.set_title(f\"join={join!r}\")\n", - "\n", - "\n", - "f, ax = plt.subplots(1, 4, sharex=True, sharey=True)\n", - "for axx, join in zip(ax, [\"inner\", \"outer\", \"left\", \"right\"]):\n", - " visualize_join(arr1, arr2, join, ax=axx)\n", - "ax[-1].legend([\"arr1\", \"arr2\", \"after align\"], bbox_to_anchor=(1, 1))\n", - "f.set_size_inches(10, 4);" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [ - "hide-input" - ] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Consider the following two arrays. Write down the `x` and `y` coordinate locations for `da1 - da2`\n", - "```python\n", - "da1 = xr.DataArray(\n", - " np.arange(12).reshape(3, 4),\n", - " dims=(\"space\", \"time\"),\n", - " coords={\"space\": [\"a\", \"b\", \"c\"], \"time\": [0, 1, 2, 3]},\n", - ")\n", - "da2 = xr.DataArray(\n", - " [0, 1],\n", - " dims=\"space\",\n", - " coords={\"space\": [\"b\", \"d\"]},\n", - ")\n", - "```\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "`x = [\"b\"], y=[0, 1, 2, 3]` . `da2` has been broadcasted to 2D (so dimension \"y\" has been inserted) and the two arrays are aligned using `join=\"inner\"` prior to subtraction.\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Further control over alignment\n", - "\n", - "#### Controlling the fill value\n", - "\n", - "For all join options other than `\"inner\"` Xarray will insert a `fill_value` at locations not present in the original dataset. By default this is NaN" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr1_aligned, _ = xr.align(arr1, arr2, join=\"outer\")\n", - "arr1_aligned" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Control the \"fill value\" by specifying the `fill_value` keyword argument" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "arr1_aligned, _ = xr.align(arr1, arr2, join=\"outer\", fill_value=0)\n", - "arr1_aligned" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Checking that objects are aligned\n", - "\n", - "`join=\"exact\"` is special in that it checks to make sure that the objects _are_ aligned.\n", - "\n", - "For `arr1` and `arr2` this will raise an error since `arr1.x` is not identical to `arr2.x` (and similarly for `y`)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.align(arr1, arr2, join=\"exact\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Controlling automatic alignment\n", - "\n", - "Xarray's default for arithmetic operations is `join=\"inner\"`. This is controllable using the [xr.set_options](https://docs.xarray.dev/en/stable/generated/xarray.set_options.html) context manager" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with xr.set_options(arithmetic_join=\"outer\"):\n", - " result = arr1 - arr2\n", - "result" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/02_labeled_data.md b/fundamentals/02_labeled_data.md deleted file mode 100644 index 380cf91c..00000000 --- a/fundamentals/02_labeled_data.md +++ /dev/null @@ -1,5 +0,0 @@ -# Labeled data - -```{tableofcontents} - -``` diff --git a/fundamentals/03.1_computation_with_xarray.ipynb b/fundamentals/03.1_computation_with_xarray.ipynb deleted file mode 100644 index b72d93e9..00000000 --- a/fundamentals/03.1_computation_with_xarray.ipynb +++ /dev/null @@ -1,298 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Basic Computation\n", - "\n", - "In this lesson, we discuss how to do scientific computations with xarray\n", - "objects. Our learning goals are as follows. By the end of the lesson, we will be\n", - "able to:\n", - "\n", - "- Apply basic arithmetic and numpy functions to xarray DataArrays / Dataset.\n", - "- Use Xarray's label-aware reduction operations (e.g. `mean`, `sum`) weighted\n", - " reductions.\n", - "- Apply arbitrary functions to Xarray data via `apply_ufunc`.\n", - "- Use Xarray's broadcasting to compute on arrays of different dimensionality." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "\n", - "# Ask Xarray to not show data values by default\n", - "xr.set_options(display_expand_data=False)\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example Dataset\n", - "\n", - "First we load a dataset. We will use the\n", - "[NOAA Extended Reconstructed Sea Surface Temperature (ERSST) v5](https://www.ncei.noaa.gov/products/extended-reconstructed-sst)\n", - "product, a widely used and trusted gridded compilation of of historical data going back to 1854." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"ersstv5\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's do some basic visualizations of the data, just to make sure it looks\n", - "reasonable.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.sst.isel(time=0).plot(vmin=-2, vmax=30);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Arithmetic\n", - "\n", - "Xarray dataarrays and datasets work seamlessly with arithmetic operators and\n", - "numpy array functions.\n", - "\n", - "For example, imagine we want to convert the temperature (given in Celsius) to\n", - "Kelvin:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "sst_kelvin = ds.sst + 273.15\n", - "sst_kelvin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The dimensions and coordinates were preserved following the operation.\n", - "\n", - "
\n", - " Warning: Although many xarray datasets have a units attribute, which is used in plotting,\n", - " Xarray does not inherently understand units. However, xarray can integrate with pint, which provides full unit-aware operations. See pint-xarray for more.\n", - "
\n", - "\n", - "\n", - "## Applying functions\n", - "\n", - "We can apply more complex functions to Xarray objects.\n", - "Imagine we wanted to compute the following expression as a function of SST\n", - "($\\Theta$) in Kelvin:\n", - "\n", - "$$ f(\\Theta) = 0.5 \\ln(\\Theta^2) $$\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "f = 0.5 * np.log(sst_kelvin**2)\n", - "f" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Applying Arbitrary Functions\n", - "\n", - "It's awesome that we can call `np.log(ds)` and have it \"just work\". However, not\n", - "all third party libraries work this way.\n", - "\n", - "numpy's [nan_to_num](https://numpy.org/devdocs/reference/generated/numpy.nan_to_num.html) for example will return a numpy array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "np.nan_to_num(ds.sst, nan=0)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "It would be nice to keep our dimensions and coordinates. \n", - "\n", - "We can accomplish this with [xr.apply_ufunc](https://docs.xarray.dev/en/stable/generated/xarray.apply_ufunc.html#xarray.apply_ufunc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "xr.apply_ufunc(np.nan_to_num, ds.sst, kwargs={\"nan\": 0})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "`apply_ufunc` is a powerful function. It has many options for doing more complicated things. Unfortunately, we don't have time to go into more depth here. See the [`apply_ufunc` tutorial material](https://tutorial.xarray.dev/advanced/apply_ufunc/apply_ufunc.html) for more.\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Reductions\n", - "\n", - "Reductions are functions that reduce the dimensionlity of our dataset. For example taking the mean sea surface temperature along `time` of our 3D data, we \"reduce\" the `time` dimension and are left with a 2D array.\n", - "\n", - "Just like in numpy, we can reduce xarray DataArrays along any number of axes." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sst = ds.sst\n", - "sst.mean(axis=0)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "However, rather than performing reductions by specifying `axis` (as in numpy), we can instead perform\n", - "them using _dimension names_. This turns out to be a huge convenience, particularly in\n", - "complex calculations it can be hard to remember which axis corresponds to \n", - "which dimension name:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sst.mean(dim=\"time\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can reduce over multiple dimensions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sst.mean([\"lat\", \"time\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If no dimension is specified, the reduction is applied across all dimensions." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sst.mean()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "All of the standard numpy reductions (e.g. `min`, `max`, `sum`, `std`, etc.) are\n", - "available on both [Datasets](https://docs.xarray.dev/en/stable/api.html#aggregation) and [DataArrays](https://docs.xarray.dev/en/stable/api.html#id6).\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Take the mean of `sst` in both longitude and latitude. Make a simple timeseries plot.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "sst.mean([\"lat\", \"lon\"]).plot();\n", - "```\n", - ":::\n", - "::::\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/03.2_groupby_with_xarray.ipynb b/fundamentals/03.2_groupby_with_xarray.ipynb deleted file mode 100644 index cb1ba252..00000000 --- a/fundamentals/03.2_groupby_with_xarray.ipynb +++ /dev/null @@ -1,674 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Grouped Computations\n", - "\n", - "In this lesson, we discuss how to do scientific computations with defined \"groups\" of data\n", - "within our xarray objects. Our learning goals are as follows:\n", - "\n", - "- Perform \"split / apply / combine\" workflows in Xarray using `groupby`,\n", - " including\n", - " - reductions within groups\n", - " - transformations on groups\n", - "- Use `resample` to change the time frequency of the data\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "\n", - "# don't expand data by default\n", - "xr.set_options(display_expand_data=False, display_expand_attrs=False)\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example Dataset\n", - "\n", - "First we load a dataset. We will use the\n", - "[NOAA Extended Reconstructed Sea Surface Temperature (ERSST) v5](https://www.ncei.noaa.gov/products/extended-reconstructed-sst)\n", - "product, a widely used and trusted gridded compilation of of historical data\n", - "going back to 1854." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"ersstv5\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Groupby\n", - "\n", - "Xarray copies Pandas' very useful groupby functionality, enabling the \"split /\n", - "apply / combine\" workflow on xarray DataArrays and Datasets.\n", - "\n", - "Let's examine a timeseries of SST at\n", - "a single point.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.sst.sel(lon=300, lat=50).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As we can see from the plot, the timeseries at any one point is totally\n", - "dominated by the seasonal cycle. We would like to remove this seasonal cycle\n", - "(called the \"climatology\") in order to better see the long-term variaitions in\n", - "temperature. We can accomplish this using **groupby**.\n", - "\n", - "Before moving forward, we note that xarray correctly parsed the time index,\n", - "resulting in a Pandas datetime index on the time dimension.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.time" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The syntax of Xarray's groupby is almost identical to Pandas.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "?ds.groupby" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Identifying groups" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The most important argument is `group`: this defines the unique values or labels we will\n", - "us to \"split\" the data for grouped analysis. We can pass either a DataArray or a\n", - "name of a variable in the dataset. Let's first use a DataArray. \n", - "\n", - "Just like with\n", - "Pandas, we can use the time index to extract specific components of dates and\n", - "times. Xarray uses a special syntax for this `.dt`, called the\n", - "[DatetimeAccessor](https://docs.xarray.dev/en/stable/generated/xarray.core.accessor_dt.DatetimeAccessor.html?highlight=DatetimeAccessor). See the [documentation](https://docs.xarray.dev/en/stable/user-guide/time-series.html#datetime-components) for more\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.time.dt" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.time.dt.month" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.time.dt.year" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Split step" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can use these arrays in a groupby operation:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gb = ds.groupby(ds.time.dt.month)\n", - "gb" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Xarray also offers a more concise syntax when the variable you're grouping on is\n", - "already present in the dataset. This is identical to the previous line:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gb = ds.groupby(\"time.month\")\n", - "gb" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`gb` is a DatasetGroupBy object. It represents a GroupBy operation and helpfully tells us the unique \"groups\" or labels found during the split step.\n", - "\n", - "\n", - "```{tip}\n", - "\n", - "Xarrays' computation methods (`groupby`, `groupby_bins`, `rolling`, `coarsen`, `weighted`) all return special objects that represent the basic underlying computation pattern. For e.g. `gb` above is a `DatasetGroupBy` object that represents monthly groupings of the data in `ds` . It is usually helpful to save and reuse these objects for multiple operations (e.g. a mean and standard deviation calculation).\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Apply & Combine\n", - "\n", - "Now that we have groups defined, it's time to \"apply\" a calculation to the\n", - "group. Like in Pandas, these calculations can either be:\n", - "\n", - "- _aggregation_ or _reduction_: reduces the size of the group\n", - "- _transformation_: preserves the group's full size\n", - "\n", - "At then end of the apply step, xarray will automatically combine the aggregated\n", - "/ transformed groups back into a single object." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Aggregations or Reductions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Most commonly, we want to perform a reduction operation like `sum` or `mean` on our groups. Xarray conveniently provides these reduction methods on Groupby objects for both [DataArrays and Datasets](https://docs.xarray.dev/en/stable/api.html#groupby-objects).\n", - "\n", - "Here we calculate the monthly mean." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_mm = gb.mean()\n", - "ds_mm" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "So we did what we wanted to do: calculate the climatology at every point in the\n", - "dataset. Let's look at the data a bit.\n", - "\n", - "_Climatology at a specific point in the North Atlantic_\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_mm.sst.sel(lon=300, lat=50).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "_Zonal Mean Climatology_\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_mm.sst.mean(dim=\"lon\").plot.contourf(x=\"month\", levels=12, vmin=-2, vmax=30);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "_Difference between January and July Climatology_\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "(ds_mm.sst.sel(month=1) - ds_mm.sst.sel(month=7)).plot(vmax=10);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Custom Aggregations\n", - "\n", - "The most fundamental way to apply a function and combine the results together to use the `.map` method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "?gb.map" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`.map` accepts as its argument a function that expects and returns xarray\n", - "objects. We define a custom function. This function takes a single argument--the\n", - "group dataset--and returns a new dataset to be combined:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def time_mean(a):\n", - " return a.mean(dim=\"time\")\n", - "\n", - "\n", - "gb.map(time_mean)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is identical to `gb.mean()`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Apply by iteration\n", - "\n", - "We can manually iterate over the group. The\n", - "iterator returns the key (group name) and the value (the actual dataset\n", - "corresponding to that group) for each group.\n", - "\n", - "\n", - "You could apply any function you want in the loop but you would have to manually [combine](https://docs.xarray.dev/en/stable/user-guide/combining.html) the results together." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "for group_name, group_ds in gb:\n", - " # stop iterating after the first loop\n", - " break\n", - "print(group_name)\n", - "group_ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Transformations\n", - "\n", - "Now we want to _remove_ this climatology from the dataset, to examine the\n", - "residual, called the _anomaly_, which is the interesting part from a climate\n", - "perspective. Removing the seasonal climatology is a perfect example of a\n", - "transformation: it operates over a group, but doesn't change the size of the\n", - "dataset. Here is one way to code it\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def remove_time_mean(x):\n", - " return x - x.mean(dim=\"time\")\n", - "\n", - "\n", - "ds_anom = ds.groupby(\"time.month\").map(remove_time_mean)\n", - "ds_anom" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Xarray makes these sorts of transformations easy by supporting _groupby\n", - "arithmetic_. This concept is easiest explained with an example:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gb = ds.groupby(\"time.month\")\n", - "ds_anom = gb - gb.mean()\n", - "ds_anom" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we can view the climate signal without the overwhelming influence of the\n", - "seasonal cycle.\n", - "\n", - "_Timeseries at a single point in the North Atlantic_\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_anom.sst.sel(lon=300, lat=50).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "_Difference between Jan. 1 2018 and Jan. 1 1970_\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "(ds_anom.sel(time=\"2018-01-01\") - ds_anom.sel(time=\"1970-01-01\")).sst.plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Using `groupby`, plot the annual mean time series of SST at 300°E, 50°N\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "ds.groupby(\"time.year\").mean().sst.sel(lon=300, lat=50).plot();\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Resample\n", - "\n", - "Resampling means changing the time frequency of data, usually reducing to a coarser frequency: e.g. converting daily frequency data to monthly frequency data using `mean` to reduce the values. This operation can be thought of as a groupby operation where each group is a single month of data. Resampling can be applied only to time-index dimensions. \n", - "\n", - "First note that `ds_anom` has data at monthly frequency (i.e. one point every month)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_anom.time" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we compute the five-year mean along the `time` dimension by passing `time='5Y'`. `'5Y'` is a special frequency string. Xarray uses pandas to convert such a frequency string to a groupby operation. See the [pandas documentation](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases) for how to specify a different frequency." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "resample_obj = ds_anom.resample(time=\"5Y\")\n", - "resample_obj" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{note}\n", - "`resample` only works with proper datetime64 coordinate labels. Note the `dtype` of `time` in the repr above.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Resampling objects are exactly like groupby objects and allow reductions, iteration, etc." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_anom_resample = resample_obj.mean()\n", - "ds_anom_resample" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "for label, group in resample_obj:\n", - " break\n", - "print(label, \"\\n\\n\", group)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds_anom.sst.sel(lon=300, lat=50).plot()\n", - "ds_anom_resample.sst.sel(lon=300, lat=50).plot(marker=\"o\");" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Using `resample`, plot the annual mean time series of SST at 300°E, 50°N.\n", - "\n", - "Compare this output to the groupby output. What differences do you see?\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "resampled = ds.resample(time='Y').mean().sst.sel(lon=300, lat=50)\n", - "resampled.plot();\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [ - "hide-input" - ] - }, - "source": [ - "## GroupBy vs Resample \n", - "\n", - "Let's compare the grouped and resampled outputs.\n", - "\n", - "\n", - "1. Note the different dimension names: when grouped, `time` is renamed to `year`. When resampled, the `time` dimension name is preserved\n", - "2. The values for `year` are integers, while those for `resampled.time` are timestamps, similar to the input dataset\n", - "3. But all values are equal" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from IPython.display import display_html\n", - "\n", - "grouped = ds.groupby(\"time.year\").mean().sst.sel(lon=300, lat=50)\n", - "resampled = ds.resample(time='Y').mean().sst.sel(lon=300, lat=50)\n", - "display_html(grouped)\n", - "display_html(resampled)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.array_equal(grouped.data, resampled.data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Going further\n", - "\n", - "1. See the documentation on [groupby](https://docs.xarray.dev/en/stable/user-guide/groupby.html) and [resample](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations)\n", - "2. Follow the tutorial on [high-level computation patterns](https://tutorial.xarray.dev/intermediate/01-high-level-computation-patterns.html)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/03.3_windowed.ipynb b/fundamentals/03.3_windowed.ipynb deleted file mode 100644 index 4454d50e..00000000 --- a/fundamentals/03.3_windowed.ipynb +++ /dev/null @@ -1,401 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Windowed Computations\n", - "\n", - "Xarray has built-in support for windowed operations:\n", - "1. [rolling](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations) - Sliding windows of fixed length.\n", - "2. [coarsen](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays) - block windows of fixed length.\n", - "\n", - "\n", - "In this notebook, we'll learn to \n", - "1. Compute rolling, or sliding window, means along one or more dimensions.\n", - "2. Compute block averages along a dimension.\n", - "3. Use `construct` to reshape arrays so that a new dimension provides windowed views to the data.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "\n", - "np.set_printoptions(threshold=10, edgeitems=2)\n", - "xr.set_options(display_expand_data=False)\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"ersstv5\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Rolling or moving windows" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "Rolling window operations \n", - "1. can be applied along any dimension, or along multiple dimensions.\n", - "2. returns object of same shape as input\n", - "3. pads with NaNs to make (3) possible\n", - "\n", - "Again, all common reduction operations are [available](https://docs.xarray.dev/en/stable/api.html#rolling-objects)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "rolling = ds.rolling(time=12, center=True)\n", - "rolling" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "```{tip}\n", - "Xarrays' computation methods (`groupby`, `groupby_bins`, `rolling`, `coarsen`, `weighted`) all return special objects that represent the basic underlying computation pattern. For e.g. `rolling` above is a `DatasetRolling` object that represents 12-point rolling windows of the data in `ds` . It is usually helpful to save and reuse these objects for multiple operations (e.g. a mean and standard deviation calculation).\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "ds_rolling = rolling.mean()\n", - "ds_rolling" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "ds.sst.sel(lon=300, lat=50).plot(label=\"monthly anom\")\n", - "ds_rolling.sst.sel(lon=300, lat=50).plot(label=\"12 month rolling mean\")\n", - "plt.legend()" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "We can apply rolling mean along multiple dimensions as a 2D smoother in (lat, lon). Here is an example of a 5-point running mean applied along both the `lat` and `lon` dimensions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "extract = ds.sst.isel(time=0)\n", - "smoothed = extract.rolling(lon=5, lat=5, center=True).mean()\n", - "\n", - "f, ax = plt.subplots(2, 1, sharex=True, sharey=True)\n", - "extract.plot(ax=ax[0], robust=True)\n", - "smoothed.plot(ax=ax[1], robust=True)\n", - "f.set_size_inches((10, 7))\n", - "plt.tight_layout()" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "Note the addition of NaNs at the data boundaries and near continental boundaries." - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "### Custom reductions\n", - "\n", - "While common reductions are implemented by default, sometimes it is useful to apply our own windowed operations. For these uses, Xarray provides the `construct` methods for [DataArray.rolling](https://docs.xarray.dev/en/stable/generated/xarray.core.rolling.DataArrayRolling.construct.html) and [Dataset.rolling](https://docs.xarray.dev/en/stable/generated/xarray.core.rolling.DatasetRolling.construct.html).\n", - "\n", - "For rolling over a dimension `time` with a window size `N`, `construct` adds a new dimension (with user-provided name) of size `N`. \n", - "\n", - "We illustrate with a simple example array:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "simple = xr.DataArray(np.arange(10), dims=\"time\", coords={\"time\": np.arange(10)})\n", - "simple" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "We call `construct` and provide a name for the new dimension: `window`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# adds a new dimension \"window\"\n", - "simple.rolling(time=5, center=True).construct(\"window\")" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - ":::{admonition} Exercise\n", - ":class: tip\n", - "Illustrate the difference between `center=True` and `center=False` for rolling by looking at the `construct`-ed array.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "display(\"center=True\")\n", - "display(simple.rolling(time=5, center=True).construct(\"window\"))\n", - "\n", - "display(\"center=False\")\n", - "display(simple.rolling(time=5, center=False).construct(\"window\"))\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Coarsening\n", - "\n", - "`coarsen` does something similar to `rolling`, but allows us to work with discrete *non-overlapping* blocks of data.\n", - "\n", - "You will need to specify `boundary` if the length of the dimension is not a multiple of the window size (\"block size\"). You can choose to\n", - "1. `trim` the excess values\n", - "2. `pad` with NaNs\n", - "\n", - "Again, all standard reductions are [implemented](https://docs.xarray.dev/en/stable/api.html#coarsen-objects)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "coarse = ds.coarsen(lon=5, lat=5)\n", - "coarse" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "Xarrays' computation methods (groupby, groupby_bins, rolling, coarsen, weighted) all return special objects that represent the basic underlying computation pattern. For e.g. `coarse` above is a `DatasetCoarsen` object that represents 5-point windows along lat, lon of the data in `ds`. It is usually helpful to save and reuse these objects for multiple operations (e.g. a mean and standard deviation calculation)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": { - "tags": [ - "raises-exception", - "output-scroll" - ] - }, - "outputs": [], - "source": [ - "# we expect an error here because lat has size 89, which is not divisible by block size 5\n", - "coarse.mean()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "coarse = ds.coarsen(lat=5, lon=5, boundary=\"trim\").mean()\n", - "coarse" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "coarse.sst.isel(time=0).plot();" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "### Custom reductions\n", - "\n", - "Like `rolling`, `coarsen` also provides a `construct` method for custom block operations. \n", - "\n", - "```{tip} \n", - "`coarsen.construct` is a handy way to reshape Xarray objects.\n", - "```\n", - "\n", - "Consider a \"monthly\" 1D timeseries. This simple example has one value per month for 2 years" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "months = xr.DataArray(\n", - " np.tile(np.arange(1, 13), reps=2),\n", - " dims=\"time\",\n", - " coords={\"time\": np.arange(1, 25)},\n", - ")\n", - "months" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "Now we reshape to get one new dimension `year` of size 12." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "# break \"time\" into two new dimensions: \"year\", \"month\"\n", - "months.coarsen(time=12).construct(time=(\"year\", \"month\"))" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": {}, - "source": [ - ":::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Imagine the array `months` was one element shorter. Use `boundary=\"pad\"` and the `side` kwarg to reshape `months.isel(time=slice(1, None))` to a 2D DataArray with the following values:\n", - "\n", - "```python\n", - "array([[nan, 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12.],\n", - " [ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12.]])\n", - "```\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "months.isel(time=slice(1, None)).coarsen({\"time\": 12}, boundary=\"pad\", side=\"right\").construct(\n", - " time=(\"year\", \"month\")\n", - ")\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "Note that `coarsen` pads with NaNs. For more control over padding, use\n", - "[DataArray.pad](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.pad.html) explicitly." - ] - }, - { - "cell_type": "markdown", - "id": "29", - "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] - }, - "source": [ - "## Going further\n", - "\n", - "1. See the documentation on [rolling](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations) and [coarsen](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays).\n", - "2. Follow the tutorial on [high-level computational patterns](https://tutorial.xarray.dev/intermediate/01-high-level-computation-patterns.html#)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/03.4_weighted.ipynb b/fundamentals/03.4_weighted.ipynb deleted file mode 100644 index 8f6807e1..00000000 --- a/fundamentals/03.4_weighted.ipynb +++ /dev/null @@ -1,172 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# Weighted Reductions\n", - "\n", - "Xarray supports [weighted reductions](https://docs.xarray.dev/en/stable/user-guide/computation.html#weighted-array-reductions).\n", - "\n", - "For demonstration, we will create a “weights” array proportional to cosine of latitude.\n", - "Modulo a normalization, this is the correct area-weighting factor for data on a regular lat-lon grid.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"ersstv5\")\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "weights = np.cos(np.deg2rad(ds.lat))\n", - "display(weights.dims)\n", - "\n", - "weights.plot()" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "## Manual weighting\n", - "\n", - "Thanks to the automatic broadcasting and alignment discussed earlier, if we multiply this by SST, it “just works,” and the arrays are broadcasted properly:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "(ds.sst * weights).dims" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "We could imagine computing the weighted spatial mean of SST manually." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "sst_mean = (ds.sst * weights).sum(dim=(\"lon\", \"lat\")) / weights.sum(dim=\"lat\")\n", - "sst_mean.plot()\n", - "plt.title(\"This is wrong!\")" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "That would be wrong, however, because the denominator (`weights.sum(dim='lat')`)\n", - "needs to be expanded to include the `lon` dimension and modified to account for\n", - "the missing values (land points).\n", - "\n", - "\n", - "## The `weighted` method\n", - "In general, weighted reductions on multidimensional arrays are complicated. To\n", - "make it a bit easier, Xarray provides a mechanism for weighted reductions. \n", - "\n", - "It does this by creating a special intermediate `DataArrayWeighted` object, to\n", - "which different reduction operations can applied.\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "sst_weighted = ds.sst.weighted(weights)\n", - "sst_weighted" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "\n", - "
\n", - " Xarrays' computation methods (groupby, groupby_bins, rolling, coarsen, weighted) all return special objects that represent the basic underlying computation pattern. For e.g. `sst_weighted` above is a `DatasetWeighted` object that represents the weighting by `weights` of the data in `ds.sst`. It is usually helpful to save and reuse these objects for multiple operations (e.g. a mean and standard deviation calculation).\n", - "
" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "sst_weighted.mean(dim=(\"lon\", \"lat\")).plot()\n", - "plt.title(\"Correct Global Mean SST\");" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "A handful of reductions have been implemented: [mean, sum, std, var](https://docs.xarray.dev/en/stable/api.html#weighted-objects)." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/03_computation.md b/fundamentals/03_computation.md deleted file mode 100644 index c5dcdc3d..00000000 --- a/fundamentals/03_computation.md +++ /dev/null @@ -1,5 +0,0 @@ -# Computation - -```{tableofcontents} - -``` diff --git a/fundamentals/04.0_plotting.md b/fundamentals/04.0_plotting.md deleted file mode 100644 index 98a76a5b..00000000 --- a/fundamentals/04.0_plotting.md +++ /dev/null @@ -1,5 +0,0 @@ -# Plotting and Visualization - -```{tableofcontents} - -``` diff --git a/fundamentals/04.1_basic_plotting.ipynb b/fundamentals/04.1_basic_plotting.ipynb deleted file mode 100644 index 034a8168..00000000 --- a/fundamentals/04.1_basic_plotting.ipynb +++ /dev/null @@ -1,377 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "toc": true - }, - "source": [ - "# Basic Visualization\n", - "\n", - "At the end of this lesson you will learn:\n", - "\n", - "1. how to use xarray's convenient [matplotlib-backed](https://matplotlib.org/) plotting interface to\n", - " visualize your datasets.\n", - "2. that `hvplot` provides an equally convenient interface for bokeh-backed plots" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Load data\n", - "\n", - "First let's load up a tutorial dataset to visualize.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature_gradient\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This dataset has three \"data variables\", `Tair` is air temperature and `dTdx`\n", - "and `dTdy` are horizontal gradients of this temperature field. All three \"data\n", - "variables\" are three-dimensional with dimensions `(time, lat, lon)`.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "## Basic plotting: .plot()\n", - "\n", - "DataArray objects have a [plot](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.plot.html) method. This method creates plots using\n", - "`matplotlib` so all of your existing matplotlib knowledge carries over!\n", - "\n", - "By default `.plot()` makes\n", - "\n", - "1. a line plot for 1-D arrays using `plt.plot()`\n", - "2. a `pcolormesh` plot for 2-D arrays using `plt.pcolormesh()`\n", - "3. a histogram for everything else using `plt.hist()`\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "## Histograms\n", - "\n", - "`Tair` is three-dimensional, so we got a histogram of temperature values. Notice\n", - "the label on the x-axis. One of xarray's convenient plotting features is that it\n", - "uses the `attrs` of `Tair` to nicely label axes and colorbars.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.plot()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can pass extra arguments to the underlying `hist()` call. See the [matplotlib\n", - "docs](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) for\n", - "all possible keyword arguments.\n", - "\n", - "**Tip:** Note that the returned values are exactly what matplotlib would return\n", - "\n", - "### Exercise\n", - "\n", - "Update the above plot to show 50 bins with unfilled steps instead of filled\n", - "bars.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "## 2D plots\n", - "\n", - "Now we will explore 2D plots. Let's select a single timestep of `Tair` to\n", - "visualize.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1).plot()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is identical to `.plot.pcolormesh` which is more explicit" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1).plot.pcolormesh()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice how much information is on that plot!\n", - "\n", - "1. The x- and y-axes are labeled with full names — \"Latitude\", \"Longitude\" — along with units. \n", - "2. The colorbar has a nice label, again with units. \n", - "3. And the title tells us the timestamp of the data presented.\n", - "\n", - "`plot.pcolormesh` takes many [keyword arguments](https://docs.xarray.dev/en/stable/generated/xarray.plot.pcolormesh.html) and is quite sophisticated.\n", - "\n", - "Here is a more complicated figure that explicitly sets `time` as the x-axis,\n", - "customizes the colorbar, and overlays two contours at specific levels.\n", - "\n", - "**Tip:** Other options for 2D plots include [.plot.contour](https://docs.xarray.dev/en/stable/generated/xarray.plot.contour.html), [.plot.contourf](https://docs.xarray.dev/en/stable/generated/xarray.plot.pcolormesh.html),\n", - "[.plot.imshow](https://docs.xarray.dev/en/stable/generated/xarray.plot.imshow.html)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(lon=1).plot(\n", - " x=\"time\", # coordinate to plot on the x-axis of the plot\n", - " robust=True, # set colorbar limits to 2nd and 98th percentile of data\n", - " cbar_kwargs={ # passed to plt.colorbar\n", - " \"orientation\": \"horizontal\",\n", - " \"label\": \"custom label\",\n", - " \"pad\": 0.3,\n", - " },\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Update the above plot to use a different matplotlib colormap.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-input" - ] - }, - "outputs": [], - "source": [ - "ds.Tair.isel(lon=1).plot(\n", - " x=\"time\", # coordinate to plot on the x-axis of the plot\n", - " robust=True, # set colorbar limits to 2nd and 98th percentile of data\n", - " cmap=mpl.cm.RdYlBu_r,\n", - " cbar_kwargs={ # passed to plt.colorbar\n", - " \"orientation\": \"horizontal\",\n", - " \"label\": \"custom label\",\n", - " \"pad\": 0.3,\n", - " },\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Now overlay a [contour plot](https://docs.xarray.dev/en/stable/generated/xarray.plot.contour.html) on top of the previous plot\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-input" - ] - }, - "outputs": [], - "source": [ - "ds.Tair.isel(lon=1).plot.pcolormesh(\n", - " x=\"time\", # coordinate to plot on the x-axis of the plot\n", - " robust=True, # set colorbar limits to 2nd and 98th percentile of data\n", - " cmap=mpl.cm.RdYlBu_r,\n", - " cbar_kwargs={ # passed to plt.colorbar\n", - " \"orientation\": \"horizontal\",\n", - " \"label\": \"custom label\",\n", - " \"pad\": 0.3,\n", - " },\n", - ")\n", - "ds.Tair.isel(lon=1).plot.contour(\n", - " x=\"time\", # coordinate to plot on the x-axis of the plot\n", - " levels=5, # autoselect 5 levels between max, min\n", - " # The following are passed to plt.contour\n", - " colors=\"k\",\n", - " linewidths=0.5,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "## 1D line plots\n", - "\n", - "xarray is also able to plot lines by wrapping `plt.plot()`. As in the earlier\n", - "examples, the axes are labelled and keyword arguments can be passed to the\n", - "underlying `matplotlib` call.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1, lon=10).plot(marker=\"o\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Again, this is equivalent to the more explicit [plot.line](https://docs.xarray.dev/en/stable/generated/xarray.plot.line.html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1, lon=10).plot.line(marker=\"o\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Multiple lines with `hue` \n", - "\n", - "Lets say we want to compare line plots of temperature at three different\n", - "latitudes. We can use the `hue` kwarg to do this.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1).sel(lat=[40, 50, 60], method=\"nearest\").plot(x=\"lon\", hue=\"lat\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Customization\n", - "\n", - "All of xarray's plotting functions take an large list kwargs that customize\n", - "behaviour. A full list can be seen [here](https://docs.xarray.dev/en/stable/generated/xarray.plot.pcolormesh.html). That\n", - "said xarray does not wrap all matplotlib functionality.\n", - "\n", - "The general strategy for making plots that are more complicated that the\n", - "examples above is\n", - "\n", - "1. Create a matplotlib axis `ax`\n", - "2. Use xarray to make a close approximation of the final plot specifying\n", - " `ax=ax`.\n", - "3. Use `ax` methods to fully customize the plot\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] - }, - "source": [ - "## More resources\n", - "\n", - "1. [Xarray's visualization gallery](https://docs.xarray.dev/en/stable/examples/visualization_gallery.html)\n", - "2. [Xarray's plotting documentation](https://docs.xarray.dev/en/stable/plotting.html)\n", - "\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/04.2_faceting.ipynb b/fundamentals/04.2_faceting.ipynb deleted file mode 100644 index 9e10f902..00000000 --- a/fundamentals/04.2_faceting.ipynb +++ /dev/null @@ -1,245 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Faceting\n", - "\n", - "Faceting is the art of presenting \"small multiples\" of the data. It is an\n", - "effective way of visualizing variations of 3D data where 2D slices are\n", - "visualized in a panel (subplot) and the third dimensions is varied between\n", - "panels (subplots).\n", - "\n", - "Here is where xarray really augments matplotlib's functionality. We will use\n", - "monthly means to illustrate\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature_gradient\")\n", - "monthly_means = ds.groupby(\"time.month\").mean()\n", - "# xarray's groupby reductions drop attributes. Let's assign them back so we get nice labels.\n", - "monthly_means.Tair.attrs = ds.Tair.attrs" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "Note that the dimensions are now `lat, lon, month`.\n", - "\n", - "## Basic faceting\n", - "\n", - "We want to visualize how the monthly mean air temperature varies with month of\n", - "the year.\n", - "\n", - "The simplest way to facet is to specify the `row` or `col` kwargs which are\n", - "expected to be a dimension name. Here we use `month` so that each panel or\n", - "\"facet\" of the plot presents the mean temperature field in a given month. Since\n", - "a 12 column plot would be too small to interpret, we can \"wrap\" the facets into\n", - "multiple rows using `col_wrap`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "fg = monthly_means.Tair.plot(\n", - " col=\"month\",\n", - " col_wrap=4, # each row has a maximum of 4 columns\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Customizing\n", - "\n", - "All the usual customizations are possible\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "fg = monthly_means.Tair.plot(\n", - " col=\"month\",\n", - " col_wrap=4,\n", - " # The remaining kwargs customize the plot just as for not-faceted plots\n", - " robust=True,\n", - " cmap=mpl.cm.RdYlBu_r,\n", - " cbar_kwargs={\n", - " \"orientation\": \"horizontal\",\n", - " \"shrink\": 0.8,\n", - " \"aspect\": 40,\n", - " \"pad\": 0.1,\n", - " },\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "The returned FacetGrid object `fg` has many useful properties and methods e.g.\n", - "\n", - "1. `fg.fig` provides a handle to the figure\n", - "2. `fg.axes` is a numpy object array with handles to each individual axes\n", - "3. `fg.set_xlabels` and `fg.set_ylabels` can be used to change axes labels.\n", - "\n", - "See the [documentation](https://docs.xarray.dev/en/stable/api.html#faceting) for a full list." - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Use these methods to set a title for the figure using `suptitle`, as well as\n", - "change the x- and y-labels." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "fg" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "## Modifying all facets\n", - "\n", - "The FacetGrid object has some more advanced methods that let you customize the\n", - "plot further.\n", - "\n", - "Here we illustrate the use of `map` and `map_dataarray` that let you map custom\n", - "plotting functions to an existing `FacetGrid`. The functions passed to `map` and\n", - "`map_dataarray` must have a particular signature. See the docstring for more\n", - "details.\n", - "\n", - "Alternatively one can loop over `fg.axes` and modify each individual subplot as\n", - "needed\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "fg = monthly_means.Tair.plot(col=\"month\", col_wrap=4)\n", - "\n", - "# Use this to plot contours on each panel\n", - "# Note that this plotting call uses the original DataArray gradients\n", - "fg.map_dataarray(xr.plot.contour, x=\"lon\", y=\"lat\", colors=\"k\", levels=13, add_colorbar=False)\n", - "\n", - "# Add a point (or anything else!)\n", - "fg.map(lambda: plt.plot(250, 40, markersize=20, marker=\".\", color=\"w\"))" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Faceting multiple DataArrays\n", - "\n", - "Faceting can be used to plot multiple DataArrays in a Dataset. The trick is to\n", - "use `to_array()` to convert a Dataset to a DataArray and then facet that.\n", - "\n", - "This trick only works when it is sensible to use the same colormap and color\n", - "scale for all DataArrays like with `dTdx` and `dTdy`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "gradients = monthly_means[[\"dTdx\", \"dTdy\"]].to_array(\"gradient\")\n", - "gradients" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "fg = gradients.isel(month=slice(None, None, 3)).plot.contourf(\n", - " levels=13,\n", - " col=\"month\",\n", - " row=\"gradient\",\n", - " robust=True,\n", - " cmap=mpl.cm.coolwarm,\n", - " cbar_kwargs={\n", - " \"orientation\": \"horizontal\",\n", - " \"shrink\": 0.8,\n", - " \"aspect\": 40,\n", - " \"label\": \"Gradient [°C/m]\",\n", - " },\n", - ")" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/04.3_geographic_plotting.ipynb b/fundamentals/04.3_geographic_plotting.ipynb deleted file mode 100644 index fc3620cd..00000000 --- a/fundamentals/04.3_geographic_plotting.ipynb +++ /dev/null @@ -1,123 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Geography with Cartopy\n", - "\n", - "Since xarray's default plotting functionality builds on matplotlib, we can\n", - "seamlessly use cartopy to make nice maps:\n", - "\n", - "1. Specify a `projection` for the plot when creating a new figure `fig` with\n", - " axis `axis`.\n", - "2. Explicitly ask xarray to plot to axis `axis` by passing the kwarg `ax=axis`.\n", - "3. Specify the projection of the data using `transform` (`PlateCarree` here) in\n", - " `.plot()`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import cartopy.crs as ccrs\n", - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature_gradient\")\n", - "monthly_means = ds.groupby(\"time.month\").mean()\n", - "# xarray's groupby reductions drop attributes. Let's assign them back so we get nice labels.\n", - "monthly_means.Tair.attrs = ds.Tair.attrs" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Basic plot" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "fig, axis = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.Orthographic(-90, 30)))\n", - "\n", - "ds.Tair.isel(time=1).plot(\n", - " ax=axis,\n", - " transform=ccrs.PlateCarree(), # this is important!\n", - " # usual xarray stuff\n", - " cbar_kwargs={\"orientation\": \"horizontal\", \"shrink\": 0.7},\n", - " robust=True,\n", - ")\n", - "axis.coastlines() # cartopy function" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Faceting maps\n", - "\n", - "We can make faceted maps. Since `FacetGrid` creates the axes it plots to, we\n", - "need to pass the `projection` kwarg in `subplot_kws`. This makes sure that the\n", - "subplots are set up properly for cartopy.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "fg = monthly_means.Tair.isel(month=[1, 2, 3]).plot(\n", - " col=\"month\",\n", - " transform=ccrs.PlateCarree(), # remember to provide this!\n", - " subplot_kws={\"projection\": ccrs.LambertConformal(central_longitude=-95, central_latitude=45)},\n", - " cbar_kwargs={\"orientation\": \"horizontal\", \"shrink\": 0.8, \"aspect\": 40},\n", - " robust=True,\n", - ")\n", - "\n", - "# lets add a coastline to each axis\n", - "# great reason to use FacetGrid.map\n", - "fg.map(lambda: plt.gca().coastlines())" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/fundamentals/05_intro_to_dask.ipynb b/fundamentals/05_intro_to_dask.ipynb deleted file mode 100644 index d6877ad2..00000000 --- a/fundamentals/05_intro_to_dask.ipynb +++ /dev/null @@ -1,567 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction to Dask\n", - "\n", - "In this lesson, we discuss cover the basics of Dask. Our learning goals are as\n", - "follows. By the end of the lesson, we will be able to:\n", - "\n", - "- Identify and describe Dask Collections (Array, DataFrame) and Schedulers\n", - "- Work with Dask Array's in much the same way you would work with a NumPy array\n", - "- Understand some of the tradeoffs surrounding chunk size, chunk shape, and\n", - " computational overhead\n", - "- Deploy a local Dask Distributed Cluster and access the diagnostics dashboard\n", - "\n", - "## Table of contents\n", - "\n", - "1. [**What-is-Dask?**](#What-is-Dask?)\n", - "1. [**Dask Collections**](#Dask-Collections)\n", - "1. [**Parallelism using the dask.distributed scheduler**](#Parallelism-using-the-dask.distributed-scheduler)\n", - "1. [**Profiling & Diagnostics using the Dask Dashboard**](#Profiling-&-Diagnostics-using-the-Dask-Dashboard)\n", - "1. [**Distributed Dask clusters for HPC and Cloud environments**](#Distributed-Dask-clusters-for-HPC-and-Cloud-environments)\n", - "\n", - "\"Dask\n", - "\n", - "## What is Dask?\n", - "\n", - "Dask is a flexible parallel computing library for analytic computing. Dask\n", - "provides dynamic parallel task scheduling and high-level big-data collections\n", - "like `dask.array` and `dask.dataframe`, and an extensive suite of deployment\n", - "options. Dask's documentation can be found here:\n", - "https://docs.dask.org/en/latest/\n", - "\n", - "\"Dask\n", - "\n", - "## Quick setup\n", - "\n", - "For the purposes of this notebook, we'll use a Dask Cluster to manage\n", - "computations. The next cell sets up a simple LocalCluster. We'll cover Dask\n", - "schedulers and clusters later on in this notebook.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from dask.distributed import Client\n", - "\n", - "client = Client()\n", - "client" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "

👆

Click the Dashboard link above.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Dask Collections\n", - "\n", - "Dask includes 3 main collections:\n", - "\n", - "- [Dask Array](https://docs.dask.org/en/latest/array.html): Parallel NumPy\n", - " arrays\n", - "- [Dask DataFrame](https://docs.dask.org/en/latest/dataframe.html): Parallel\n", - " Pandas DataFrames\n", - "- [Dask Bag](https://docs.dask.org/en/latest/bag.html): Parallel Python Lists\n", - "\n", - "Xarray primarily interfaces with the Dask Array collection so we'll skip the\n", - "others for now. You can find out more about Dask's user interfaces\n", - "[here](https://docs.dask.org/en/latest/user-interfaces.html).\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Dask Arrays\n", - "\n", - "Dask Array implements a subset of the NumPy ndarray interface using blocked\n", - "algorithms, cutting up the large array into many small arrays. This lets us\n", - "compute on arrays larger than memory using multiple cores. We coordinate these\n", - "blocked algorithms using Dask graphs. Dask Array's are also _lazy_, meaning that\n", - "they do not evaluate until you explicitly ask for a result using the `compute`\n", - "method.\n", - "\n", - "If we want to create a NumPy array of all ones, we do it like this:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "shape = (1000, 4000)\n", - "ones_np = np.ones(shape)\n", - "ones_np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This array contains exactly 32 MB of data:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(\"%.1f MB\" % (ones_np.nbytes / 1e6))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now let's create the same array using Dask's array interface.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import dask.array as da\n", - "\n", - "ones = da.ones(shape)\n", - "ones" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This works, but we didn't tell Dask how to split up (or chunk) the array, so it\n", - "is not optimized for parallel computation.\n", - "\n", - "A crucal difference with Dask is that we must specify the `chunks` argument.\n", - "\"Chunks\" describes how the array is split up over many sub-arrays.\n", - "\n", - "![Dask Arrays](http://docs.dask.org/en/latest/_images/dask-array-black-text.svg)\n", - "_source:\n", - "[Dask Array Documentation](http://docs.dask.org/en/latest/array-overview.html)_\n", - "\n", - "There are\n", - "[several ways to specify chunks](http://docs.dask.org/en/latest/array-creation.html#chunks).\n", - "In this lecture, we will use a block shape.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "chunk_shape = (1000, 1000)\n", - "ones = da.ones(shape, chunks=chunk_shape)\n", - "ones" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Notice that we just see a symbolic represetnation of the array, including its\n", - "shape, dtype, and chunksize. No data has been generated yet. When we call\n", - "`.compute()` on a Dask array, the computation is trigger and the dask array\n", - "becomes a numpy array.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ones.compute()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In order to understand what happened when we called `.compute()`, we can\n", - "visualize the Dask _graph_, the symbolic operations that make up the array\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ones.visualize()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Our array has four chunks. To generate it, Dask calls `np.ones` four times and\n", - "then concatenates this together into one array.\n", - "\n", - "Rather than immediately loading a Dask array (which puts all the data into RAM),\n", - "it is more common to reduce the data somehow. For example:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sum_of_ones = ones.sum()\n", - "sum_of_ones.visualize()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Modify the chunk size (or shape) in the `ones` array and visualize how the task\n", - "graph changes.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# your code here" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we see Dask's strategy for finding the sum. This simple example illustrates\n", - "the beauty of Dask: it automatically designs an algorithm appropriate for custom\n", - "operations with big data.\n", - "\n", - "If we make our operation more complex, the graph gets more complex.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fancy_calculation = (ones * ones[::-1, ::-1]).mean()\n", - "fancy_calculation.visualize()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### A Bigger Calculation\n", - "\n", - "The examples above were toy examples; the data (32 MB) is probably not big\n", - "enough to warrant the use of Dask.\n", - "\n", - "We can make it a lot bigger!\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "bigshape = (200000, 4000)\n", - "big_ones = da.ones(bigshape, chunks=chunk_shape)\n", - "big_ones" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(\"%.1f MB\" % (big_ones.nbytes / 1e6))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This dataset is 6.4 GB, rather than 32 MB! This is probably close to or greater\n", - "than the amount of available RAM than you have in your computer. Nevertheless,\n", - "Dask has no problem working on it.\n", - "\n", - "_Do not try to `.visualize()` this array!_\n", - "\n", - "When doing a big calculation, dask also has some tools to help us understand\n", - "what is happening under the hood. Let's watch the dashboard again as we do a\n", - "bigger computation.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "big_calc = (big_ones * big_ones[::-1, ::-1]).mean()\n", - "\n", - "result = big_calc.compute()\n", - "result" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Reduction\n", - "\n", - "All the usual numpy methods work on dask arrays. You can also apply numpy\n", - "function directly to a dask array, and it will stay lazy.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "big_ones_reduce = (np.cos(big_ones) ** 2).mean(axis=1)\n", - "big_ones_reduce" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Plotting also triggers computation, since we need the actual values\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%matplotlib inline\n", - "from matplotlib import pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.plot(big_ones_reduce)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Parallelism using the dask.distributed scheduler\n", - "\n", - "In the [first cell](#Quick-setup) of this notebook, we started a local Dask\n", - "Cluster and Client. We skipped past some important details there that we'll\n", - "unpack now.\n", - "\n", - "### Dask Schedulers\n", - "\n", - "The Dask _Schedulers_ orchestrate the tasks in the Task Graphs so that they can\n", - "be run in parallel. _How_ they run in parallel, though, is determined by which\n", - "_Scheduler_ you choose.\n", - "\n", - "There are 3 _local_ schedulers:\n", - "\n", - "- **Single-Thread Local:** For debugging, profiling, and diagnosing issues\n", - "- **Multi-threaded:** Using the Python built-in `threading` package (the default\n", - " for all Dask operations except `Bags`)\n", - "- **Multi-process:** Using the Python built-in `multiprocessing` package (the\n", - " default for Dask `Bags`)\n", - "\n", - "and 1 _distributed_ scheduler, which we will talk about later:\n", - "\n", - "- **Distributed:** Using the `dask.distributed` module (which uses `tornado` for\n", - " communication over TCP). The distributed scheduler uses a `Cluster` to manage\n", - " communication between the scheduler and the \"workers\". This is described in\n", - " the next section.\n", - "\n", - "### Distributed Clusters (http://distributed.dask.org/)\n", - "\n", - "- `LocalCluster` - Creates a `Cluster` that can be executed locally. Each\n", - " `Cluster` includes a `Scheduler` and `Worker`s.\n", - "- `Client` - Connects to and drives computation on a distributed `Cluster`\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Profiling & Diagnostics using the Dask Dashboard\n", - "\n", - "You'll recall from above, that we opened a url to the Dask Dashboard:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The dashboard the Dask distributed scheduler provides a an incredibly valuable\n", - "tool for gaining insights into the performance of your computation and the\n", - "cluster as a whole. In the dashboard, you'll see a number of tags:\n", - "\n", - "- _Status_: Overview of the current state of the scheduler, including the active\n", - " task stream, progress, memory per worker, and the number of tasks per worker.\n", - "- _Workers_: The workers tab allows you to track cpu and memory use per worker.\n", - "- _System_: Live tracking of system resources like cpu, memory, bandwidth, and\n", - " open file descriptors\n", - "- _Profile_: Fine-grained statistical profiling\n", - "- _Info_: Worker status and logs.\n", - "\n", - "Another useful diagnostic tool is Dask's static performance report. This allows\n", - "you to save a report, including the task stream, worker profiles, etc. for all\n", - "or a specific part of a workflow. Below is an example of how you would create\n", - "such a report:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from dask.distributed import performance_report\n", - "\n", - "with performance_report(filename=\"dask-report.html\"):\n", - " big_calc.compute()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "Again, let's modify the chunk size in `big_ones` (aim for ~100mb). How does the\n", - "_Performance Report_ change with a larger chunk size?\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# your code here\n", - "\n", - "with performance_report(filename=\"dask-report-large-chunk.html\"):\n", - " big_calc.compute()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Distributed Dask clusters for HPC and Cloud environments\n", - "\n", - "Dask can be deployed on distributed infrastructure, such as a an HPC system or a\n", - "cloud computing system. There is a growing ecosystem of Dask deployment projects\n", - "that facilitate easy deployment and scaling of Dask clusters on a wide variety of\n", - "computing systems.\n", - "\n", - "### HPC\n", - "\n", - "#### Dask Jobqueue (https://jobqueue.dask.org/)\n", - "\n", - "- `dask_jobqueue.PBSCluster`\n", - "- `dask_jobqueue.SlurmCluster`\n", - "- `dask_jobqueue.LSFCluster`\n", - "- etc.\n", - "\n", - "#### Dask MPI (https://mpi.dask.org/)\n", - "\n", - "- `dask_mpi.initialize`\n", - "\n", - "### Cloud\n", - "\n", - "#### Dask Kubernetes (https://kubernetes.dask.org/)\n", - "\n", - "- `dask_kubernetes.KubeCluster`\n", - "\n", - "#### Dask Cloud Provider (https://cloudprovider.dask.org)\n", - "\n", - "- `dask_cloudprovider.FargateCluster`\n", - "- `dask_cloudprovider.ECSCluster`\n", - "- `dask_cloudprovider.ECSCluster`\n", - "\n", - "#### Dask Gateway (https://gateway.dask.org/)\n", - "\n", - "- `dask_gateway.GatewayCluster`\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "_Note: Pieces of this notebook comes from the following sources:_\n", - "\n", - "- https://github.com/pangeo-data/pangeo-tutorial\n", - "- https://github.com/rabernat/research_computing\n", - "- https://github.com/dask/dask-examples\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/fundamentals/README.md b/fundamentals/README.md deleted file mode 100644 index e69de29b..00000000 diff --git a/images/alignment_schematic.png b/images/alignment_schematic.png deleted file mode 100644 index b9c91245..00000000 Binary files a/images/alignment_schematic.png and /dev/null differ diff --git a/images/broadcasting.png b/images/broadcasting.png deleted file mode 100644 index 4bdd88ad..00000000 Binary files a/images/broadcasting.png and /dev/null differ diff --git a/images/broadcasting_schematic.png b/images/broadcasting_schematic.png deleted file mode 100644 index 034950ab..00000000 Binary files a/images/broadcasting_schematic.png and /dev/null differ diff --git a/images/logo.png b/images/logo.png deleted file mode 100644 index 8fa32d2a..00000000 Binary files a/images/logo.png and /dev/null differ diff --git a/images/malaria_dataset.png b/images/malaria_dataset.png deleted file mode 100644 index b360a198..00000000 Binary files a/images/malaria_dataset.png and /dev/null differ diff --git a/images/orthogonal_vs_vectorized.png b/images/orthogonal_vs_vectorized.png deleted file mode 100644 index e2033b87..00000000 Binary files a/images/orthogonal_vs_vectorized.png and /dev/null differ diff --git a/images/scipy2024.png b/images/scipy2024.png deleted file mode 100644 index b9fdb87c..00000000 Binary files a/images/scipy2024.png and /dev/null differ diff --git a/images/scipylogo.png b/images/scipylogo.png deleted file mode 100644 index 59c34332..00000000 Binary files a/images/scipylogo.png and /dev/null differ diff --git a/images/scipylogo2023.png b/images/scipylogo2023.png deleted file mode 100644 index 78791383..00000000 Binary files a/images/scipylogo2023.png and /dev/null differ diff --git a/images/xarray-data-structures.png b/images/xarray-data-structures.png deleted file mode 100644 index 8f4d4ae7..00000000 Binary files a/images/xarray-data-structures.png and /dev/null differ diff --git a/intermediate/01-high-level-computation-patterns.ipynb b/intermediate/01-high-level-computation-patterns.ipynb deleted file mode 100644 index 13c5f261..00000000 --- a/intermediate/01-high-level-computation-patterns.ipynb +++ /dev/null @@ -1,1487 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "slideshow": { - "slide_type": "slide" - }, - "tags": [] - }, - "source": [ - "# Computational Patterns\n", - "\n", - "Often when writing code we repeat certain patterns, whether we realize it or not.\n", - "If you have learned to write list comprehensions, you are taking advantage of a \"control pattern\".\n", - "Often, these patterns are so common that many packages have built in functions to implement them.\n", - "\n", - "Quoting the [toolz documentation](https://toolz.readthedocs.io/en/latest/control.html):\n", - "\n", - "> The Toolz library contains dozens of patterns like map and groupby. Learning a\n", - "> core set (maybe a dozen) covers the vast majority of common programming tasks\n", - "> often done by hand. A rich vocabulary of core control functions conveys the\n", - "> following benefits:\n", - ">\n", - "> - You identify new patterns\n", - "> - You make fewer errors in rote coding\n", - "> - You can depend on well tested and benchmarked implementations\n", - "\n", - "The same is true for xarray.\n", - "\n", - "\n", - "```{seealso}\n", - "\n", - "The concepts covered here, particularly the emphasis on deleting for loops and focusing on large elements of the computation, are very related to the [array programming style](https://en.wikipedia.org/wiki/Array_programming) or paradigm. See this SciPy 2023 tutorial on [\"Thinking Like Arrays\"](https://github.com/jpivarski-talks/2023-07-11-scipy2023-tutorial-thinking-in-arrays) by Jim Pivarski if you are interested in these ideas.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": { - "slideshow": { - "slide_type": "slide" - }, - "tags": [] - }, - "source": [ - "## Motivation / Learning goals\n", - "\n", - "- Learn what high-level computational patterns are available in Xarray.\n", - "- Learn that these patterns replace common uses of the `for` loop.\n", - "- Identify when you are re-implementing an existing computational pattern.\n", - "- Implement that pattern using built-in Xarray functionality.\n", - "- Understand the difference between `map` and `reduce`." - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "## Xarray's high-level patterns\n", - "\n", - "Xarray allows you to leverage dataset metadata to write more readable analysis\n", - "code. The metadata is stored with the data; not in your head.\n", - "\n", - "1. Dimension names: `dim=\"latitude\"` instead of `axis=0`\n", - "2. Coordinate \"labels\": or axis tick labels. `data.sel(latitude=45)` instead of\n", - " `data[10]`\n", - "\n", - "Xarray also provides computational patterns that cover many data\n", - "analysis tasks.\n", - "\n", - "Xarray provides methods for high-level analysis patterns:\n", - "\n", - "1. `rolling` :\n", - " [Operate on rolling or sliding (fixed length, overlapping) windows of your data e.g. running mean.](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations)\n", - "1. `coarsen` :\n", - " [Operate on blocks (fixed length) of your data (downsample).](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays)\n", - "1. `groupby` :\n", - " [Parse data into groups (using an exact value) and operate on each one (reduce data).](https://docs.xarray.dev/en/stable/groupby.html)\n", - "1. `groupby_bins`: [GroupBy after discretizing a numeric (non-exact, e.g. float) variable.](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.groupby_bins.html)\n", - "1. `resample` :\n", - " [Groupby specialized for time axes. Either downsample or upsample your data.](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations)\n", - "1. `weighted`: [Weight your data before reducing.](https://docs.xarray.dev/en/stable/user-guide/computation.html#weighted-array-reductions) \n", - "\n", - "\n", - "```{note}\n", - "The documentation links in this tutorial point to the DataArray implementations of each function, but they are also available for DataSet objects.\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": { - "slideshow": { - "slide_type": "slide" - }, - "tags": [] - }, - "source": [ - "### Load example dataset\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "\n", - "# reduce figure size\n", - "plt.rcParams[\"figure.dpi\"] = 90\n", - "xr.set_options(keep_attrs=True, display_expand_data=False)\n", - "\n", - "da = xr.tutorial.load_dataset(\"air_temperature\", engine=\"netcdf4\").air\n", - "monthly = da.resample(time=\"ME\").mean()\n", - "data = da.isel(time=0)\n", - "data.plot();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "***\n", - "\n", - "### Identifying high-level computation patterns\n", - "\n", - "*or, when should I use these functions?*\n", - "\n", - "Consider a common use case. We want to complete some \"task\" for each of \"something\". The \"task\" might be a computation (e.g. mean, median, plot). The \"something\" could be a group of array values (e.g. pixels) or segments of time (e.g. monthly or seasonally).\n", - "\n", - "Often, our solution to this type of problem is to write a for loop. Say we want the average air temperature for each month across the entire domain (all lat and lon values):" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "months = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]\n", - "avg_temps = []\n", - "\n", - "# for loop\n", - "for mon in months:\n", - " # filter data, split out in to groups\n", - " subset = da[da[\"time.month\"] == mon]\n", - " # do some computation\n", - " avg = subset.mean()\n", - " # append to existing results\n", - " avg_temps.append(avg.item())\n", - "\n", - "print(avg_temps)" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "This pattern is the GroupBy pattern.\n", - "\n", - "\n", - "An easy conceptual next step for this example (but still using our for loop) would be to use Xarray's `groupby` function to create an iterator that does the work of grouping our data by month and looping over each month." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "avg_temps = []\n", - "\n", - "for label, group in da.groupby(\"time.month\"):\n", - " avg_temps.append(float(group.mean().data))\n", - "\n", - "print(avg_temps)" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "Writing a for-loop here is not wrong, but it can quickly become cumbersome if you have a complex function to apply and it will take a while to compute on a large dataset (you may even run out of memory). Parallelizing the computation would take a lot of additional work.\n", - "\n", - "Xarray's functionality instead allows us to do the same computation in one line of code (plus, the computation is optimized and ready to take advantage of parallel compute resources)!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# note the use of the ellipses here\n", - "# for easy comparison to the for loop above\n", - "avg_temps = da.groupby(\"time.month\").mean(...)\n", - "print(avg_temps.data)" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "```{note}\n", - ":class: dropdown\n", - "By default, `da.mean()` (and `df.mean()`) will calculate the mean by reducing your data over all dimensions (unless you specify otherwise using the `dim` kwarg). The default behavior of `.mean()` on a groupby is to calculate the mean over all dimensions of the variable you are grouping by - but not all the dimensions of the object you are operating on. To compute the mean across all dimensions of a groupby, we must specify `...` for all dimensions (or use the `dim` kwarg to specify which dimensions to reduce by).\n", - "\n", - "```\n", - "\n", - "Here we showed an example for computing a mean over a certain period of time (months), which ultimately uses the `GroupBy` function to group together observations with similar characteristics that are scattered through the dataset. The transition from loops to a built-in function is similar for `rolling` and `coarsen` over sequential windows of values (e.g. pixels) instead of \"groups\" of time.\n", - "\n", - "Read on through this tutorial to learn some of the incredible ways to use Xarray to avoid writing long for-loops and efficiently complete computational analyses on your data.\n", - "\n", - "```{seealso}\n", - ":class: dropdown\n", - "For a more complex example (identifying flood events - including their start and end date - from rainfall data) illustrating the transition from for loops to high level computation tools, see [this discussion](https://github.com/pydata/xarray/discussions/7641). The [original 40 lines of code](https://github.com/pydata/xarray/discussions/7641#discussion-4976005), including nested for loops, was streamlined into a ~15 line workflow without any loops.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": { - "slideshow": { - "slide_type": "slide" - }, - "tags": [] - }, - "source": [ - "***\n", - "\n", - "### Concept refresher: \"index space\" vs \"label space\"\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# index space\n", - "data[10, :] # 10th element along the first axis; ¯\\_(ツ)_/¯" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# slightly better index space\n", - "data.isel(lat=10) # slightly better, 10th element in latitude" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# \"label\" space\n", - "data.sel(lat=50) # much better! lat=50°N" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "outputs": [], - "source": [ - "# What I wanted to do\n", - "data.sel(lat=50)\n", - "\n", - "# What I had to do (if I wasn't using xarray)\n", - "data[10, :]" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "***\n", - "\n", - "## Xarray provides patterns in both \"index space\" and \"label space\"\n", - "\n", - "### Index space\n", - "\n", - "These are sequential windowed operations with a window of a fixed size.\n", - "\n", - "1. `rolling` :\n", - " [Operate on rolling or sliding (fixed length, overlapping) windows of your data e.g. running mean.](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations)\n", - "1. `coarsen` :\n", - " [Operate on blocks (fixed length) of your data (downsample).](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays)\n", - "\n", - "\n", - "### Label space\n", - "\n", - "These are windowed operations with irregular windows based on your data. Members of a single group may be non-sequential and scattered through the dataset.\n", - "\n", - "1. `groupby` :\n", - " [Parse data into groups (using an exact value) and operate on each one (reduce data).](https://docs.xarray.dev/en/stable/groupby.html)\n", - "1. `groupby_bins`: [GroupBy after discretizing a numeric (non-exact, e.g. float) variable.](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.groupby_bins.html)\n", - "1. `resample` :\n", - " [Groupby specialized for time axes. Either downsample or upsample your data.](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations)\n" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "add some \"loop\" versions to show what a user might come up with that could be turned into one of these pattern operations\n", - "\n", - "---\n", - "\n", - "## Index space: windows of fixed width\n", - "\n", - "### Sliding windows of fixed length: [`rolling`](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.rolling.html)\n", - "\n", - "- Supports common reductions : `sum`, `mean`, `count`, `std`, `var` etc.\n", - "- Returns object of same shape as input\n", - "- Pads with NaNs to make this happen\n", - "- Supports multiple dimensions\n", - "\n", - "Here's the dataset\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "data.plot();" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": {}, - "source": [ - "And now smoothed 5 point running mean in lat and lon\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "data.rolling(lat=5, lon=5, center=True).mean().plot();" - ] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "#### Apply an existing numpy-only function with `reduce`\n", - "\n", - "In some cases, we may want to apply a sliding window function using rolling that is not built in to Xarray. In these cases we can still leverage the sliding windows of rolling and apply our own function with [`reduce`](https://docs.xarray.dev/en/stable/generated/xarray.core.rolling.DataArrayRolling.reduce.html).\n", - "\n", - "The `reduce` method on Xarray objects (e.g. DataArray, Dataset) expects a function that can *receive and return plain arrays (e.g. numpy)*, as in each of the \"windows\" provided by the rolling iterator. This is in contrast to the `map` method on DataArray and Dataset objects, which expects a function that can receive and return Xarray objects.\n", - "\n", - "Here's an example function: [`np.ptp`](https://numpy.org/doc/stable/reference/generated/numpy.ptp.html).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "data.rolling(lat=5, lon=5, center=True).reduce(np.ptp).plot();" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Calculate the rolling mean in 5 point bins along both latitude and longitude using\n", - "[`rolling(...).reduce`](https://docs.xarray.dev/en/stable/generated/xarray.core.rolling.DataArrayRolling.reduce.html)\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "# exactly equivalent to data.rolling(...).mean()\n", - "data.rolling(lat=5, lon=5, center=True).reduce(np.mean).plot();\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "#### View the `rolling` operation as a Xarray object with `construct`\n", - "\n", - "In the above examples, we plotted the outputs of our rolling operations. Xarray makes it easy to integrate the outputs from `rolling` directly into the DataArray using the [`construct`](https://docs.xarray.dev/en/stable/generated/xarray.core.rolling.DataArrayRolling.construct.html#xarray.core.rolling.DataArrayRolling.construct) method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "simple = xr.DataArray(np.arange(10), dims=\"time\", coords={\"time\": np.arange(10)})\n", - "simple" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "# adds a new dimension \"window\"\n", - "simple.rolling(time=5, center=True).construct(\"window\")" - ] - }, - { - "cell_type": "markdown", - "id": "30", - "metadata": {}, - "source": [ - "```{note}\n", - "Because `.construct()` only returns a \"view\" (not a copy) of the original data object (i.e. it is not operating \"in-place\"), in order to \"save\" the results you would need to rewrite the original object: `simple = simple.rolling(time=5, center=True).construct(\"window\")`.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Calculate the 5 point running mean in time using `rolling.construct`\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "simple.rolling(time=5, center=True).construct(\"window\").mean(\"window\")\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "`construct` is clever.\n", - "\n", - "1. It constructs a [**view**](https://numpy.org/doc/stable/user/basics.copies.html) of the original array, so it is memory-efficient.\n", - "1. It does something sensible for dask arrays (though generally you want big chunksizes for the dimension you're sliding along).\n", - "1. It also works with rolling along multiple dimensions!\n" - ] - }, - { - "cell_type": "markdown", - "id": "33", - "metadata": { - "tags": [] - }, - "source": [ - "#### Advanced: Another `construct` example\n", - "\n", - "This is a 2D rolling example; we need to provide two new dimension names.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [ - "data.rolling(lat=5, lon=5, center=True).construct(lat=\"lat_roll\", lon=\"lon_roll\")" - ] - }, - { - "cell_type": "markdown", - "id": "35", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "***\n", - "\n", - "### Block windows of fixed length: `coarsen`\n", - "\n", - "For non-overlapping windows or \"blocks\" use [`coarsen`](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.coarsen.html). The syntax is very similar to `rolling`. You will need to specify how you want Xarray to handle the `boundary` if the length of the dimension is not a multiple of the block size.\n", - "\n", - "\n", - "- Supports common reductions : `sum`, `mean`, `count`, `std`, `var` etc.\n", - "- Does **not** return an object of same shape as input\n", - "- Allows controls over behaviour at boundaries\n", - "- Supports multiple dimensions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [ - "data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "data.plot();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": {}, - "outputs": [], - "source": [ - "data.coarsen(lat=5, lon=5, boundary=\"trim\").mean()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39", - "metadata": {}, - "outputs": [], - "source": [ - "(data.coarsen(lat=5, lon=5, boundary=\"trim\").mean().plot())" - ] - }, - { - "cell_type": "markdown", - "id": "40", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "#### Coarsen supports `reduce` for custom reductions\n", - "\n", - "::::{admonition} Exercise\n", - ":class: tip\n", - "Use `coarsen.reduce` to apply `np.ptp` in 5x5 (lat x lon) point blocks to `data`\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "data.coarsen(lat=5, lon=5, boundary=\"trim\").reduce(np.ptp).plot();\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "41", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "#### Coarsen supports `construct` for block reshaping and storing outputs\n", - "\n", - "```{tip}\n", - "`coarsen.construct` is usually a good alternative to `np.reshape`\n", - "```\n", - "\n", - "A simple example splits a 2-year long monthly 1D time series into a 2D array shaped (year x month)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "42", - "metadata": {}, - "outputs": [], - "source": [ - "months = xr.DataArray(\n", - " np.tile(np.arange(1, 13), reps=2),\n", - " dims=\"time\",\n", - " coords={\"time\": np.arange(1, 25)},\n", - ")\n", - "months" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43", - "metadata": {}, - "outputs": [], - "source": [ - "# break \"time\" into two new dimensions: \"year\", \"month\"\n", - "months.coarsen(time=12).construct(time=(\"year\", \"month\"))" - ] - }, - { - "cell_type": "markdown", - "id": "44", - "metadata": {}, - "source": [ - "Note two things:\n", - "\n", - "1. The `time` dimension was also reshaped.\n", - "1. The new dimensions `year` and `month` don't have any coordinate labels\n", - " associated with them.\n", - "\n", - "What if the data had say 23 instead of 24 values (`months.isel(time=slice(1, None)`)? In that case we specify a different `boundary` (the default `boundary=\"exact\"` worked above); here we pad to 24 values.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "45", - "metadata": {}, - "outputs": [], - "source": [ - "months.isel(time=slice(1, None)).coarsen(time=12, boundary=\"pad\").construct(time=(\"year\", \"month\"))" - ] - }, - { - "cell_type": "markdown", - "id": "46", - "metadata": {}, - "source": [ - "This adds values at the end of the array (see the 'nan' at the end of the time coordinate?), which is not so sensible for this\n", - "problem. We have some control of the padding through the `side` kwarg to `coarsen`. For `side=\"right\"` we get more sensible output." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "47", - "metadata": {}, - "outputs": [], - "source": [ - "months.isel(time=slice(1, None)).coarsen(time=12, boundary=\"pad\", side=\"right\").construct(\n", - " time=(\"year\", \"month\")\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "48", - "metadata": {}, - "source": [ - "Note that `coarsen` pads with NaNs. For more control over padding, use\n", - "[DataArray.pad](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.pad.html) explicitly." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "49", - "metadata": {}, - "outputs": [], - "source": [ - "(\n", - " months.isel(time=slice(1, None))\n", - " .pad(time=(1, 0), constant_values=-1)\n", - " .coarsen(time=12)\n", - " .construct(time=(\"year\", \"month\"))\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "50", - "metadata": {}, - "source": [ - "```{note}\n", - "The value specified in `.pad` only applies the `fill_value` to the array, not to coordinate variables.\n", - "This is why the first value of time in the above example is NaN and not -1.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "51", - "metadata": {}, - "source": [ - ":::{admonition} Exercise\n", - ":class: tip\n", - "Reshape the `time` dimension of the DataArray `monthly` to year x\n", - "month and visualize the seasonal cycle for two years at 250°E\n", - "\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "# splits time dimension into year x month\n", - "year_month = monthly.coarsen(time=12).construct(time=(\"year\", \"month\"))\n", - "\n", - "# assign a nice coordinate value for month\n", - "year_month[\"month\"] = [\n", - " \"jan\",\n", - " \"feb\",\n", - " \"mar\",\n", - " \"apr\",\n", - " \"may\",\n", - " \"jun\",\n", - " \"jul\",\n", - " \"aug\",\n", - " \"sep\",\n", - " \"oct\",\n", - " \"nov\",\n", - " \"dec\",\n", - "]\n", - "\n", - "# assign a nice coordinate value for year\n", - "year_month[\"year\"] = [2013, 2014]\n", - "\n", - "# seasonal cycle for two years\n", - "year_month.sel(lon=250).plot.contourf(col=\"year\", x=\"month\", y=\"lat\")\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "52", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Calculate the rolling 4 month average, averaged across years. (This exercise came up during a live lecture).\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "1. We first reshape using `coarsen.construct` to add `year` as a new dimension.\n", - "2. Apply `rolling` on the month dimension.\n", - "3. It turns out that `roll.mean([\"year\", \"month\"])` doesn't work. So we use `roll.construct` to get a DataArray with a new dimension `window` and then take the mean over `window` and `year`\n", - "\n", - "```python\n", - "reshaped = months.coarsen(time=12).construct(time=(\"year\", \"month\"))\n", - "roll = reshaped.rolling(month=4, center=True)\n", - "roll.construct(\"window\").mean([\"window\", \"year\"])\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "id": "53", - "metadata": {}, - "source": [ - "### Summary\n", - "\n", - "Delete your `for` loops. Use `rolling` and `coarsen` for fixed size windowing operations.\n", - "\n", - "1. `rolling` for overlapping windows\n", - "1. `coarsen` for non-overlapping windows.\n", - "\n", - "Both provide the usual reductions as methods (`.mean()` and friends), and also\n", - "`reduce` and `construct` for custom operations.\n" - ] - }, - { - "cell_type": "markdown", - "id": "54", - "metadata": { - "slideshow": { - "slide_type": "slide" - }, - "tags": [] - }, - "source": [ - "***\n", - "\n", - "## Label space \"windows\" or bins : GroupBy\n", - "\n", - "Sometimes the windows you want are not regularly spaced or even defined by a grid.\n", - "For instance, grouping data by month (which have varying numbers of days) or the results of an image classification.\n", - "The GroupBy functions are essentially a generalization of `coarsen`: \n", - "\n", - "- `groupby`: divide data into distinct groups, e.g. climatologies, composites. Works best when the \"group identifiers\" or \"labels\" are exact and can be determined using equality (`==`), e.g. characters or integers. Remember that floats are not exact values.\n", - "- `groupby_bins`: Use binning operations, e.g. histograms, to group your data.\n", - "- `resample`: Specialized implementation of GroupBy specifically for time grouping (so far), allows you to change sampling frequency of dataset.\n", - "\n", - "\n", - "```{note}\n", - " Both `groupby_bins` and `resample` are implemented as `groupby` with a specific way of constructing group labels. The GroupBy pattern is very flexible!\n", - "```\n", - "\n", - "\n", - "### Deconstructing GroupBy\n", - "\n", - "The GroupBy workflow is commonly called \"split-apply-combine\".\n", - "\n", - "1. *Split* : break dataset into groups\n", - "1. *Apply* : apply an operation, for instance a reduction like `mean`\n", - "1. *Combine* : concatenate results from apply step along a new \"group\" dimension\n", - "\n", - "illustrated in this neat schematic from [Project Pythia](https://foundations.projectpythia.org/core/xarray/computation-masking.html#groupby-split-apply-combine):\n", - "\n", - "\n", - "\n", - "But really there is a \"hidden\" first step: *identifying* groups (also called \"factorization\" or sometimes \"binning\"). Usually this is the hard part.\n", - "\n", - "In reality the workflow is: \"identify groups\" → \"split into groups\" → \"apply function\" → \"combine results\".\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "55", - "metadata": {}, - "outputs": [], - "source": [ - "# recall our earlier DataArray\n", - "da" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "56", - "metadata": {}, - "outputs": [], - "source": [ - "# GroupBy returns an iterator that traverses the specified groups, here by month.\n", - "# Notice that groupby is clever enough for us to leave out the `.dt` before `.month`\n", - "# we would need to specify to access the month data directly, as in `da.time.dt.month`.\n", - "da.groupby(\"time.month\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57", - "metadata": {}, - "outputs": [], - "source": [ - "# for each group (e.g. the air temperature in a given month for all the years),\n", - "# compute the mean\n", - "da.groupby(\"time.month\").mean()" - ] - }, - { - "cell_type": "markdown", - "id": "58", - "metadata": {}, - "source": [ - "Notice that since we have averaged over all the years for each month, our resulting DataArray no longer has a \"year\" coordinate.\n", - "\n", - "If we want to see how Xarray identifies \"groups\" for the monthly climatology computation, we can plot our input to `groupby`. GroupBy is clever enough to figure out how many values there are an thus how many groups to make.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59", - "metadata": {}, - "outputs": [], - "source": [ - "da[\"time.month\"].plot();" - ] - }, - { - "cell_type": "markdown", - "id": "60", - "metadata": {}, - "source": [ - "Similarly for binning (remember this is useful when the parameter you are binning over is not \"exact\", like a float),\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61", - "metadata": {}, - "outputs": [], - "source": [ - "data.groupby_bins(\"lat\", bins=[20, 35, 40, 45, 50])" - ] - }, - { - "cell_type": "markdown", - "id": "62", - "metadata": {}, - "source": [ - "and resampling...\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "63", - "metadata": {}, - "outputs": [], - "source": [ - "da.resample(time=\"ME\")" - ] - }, - { - "cell_type": "markdown", - "id": "64", - "metadata": {}, - "source": [ - "```{note}\n", - "\n", - "Resampling is changing the frequency of our data to monthly (for two years), so we have 24 bins. GroupBy is taking the average across all data in the same month for two years, so we have 12 bins.\n", - "\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "65", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "### Constructing group labels\n", - "\n", - "If the automatic group detection doesn't work for your problem then these functions are useful for constructing specific \"group labels\" in many cases\n", - "\n", - "1. [numpy.digitize](https://numpy.org/doc/stable/reference/generated/numpy.digitize.html)\n", - " for binning\n", - "1. [numpy.searchsorted](https://numpy.org/doc/stable/reference/generated/numpy.searchsorted.html)\n", - " supports many other data types\n", - "1. [pandas.factorize](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.factorize.html)\n", - " supports characters, strings etc.\n", - "1. [pandas.cut](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.cut.html)\n", - " for binning\n", - "1. [\"Datetime components\"](https://docs.xarray.dev/en/stable/user-guide/time-series.html#datetime-components) of Xarray DataArrays\n", - "1. [DataArray.isin](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.isin.html)\n", - "1. [scipy.ndimage.label](https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.label.html)\n", - "1. Do you know of any more? (Send in a pull request to update this list!)\n", - "\n", - "\n", - "\n", - "```{tip}\n", - "Xarray uses [`pandas.factorize`](https://pandas.pydata.org/docs/reference/api/pandas.factorize.html) for `groupby` and [`pandas.cut`](https://pandas.pydata.org/docs/reference/api/pandas.cut.html) for `groupby_bins`.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "66", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "#### [\"Datetime components\"](https://docs.xarray.dev/en/stable/user-guide/time-series.html#datetime-components) for creating groups\n", - "\n", - "See a full list\n", - "[here](https://docs.xarray.dev/en/stable/generated/xarray.core.accessor_dt.DatetimeAccessor.html?highlight=DatetimeAccessor)\n", - "\n", - "These can be accessed in a few different ways as illustrated below.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "67", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da.time" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "68", - "metadata": {}, - "outputs": [], - "source": [ - "da.time.dt.day" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "69", - "metadata": {}, - "outputs": [], - "source": [ - "da[\"time.day\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "70", - "metadata": {}, - "outputs": [], - "source": [ - "da.time.dt.season" - ] - }, - { - "cell_type": "markdown", - "id": "71", - "metadata": {}, - "source": [ - "#### Construct and use custom labels\n", - "\n", - "##### Custom seasons with `numpy.isin`.\n", - "\n", - "We want to group over four seasons: `DJF`, `MAM`, `JJAS`, `ON` - this makes physical sense in the Indian Ocean basin.\n", - "\n", - "Start by extracting months.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "72", - "metadata": {}, - "outputs": [], - "source": [ - "month = da.time.dt.month.data\n", - "month" - ] - }, - { - "cell_type": "markdown", - "id": "73", - "metadata": {}, - "source": [ - "Create a new empty array\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "74", - "metadata": {}, - "outputs": [], - "source": [ - "myseason = np.full(month.shape, \" \")\n", - "myseason" - ] - }, - { - "cell_type": "markdown", - "id": "75", - "metadata": {}, - "source": [ - "Use `isin` to assign custom seasons,\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "76", - "metadata": {}, - "outputs": [], - "source": [ - "myseason[np.isin(month, [12, 1, 2])] = \"DJF\"\n", - "myseason[np.isin(month, [3, 4, 5])] = \"MAM\"\n", - "myseason[np.isin(month, [6, 7, 8, 9])] = \"JJAS\"\n", - "myseason[np.isin(month, [10, 11])] = \"ON\"" - ] - }, - { - "cell_type": "markdown", - "id": "77", - "metadata": {}, - "source": [ - "Turn our new seasonal group array into a DataArray." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "78", - "metadata": {}, - "outputs": [], - "source": [ - "myseason_da = da.time.copy(data=myseason)\n", - "myseason_da" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "79", - "metadata": {}, - "outputs": [], - "source": [ - "(\n", - " # Calculate climatology\n", - " da.groupby(myseason_da)\n", - " .mean()\n", - " # reindex to get seasons in logical order (not alphabetical order)\n", - " .reindex(time=[\"DJF\", \"MAM\", \"JJAS\", \"ON\"])\n", - " .plot(col=\"time\")\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "80", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "##### `floor`, `ceil` and `round` on time\n", - "\n", - "Additional functionality in the [datetime accessor](https://docs.xarray.dev/en/stable/generated/xarray.core.accessor_dt.DatetimeAccessor.html) allows us to effectively \"resample\" our time data to remove roundoff errors in timestamps.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "81", - "metadata": {}, - "outputs": [], - "source": [ - "da.time" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "82", - "metadata": {}, - "outputs": [], - "source": [ - "# remove roundoff error in timestamps\n", - "# floor to daily frequency\n", - "da.time.dt.floor(\"D\")" - ] - }, - { - "cell_type": "markdown", - "id": "83", - "metadata": { - "slideshow": { - "slide_type": "subslide" - }, - "tags": [] - }, - "source": [ - "##### `strftime` is another powerful option\n", - "\n", - "So useful and so unintuitive that it has its own website: https://strftime.org/\n", - "\n", - "This is useful to avoid merging \"Feb-29\" and \"Mar-01\" for a daily climatology\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "84", - "metadata": {}, - "outputs": [], - "source": [ - "da.time.dt.strftime(\"%b-%d\")" - ] - }, - { - "cell_type": "markdown", - "id": "85", - "metadata": { - "tags": [] - }, - "source": [ - "### Custom reductions with `GroupBy.reduce`\n", - "\n", - "Analogous to `rolling`, `reduce` and `map` apply custom reductions to `groupby_bins` and `resample`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "86", - "metadata": {}, - "outputs": [], - "source": [ - "(da.groupby(\"time.month\").reduce(np.ptp).plot(col=\"month\", col_wrap=4))" - ] - }, - { - "cell_type": "markdown", - "id": "87", - "metadata": {}, - "source": [ - "```{tip}\n", - " `map` is for functions that expect and return xarray objects (see also [`Dataset.map`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.map.html)). `reduce` is for functions that expect and return plain arrays (like Numpy or SciPy functions).\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "id": "88", - "metadata": { - "tags": [] - }, - "source": [ - "### Viewing the GroupBy operation on your DataArray or DataSet\n", - "\n", - "GroupBy does *not* provide a `construct` method, because all the groups need not be the same \"length\" (e.g. months can have 28, 29, 30, or 31 days).\n", - "\n", - "#### Instead looping over groupby objects is possible\n", - "\n", - "Because `groupby` returns an iterator that loops over each group, it is easy to loop over groupby objects. You can also iterate over `rolling` and `coarsen` objects, however this approach is usually quite slow.\n", - "\n", - "Maybe you want to plot data in each group separately:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89", - "metadata": {}, - "outputs": [], - "source": [ - "for label, group in da.groupby(\"time.month\"):\n", - " print(label)" - ] - }, - { - "cell_type": "markdown", - "id": "90", - "metadata": {}, - "source": [ - "`group` is a DataArray containing data for all December days (because the last printed `label` value is `12`, so the last `group` value is for December)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "91", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "group" - ] - }, - { - "cell_type": "markdown", - "id": "92", - "metadata": {}, - "source": [ - "Maybe you want a histogram of December temperatures?\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "93", - "metadata": {}, - "outputs": [], - "source": [ - "group.plot.hist()" - ] - }, - { - "cell_type": "markdown", - "id": "94", - "metadata": {}, - "source": [ - "Remember, this example is just to show how you could operate on each group object in a groupby operation. If we wanted to just explore the December (or March) data, we should just filter for it directly:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "95", - "metadata": {}, - "outputs": [], - "source": [ - "da[da[\"time.month\"] == 12].plot.hist()" - ] - }, - { - "cell_type": "markdown", - "id": "96", - "metadata": { - "tags": [] - }, - "source": [ - "#### In most cases, avoid a for loop using `map`\n", - "\n", - "`map` enables us to apply functions that expect xarray Datasets or DataArrays. This makes it easy to perform calculations on the grouped data, add the results from each group back to the original object, and avoid having to manually combine results (using concat).\n", - "\n", - "\n", - "```{tip}\n", - "The implementation of `map` *is* a `for` loop. We like `map` because it's cleaner.\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "97", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def iqr(gb_da, dim):\n", - " \"\"\"Calculates interquartile range\"\"\"\n", - " return (gb_da.quantile(q=0.75, dim=dim) - gb_da.quantile(q=0.25, dim=dim)).rename(\"iqr\")\n", - "\n", - "\n", - "da.groupby(\"time.month\").map(iqr, dim=\"time\")" - ] - }, - { - "cell_type": "markdown", - "id": "98", - "metadata": {}, - "source": [ - "***" - ] - }, - { - "cell_type": "markdown", - "id": "99", - "metadata": {}, - "source": [ - "## Summary\n", - "\n", - "Xarray provides methods for high-level analysis patterns:\n", - "\n", - "1. `rolling` :\n", - " [Operate on rolling (fixed length, overlapping) windows of your data e.g. running mean.](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations)\n", - "1. `coarsen` :\n", - " [Operate on blocks (fixed length) of your data (downsample).](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays)\n", - "1. `groupby` :\n", - " [Parse data into groups (using an exact value) and operate on each one (reduce data).](https://docs.xarray.dev/en/stable/groupby.html)\n", - "1. `groupby_bins`: [GroupBy after discretizing a numeric (non-exact, e.g. float) variable.](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.groupby_bins.html)\n", - "1. `resample` :\n", - " [Groupby specialized for time axes. Either downsample or upsample your data.](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations)\n", - "1. `weighted`: [Weight your data before reducing.](https://docs.xarray.dev/en/stable/user-guide/computation.html#weighted-array-reductions)\n", - "\n", - "Xarray also provides a consistent interface to make using those patterns easy:\n", - "\n", - "1. Iterate over the operators (`rolling`, `coarsen`, `groupby`, `groupby_bins`, `resample`).\n", - "1. Apply functions that accept numpy-like arrays with `reduce`.\n", - "1. Reshape to a new xarray object with `.construct` (`rolling`, `coarsen` only).\n", - "1. Apply functions that accept xarray objects with `map` (`groupby`, `groupby_bins`, `resample` only).\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/intermediate/data_cleaning/05.1_intro.md b/intermediate/data_cleaning/05.1_intro.md deleted file mode 100644 index c3ccfd43..00000000 --- a/intermediate/data_cleaning/05.1_intro.md +++ /dev/null @@ -1,83 +0,0 @@ -# Data Tidying - -Array data that are represented by Xarray objects are often multivariate, multi-dimensional, and very complex. Part of the beauty of Xarray is that it is adaptable and scalable to represent a large number of data structures. However, this can also introduce difficulty (especially for learning users) in arriving at a workable structure that will best suit one's analytical needs. - -```{seealso} -Look for examples [here](05.2_examples.md) -``` - -This project is motivated by community sentiment and experiences that often, the hardest part of learning and teaching Xarray is teaching users how best to use Xarray conceptually. We hope to leverage the experiences of Xarray and geospatial data users to arrive at a unifying definition of 'tidy' data in this context and best practices for 'tidying' geospatial raster data represented by Xarray objects. - -This page discusses common data ‘tidying’ steps and presents principles to keep in mind when organizing data in Xarray. We also point out helpful extensions to simplify and automate this process for specific dataset types like satellite imagery. - -A great first step is familiarizing yourself with the [terminology](https://docs.xarray.dev/en/stable/user-guide/terminology.html) used in the Xarray ecosystem. - -## A brief primer on tidy data - -Tidy data was developed by Hadley Wickham for tabular datasets in the R programming language. Many resources comprehensively explain this concept and the ecosystem of tools built upon it. Below is a very brief explanation: - -**Data tidying** is the process of structuring datasets to facilitate analysis. Wickham writes: "...tidy datasets are all alike, but every messy dataset is messy in its own way. Tidy datasets provide a standardized way to link the structure of a dataset (its physical layout) with its semantics (its meaning)" (Wickham, 2014). - -### Tidy data principles for tabular datasets - -The concept of [tidy data](https://vita.had.co.nz/papers/tidy-data.pdf) was developed by Hadley Wickham in the R programming language, and is a set of principles to guide facilitating tabular data for analysis. - -{attribution="Wickham, 2014"} - -> "Tidy datasets are all alike, but every messy dataset is messy in its own way." - -Wickham defines three core principles of tidy data for tabular principles. They are: - -1. Each variable forms an observation -2. Each observation forms a row -3. Each type of observational unit forms a table - -## Imagining a 'tidy data' framework for gridded datasets - -### Common use-case: Manipulating individual observations to an x-y-time datacube - -Data downloaded or accessed from DAACs and other providers is often (for good reason) separated into temporal observations or spatial subsets. This minimizes the services that must be provided for different datasets and allows the user to access just the material that they need. However, most workflows will involve some sort of spatial and/or temporal investigation of an observable, which will usually require the analyst to arrange individual files into spatial mosaics and/or temporal cubes. In addition to being a source of duplicated effort and work, these steps also introduce decision-points that can be stumbling blocks for newer users. We hope a tidy framework for xarray will streamline the process of preparing data for analysis by providing specific expectations of what 'tidied' datasets look like as well as common patterns and tools to use to arrive at a tidy state. - -## Tidy data principles for Xarray data structures - -These are guidelines to keep in mind while you are organizing your data. For detailed definitions of the terms mentioned below (and more), check out Xarray's [Terminology page](https://docs.xarray.dev/en/stable/user-guide/terminology.html). - -**1. Dimensions** - -- Minimize the number of dimensional coordinates - -**2. Coordinates** - -- Non-dimensional coordinates can be numerous. Each should exist along one or multiple dimensions - -**3. Data Variables** - -- Data variables should be observables rather than contextual. Each should exist along one or multiple dimensions. - -**4. Contextual information (metadata)** - -- Metadata should only be stored as an attribute if it is static along the dimensions to which it is applied. -- If metadata is dynamic, it should be stored as a coordinate variable. -- Metadata `attrs` should be added such that dataset is self-describing (following CF-conventions) - -**5. Variable, attribute naming** - -- **Wherever possible, use cf-conventions for naming** -- Variable names should be descriptive -- Variable names should not contain information that belongs in a dimension or coordinate (ie. information stored in a variable name should be reduced to only the observable the variable describes. - -**6. Make us of & work within the framework of other tools** - -- Specification systems such as [CF](https://cfconventions.org/) and [STAC](https://stacspec.org/en), and related tools such as [Open Data Cube](https://www.opendatacube.org/), [PySTAC](https://pystac.readthedocs.io/en/stable/), [cf_xarray](https://cf-xarray.readthedocs.io/en/latest/),[stackstac](https://stackstac.readthedocs.io/en/latest/) and more make tidying possible and smoother, especially with large, cloud-optimized datasets. -- - -## Other guidelines and rules of thumb - -- Avoid storing important data in filenames -- Non-descriptive variable names can create + perpetuate confusion -- Missing coordinate information makes datasets harder to use -- Elements of a dataset's 'shape'/structure can sometimes be embedded in variable names; this will complicate subsequent analysis - -## Contributing - -We would love your help and engagement on this project! If you have a dataset that you've worked with that felt particularly messy, or one with steps you find yourself thinking back to as you work with new datasets, consider submitting it as an example! If you have input on tidy principles, please feel free to raise an issue. diff --git a/intermediate/data_cleaning/05.2_examples.md b/intermediate/data_cleaning/05.2_examples.md deleted file mode 100644 index 3abdb943..00000000 --- a/intermediate/data_cleaning/05.2_examples.md +++ /dev/null @@ -1,16 +0,0 @@ -# Examples - -This page contains examples of 'tidying' datasets. If you have an example you'd like to submit, or an example of an anti-pattern, please raise an issue ! - -## 1. Aquarius - -This is an example of tidying a dataset comprised of locally downloaded files. Aquarius is a sea surface salinity dataset produced by NASA and accessed as network Common Data Form (NetCDF) files. -You can find this example [here](https://gist.github.com/dcherian/66269bc2b36c2bc427897590d08472d7). This example focuses on data access steps and organizing data into a workable data cube. - -## 2. ASE Ice Velocity - -Already integrated into the Xarray tutorial, this examples uses an ice velocity dataset derived from synthetic aperture radar imagery. You can find it [here](05.3_ice_velocity.ipynb). This example focuses on data access steps and organizing data into a workable data cube. - -## 3. Harmonized Landsat-Sentinel - -This [example](https://nbviewer.org/gist/scottyhq/efd583d66999ce8f6e8bcefa81545b8d) features cloud-optimized data that does not need to be downloaded locally. Here, package such as [`odc-stac`](https://github.com/opendatacube/odc-stac) are used to accomplish much of the initial tidying (assembling an x,y,time cube). However, this example shows that there is frequently additional formatting required to make a dataset analysis ready. diff --git a/intermediate/data_cleaning/05.3_ice_velocity.ipynb b/intermediate/data_cleaning/05.3_ice_velocity.ipynb deleted file mode 100644 index e54ff918..00000000 --- a/intermediate/data_cleaning/05.3_ice_velocity.ipynb +++ /dev/null @@ -1,490 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Re-organize InSAR ice velocity data\n", - "\n", - "This is an example of cleaning data accessed in netcdf format and preparing it for analysis. \n", - "\n", - "The dataset we will use contains InSAR-derived ice velocity for 10 years over the Amundsen Sea Embayment in Antarctica. The data is downloaded from: https://nsidc.org/data/NSIDC-0545/versions/1 but this example uses only a subset of the full dataset.\n", - "\n", - "Downloaded data is `.hdr` and `.dat` files for each year, and a `.nc` for all of the years together. \n", - "\n", - "The `.nc` object is a dataset with dimensions x,y and data vars for each year. So for each year there are `vx`,`vy`,`err` vars. We'd like to re-organize this so that there are 3 variables (`vx`, `vy` and `err`) that exist along a time dimension.\n", - "\n", - "```{note}\n", - "These steps were turned into a accessor/extension example, which can be viewed [here](/advanced/accessors/01_accessor_examples).\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "import pandas as pd\n", - "import os\n", - "import numpy as np" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset('ASE_ice_velocity.nc')\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "Take a look at the dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "Check the projection:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "ds.attrs['Projection']" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "Currently the dimensions on the object are `ny` and `nx` but the object has no coordinates. If we look in the `data_vars` we can see there are two variables named `xaxis` and `yaxis`. It seems like these are the coordinate values that should exist along the `nx` and `ny` dimensions, respectively. Let's confirm that they match the dimensions `nx` and `ny` in length and then assign them as coordinates:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "print(ds.dims['ny'])\n", - "print(ds.dims['nx'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "print(len(ds.yaxis.data))\n", - "print(len(ds.xaxis.data))" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "We'll assign the `xaxis` and `yaxis` vars to be coordinates, and drop them from the `data_vars`. We'll first use `swap_dims()` to swap `ny` for `yaxis` and `nx` for `xaxis`. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "ds = ds.swap_dims({'ny': 'yaxis', 'nx': 'xaxis'})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "Rename `yaxis` and `xaxis` and drop the `nx` and `ny` coordinates: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "ds = ds.rename({'xaxis': 'x', 'yaxis': 'y'})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "Now we have x and y coordinates and 30 data variables. However, the `data_vars` are really only 3 unique variables that exist along a time dimension (with a length of 10). \n", - "We want to add a time dimension to the dataset and concatenate the data variables in each of the three groups together." - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "Start by making a few objects that we'll use while we're re-organizing. These are: a list of all the variables in the dataset (`var_ls`), a list of the years covered by the dataset that are currently stored in variable names (`yr_ls`) and then finally lists for each variable (`vx_ls`,`vy_ls` and `err_ls`). These are all of the variables in the original dataset that correspond with that main variable group (`vx`, `vy` or `err`)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "var_ls = list(ds)\n", - "\n", - "vx_ls = [var for var in var_ls if 'vx' in var]\n", - "vy_ls = [var for var in var_ls if 'vy' in var]\n", - "err_ls = [var for var in var_ls if 'err' in var]\n", - "\n", - "yr_ls = [int(var[-4:]) for var in vx_ls]" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "Now we are going to group the `dataset.data_vars` into `vx`,`vy`, and `err` and prepare to concatenate them along the time dimension. We will perform the same operations for all three variables but we will demonstrate the process for the first variable in several steps, before showing the operation wrapped into one command for the other two variables. There is a great explanation of this kind of step [here](https://towardsdatascience.com/pythonic-way-to-perform-statistics-across-multiple-variables-with-xarray-d0221c78e34a). At the end of this step, for `vx`, `vy` and `err` we will have a list of `xr.DataArrays` that all have a time dimension on the 0-axis. \n", - "\n", - "In the cell below, we make a list of the `xr.DataArrays` in the original `xr.Dataset` that correspond to that variable. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls = [ds[var] for var in vx_ls]" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "You can see that `da_vx_ls` is a `list` object with a length of 10, and each element of the list is a `xr.DataArray` corresponding to `vx` vars in the original `xr.Dataset`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "print('Object type: ', type(da_vx_ls))\n", - "print('Object length: ', len(da_vx_ls))\n", - "da_vx_ls[0]" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "next, we will add a time dimension to every element of `da_vx_ls`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls = [da_vx_ls[var].expand_dims('time') for var in range(len(da_vx_ls))]" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "Now you can see that each list element is an `xr.DataArray` as before, but that there is now a time dimension." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls[0]" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": {}, - "source": [ - "Assign time as a coordinate to each `xr.DataArray` in the list: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls = [da_vx_ls[var].assign_coords(time=[yr_ls[var]]) for var in range(len(da_vx_ls))]\n", - "da_vx_ls[0]" - ] - }, - { - "cell_type": "markdown", - "id": "29", - "metadata": {}, - "source": [ - "Time is now a coordinate as well as a dimension and the coordinate value corresponds to the element-order of the list, ie. the first (0-place) element of `da_vx_ls_test` is the `xr.DataArray` containing the `vx1996` variable, and the `time` coord is 0. In the second (1-place) element, the `xr.DataArray` is called `vx2000` and the `time` coord is 1. \n", - "\n", - "Finally, we will rename the `xr.DataArrays` to reflect just the variable name, rather than the year, because that is now referenced in the time coordinate. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls = [da_vx_ls[var].rename('vx') for var in range(len(da_vx_ls))]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "da_vx_ls[2]" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "Now we have a list of `xr.DataArrays` for the `vx` data variable where each `xr.DataArray` has a time dimension and coordinates along the time dimension. This list is ready to be concatenated along the time dimension. \n", - "\n", - "First, we will perform the same steps for the other two data variables (`vy` and `err`) before concatenating all three along the time dimension and merging into one `xr.Dataset`. For `vy` and `err`, we will combine the steps followed for `vx` into one operation. Note one other difference between the workflow for `vx` and the workflow for `vy` and `err`: rather than assigning coordinate values using the `assign_coords()` function, we do this within the `expand_dims()` function, where a `time` dimension is added as well as coordinate values for the dimension (`[int(var[-4:])]`)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "da_vy_ls = [ds[var].expand_dims(time=[int(var[-4:])]).rename('vy') for var in vy_ls]\n", - "\n", - "da_err_ls = [ds[var].expand_dims(time=[int(var[-4:])]).rename('err') for var in err_ls]" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "Once we have these lists, we will concatenate them together to a single `xr.DataArray` with `x`,`y` and `time` dimensions. In the above step, when we create the time dimension we assign a stand-in for the time coordinate. In the cell below, we'll use the `yr_ls` object that we created that is a list whose elements are time-aware objects corresponding to the time coordinates (originally in the variable names). The final line in the cell below merges the three `xr.DataArray`s on the common `time` dimension that they now share, so we have a `xr.Dataset` with `x`,`y` and `time` dimensions and `vx`, `vy` and `err` variables." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [ - "vx_concat = xr.concat(da_vx_ls, dim='time')\n", - "vy_concat = xr.concat(da_vy_ls, dim='time')\n", - "err_concat = xr.concat(da_err_ls, dim='time')\n", - "\n", - "ds_merge = xr.merge([vx_concat, vy_concat, err_concat])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge" - ] - }, - { - "cell_type": "markdown", - "id": "37", - "metadata": {}, - "source": [ - "We'll add a variable that is magnitude of velocity as well" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge['vv'] = np.sqrt((ds_merge.vx**2) + (ds_merge.vy**2))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge.vv.isel(time=0).plot(vmax=1000)" - ] - }, - { - "cell_type": "markdown", - "id": "40", - "metadata": {}, - "source": [ - "and add the `attrs` of the original object to our new object, `ds_full`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "41", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge.attrs = ds.attrs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "42", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge" - ] - }, - { - "cell_type": "markdown", - "id": "43", - "metadata": {}, - "source": [ - "Checking against original version to make sure it's the same:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "44", - "metadata": {}, - "outputs": [], - "source": [ - "np.sqrt((ds.vx1996**2) + (ds.vy1996**2)).plot(vmax=1000)" - ] - }, - { - "cell_type": "markdown", - "id": "45", - "metadata": {}, - "source": [ - "We can also use `xr.DataArray.equals` function to test if two `xr.DataArrays` are identical to one another. More information [here](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.equals.html). " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": {}, - "outputs": [], - "source": [ - "ds_merge['vx'].sel(time=1996, drop=True).equals(ds.vx1996)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/intermediate/data_cleaning/05.4_contributing.md b/intermediate/data_cleaning/05.4_contributing.md deleted file mode 100644 index aa47b7d9..00000000 --- a/intermediate/data_cleaning/05.4_contributing.md +++ /dev/null @@ -1,5 +0,0 @@ -# Contributing - -This project is an evolving community effort. **We want to hear from you!**. Many workflows involve some version of the examples discussed here. The solutions you've developed in your work could help future users and help the community move toward more established norms around tidy data. Please consider submitting any examples you may have. You can create an issue [here](https://github.com/e-marshall/tidy-xarray/issues/new?assignees=&labels=&projects=&template=data-tidying-example-template.md&title=).If you have any questions or topics you'd like to discuss, please don't hesitate to create an issue on github. - -_note: issue template has some errors currently, need to fix_ diff --git a/intermediate/data_cleaning/05.5_scipy_talk.md b/intermediate/data_cleaning/05.5_scipy_talk.md deleted file mode 100644 index d1cfb076..00000000 --- a/intermediate/data_cleaning/05.5_scipy_talk.md +++ /dev/null @@ -1,13 +0,0 @@ -# Presentations - -## SciPy 2023 - -This project was initially presented at the 2023 SciPy conference in Austin, TX. You can check out the slides and a recording of the presentation below. - -### Slides - -The presentation slides are available through the [2023 SciPy Conference Proceedings](https://conference.scipy.org/proceedings/scipy2023/slides.html) and can be downloaded [here](https://zenodo.org/records/8221167). - -### Recording - -A recording of the presentation is available [here](https://www.youtube.com/watch?v=KZlG1im088s). diff --git a/intermediate/data_cleaning/05_data_cleaning.md b/intermediate/data_cleaning/05_data_cleaning.md deleted file mode 100644 index 154064a1..00000000 --- a/intermediate/data_cleaning/05_data_cleaning.md +++ /dev/null @@ -1,5 +0,0 @@ -# Data Tidying - -```{tableofcontents} - -``` diff --git a/intermediate/hvplot.ipynb b/intermediate/hvplot.ipynb deleted file mode 100644 index fa458c7c..00000000 --- a/intermediate/hvplot.ipynb +++ /dev/null @@ -1,175 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Interactive plots using hvplot\n", - "\n", - "Xarray's builtin plotting functionality wraps matplotlib.\n", - "\n", - "The `holoviews` [ecosystem](https://hvplot.holoviz.org/) provides the `hvplot` package to allow easy\n", - "visualization of xarray (and other) objects. These\n", - "plots build on [Bokeh](https://bokeh.org/).\n", - "\n", - "`hvplot` makes uses of xarray's [accessor interface](https://docs.xarray.dev/en/stable/internals/extending-xarray.html). This means that all xarray\n", - "objects gain a `.hvplot` attribute that lets you access `.hvplot` functionality\n", - "as easily as you would use `.plot`. All you need to do is `import hvplot.xarray`\n", - "\n", - "For more, see hvplot's [documentation](https://hvplot.holoviz.org/user_guide/Gridded_Data.html)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import cartopy.crs as ccrs\n", - "import hvplot.xarray\n", - "import xarray as xr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature.nc\").rename({\"air\": \"Tair\"})" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Basics\n", - "\n", - "`hvplot` makes the same default choices as `DataArray.plot`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.hvplot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# 2D array yields a quadmesh plot\n", - "ds.Tair.isel(time=1).hvplot(cmap=\"fire\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# 1D array yields a line plot\n", - "ds.Tair.isel(time=1, lon=1).hvplot()" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "## Interactivity\n", - "\n", - "But `hvplot` shines when interactivity is used. Here we can give it _all_ the\n", - "data and ask it to create a nice slider to control the time slice using the\n", - "`groupby` kwarg.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.hvplot(\n", - " groupby=\"time\",\n", - " clim=(250, 295), # adds a widget for time # sets colorbar limits\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Animations\n", - "\n", - "are easy.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# set constant colorbar limits\n", - "ds.Tair.hvplot(\n", - " groupby=\"time\", # adds a widget for time\n", - " clim=(250, 295), # sets colormap limits\n", - " widget_type=\"scrubber\",\n", - " widget_location=\"bottom\",\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Geography\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "ds.Tair.isel(time=1).hvplot(\n", - " projection=ccrs.Orthographic(-90, 30),\n", - " coastline=True,\n", - ")" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/intermediate/indexing/advanced-indexing.ipynb b/intermediate/indexing/advanced-indexing.ipynb deleted file mode 100644 index a5538151..00000000 --- a/intermediate/indexing/advanced-indexing.ipynb +++ /dev/null @@ -1,432 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Advanced Indexing\n", - "\n", - "## Learning Objectives\n", - "\n", - "* Orthogonal vs. Pointwise (Vectorized) Indexing.\n", - "* Pointwise indexing in Xarray to extract data at a collection of points.\n", - "* Understand the difference between NumPy and Xarray indexing behavior." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "In the previous notebooks, we learned basic forms of indexing with Xarray, including positional and label-based indexing, datetime indexing, and nearest neighbor lookups. We also learned that indexing an Xarray DataArray directly works (mostly) like it does for NumPy arrays; however, Xarray indexing behavior deviates from NumPy when using multiple arrays for indexing, like `arr[[0, 1], [0, 1]]`.\n", - "\n", - "To better understand this difference, let's take a look at an example of 2D 5x5 array:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "# Create a 5x5 array with values from 1 to 25\n", - "np_array = np.arange(1, 26).reshape(5, 5)\n", - "np_array" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now create a Xarray DataArray from this NumPy array: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "\n", - "da = xr.DataArray(np_array, dims=[\"x\", \"y\"])\n", - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, let's see how the indexing behavior is different between NumPy array and Xarray DataArray when indexing with multiple arrays:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "np_array[[0, 2, 4], [0, 2, 4]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da[[0, 2, 4], [0, 2, 4]]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The image below summarizes the difference between vectorized and orthogonal indexing for a 2D 5x5 NumPy array and Xarray DataArray:\n", - "\n", - "\n", - "\n", - "![Orthogonal vs. Vectorized Indexing](../../images/orthogonal_vs_vectorized.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**Pointwise** or **Vectorized indexing**, shown on the left, selects specific elements at given coordinates, resulting in an array of those individual elements. In the example shown, the indices `[0, 2, 4]`, `[0, 2, 4]` select the elements at positions `(0, 0)`, `(2, 2)`, and `(4, 4)`, resulting in the values `[1, 13, 25]`. This is the default behavior of NumPy arrays.\n", - " \n", - "In contrast, **orthogonal indexing** uses the same indices to select entire rows and columns, forming a cross-product of the specified indices. This method results in sub-arrays that include all combinations of the selected rows and columns. The example demonstrates this by selecting rows 0, 2, and 4 and columns 0, 2, and 4, resulting in a subarray containing `[[1, 3, 5], [11, 13, 15], [21, 23, 25]]`. This is Xarray DataArray's default behavior.\n", - " \n", - "The output of vectorized indexing is a `1D array`, while the output of orthogonal indexing is a `3x3` array. \n", - "\n", - "\n", - ":::{tip} To Summarize: \n", - "\n", - "- *Pointwise* or *vectorized* indexing is a more general form of indexing that allows for arbitrary combinations of indexing arrays. This method of indexing is analogous to the broadcasting rules in NumPy, where the dimensions of the indexers are aligned and the result is determined by the shape of the indexers. This is the default behavior in NumPy.\n", - "\n", - "- *Orthogonal* or *outer* indexing allows for indexing along each dimension independently, treating the indexers as one-dimensional arrays. The principle of outer or orthogonal indexing is that the result mirrors the effect of independently indexing along each dimension with integer or boolean arrays, treating both the indexed and indexing arrays as one-dimensional. This method of indexing is analogous to vector indexing in programming languages like MATLAB, Fortran, and R, where each indexer component independently selects along its corresponding dimension. This is the default behavior in Xarray.\n", - "\n", - "\n", - ":::\n", - "\n", - ":::{note} Orthogonal indexing with NumPy\n", - ":class: dropdown\n", - "\n", - "While pointwise indexing is the default behavior in NumPy, you can achieve orthogonal indexing by using the [`np.ix_` function](https://numpy.org/doc/stable/reference/generated/numpy.ix_.html). This function constructs an open mesh from multiple arrays, allowing you to index along each dimension independently similar to Xarray indexing behavior. For example: \n", - "\n", - "```python\n", - "ixgrid = np.ix_([0, 2, 4], [0, 2, 4])\n", - "np_array[ixgrid]\n", - "```\n", - "\n", - ":::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Orthogonal Indexing in Xarray\n", - "\n", - "As explained earlier, when you use only integers, slices, or unlabeled arrays (arrays without dimension names, such as `np.ndarray` or `list`, but not `DataArray`) to index an `Xarray DataArray`, Xarray interprets these indexers orthogonally. This means it indexes along independent axes, rather than using NumPy's broadcasting rules to vectorize the indexers. \n", - "\n", - "In the example above we saw this behavior, but let's see this behavior in action with a real dataset. Here we’ll use `air temperature` data from the National Center for Environmental Prediction:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "\n", - "xr.set_options(display_expand_attrs=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)\n", - "%config InlineBackend.figure_format='retina'\n", - "\n", - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "da_air = ds.air\n", - "da_air" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "selected_da = da_air.isel(time=0, lat=[2, 4, 10, 13], lon=[1, 6, 7]) # -- orthogonal indexing\n", - "selected_da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "👆 Please note that the output shape in the example above is `4x3` because the latitude indexer selects 4 rows, and the longitude indexer selects 3 columns." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For more flexibility, you can supply `DataArray()` objects as indexers. Dimensions on resultant arrays are given by the ordered union of the indexers’ dimensions.\n", - "\n", - "For example, in the example below we do orthogonal indexing using `DataArray()` objects. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "target_lat = xr.DataArray([31, 41, 42, 42], dims=\"degrees_north\")\n", - "target_lon = xr.DataArray([200, 201, 202, 205], dims=\"degrees_east\")\n", - "\n", - "da_air.sel(lat=target_lat, lon=target_lon, method=\"nearest\") # -- orthogonal indexing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the above example, you can see how the output shape is `time` x `lats` x `lons`. Please note that there are no shared dimensions between the indexers, so the output shape is the union of the dimensions of the indexers.\n", - "\n", - "```{attention}\n", - "Please note that slices or sequences/arrays without named-dimensions are treated as if they have the same dimension which is indexed along.\n", - "```\n", - "\n", - "For example:\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_air.sel(lat=[20, 30, 40], lon=target_lon, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "\n", - "But what if we'd like to find the nearest climate model grid cell to a collection of specified points (for example observation sites, or weather stations)?\n", - "\n", - "## Vectorized or Pointwise Indexing in Xarray\n", - "\n", - "Like NumPy and pandas, Xarray supports indexing many array elements at once in a *vectorized* manner. \n", - "\n", - "**Vectorized indexing** or **Pointwise Indexing** using `DataArrays()` can be used to extract information from the nearest grid cells of interest, for example, the nearest climate model grid cells to a collection of specified observation tower data latitudes and longitudes.\n", - "\n", - "```{hint}\n", - "To trigger vectorized indexing behavior, you will need to provide the selection dimensions with a new **shared** output dimension name. This means that the dimensions of both indexers must be the same, and the output will have the same dimension name as the indexers.\n", - "```\n", - "\n", - "Let's see how this works with an example:\n", - "\n", - "A researcher wants to find the nearest climate model grid cell to a collection of observation sites. They have the latitude and longitude of the observation sites as following:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "obs_lats = [31.81, 41.26, 22.59, 44.47, 28.57]\n", - "\n", - "obs_lons = [200.16, 201.57, 305.54, 210.56, 226.59]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If the researcher use the lists to index the DataArray, they will get the orthogonal indexing behavior, which is not what they want." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_air.sel(lat=obs_lats, lon=obs_lats, method=\"nearest\") # -- orthogonal indexing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To trigger the pointwise indexing, they need to create DataArray objects with the same dimension name, and then use them to index the DataArray. \n", - "For example, the code below first create DataArray objects for the latitude and longitude of the observation sites using a shared dimension name `points`, and then use them to index the DataArray `air_temperature`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "## latitudes of weather stations with a dimension of \"points\"\n", - "lat_points = xr.DataArray(obs_lats, dims=\"points\")\n", - "lat_points" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "## longitudes of weather stations with a dimension of \"points\"\n", - "lon_points = xr.DataArray(obs_lons, dims=\"points\")\n", - "lon_points" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, retrieve data at the grid cells nearest to the target latitudes and longitudes (weather stations):" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da_air.sel(lat=lat_points, lon=lon_points, method=\"nearest\") # -- pointwise indexing" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "👆 Please notice how the shape of our `DataArray` is `time` x `points`, extracting time series for each weather stations. \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "da_air.sel(lat=lat_points, lon=lon_points, method=\"nearest\").dims" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, let's plot the data for all stations." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_air.sel(lat=lat_points, lon=lon_points, method=\"nearest\").plot(x='time', hue='points');" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exercises\n", - "\n", - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "In the simple 2D 5x5 Xarray data array above, select the sub-array containing (0,0),(2,2),(4,4):\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "```python\n", - "\n", - "indices = np.array([0, 2, 4])\n", - "\n", - "xs_da = xr.DataArray(indices, dims=\"points\")\n", - "ys_da = xr.DataArray(indices, dims=\"points\")\n", - "\n", - "subset_da = da.sel(x=xs_da, y=xs_da)\n", - "subset_da\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Additional Resources\n", - "\n", - "- [Xarray Docs - Indexing and Selecting Data](https://docs.xarray.dev/en/stable/indexing.html)\n", - "\n", - "\n", - ":::{seealso}\n", - "- [Introductions to Fancy Indexing](https://jakevdp.github.io/PythonDataScienceHandbook/02.07-fancy-indexing.html)\n", - "- [NumPy Docs - Advanced Indexing](https://numpy.org/doc/stable/user/basics.indexing.html#advanced-indexing)\n", - "\n", - ":::\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/intermediate/indexing/boolean-masking-indexing.ipynb b/intermediate/indexing/boolean-masking-indexing.ipynb deleted file mode 100644 index 67aeced7..00000000 --- a/intermediate/indexing/boolean-masking-indexing.ipynb +++ /dev/null @@ -1,496 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Boolean Indexing & Masking\n", - "\n", - "## Learning Objectives\n", - "\n", - "* The concept of boolean masks\n", - "* Dropping/Masking data using `where`\n", - "* Using `isin` for creating a boolean mask" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Overview\n", - "\n", - "*Boolean masking*, known as *boolean indexing*, is a functionality in Python that enables the filtering of values based on a specific condition.\n", - "\n", - "A boolean mask refers to a binary array or a boolean-valued (`True`/`False`) array that is used as a *filter* to select specific elements from another array. The boolean mask acts as a criterion or condition, where each element in the mask corresponds to an element in the target array. An element in the target array is selected when the corresponding `mask` value is `True`. \n", - "\n", - "Xarray provides different capabilities to allow filtering and boolean indexing. In this notebook, we will learn more about it.\n", - "\n", - "First, let's import the packages needed for this notebook: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import cartopy.crs as ccrs\n", - "import numpy as np\n", - "import xarray as xr\n", - "from matplotlib import pyplot as plt\n", - "import matplotlib as mpl\n", - "\n", - "xr.set_options(display_expand_attrs=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this tutorial, we’ll use the Regional Arctic System Mode (RASM) example dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"rasm\").isel(time=0)\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this dataset, the logical coordinates are `x` and `y`, while the physical coordinates are `xc` and `yc`, which represent the latitudes and longitude of the data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(ds.xc.attrs)\n", - "print(ds.yc.attrs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da = ds.Tair\n", - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Masking with `where()`\n", - "\n", - "Indexing methods on Xarray objects generally return a subset of the original data. However, it is sometimes useful to select an object with the same shape as the original data, but with some elements masked. \n", - "\n", - "By applying `.where()`, the original data's shape is maintained, with values masked based on a Boolean condition. Values that satisfy the condition (`True`) are returned unchanged, while values that do not meet the condition (`False`) are replaced with a predefined value.\n", - "\n", - "In the example below, we replace all `nan` values with `-9999`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Let's replace the missing values (nan) with some placeholder\n", - "ds.Tair.where(ds.Tair.notnull(), -9999)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As you can see, in the example above `.where()` preserved the **shape** of the original data by masking the values with a boolean condition. \n", - "\n", - "Most uses of `.where()` check whether or not specific data values are less than or greater than a constant value. \n", - "\n", - "The data values specified in the boolean condition of `.where()` can be any of the following:\n", - "\n", - "* a `DataArray`\n", - "* a `Dataset`\n", - "* a function\n", - "\n", - "In the following example, we make use of `.where()` to mask all temperature below 0°C.\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_masked = da.where(da >= 0)\n", - "\n", - "# -- making both plots for comparison:\n", - "fig, axes = plt.subplots(ncols=2, figsize=(15, 5))\n", - "\n", - "# -- for reference (without masking):\n", - "da.plot(ax=axes[0], vmin=-30, vmax=30, cmap=mpl.cm.RdBu_r)\n", - "\n", - "# -- masked DataArray\n", - "da_masked.plot(ax=axes[1], vmin=-30, vmax=30, cmap=mpl.cm.RdBu_r);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "By default Xarray set the masked values to `nan`. But as we saw in the first example, we can set it to other values too. \n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Using the syntax you’ve learned so far, mask all the points with latitudes above 60° N.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "da_masked = da.where(da.yc >= 60)\n", - "da_masked[:, :].plot();\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# write your answer here!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As mentioned above, by default `where` maintains the original size of the data. You can use the option `drop=True` to clip coordinate elements that are fully masked:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_masked = da.where(da.yc > 60, drop=True)\n", - "da_masked.plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Please note that in this dataset, the variables `xc` (longitude) and `yc` (latitude) are two-dimensional scalar fields.\n", - "\n", - "When we plotted the data variable `Tair`, by default we get the logical coordinates (i.e. `x` and `y`) as we show in the example above. \n", - "\n", - "In order to visualize the data on a conventional latitude-longitude grid, we can take advantage of Xarray’s ability to apply `cartopy` map projections." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.figure(figsize=(14, 6))\n", - "ax = plt.axes(projection=ccrs.PlateCarree())\n", - "ax.set_global()\n", - "ds.Tair.plot.pcolormesh(ax=ax, transform=ccrs.PlateCarree(), x=\"xc\", y=\"yc\", add_colorbar=False)\n", - "ax.coastlines()\n", - "ax.set_ylim([20, 90]);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Using `where` with Multiple Conditions\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In Xarray's `.where()` function, boolean conditions can be combined using logical operators. The bitwise `and` operator (`&`) and the bitwise `or` operator (`|`) are relevant in this case. This allows for specifying multiple masking conditions within a single `.where()` statement.\n", - "\n", - "We can select data for one specific region using bound boxes. For example, here we want to access data over a region over Alaska :" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# -- define a region\n", - "min_lon = 190\n", - "min_lat = 55\n", - "max_lon = 230\n", - "max_lat = 85" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First we have to create our boolean masks:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "mask_lon = (ds.xc >= min_lon) & (ds.xc <= max_lon)\n", - "mask_lat = (ds.yc >= min_lat) & (ds.yc <= max_lat)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, we can use the boolean masks for filtering data for that region: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_masked = da.where(mask_lon & mask_lat, drop=True)\n", - "\n", - "da_masked.plot();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.figure(figsize=(5, 5))\n", - "ax = plt.axes(projection=ccrs.PlateCarree())\n", - "ax.set_global()\n", - "da_masked.plot.pcolormesh(ax=ax, transform=ccrs.PlateCarree(), x=\"xc\", y=\"yc\", add_colorbar=False)\n", - "ax.coastlines()\n", - "ax.set_ylim([50, 80])\n", - "ax.set_xlim([-180, -120]);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Exercise\n", - "\n", - "If we load air temperature dataset from NCEP, we could use `sel` method for selecting a region:\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "If we load air temperature dataset from NCEP, we could use `sel` method for selecting a region:\n", - "\n", - "```python\n", - "ds = xr.tutorial.open_dataset(\"air_temperature\")\n", - "ds_region = ds.sel(lat=slice(75,50), lon=slice(250,300))\n", - "\n", - "ds_region.air.plot();\n", - "```\n", - "Can you use a similar method as above using `sel` to crop a region using the RASM dataset? Why?\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "This method will not work here as the dimensions are different from coordinates here. Specifically, the variables xc (longitude) and yc (latitude) are two-dimensional scalar fields, which differ from the logical coordinates represented by x and y.\n", - "\n", - "So the code below will not give the correct answer!\n", - "```python\n", - "cropped_ds = ds.sel(x=slice(min_lat,max_lat), y=slice(min_lon,max_lon))\n", - "cropped_ds.Tair.plot()\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Using `xr.where` with a Function\n", - "\n", - "We can use `xr.where` with a function as a condition too. For example, here we want to convert temperature to Kelvin and find if temperature is greater than 280 K:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Define a function to use as a condition\n", - "def is_greater_than_threshold(x, threshold=300):\n", - " # function to convert temp to K\n", - " # and compare with threshold\n", - " x = x + 273.15\n", - " return x > threshold\n", - "\n", - "\n", - "# Apply the condition using xarray.where()\n", - "masked_data = xr.where(is_greater_than_threshold(da, 280), da, 0)\n", - "\n", - "masked_data.plot()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Selecting Values with `isin`\n", - "\n", - "To check whether elements of an xarray object contain a single object, you can compare with the equality operator `==` (e.g., `arr == 3`). \n", - "\n", - "To check multiple values, we use `isin()`:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here is a simple example: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "x_da = xr.DataArray([1, 2, 3, 4, 5], dims=[\"x\"])\n", - "\n", - "# -- select points with values equal to 2 and 4:\n", - "x_da.isin([2, 4])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "`isin()` works particularly well with `where()` to support indexing by arrays that are not already labels of an array. \n", - "```\n", - "\n", - "For example, we have another `DataArray` that displays the status flags of the data-collecting device for our data. \n", - "\n", - "Here, flags with value 0 and -1 signifies the device was functioning correctly, while 0 indicates a malfunction, implying that the resulting data collected may not be accurate." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "flags = xr.DataArray(np.random.randint(-1, 5, da.shape), dims=da.dims, coords=da.coords)\n", - "flags" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, we want to only see the data for points where out measurement device is working correctly: " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_masked = da.where(flags.isin([1, 2, 3, 4, 5]), drop=True)\n", - "da_masked.plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{warning}\n", - "Please note that when done repeatedly, this type of indexing is significantly slower than using `sel()`. \n", - "\n", - "Use `sel` instead of `where` as much as possible.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Additional Resources\n", - "\n", - "- [Xarray Docs - Indexing and Selecting Data](https://docs.xarray.dev/en/stable/indexing.html)\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/intermediate/indexing/indexing.md b/intermediate/indexing/indexing.md deleted file mode 100644 index 72dae2a4..00000000 --- a/intermediate/indexing/indexing.md +++ /dev/null @@ -1,5 +0,0 @@ -# Indexing - -```{tableofcontents} - -``` diff --git a/intermediate/remote_data/cmip6-cloud.ipynb b/intermediate/remote_data/cmip6-cloud.ipynb deleted file mode 100644 index e93eabf6..00000000 --- a/intermediate/remote_data/cmip6-cloud.ipynb +++ /dev/null @@ -1,290 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Zarr in Cloud Object Storage\n", - "\n", - "In this tutorial, we'll cover the following:\n", - "- Finding a cloud hosted Zarr archive of CMIP6 dataset(s)\n", - "- Remote data access to a single CMIP6 dataset (sea surface height)\n", - "- Calculate future predicted sea level change in 2100 compared to 2015" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import pandas as pd\n", - "import xarray as xr" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "## Finding cloud native data\n", - "\n", - "Cloud-native data means data that is structured for efficient querying across the network.\n", - "Typically, this means having metadata that describes the entire file in the header of the\n", - "file, or having a a separate pointer file (so that there is no need to download everything first).\n", - "\n", - "Quite commonly, you'll see cloud-native datasets stored on these\n", - "three object storage providers, though there are many other ones too.\n", - "\n", - "- [Amazon Simple Storage Service (S3)](https://aws.amazon.com/s3)\n", - "- [Azure Blob Storage](https://azure.microsoft.com/en-us/services/storage/blobs)\n", - "- [Google Cloud Storage](https://cloud.google.com/storage)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "### Getting cloud hosted CMIP6 data\n", - "\n", - "The [Coupled Model Intercomparison Project Phase 6 (CMIP6)](https://en.wikipedia.org/wiki/CMIP6#CMIP_Phase_6)\n", - "dataset is a rich archive of modelling experiments carried out to predict the climate change impacts.\n", - "The datasets are stored using the [Zarr](https://zarr.dev) format, and we'll go over how to access it.\n", - "\n", - "Sources:\n", - "- https://esgf-node.llnl.gov/search/cmip6/\n", - "- CMIP6 data hosted on Google Cloud - https://console.cloud.google.com/marketplace/details/noaa-public/cmip6\n", - "- Pangeo/ESGF Cloud Data Access tutorial - https://pangeo-data.github.io/pangeo-cmip6-cloud/accessing_data.html" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "First, let's open a CSV containing the list of CMIP6 datasets available" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "df = pd.read_csv(\"https://cmip6.storage.googleapis.com/pangeo-cmip6.csv\")\n", - "print(f\"Number of rows: {len(df)}\")\n", - "df.head()" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "Over 5 million rows! Let's filter it down to the variable and experiment\n", - "we're interested in, e.g. sea surface height.\n", - "\n", - "For the `variable_id`, you can look it up given some keyword at\n", - "https://docs.google.com/spreadsheets/d/1UUtoz6Ofyjlpx5LdqhKcwHFz2SGoTQV2_yekHyMfL9Y\n", - "\n", - "For the `experiment_id`, download the spreadsheet from\n", - "https://github.com/ES-DOC/esdoc-docs/blob/master/cmip6/experiments/spreadsheet/experiments.xlsx,\n", - "go to the 'experiment' tab, and find the one you're interested in.\n", - "\n", - "Another good place to find the right model runs is https://esgf-node.llnl.gov/search/cmip6\n", - "(once you get your head around the acronyms and short names)." - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "Below, we'll filter to CMIP6 experiments matching:\n", - "- Sea Surface Height Above Geoid [m] (variable_id: `zos`)\n", - "- Shared Socioeconomic Pathway 5 (experiment_id: `ssp585`)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "df_zos = df.query(\"variable_id == 'zos' & experiment_id == 'ssp585'\")\n", - "df_zos" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "There's 272 modelled scenarios for SSP5.\n", - "Let's just get the URL to the first one in the list for now." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "print(df_zos.zstore.iloc[0])" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Reading from the remote Zarr storage" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "If Zarr-python is version 3 or greater, we can simply pass URIs to Zarr Stores to `xr.open_zarr`. Note that anonymous access is used by default." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.open_zarr(\n", - " \"gs://cmip6/CMIP6/ScenarioMIP/NOAA-GFDL/GFDL-ESM4/ssp585/r1i1p1f1/Omon/zos/gn/v20180701/\",\n", - " consolidated=True,\n", - ")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "### Selecting time slices\n", - "\n", - "Let's say we want to calculate sea level change between\n", - "2015 and 2100. We can access just the specific time points\n", - "needed using [`xr.Dataset.sel`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.sel.html)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "zos_2015jan = ds.zos.sel(time=\"2015-01-16\").squeeze()\n", - "zos_2100dec = ds.zos.sel(time=\"2100-12-16\").squeeze()" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "Sea level change would just be 2100 minus 2015." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "sealevelchange = zos_2100dec - zos_2015jan" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": {}, - "source": [ - "Note that up to this point, we have not actually downloaded any\n", - "(big) data yet from the cloud. This is all working based on\n", - "metadata only.\n", - "\n", - "To bring the data from the cloud to your local computer, call `.compute`.\n", - "This will take a while depending on your connection speed." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "sealevelchange = sealevelchange.compute()" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": {}, - "source": [ - "We can do a quick plot to show how Sea Level is predicted to change\n", - "between 2015-2100 (from one modelled experiment)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "sealevelchange.plot.imshow()" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": {}, - "source": [ - "Notice the blue parts between -40 and -60 South where sea level has dropped?\n", - "That's to do with the Antarctic ice sheet losing mass and resulting in a lower\n", - "gravitational pull, resulting in a relative decrease in sea level. Over most\n", - "of the Northern Hemisphere though, sea level rise has increased between 2015 and 2100." - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "That's all! Hopefully this will get you started on accessing more cloud-native datasets!" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/intermediate/remote_data/index.md b/intermediate/remote_data/index.md deleted file mode 100644 index a6110848..00000000 --- a/intermediate/remote_data/index.md +++ /dev/null @@ -1,5 +0,0 @@ -# Remote Data - -```{tableofcontents} - -``` diff --git a/intermediate/remote_data/remote-data.ipynb b/intermediate/remote_data/remote-data.ipynb deleted file mode 100644 index c2b54ba5..00000000 --- a/intermediate/remote_data/remote-data.ipynb +++ /dev/null @@ -1,377 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Access Patterns to Remote Data with *fsspec*\n", - "\n", - "Accessing remote data with xarray usually means working with cloud-optimized formats like Zarr or COGs, the [CMIP6 tutorial](remote-data.ipynb) shows this pattern in detail. These formats were designed to be efficiently accessed over the internet, however in many cases we might need to access data that is not available in such formats.\n", - "\n", - "This notebook will explore how we can leverage xarray's backends to access remote files. For this we will make use of [`fsspec`](https://github.com/fsspec/filesystem_spec), a powerful Python library that abstracts the internal implementation of remote storage systems into a uniform API that can be used by many file-format specific libraries.\n", - "\n", - "Before starting with remote data, it may be helpful to understand how xarray handles local files and how xarray backends work. The following diagram shows the different components involved in accessing data either locally or remote using the `h5netcdf` backend which uses a format specific library to access HDF5 files.\n", - "\n", - "![xarray-access(3)](https://gist.github.com/assets/717735/3c3c6801-11ed-43a4-98ea-636b7dd612d8)\n", - "\n", - "Let's consider a scenario where we have a local NetCDF4 file containing gridded data. NetCDF is a common file format used in scientific research for storing array-like data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "\n", - "localPath = \"../../data/sst.mnmean.nc\"\n", - "\n", - "ds = xr.open_dataset(localPath)\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "## xarray backends under the hood\n", - "\n", - "* What happened when we ran `xr.open_dataset(\"path-to-file\")`?\n", - "\n", - "As we know xarray is a very flexible and modular library. When we open a file, we are asking xarray to use one of its format specific engines to get the actual array data from the file into memory. File formats come in different flavors, from general purpose HDF5 to the very domain-specific ones like GRIB2. When we call `open_dataset()` the first thing xarray does is try to guess which of the preinstalled backends can handle this file, in this case we pass a string with a valid local path.\n", - "\n", - "We'll use a helper function to print a simplified call stack and see what's going on under the hood.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "from IPython.display import Code\n", - "\n", - "\n", - "tracing_output = []\n", - "_match_pattern = \"xarray\"\n", - "\n", - "\n", - "def trace_calls(frame, event, arg):\n", - " if event == 'call':\n", - " code = frame.f_code\n", - " func_name = code.co_name\n", - " func_file = code.co_filename.split(\"/site-packages/\")[-1]\n", - " func_line = code.co_firstlineno\n", - " if not func_name.startswith(\"_\") and _match_pattern in func_file:\n", - " tracing_output.append(f\"def {func_name}() at {func_file}:{func_line}\")\n", - " return trace_calls\n", - "\n", - "\n", - "# we enable tracing and call open_dataset()\n", - "sys.settrace(trace_calls)\n", - "ds = xr.open_dataset(localPath)\n", - "sys.settrace(None)\n", - "\n", - "# Print the trace with some syntax highlighting\n", - "Code(\" \\n\".join(tracing_output[0:10]), language='python')" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### **What are we seeing?** \n", - "\n", - "* xarray uses `guess_engine()` to identify which backend can open the file.\n", - "* `guess_engine()` will loop through the preinstalled backends and will run `guess_can_open()`.\n", - "* if an engine can handle the file type it will verify that we are working with a local file.\n", - "* Once that we know which backend we'll use we invoke that backend implementation of `open_dataset()`.\n", - "\n", - "Let's tell xarray which backend we need for our local file. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "tracing_output = []\n", - "\n", - "sys.settrace(trace_calls)\n", - "ds = xr.open_dataset(localPath, engine=\"h5netcdf\")\n", - "sys.settrace(None)\n", - "\n", - "# Print the top 10 calls to public methods\n", - "Code(\" \\n\".join(tracing_output[0:10]), language='python')" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "> It is important to note that there are overlaps between the pre-installed backends in xarray. Many of these backends support the same formats (e.g., NetCDF-4), and xarray uses them in a specific order unless a particular backend is specified. For example, when we request the h5netcdf engine, xarray will not attempt to guess the backend. However, it will still check if the URI is remote, which will involve some calls to a context manager. By examining the call stack, we can observe the use of a file handler and a cache, which are crucial for efficiently accessing remote files." - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "### Supported file formats by backend\n", - "\n", - "The `open_dataset()` method is our entry point to n-dimensional data with xarray, the first argument we pass indicates what we want to open and is used by xarray to get the right backend and in turn is used by the backend to open the file locally or remote. The accepted types by xarray are:\n", - "\n", - "\n", - "* **str**: `my-file.nc` or `s3:://my-zarr-store/data.zarr`\n", - "* **os.PathLike**: Posix compatible path, most of the times is a Pathlib cross-OS compatible path.\n", - "* **BufferedIOBase**: some xarray backends can read data from a buffer, this is key for remote access.\n", - "* **AbstractDataStore**: This one is the generic store and backends should subclass it, if we do we can pass a \"store\" to xarray like in the case of Opendap/Pydap\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# Listing which backends we have available, if we install more they should show up here.\n", - "xr.backends.list_engines()" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "### Trying to access a file on cloud storage (AWS S3)\n", - "\n", - "Now let's try to open a file on a remote file system, this will fail and we'll take a look into why it failed and how we'll use fsspec to overcome this." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "try:\n", - " ds = xr.open_dataset(\"s3://its-live-data/test-space/sample-data/sst.mnmean.nc\")\n", - "except Exception as e:\n", - " print(e)" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "xarray iterated through the registered backends and netcdf4 returned a `\"yes, I can open that extension\"` see: [netCDF4_.py#L618](https://github.com/pydata/xarray/blob/6c2d8c3389afe049ccbfd1393e9a81dd5c759f78/xarray/backends/netCDF4_.py#L618). However, **the backend doesn't know how to \"talk\" to a remote store** and thus it fails to open our file.\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Supported format + Read from Buffers = Remote access \n", - "\n", - "Some of xarray's backends can read and write data to memory, this coupled with fsspec's ability to abstract remote files allows us to **access remote files as if they were local**. The following table helps us to identify if a backend can be used to access remote files with fsspec.\n", - "\n", - "\n", - "| Backend | HDF/NetCDF Support | Can Read from Buffer | Handles Own I/O |\n", - "|-----------------|--------------------|----------------------|-----------------|\n", - "| netCDF4 | Yes | No | Yes |\n", - "| scipy | Limited | Yes | Yes |\n", - "| pydap | Yes | No | No |\n", - "| h5netcdf | Yes | Yes | Yes |\n", - "| zarr | No | Yes | Yes |\n", - "| cfgrib | Yes | No | Yes |\n", - "| rasterio | Partial | Yes | No |\n", - "\n", - "\n", - "\n", - "**Can Read from Buffer**: Libraries that can read from buffers do not need to open a file using the operating system machinery and they allow the use of memory to open our files in whatever way we want as long as we have a seekable buffer (random access). \n", - "\n", - "**Handles Own I/O**: Some libraries have self contained code that can handle I/O, compression, codecs and data access. Some engines task their I/O to lower level libraries. This is the case with rasterio that uses GDAL to access raster files. If a Library is in control of its own I/O operations can be easily adapted to read from buffers.\n", - "\n", - "```{mermaid}\n", - "graph TD\n", - " A[\"netCDF-4 (.nc, .nc4) and most HDF5 files\"] -->|netcdf4| B[\"Remote Access: No\"]\n", - " A -->|h5netcdf| C[\"Remote Access: Yes\"]\n", - " \n", - " D[\"netCDF files (.nc, .cdf, .gz)\"] -->|scipy| E[\"Remote Access: Yes\"]\n", - " \n", - " F[\"zarr files (.zarr)\"] -->|zarr| G[\"Remote Access: Yes\"]\n", - "\n", - " H[\"OpenDAP\"] -->|pydap| I[\"Remote Access: Yes\"]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "## Remote Access and File Caching\n", - "\n", - "When we use fsspec to abstract a remote file we are in essence translating byte requests to HTTP range requests over the internet. An HTTP request is a costly I/O operation compared to accessing a local file. Because of this, it's common that libraries that handle over the network data transfers implement a cache to avoid requesting the same data over and over. In the case of fsspec there are different ways to ask the library to handle this **caching and this is one of the most relevant performance considerations** when we work with xarray and remote data.\n", - "\n", - "fsspec default cache is called `read-ahead` and as its name suggests it will read ahead of our request a fixed amount of bytes, this is good when we are working with text or tabular data but it's really an anti pattern when we work with scientific data formats. Benchmarks show that any of the caching schemas will perform better than using the default `read-ahead`.\n", - "\n", - "### fsspec caching implementations.\n", - "\n", - "#### simple cache + `open_local()`\n", - "\n", - "The simplest way to use fsspec is to cache remote files locally. Since we are using a local storage for our cache, backends like `netcdf4` will be reading from disk avoiding the issue of not being able to read directly from buffers. This pattern can be applied to different backends that don't support buffers with the disadvantage that we'll be caching whole files and using disk space.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "import fsspec\n", - "\n", - "uri = \"https://its-live-data.s3-us-west-2.amazonaws.com/test-space/sample-data/sst.mnmean.nc\"\n", - "# we prepend the cache type to the URI, this is called protocol chaining in fsspec-speak\n", - "file = fsspec.open_local(f\"simplecache::{uri}\", simplecache={'cache_storage': '/tmp/fsspec_cache'})\n", - "\n", - "ds = xr.open_dataset(file, engine=\"netcdf4\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "#### block cache + `open()`\n", - "\n", - "If our backend support reading from a buffer we can cache only the parts of the file that we are reading, this is useful but tricky. As we mentioned before fsspec default cache will request an overhead of 5MB ahead of the byte offset we request, and if we are reading small chunks from our file it will be really slow and incur in unnecessary transfers.\n", - "\n", - "Let's open the same file but using the `h5netcdf` engine and we'll use a block cache strategy that stores predefined block sizes from our remote file.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "%%time\n", - "uri = \"https://its-live-data.s3-us-west-2.amazonaws.com/test-space/sample-data/sst.mnmean.nc\"\n", - "\n", - "fs = fsspec.filesystem('http')\n", - "\n", - "fsspec_caching = {\n", - " \"cache_type\": \"blockcache\", # block cache stores blocks of fixed size and uses eviction using a LRU strategy.\n", - " \"block_size\": 8\n", - " * 1024\n", - " * 1024, # size in bytes per block, adjust depends on the file size but the recommended size is in the MB\n", - "}\n", - "\n", - "# Note that if we use a context, we'll close the file after the block so operations on xarray may fail if we don't load our data arrays.\n", - "with fs.open(uri, **fsspec_caching) as file:\n", - " ds = xr.open_dataset(file, engine=\"h5netcdf\")\n", - " mean = ds.sst.mean()\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "### Reading data from cloud storage\n", - "\n", - "So far we have only used HTTP to access a remote file, however the commercial cloud has their own implementations with specific features. fsspec allows us to talk to different cloud storage implementations hiding these details from us and the libraries we use. Now we are going to access the same file using the S3 protocol. \n", - "\n", - "> Note: S3, Azure blob, etc all have their names and prefixes but under the hood they still work with the HTTP protocol.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "%%time\n", - "uri = \"s3://its-live-data/test-space/sample-data/sst.mnmean.nc\"\n", - "\n", - "# If we need to pass credentials to our remote storage we can do it here, in this case this is a public bucket\n", - "fs = fsspec.filesystem('s3', anon=True)\n", - "\n", - "fsspec_caching = {\n", - " \"cache_type\": \"blockcache\", # block cache stores blocks of fixed size and uses eviction using a LRU strategy.\n", - " \"block_size\": 8\n", - " * 1024\n", - " * 1024, # size in bytes per block, adjust depends on the file size but the recommended size is in the MB\n", - "}\n", - "\n", - "# we are not using a context, we can use ds until we manually close it.\n", - "ds = xr.open_dataset(fs.open(uri, **fsspec_caching), engine=\"h5netcdf\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "## Key Takeaways\n", - "\n", - "1. **fsspec and remote access.**\n", - "\n", - ">fsspec is a Python library that provides a unified interface to various filesystems, enabling access to local, remote, and cloud storage systems.\n", - "It supports a wide range of protocols such as http, https, s3, gcs, ftp, and many more.\n", - "One of the key features of fsspec is its ability to cache remote files locally, improving performance by reducing latency and bandwidth usage.\n", - "\n", - "2. **xarray Backends.**\n", - "\n", - ">xarray backends offers flexible support for opening datasets stored in different formats and locations.\n", - "By leveraging various backends along with fsspec we can open, read, and analyze complex datasets efficiently, without worrying about the underlying file format or storage mechanism.\n", - "\n", - "3. **Combining fsspec with xarray**\n", - "\n", - "> xarray can work with fsspec filesystems to open and cache remote files and use caching strategies to optimize its data transfer.\n", - "\n", - "\n", - "\n", - "By leveraging these tools and techniques, you can efficiently manage and process large, remote datasets in a way that optimizes performance and accessibility." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/intermediate/xarray_and_dask.ipynb b/intermediate/xarray_and_dask.ipynb deleted file mode 100644 index b2f5c775..00000000 --- a/intermediate/xarray_and_dask.ipynb +++ /dev/null @@ -1,616 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Parallel computing with Dask\n", - "\n", - "This notebook demonstrates one of Xarray's most powerful features: the ability\n", - "to wrap [dask arrays](https://docs.dask.org/en/stable/array.html) and allow users to seamlessly execute analysis code in\n", - "parallel.\n", - "\n", - "By the end of this notebook, you will:\n", - "\n", - "1. Xarray DataArrays and Datasets are \"dask collections\" i.e. you can execute\n", - " top-level dask functions such as `dask.visualize(xarray_object)`\n", - "2. Learn that all xarray built-in operations can transparently use dask\n", - "\n", - "\n", - "```{important}\n", - "Using Dask does not always make your computations run faster!* \n", - "```\n", - "\n", - "Performance will depend on the computational infrastructure you're using (for example, how many CPU cores), how the data you're working with is structured and stored, and the algorithms and code you're running. Be sure to review the [Dask best-practices](https://docs.dask.org/en/stable/best-practices.html) if you're new to Dask!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## What is Dask\n", - "\n", - "When we talk about Xarray + Dask, we are *usually* talking about two things:\n", - "1. `dask.array` as a drop-in replacement for numpy arrays\n", - "2. A \"scheduler\" that actually runs computations on dask arrays (commonly [distributed](https://docs.dask.org/en/stable/deploying.html))\n", - "\n", - "## Introduction to dask.array\n", - "\n", - "> Dask Array implements a subset of the NumPy ndarray interface using blocked algorithms, cutting up the large array into many small arrays (*blocks* or *chunks*). This lets us compute on arrays larger than memory using all of our cores. We coordinate these blocked algorithms using Dask graphs.\n", - "\n", - "" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import dask\n", - "import dask.array\n", - "\n", - "dasky = dask.array.ones((10, 5), chunks=(2, 2))\n", - "dasky" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Why dask.array\n", - "\n", - "1. Use parallel resources to speed up computation\n", - "2. Work with datasets bigger than RAM (\"out-of-core\")\n", - " > \"dask lets you scale from memory-sized datasets to disk-sized datasets\"\n", - "\n", - "### dask is lazy\n", - "\n", - "Operations are not computed until you explicitly request them. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dasky.mean(axis=-1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "So what did dask do when you called `.mean`? It added that operation to the \"graph\" or a blueprint of operations to execute later." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dask.visualize(dasky.mean(axis=-1))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dasky.mean(axis=-1).compute()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### More\n", - "\n", - "See the [dask.array tutorial](https://tutorial.dask.org/02_array.html)\n", - "\n", - "\n", - "### Dask + Xarray\n", - "\n", - "Remember that Xarray can wrap many different array types. So Xarray can wrap dask arrays too. \n", - "\n", - "We use Xarray to enable using our metadata to express our analysis." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "## Creating dask-backed Xarray objects\n", - "\n", - "The `chunks` argument to both `open_dataset` and `open_mfdataset` allow you to\n", - "read datasets as dask arrays. \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%xmode minimal\n", - "\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "# limit the amount of information printed to screen\n", - "xr.set_options(display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature\")\n", - "ds.air" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.open_dataset(\n", - " \"air_temperature\",\n", - " chunks={ # this tells xarray to open the dataset as a dask array\n", - " \"lat\": \"auto\",\n", - " \"lon\": 25,\n", - " \"time\": -1,\n", - " },\n", - ")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The representation (\"repr\" in Python parlance) for the `air` DataArray shows the very nice HTML dask array repr. You can access the underlying chunk sizes using `.chunks`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.chunks" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "All variables in a `Dataset` need _not_ have the same chunk size along\n", - "common dimensions.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Extracting underlying data\n", - "\n", - "There are two ways to pull out the underlying array object in an xarray object.\n", - "\n", - "1. `.to_numpy` or `.values` will always return a NumPy array. For dask-backed xarray objects,\n", - " this means that compute will always be called\n", - "2. `.data` will return a Dask array\n", - "\n", - "```{tip}\n", - "Use `to_numpy` or `as_numpy` instead of `.values` so that your code generalizes to other array types (like CuPy arrays, sparse arrays)\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.data # dask array, not numpy" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.air.as_numpy().data ## numpy array" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Try calling `ds.air.values` and `ds.air.data`. Do you understand the difference?\n", - "::::" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# use this cell for the exercise" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ds.air.to_numpy()" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "\n", - "\n", - "## Lazy computation \n", - "\n", - "Xarray seamlessly wraps dask so all computation is deferred until explicitly\n", - "requested." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "mean = ds.air.mean(\"time\")\n", - "mean" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Dask actually constructs a graph of the required computation. Here it's pretty simple: The full array is subdivided into 3 arrays. Dask will load each of these subarrays in a separate thread using the default [single-machine scheduling](https://docs.dask.org/en/stable/scheduling.html). You can visualize dask 'task graphs' which represent the requested computation:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "mean.data # dask array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# visualize the graph for the underlying dask array\n", - "# we ask it to visualize the graph from left to right because it looks nicer\n", - "dask.visualize(mean.data, rankdir=\"LR\")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [], - "toc-hr-collapsed": true - }, - "source": [ - "## Getting concrete values\n", - "\n", - "At some point, you will want to actually get concrete values (_usually_ a numpy array) from dask.\n", - "\n", - "There are two ways to compute values on dask arrays.\n", - "\n", - "1. `.compute()` returns an xarray object *just like a dask array*\n", - "2. `.load()` replaces the dask array in the xarray object with a numpy array.\n", - " This is equivalent to `ds = ds.compute()`\n", - " \n", - "```{tip}\n", - "There is a third option : \"persisting\". `.persist()` loads the values into distributed RAM. The values are computed but remain distributed across workers. So `ds.air.persist()` still returns a dask array. This is useful if you will be repeatedly using a dataset for computation but it is too large to load into local memory. You will see a persistent task on the dashboard. See the [dask user guide](https://docs.dask.org/en/latest/api.html#dask.persist) for more on persisting\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Try running `mean.compute` and then examine `mean` after that. Is it still a dask array?\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "Computing returns a numpy array but does not modify in-place. So `mean` still contains a dask array.\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "\n", - "Now repeat that exercise with `mean.load`.\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "`load` modifies an Xarray object in-place so `mean` now contains a numpy array.\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Distributed Clusters" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As your data volumes grow and algorithms get more complex it can be hard to print out task graph representations and understand what Dask is doing behind the scenes. Luckily, you can use Dask's 'Distributed' scheduler to get very useful diagnotisic information.\n", - "\n", - "First let's set up a `LocalCluster` using [dask.distributed](https://distributed.dask.org/).\n", - "\n", - "You can use any kind of Dask cluster. This step is completely independent of\n", - "xarray. While not strictly necessary, the dashboard provides a nice learning\n", - "tool.\n", - "\n", - "By default, Dask uses the current working directory for writing temporary files.\n", - "We choose to use a temporary scratch folder `local_directory='/tmp'` in the example below instead." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from dask.distributed import Client\n", - "\n", - "# This piece of code is just for a correct dashboard link mybinder.org or other JupyterHub demos\n", - "import dask\n", - "import os\n", - "\n", - "# if os.environ.get('JUPYTERHUB_USER'):\n", - "# dask.config.set(**{\"distributed.dashboard.link\": \"/user/{JUPYTERHUB_USER}/proxy/{port}/status\"})\n", - "\n", - "client = Client()\n", - "client" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "☝️ Click the Dashboard link above. \n", - "\n", - "👈 Or click the \"Search\" 🔍 button in the [dask-labextension](https://github.com/dask/dask-labextension) dashboard.\n", - "\n", - "```{note}\n", - "if using the dask-labextension, you should disable the 'Simple' JupyterLab interface (`View -> Simple Interface`), so that you can drag and rearrange whichever dashboards you want. The `Workers` and `Task Stream` are good to make sure the dashboard is working!\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import dask.array\n", - "\n", - "dask.array.ones((1000, 4), chunks=(2, 1)).compute() # should see activity in dashboard" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Computation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's go back to our xarray DataSet, in addition to computing the mean, other operations such as indexing will automatically use whichever Dask Cluster we are connected to!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.isel(lon=1, lat=20)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "and more complicated operations...\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "rolling_mean = ds.air.rolling(time=5).mean() # no activity on dashboard\n", - "rolling_mean # contains dask array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "timeseries = rolling_mean.isel(lon=1, lat=20) # no activity on dashboard\n", - "timeseries # contains dask array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "computed = rolling_mean.compute() # activity on dashboard\n", - "computed # has real numpy values" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that `mean` still contains a dask array\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "rolling_mean" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "While these operations all work, not all of them are necessarily the optimal implementation for parallelism. Usually analysis pipelines need some tinkering and tweaking to get things to work. In particular read the user guidie recommendations for [chunking](https://docs.xarray.dev/en/stable/user-guide/dask.html#chunking-and-performance).\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Xarray data structures are first-class dask collections.\n", - "\n", - "This means you can do things like `dask.compute(xarray_object)`,\n", - "`dask.visualize(xarray_object)`, `dask.persist(xarray_object)`. This works for\n", - "both DataArrays and Datasets" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] - }, - "source": [ - "::::{admonition} Exercise\n", - "Visualize the task graph for a few different computations on `ds.air`!\n", - "::::" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Use this cell for the exercise" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Finish up\n", - "Gracefully shutdown our connection to the Dask cluster. This becomes more important when you are running on large HPC or Cloud servers rather than a laptop!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client.close()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Next\n", - "\n", - "\n", - "See the [Xarray user guide on dask](https://docs.xarray.dev/en/stable/user-guide/dask.html). " - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/intermediate/xarray_ecosystem.ipynb b/intermediate/xarray_ecosystem.ipynb deleted file mode 100644 index c5d0d6c9..00000000 --- a/intermediate/xarray_ecosystem.ipynb +++ /dev/null @@ -1,684 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# A Tour of Xarray Customizations\n", - "\n", - "Xarray is easily extensible.\n", - "This means it is easy to add on to to build custom packages that tackle particular computational problems or supply domain specific functionality.\n", - "\n", - "These packages can plug in to xarray in various different ways. They may build directly on top of xarray, or they may take advantage of some of xarray's dedicated interfacing features:\n", - "- Accessors (extensions)\n", - "- Backend (filetype) entrypoint\n", - "- Metadata attributes\n", - "- Duck-array wrapping interface\n", - "- Flexible indexes (coming soon!)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we introduce several popular or interesting extensions that are installable as their own packages (via conda and pip). These packages integrate with xarray using one or more of the features mentioned above.\n", - "\n", - "- [hvplot](https://hvplot.holoviz.org/), a powerful interactive plotting library\n", - "- [rioxarray](https://corteva.github.io/rioxarray/stable/index.html), for working with geospatial raster data using rasterio\n", - "- [cf-xarray](https://cf-xarray.readthedocs.io/en/latest/), for interpreting CF-compliant data\n", - "- [pint-xarray](https://pint-xarray.readthedocs.io/en/latest/), for unit-aware computations using pint." - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Specific examples for implementing your own xarray customizations using these interfacing features are available in the ADVANCED section of this book." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Quick note on accessors" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Before we look at the packages we need to briefly introduce a feature they commonly use: [\"xarray accessors\"](https://docs.xarray.dev/en/stable/internals/extending-xarray.html).\n", - "\n", - "The accessor-style syntax is used heavily by the other libraries we are about to cover.\n", - "\n", - "For users, accessors just allow us to have a familiar dot (method-like) syntax on xarray objects, for example `da.hvplot()`, `da.pint.quantify()`, or `da.cf.describe()`.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## hvplot via accessors" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The [HoloViews library](https://holoviews.org/) makes great use of accessors to allow seamless plotting of xarray data using a completely different plotting backend (by default, xarray uses [matplotlib](https://matplotlib.org/))." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We first need to import the code that registers the hvplot accessor" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import hvplot.xarray" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "And now we can call the `.hvplot` method to plot using holoviews in the same way that we would have used `.plot` to plot using matplotlib." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds['air'].isel(time=1).hvplot(cmap=\"fire\")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For some more examples of how powerful HoloViews is [see here](https://tutorial.xarray.dev/intermediate/hvplot.html)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Rioxarray via the backend entrypoint\n", - "\n", - "[Rioxarray](https://corteva.github.io/rioxarray/stable/index.html) is a Python library that enhances Xarray's ability to work with geospatial data and coordinate reference systems. Geographic information systems use GeoTIFF and [many other formats](https://gdal.org/drivers/raster/index.html) to organize and store gridded, or *raster*, datasets. \n", - "\n", - "The Geospatial Data Abstraction Library ([GDAL](https://gdal.org)) provides foundational drivers and geospatial algorithms, and the [rasterio](https://rasterio.readthedocs.io/en/latest) library provides a Pythonic interface to GDAL. `Rioxarray` brings key features of rasterio to Xarray:\n", - "\n", - "1. A backend *engine* to read any format recognized by *GDAL*\n", - "1. A `.rio` *accessor* for *rasterio's* geospatial algorithms such as reprojection\n", - "\n", - "Below a couple brief examples to illustrate these features:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import rioxarray # ensure you have rioxarray installed in your environment" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can explicitly use rioxarray's 'rasterio' engine to load myriad geospatial raster formats. Below is a [Cloud-Optimized Geotiff](https://www.cogeo.org) from an AWS [public dataset](https://registry.opendata.aws/sentinel-1-rtc-indigo/) of synthetic aperture radar data over Washington, State, USA. `overview_level=4` is an argument specific to the `rasterio` engine that allows opening a pre-computed lower resolution \"overview\" of the data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "url = 'https://sentinel-s1-rtc-indigo.s3.us-west-2.amazonaws.com/tiles/RTC/1/IW/10/U/CU/2017/S1A_20170101_10UCU_ASC/Gamma0_VV.tif'\n", - "da = xr.open_dataarray(url, engine='rasterio', open_kwargs={\"overview_level\": 2})\n", - "da" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `spatial_ref` coordinate is added by rioxarray to store standardized geospatial Coordinate Reference System (CRS) information. We can access that information and additional methods via the `.rio` accessor:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.rio.crs" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "EPSG refers to 'European Petroleum Survey Group', a database of the many CRS definitions for our Planet used over the years! EPSG=32610 is the [\"UTM 10N\" CRS](https://epsg.io/32610), with coordinate units in meters. Let's say you want longitude,latitude coordinate points in degrees instead. You'd have to *reproject* this data:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_lonlat = da.rio.reproject('epsg:4326')\n", - "da_lonlat" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that that the size of the data has changed as well as the coordinate values. This is typical of reprojection, as your data must be resampled and often interpolated to match the new CRS grid! A quick plot will compare the results of our reprojected data:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import panel as pn\n", - "\n", - "img1 = da.sel(band=1).hvplot.image(\n", - " x='x', y='y', rasterize=True, cmap='gray', clim=(0, 0.5), title='UTM'\n", - ")\n", - "img2 = da_lonlat.sel(band=1).hvplot.image(\n", - " rasterize=True, cmap='gray', clim=(0, 0.5), title='LON/LAT'\n", - ")\n", - "\n", - "pn.Column(img1, img2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## cf-xarray via metadata attributes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Xarray objects can store [arbitrary metadata](https://docs.xarray.dev/en/stable/getting-started-guide/faq.html#what-is-your-approach-to-metadata) in the form of a `dict` attached to each `DataArray` and `Dataset` object, accessible via the `.attrs` property." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "xr.DataArray(name=\"Hitchhiker\", data=0, attrs={\"life\": 42, \"name\": \"Arthur Dent\"})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Normally xarray operations ignore this metadata, simply carting it around until you explicitly choose to use it. However sometimes we might want to write custom code which makes use of the metadata." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[cf_xarray](https://cf-xarray.readthedocs.io/) is a project that tries to\n", - "let you make use of other [Climate and Forecast metadata convention attributes](http://cfconventions.org/) (or \"CF attributes\") that xarray ignores. It attaches itself\n", - "to all xarray objects under the `.cf` namespace.\n", - "\n", - "Where xarray allows you to specify dimension names for analysis, `cf_xarray`\n", - "lets you specify logical names like `\"latitude\"` or `\"longitude\"` instead as\n", - "long as the appropriate CF attributes are set.\n", - "\n", - "For example, the `\"longitude\"` dimension in different files might be labelled as: (lon, LON, long, x…), but cf_xarray let's you always refer to the logical name `\"longitude\"` in your code:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import cf_xarray" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# describe cf attributes in dataset\n", - "ds.air.cf.describe()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The following `mean` operation will work with any dataset that has appropriate\n", - "attributes set that allow detection of the \"latitude\" variable (e.g.\n", - "`units: \"degress_north\"` or `standard_name: \"latitude\"`)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate equivalent of .mean(\"lat\")\n", - "ds.air.cf.mean(\"latitude\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate indexing\n", - "ds.air.cf.sel(longitude=242.5, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Pint via duck array wrapping" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Why use pint" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[Pint](https://pint.readthedocs.io/en/stable/) defines physical units, allowing you to work with numpy-like arrays which track the units of your values through computations." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Pint defines a numpy-like array class called `pint.Quantity`, which is made up of a numpy array and a `pint.Unit` instance." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from pint import Unit\n", - "\n", - "# you can create a pint.Quantity by multiplying a value by a pint.Unit\n", - "d = np.array(10) * Unit(\"metres\")\n", - "d" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "These units are automatically propagated through operations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "t = 1 * Unit(\"seconds\")\n", - "v = d / t\n", - "v" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Or if the operation involves inconsistent units, a `pint.DimensionalityError` is raised." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "d + t" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### pint inside xarray objects" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We have already seen that xarray can wrap numpy arrays or dask arrays, but in fact xarray can wrap any type of array that behaves similarly to a numpy array.\n", - "Using this feature we can store a `pint.Quantity` array inside an xarray DataArray" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da = xr.DataArray(d)\n", - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can see that the data type stored within the DataArray is a `Quantity` object, rather than just a `np.ndarray` object, and the units of the data are displayed in the repr." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The reason this works is that a `pint.Quantity` array is what we call a \"duck array\", in that it behaves so similarly to a `numpy.ndarray` that xarray can treat them the same way. (See [python duck typing](https://realpython.com/duck-typing-python/))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### pint-xarray" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The convenience package [pint-xarray](https://pint-xarray.readthedocs.io/en/latest/index.html) makes it easier to get the benefits of pint whilst working with xarray objects.\n", - "\n", - "It provides utility accessor methods for promoting xarray data to pint quantities (which we call \"quantifying\") in various ways and for manipulating the resulting objects." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# to be able to read unit attributes following the CF conventions\n", - "import cf_xarray.units\n", - "import pint_xarray\n", - "\n", - "xr.set_options(display_expand_data=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "pint-xarray provides the `.pint` accessor, which firstly allows us to easily extract the units of our data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.pint.units" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `.pint.quantify()` accessor gives us various ways to convert normal xarray data to be unit-aware." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{note}\n", - "It is preferred to use `.pint.quantify()` to convert xarray data to use pint rather than explicitly creating a `pint.Quantity` array and placing it inside the xarray object, because pint-xarray will handle various subtleties involving dask etc.\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can explicitly specify the units we want" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da = xr.DataArray([4.5, 6.7, 3.8], dims=\"time\")\n", - "da.pint.quantify(\"V\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Or if the data has a \"units\" entry in its `.attrs` metadata dictionary, we can automatically use that to convert each variable.\n", - "\n", - "For example, the xarray tutorial dataset we opened earlier has units in its attributes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.attrs['units']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "which we can automatically read with `.pint.quantify()`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "quantified_air = ds.pint.quantify()\n", - "quantified_air" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we have actually gone even further, and used cf-xarray to automatically interpret cf-compliant units in the `.attrs` to valid pint units.\n", - "This automatically happened just as a result of importing cf-xarray above." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "When we plot quantified data with xarray the correct units will automatically appear on the plot" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "quantified_air[\"air\"].isel(time=500).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If we want to cast the pint arrays back to numpy arrays, we can use `.pint.dequantify()`, which will also write the current units back out to the `.attrs[\"units\"]` field" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "quantified_air.pint.dequantify()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - ":class: tip\n", - "Write a function which will raise a `pint.DimensionalityError` if supplied with Xarray DataArray with the wrong units.\n", - "\n", - "\n", - ":::{admonition} Solution\n", - ":class: dropdown\n", - "\n", - "```python\n", - "from pint import DimensionalityError\n", - "\n", - "\n", - "def special_science_function(distance):\n", - " if distance.pint.units != \"miles\":\n", - " raise DimensionalityError(\n", - " \"this function will only give the correct answer if the input is in units of miles\"\n", - " )\n", - "```\n", - ":::\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "::::{admonition} Exercise\n", - "Try this on some of your data!\n", - "\n", - "After you have imported pint-xarray (and maybe cf-xarray) as above, start with something like\n", - "\n", - "`ds = xr.open_dataset(my_data).pint.quantify()`\n", - "::::" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Take a look at the [pint-xarray documentation](https://pint-xarray.readthedocs.io/en/latest/) or the [pint documentation](https://pint.readthedocs.io/en/stable/) if you get stuck." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## The wider world..." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "There are many other libraries in the wider xarray ecosystem. For a list of a few packages we particularly like for geoscience work [see here](https://tutorial.xarray.dev/overview/xarray-in-45-min.html#other-cool-packages), and for a [more exhaustive list see here](https://docs.xarray.dev/en/stable/ecosystem.html)." - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/intro.md b/intro.md deleted file mode 100644 index 2f0d5cb2..00000000 --- a/intro.md +++ /dev/null @@ -1,50 +0,0 @@ -# Welcome to the Xarray Tutorial! - -**[`Xarray`](https://xarray.dev) is an open source project and Python package that makes working with labelled multi-dimensional arrays simple, efficient, and fun!** - -📖 On this Jupyter Book website you'll find easy-to-run tutorial notebooks for Xarray. Whether you're new to Xarray or a seasoned user we hope you'll learn something new and get a head start on your own projects by exploring this material! - -````{grid} -:class-container: text-center -:gutter: 3 - -```{grid-item-card} -:link: overview/get-started -:link-type: doc -:class-header: bg-light -Get started! 🚀 -^^^ -Learn how to navigate the tutorials and run code examples interactively -``` - -```{grid-item-card} -:link: https://www.youtube.com/channel/UCBlxVSA6xQXeb-i4GgTlO7g -:class-header: bg-light -Watch videos 🎬 -^^^ -Live walk-throughs of this material and more on Xarray's YouTube Channel -``` - -```{grid-item-card} -:link: https://docs.xarray.dev/en/latest/help-diagram.html -:class-header: bg-light -Get involved 🙋 -^^^ -Ask questions and participate in the Xarray community! -``` - -```` - -## Citation - -✏️ To cite this material, you can use the recommended [Xarray citation](https://docs.xarray.dev/en/stable/getting-started-guide/faq.html#how-should-i-cite-xarray): {cite:p}`hoyerhamman2017`, {cite:p}`xarray_v202230`. - -## Acknowledgements - -This website is the result of many contributions from the Xarray community! We're very grateful for everyone's volunteered effort as well as [sponsored development](https://xarray.dev/#sponsors). Funding for SciPy 2022, SciPy 2023 tutorial material development specifically was supported by NASA's Open Source Tools, Frameworks, and Libraries Program (award 80NSSC22K0345). - -## Bibliography - -```{bibliography} -:style: plain -``` diff --git a/overview/fundamental-path/README.md b/overview/fundamental-path/README.md deleted file mode 100644 index c5435f9e..00000000 --- a/overview/fundamental-path/README.md +++ /dev/null @@ -1,57 +0,0 @@ -(fundamental-path)= - -# Fundamental Path - -This syllabus comes from Scipy 2022 tutorial workshop. -This path covers fundamental Xarray concepts with -increasingly complex real-world data analysis tasks. - -The tutorial was organized by: - -- Scott Henderson (Univ. Washington) -- Deepak Cherian (National Center for Atmospheric Research) -- Jessica Scheick (Univ. New Hampshire) -- Emma Marshall (Univ. Utah) -- Anderson Banihirwe (CarbonPlan) -- Tom Nicholas (Lamont-Doherty Earth Observatory) - -Launch cloud-hosted Jupyter Lab environment: -[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/xarray-contrib/xarray-tutorial/HEAD?labpath=overview/fundamental-path/index.ipynb) - -## Topics - -_Below are links to sections of this website that are part of this journey_: - -```{dropdown} Introduction -{doc}`../../overview/get-started` -``` - -```{dropdown} Introduction to Xarray -{doc}`../../fundamentals/01_datastructures` - -{doc}`../../fundamentals/01.1_io` -``` - -```{dropdown} Working with Labeled Data -{doc}`../../fundamentals/02.1_indexing_Basic` -``` - -```{dropdown} Computation -{doc}`../../fundamentals/03.1_computation_with_xarray` - -{doc}`../../fundamentals/02.3_aligning_data_objects` - -{doc}`../../fundamentals/03.2_groupby_with_xarray` -``` - -```{dropdown} Plotting and Visualization -{doc}`../../fundamentals/04.1_basic_plotting` - -{doc}`../../fundamentals/04.2_faceting` - -{doc}`../../fundamentals/04.3_geographic_plotting` -``` - -```{dropdown} Xarray Ecosystem -{doc}`../../intermediate/xarray_ecosystem` -``` diff --git a/overview/fundamental-path/index.ipynb b/overview/fundamental-path/index.ipynb deleted file mode 100644 index 48c3f5fb..00000000 --- a/overview/fundamental-path/index.ipynb +++ /dev/null @@ -1,62 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "\n", - "\n", - "# Welcome to the Xarray SciPy 2022 Tutorial! \n", - "\n", - "**Xarray**: *Friendly, Interactive, and Scalable Scientific Data Analysis*\n", - "\n", - "Learning goals:\n", - "- understand the Xarray data model\n", - "- effectively use Xarray’s multidimensional labeled indexing\n", - "- utilize Xarray’s powerful built-in computational methods\n", - "- learn about the ecosystem of software extending Xarray\n", - "\n", - "This *4-hour* workshop will explore content from https://tutorial.xarray.dev, which contains a comprehensive collection of hands-on tutorial Jupyter Notebooks. We won't cover it all today, but instead will review a curated set of examples that will prepare you for increasingly complex real-world data analysis tasks!\n", - "\n", - "\n", - "## Schedule \n", - "*Times in Central Daylight (Austin, TX)\n", - "\n", - "| Topic | Time | Notebook Links | \n", - "| :- | - | - | \n", - "| Introduction | 1:30 (20 min) | [getting started](../../overview/get-started.md) | \n", - "| Data Structures | 1:50 (30 min) | [datastructures](../../fundamentals/01_datastructures.ipynb)
[input/output](../../fundamentals/01.1_io.ipynb) | \n", - "| *Break* \n", - "| Labeled Data | 2:30 (20 min) | [working_with_labeled_data](../../fundamentals/02.1_working_with_labeled_data.ipynb) | \n", - "| Visualizations | 2:50 (30 min) | [basic_plotting](../../fundamentals/04.1_basic_plotting.ipynb)
[facet plots](../../fundamentals/04.2_faceting.ipynb) | \n", - "| *Break* | \n", - "| Computation | 3:30 (50 min) | [computation with xarray](../../fundamentals/03.1_computation_with_xarray.ipynb)
[aligning data objects](../../fundamentals/02.3_aligning_data_objects.ipynb)
[groupby with xarray](../../fundamentals/03.2_groupby_with_xarray.ipynb) | \n", - "| *Break* | \n", - "| Xarray ecosystem | 4:30 (30 min) | [xarray ecosystem](../../intermediate/xarray_ecosystem.ipynb) | \n", - "| Scaling Xarray | 5:00 (20 min) | | \n", - "| | **End 5:30** | |\n", - "\n", - "**Thanks for attending!**\n", - "\n", - "Please continue to explore the subfolders in the JupyterLab File Browser for additional tutorial notebooks to run, or read the rendered notebooks at https://tutorial.xarray.dev" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/overview/get-started.md b/overview/get-started.md deleted file mode 100644 index 1ee0299f..00000000 --- a/overview/get-started.md +++ /dev/null @@ -1,60 +0,0 @@ - - -(get-started)= - -# Get Started - -## Organization - -Tutorials are approximately divided into sections with increasing levels of complexity: `Fundamentals`, `Intermediate`, `Advanced`. You'll also find content specific to various `Workshops` hosted over the years, often with accompanying video recordings of instructors going over content and answering questions that come up. - -Most of the tutorial content is written as Jupyter Notebooks that mix -code, text, visualization, and exercises. You can either browse rendered versions of these notebooks on this website, or _execute_ the code examples interactively. - -Many notebooks use special formatting ([Myst Markdown](https://mystmd.org/guide/quickstart-jupyter-lab-myst)) that renders best in a JupyterLab web interface. If you are new to JupyterLab, spend some time reviewing the [documentation and videos](https://jupyterlab.readthedocs.io/en/stable/getting_started/overview.html). - -## Run code interactively - -### On the Cloud - -The easiest way to start modifying and experimenting with tutorial content is to launch a pre-configured server on the Cloud. This is easy thanks to several free resources which offer ephemeral computing instances (be aware you may loose your connection or work at any time) - -#### Mybinder.org - -Clicking [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/xarray-contrib/xarray-tutorial/HEAD) will load a pre-configured Jupyter Lab interface with _all_ tutorial notebooks for you to run. _You have minimal computing resources and any changes you make will not be saved._ Any page with executable content also has a {octicon}`rocket;2em` icon in the upper right that will launch an interactive session for that particular page. - -```{warning} -Be patient, it can take a few minutes for a server to become available on the Cloud (Mybinder.org)! -``` - -#### GitHub Codespaces - -This tutorial is available to run within [GitHub Codespaces](https://github.com/features/codespaces) - a preconfigured development environment running in Microsoft Azure. - -[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://github.com/codespaces/new/xarray-contrib/xarray-tutorial) - -☝️ Click the button above to go to options window to launch a GitHub codespace. - -You can choose from a selection of virtual machine types: 2 cores - 8 GB RAM should be sufficient for all code examples in this repository. -Additionally, you are able to chose from various configurations for specific workshops (such as Scipy2024). -GitHub currently gives every user [120 vCPU hours per month for free](https://docs.github.com/en/billing/managing-billing-for-github-codespaces/about-billing-for-github-codespaces#monthly-included-storage-and-core-hours-for-personal-accounts), beyond that you must pay. **So be sure to explicitly stop your codespace when you are done by going to this page (https://github.com/codespaces).** You can also chose to fully delete your codespace when you're done exploring tutorial content. - -### On your computer - -Running tutorials on your computer requires some setup: - -We recommend using [`pixi`](https://pixi.sh/latest/) to ensure a fully reproducible Python environment - -``` -git clone https://github.com/xarray-contrib/xarray-tutorial.git -cd xarray-tutorial -pixi run tutorial -``` - -If you prefer to use conda/mamba: - -``` -mamba env create -f .binder/environment.yml -n xarray-tutorial -conda activate xarray-tutorial -jupyter lab -``` diff --git a/overview/intermediate-path/README.md b/overview/intermediate-path/README.md deleted file mode 100644 index 3fb0fc81..00000000 --- a/overview/intermediate-path/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Intermediate Path - -This learning path was presented at SciPy 2023. It covers intermediate -and more advanced topics and is a good follow-on from the [Fundamental Path](fundamental-path) - -The tutorial was organized by: - -- Deepak Cherian (National Center for Atmospheric Research) -- Scott Henderson (Univ. Washington) -- Jessica Scheick (Univ. New Hampshire) -- Negin Sobhani (National Center for Atmospheric Research) -- Tom Nicholas (Lamont-Doherty Earth Observatory) -- Anderson Banihirwe (CarbonPlan) -- Don Setiawan (Univ. Washington) - -## Outline - -```{dropdown} Introduction -{doc}`../../overview/get-started` -``` - -```{dropdown} Indexing -{doc}`../../intermediate/indexing/advanced-indexing` -``` - -```{dropdown} Computational Patterns -{doc}`../../intermediate/01-high-level-computation-patterns` -``` - -```{dropdown} Wrapping other arrays: dask -{doc}`../../intermediate/xarray_and_dask` -``` - -```{dropdown} Wrapping custom computation -{doc}`../../advanced/apply_ufunc/simple_numpy_apply_ufunc` - -{doc}`../../advanced/apply_ufunc/core-dimensions` - -{doc}`../../advanced/apply_ufunc/complex-output-numpy` - -{doc}`Explore the remaining material <../../advanced/apply_ufunc/apply_ufunc>` -``` diff --git a/overview/intermediate-path/index.ipynb b/overview/intermediate-path/index.ipynb deleted file mode 100644 index 33a6bcd9..00000000 --- a/overview/intermediate-path/index.ipynb +++ /dev/null @@ -1,61 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "\n", - "\n", - "\n", - "# Welcome to the Xarray SciPy 2023 Tutorial! \n", - "\n", - "**Xarray**: *Friendly, Interactive, and Scalable Scientific Data Analysis*\n", - "\n", - "\n", - "This *4-hour* workshop will explore content from [the Xarray tutorial](https://tutorial.xarray.dev), which contains a comprehensive collection of hands-on tutorial Jupyter Notebooks. We won't cover it all today, but instead will review a curated set of examples that will prepare you for increasingly complex real-world data analysis tasks!\n", - "\n", - "\n", - "## Schedule \n", - "*Times in Central Daylight (Austin, TX)\n", - "\n", - "Use the links to navigate to the right notebooks.\n", - "\n", - "**Remember to select `global-global-xarray environment` for the notebooks when prompted.**\n", - "\n", - "| Topic | Time | Notebook Links | \n", - "| :- | - | - | \n", - "| Introduction and Setup | 1:30 (20 min) | --- | \n", - "| Indexing | 1:50 (30 min) | [Advanced and Vectorized Indexing](../../intermediate/indexing/advanced-indexing.ipynb) | \n", - "| *10 minute Break* \n", - "| Computational Patterns | 2:30 (50 min) | [Computation Patterns](../../intermediate/01-high-level-computation-patterns.ipynb) | \n", - "| *10 minute Break* | \n", - "| Wrapping other arrays | 3:30 (50 min) | [Xarray and Dask](../../intermediate/xarray_and_dask.ipynb) | \n", - "| *10 minute Break* | \n", - "| Wrapping custom computation (apply_ufunc) | 4:30 (30 min)


5:00 (30 min) | [A gentle introduction](../../advanced/apply_ufunc/simple_numpy_apply_ufunc.ipynb)
[Core dimensions](../../advanced/apply_ufunc/core-dimensions.ipynb)
[Handling complex output](../../advanced/apply_ufunc/complex-output-numpy.ipynb)
Explore the rest of the material |\n", - "| | **End 5:30** | |\n", - "\n", - "\n", - "\n", - "## Thanks for attending!\n", - "\n", - "Please continue to explore the subfolders in the JupyterLab File Browser for additional tutorial notebooks to run, or read the rendered notebooks at [https://tutorial.xarray.dev](https://tutorial.xarray.dev)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/overview/learning-paths.md b/overview/learning-paths.md deleted file mode 100644 index 9e7922e4..00000000 --- a/overview/learning-paths.md +++ /dev/null @@ -1,11 +0,0 @@ -# Learning Paths - -Learning Paths are guided tours through the material on the site. -These paths were originally built for in-person tutorials at the SciPy conference. -They are useful if you would like to work through the material in an organized manner. - -Follow the links below to try out each path. - -```{tableofcontents} - -``` diff --git a/overview/xarray-in-45-min.ipynb b/overview/xarray-in-45-min.ipynb deleted file mode 100644 index d771a368..00000000 --- a/overview/xarray-in-45-min.ipynb +++ /dev/null @@ -1,1257 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "\n", - "# Xarray in 45 minutes\n", - "\n", - "In this lesson, we cover the basics of Xarray data structures. By the\n", - "end of the lesson, we will be able to:\n", - "\n", - "- Understand the basic data structures in Xarray\n", - "- Inspect `DataArray` and `Dataset` objects.\n", - "- Read and write netCDF files using Xarray.\n", - "- Understand that there are many packages that build on top of xarray\n", - "\n", - "\n", - "We'll start by reviewing the various components of the Xarray data model, represented here visually:\n", - "\n", - "" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import xarray as xr\n", - "\n", - "xr.set_options(keep_attrs=True, display_expand_data=False)\n", - "np.set_printoptions(threshold=10, edgeitems=2)\n", - "\n", - "%xmode minimal\n", - "%matplotlib inline\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Xarray has a few small real-world tutorial datasets hosted in the [xarray-data](https://github.com/pydata/xarray-data) GitHub repository.\n", - "\n", - "[xarray.tutorial.load_dataset](https://docs.xarray.dev/en/stable/generated/xarray.tutorial.open_dataset.html#xarray.tutorial.open_dataset) is a convenience function to download and open DataSets by name (listed at that link).\n", - "\n", - "Here we'll use `air temperature` from the [National Center for Environmental Prediction](https://www.weather.gov/ncep/). Xarray objects have convenient HTML representations to give an overview of what we're working with:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.tutorial.load_dataset(\"air_temperature\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that behind the scenes the `tutorial.open_dataset` downloads a file. It then uses [`xarray.open_dataset`](https://docs.xarray.dev/en/stable/generated/xarray.open_dataset.html#xarray-open-dataset) function to open that file (which for this datasets is a [netCDF](https://www.unidata.ucar.edu/software/netcdf/) file). \n", - "\n", - "A few things are done automatically upon opening, but controlled by keyword arguments. For example, try passing the keyword argument `mask_and_scale=False`... what happens?" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## What's in a Dataset? \n", - "\n", - "*Many DataArrays!* \n", - "\n", - "Datasets are dictionary-like containers of \"DataArray\"s. They are a mapping of\n", - "variable name to DataArray:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# pull out \"air\" dataarray with dictionary syntax\n", - "ds[\"air\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can save some typing by using the \"attribute\" or \"dot\" notation. This won't\n", - "work for variable names that clash with a built-in method name (like `mean` for\n", - "example).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# pull out dataarray using dot notation\n", - "ds.air" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## What's in a DataArray? \n", - "\n", - "*data + (a lot of) metadata*" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Name (optional)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da = ds.air\n", - "\n", - "da.name" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Named dimensions \n", - "\n", - "`.dims` correspond to the axes of your data. \n", - "\n", - "In this case we have 2 spatial dimensions (`latitude` and `longitude` are stored with shorthand names `lat` and `lon`) and one temporal dimension (`time`)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.dims" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Coordinate variables \n", - "\n", - "`.coords` is a simple [data container](https://docs.xarray.dev/en/stable/user-guide/data-structures.html#coordinates)\n", - "for coordinate variables.\n", - "\n", - "Here we see the actual timestamps and spatial positions of our air temperature data:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.coords" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Coordinates objects support similar indexing notation\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# extracting coordinate variables\n", - "da.lon" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# extracting coordinate variables from .coords\n", - "da.coords[\"lon\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "It is useful to think of the values in these coordinate variables as axis\n", - "\"labels\" such as \"tick labels\" in a figure. These are coordinate locations on a\n", - "grid at which you have data.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Arbitrary attributes \n", - "\n", - "`.attrs` is a dictionary that can contain arbitrary Python objects (strings, lists, integers, dictionaries, etc.) Your only\n", - "limitation is that some attributes may not be writeable to certain file formats." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.attrs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# assign your own attributes!\n", - "da.attrs[\"who_is_awesome\"] = \"xarray\"\n", - "da.attrs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Underlying data \n", - "\n", - "`.data` contains the [numpy array](https://numpy.org) storing air temperature values.\n", - "\n", - "\n", - "\n", - "Xarray structures wrap underlying simpler array-like data structures. This part of Xarray is quite extensible allowing for distributed array, GPU arrays, sparse arrays, arrays with units etc. We'll briefly look at this later in this tutorial." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# what is the type of the underlying data\n", - "type(da.data)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Review\n", - "\n", - "Xarray provides two main data structures:\n", - "\n", - "1. [`DataArrays`](https://docs.xarray.dev/en/stable/user-guide/data-structures.html#dataarray) that wrap underlying data containers (e.g. numpy arrays) and contain associated metadata\n", - "1. [`Datasets`](https://docs.xarray.dev/en/stable/user-guide/data-structures.html#dataset) that are dictionary-like containers of DataArrays\n", - "\n", - "DataArrays contain underlying arrays and associated metadata:\n", - "1. Name\n", - "2. Dimension names\n", - "3. Coordinate variables\n", - "4. and arbitrary attributes." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Why Xarray? \n", - "\n", - "Metadata provides context and provides code that is more legible. This reduces the likelihood of errors from typos and makes analysis more intuitive and fun!\n", - "\n", - "### Analysis without xarray: `X(`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# plot the first timestep\n", - "lat = ds.air.lat.data # numpy array\n", - "lon = ds.air.lon.data # numpy array\n", - "temp = ds.air.data # numpy array" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.figure()\n", - "plt.pcolormesh(lon, lat, temp[0, :, :]);" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "temp.mean(axis=1) ## what did I just do? I can't tell by looking at this line." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Analysis with xarray `=)`\n", - "\n", - "How readable is this code?\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.isel(time=0).plot(x=\"lon\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use dimension names instead of axis numbers\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.mean(dim=\"time\").plot(x=\"lon\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## Extracting data or \"indexing\" \n", - "\n", - "Xarray supports\n", - "\n", - "- label-based indexing using `.sel`\n", - "- position-based indexing using `.isel`\n", - "\n", - "See the [user guide](https://docs.xarray.dev/en/stable/indexing.html) for more." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Label-based indexing\n", - "\n", - "Xarray inherits its label-based indexing rules from pandas; this means great\n", - "support for dates and times!\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# here's what ds looks like\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# pull out data for all of 2013-May\n", - "ds.sel(time=\"2013-05\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate slicing\n", - "ds.sel(time=slice(\"2013-05\", \"2013-07\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.sel(time=\"2013\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate \"nearest\" indexing\n", - "ds.sel(lon=240.2, method=\"nearest\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# \"nearest indexing at multiple points\"\n", - "ds.sel(lon=[240.125, 234], lat=[40.3, 50.3], method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Position-based indexing\n", - "\n", - "This is similar to your usual numpy `array[0, 2, 3]` but with the power of named\n", - "dimensions!\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.data[0, 2, 3]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# pull out time index 0, lat index 2, and lon index 3\n", - "ds.air.isel(time=0, lat=2, lon=3) # much better than ds.air[0, 2, 3]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate slicing\n", - "ds.air.isel(lat=slice(10))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## Concepts for computation\n", - "\n", - "Consider calculating the *mean air temperature per unit surface area* for this dataset. Because latitude and longitude correspond to spherical coordinates for Earth's surface, each 2.5x2.5 degree grid cell actually has a different surface area as you move away from the equator! This is because *latitudinal length* is fixed ($ \\delta Lat = R \\delta \\phi $), but *longitudinal length varies with latitude* ($ \\delta Lon = R \\delta \\lambda \\cos(\\phi) $)\n", - "\n", - "So the [area element for lat-lon coordinates](https://en.wikipedia.org/wiki/Spherical_coordinate_system#Integration_and_differentiation_in_spherical_coordinates) is\n", - "\n", - "\n", - "$$ \\delta A = R^2 \\delta\\phi \\, \\delta\\lambda \\cos(\\phi) $$\n", - "\n", - "where $\\phi$ is latitude, $\\delta \\phi$ is the spacing of the points in latitude, $\\delta \\lambda$ is the spacing of the points in longitude, and $R$ is Earth's radius. (In this formula, $\\phi$ and $\\lambda$ are measured in radians)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Earth's average radius in meters\n", - "R = 6.371e6\n", - "\n", - "# Coordinate spacing for this dataset is 2.5 x 2.5 degrees\n", - "dϕ = np.deg2rad(2.5)\n", - "dλ = np.deg2rad(2.5)\n", - "\n", - "dlat = R * dϕ * xr.ones_like(ds.air.lon)\n", - "dlon = R * dλ * np.cos(np.deg2rad(ds.air.lat))\n", - "dlon.name = \"dlon\"\n", - "dlat.name = \"dlat\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "There are two concepts here:\n", - "1. you can call functions like `np.cos` and `np.deg2rad` ([\"numpy ufuncs\"](https://numpy.org/doc/stable/reference/ufuncs.html)) on Xarray objects and receive an Xarray object back.\n", - "2. We used [ones_like](https://docs.xarray.dev/en/stable/generated/xarray.ones_like.html) to create a DataArray that looks like `ds.air.lon` in all respects, except that the data are all ones" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# returns an xarray DataArray!\n", - "np.cos(np.deg2rad(ds.lat))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# cell latitude length is constant with longitude\n", - "dlat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# cell longitude length changes with latitude\n", - "dlon" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Broadcasting: expanding data\n", - "\n", - "Our longitude and latitude length DataArrays are both 1D with different dimension names. If we multiple these DataArrays together the dimensionality is expanded to 2D by _broadcasting_:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cell_area = dlon * dlat\n", - "cell_area" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The result has two dimensions because xarray realizes that dimensions `lon` and\n", - "`lat` are different so it automatically \"broadcasts\" to get a 2D result. See the\n", - "last row in this image from _Jake VanderPlas Python Data Science Handbook_\n", - "\n", - "\n", - "\n", - "Because xarray knows about dimension names we avoid having to create unnecessary\n", - "size-1 dimensions using `np.newaxis` or `.reshape`. For more, see the [user guide](https://docs.xarray.dev/en/stable/user-guide/computation.html#broadcasting-by-dimension-name)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "### Alignment: putting data on the same grid\n", - "\n", - "When doing arithmetic operations xarray automatically \"aligns\" i.e. puts the\n", - "data on the same grid. In this case `cell_area` and `ds.air` are at the same\n", - "lat, lon points we end up with a result with the same shape (25x53):\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.air.isel(time=1) / cell_area" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now lets make `cell_area` unaligned i.e. change the coordinate labels\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# make a copy of cell_area\n", - "# then add 1e-5 degrees to latitude\n", - "cell_area_bad = cell_area.copy(deep=True)\n", - "cell_area_bad[\"lat\"] = cell_area.lat + 1e-5 # latitudes are off by 1e-5 degrees!\n", - "cell_area_bad" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cell_area_bad * ds.air.isel(time=1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The result is an empty array with no latitude coordinates because none of them were aligned!\n", - "\n", - "```{tip}\n", - "If you notice extra NaNs or missing points after xarray computation, it\n", - "means that your xarray coordinates were not aligned _exactly_.\n", - "```\n", - "\n", - "To make sure variables are aligned as you think they are, do the following:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "raises-exception" - ] - }, - "outputs": [], - "source": [ - "xr.align(cell_area_bad, ds.air, join=\"exact\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The above statement raises an error since the two are not aligned." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{seealso}\n", - "For more, see\n", - "[the Xarray documentation](https://docs.xarray.dev/en/stable/user-guide/computation.html#automatic-alignment). [This tutorial notebook](https://tutorial.xarray.dev/fundamentals/02.3_aligning_data_objects.html) also covers alignment and broadcasting (*highly recommended*)\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## High level computation \n", - "\n", - "(`groupby`, `resample`, `rolling`, `coarsen`, `weighted`)\n", - "\n", - "Xarray has some very useful high level objects that let you do common\n", - "computations:\n", - "\n", - "1. `groupby` :\n", - " [Bin data in to groups and reduce](https://docs.xarray.dev/en/stable/groupby.html)\n", - "1. `resample` :\n", - " [Groupby specialized for time axes. Either downsample or upsample your data.](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations)\n", - "1. `rolling` :\n", - " [Operate on rolling windows of your data e.g. running mean](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations)\n", - "1. `coarsen` :\n", - " [Downsample your data](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays)\n", - "1. `weighted` :\n", - " [Weight your data before reducing](https://docs.xarray.dev/en/stable/user-guide/computation.html#weighted-array-reductions)\n", - "\n", - "\n", - "Below we quickly demonstrate these patterns. See the user guide links above and [the tutorial](https://tutorial.xarray.dev/intermediate/01-high-level-computation-patterns.html) for more." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### groupby\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# here's ds\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# seasonal groups\n", - "ds.groupby(\"time.season\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# make a seasonal mean\n", - "seasonal_mean = ds.groupby(\"time.season\").mean()\n", - "seasonal_mean" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The seasons are out of order (they are alphabetically sorted). This is a common\n", - "annoyance. The solution is to use `.sel` to change the order of labels\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "seasonal_mean = seasonal_mean.sel(season=[\"DJF\", \"MAM\", \"JJA\", \"SON\"])\n", - "seasonal_mean" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "seasonal_mean.air.plot(col=\"season\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### resample\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# resample to monthly frequency\n", - "ds.resample(time=\"M\").mean()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### weighted\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# weight by cell_area and take mean over (time, lon)\n", - "ds.weighted(cell_area).mean([\"lon\", \"time\"]).air.plot(y=\"lat\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## Visualization\n", - "\n", - "(`.plot`)\n", - "\n", - "\n", - "We have seen very simple plots earlier. Xarray also lets you easily visualize\n", - "3D and 4D datasets by presenting multiple facets (or panels or subplots) showing\n", - "variations across rows and/or columns." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# facet the seasonal_mean\n", - "seasonal_mean.air.plot(col=\"season\", col_wrap=2);" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# contours\n", - "seasonal_mean.air.plot.contour(col=\"season\", levels=20, add_colorbar=True);" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# line plots too? wut\n", - "seasonal_mean.air.mean(\"lon\").plot.line(hue=\"season\", y=\"lat\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For more see the [user guide](https://docs.xarray.dev/en/stable/plotting.html), the [gallery](https://docs.xarray.dev/en/stable/examples/visualization_gallery.html), and [the tutorial material](https://tutorial.xarray.dev/fundamentals/04.0_plotting.html)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## Reading and writing files\n", - "\n", - "Xarray supports many disk formats. Below is a small example using netCDF. For\n", - "more see the [documentation](https://docs.xarray.dev/en/stable/user-guide/io.html)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# write to netCDF\n", - "ds.to_netcdf(\"my-example-dataset.nc\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{note}\n", - "To avoid the `SerializationWarning` you can assign a _FillValue for any NaNs in 'air' array by adding the keyword argument `encoding=dict(air=dict(_FillValue=-9999))`\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# read from disk\n", - "fromdisk = xr.open_dataset(\"my-example-dataset.nc\")\n", - "fromdisk" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# check that the two are identical\n", - "ds.identical(fromdisk)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{tip}\n", - "A common use case to read datasets that are a collection of many netCDF\n", - "files. See the [documentation](https://docs.xarray.dev/en/stable/user-guide/io.html#reading-multi-file-datasets) for how\n", - "to handle that.\n", - "```\n", - "\n", - "Finally to read other file formats, you might find yourself reading in the data using a different library and then creating a DataArray([docs](https://docs.xarray.dev/en/stable/user-guide/data-structures.html#creating-a-dataarray), [tutorial](https://tutorial.xarray.dev/fundamentals/01.1_creating_data_structures.html)) from scratch. For example, you might use `h5py` to open an HDF5 file and then create a Dataset from that.\n", - "For MATLAB files you might use `scipy.io.loadmat` or `h5py` depending on the version of MATLAB file you're opening and then construct a Dataset." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## The scientific python ecosystem\n", - "\n", - "Xarray ties in to the larger scientific python ecosystem and in turn many\n", - "packages build on top of xarray. A long list of such packages is here:\n", - ".\n", - "\n", - "Now we will demonstrate some cool features.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Pandas: tabular data structures\n", - "\n", - "You can easily [convert](https://docs.xarray.dev/en/stable/pandas.html) between xarray and [pandas](https://pandas.pydata.org/) structures. This allows you to conveniently use the extensive pandas \n", - "ecosystem of packages (like [seaborn](https://seaborn.pydata.org/)) for your work.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# convert to pandas dataframe\n", - "df = ds.isel(time=slice(10)).to_dataframe()\n", - "df" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# convert dataframe to xarray\n", - "df.to_xarray()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Alternative array types\n", - "\n", - "This notebook has focused on Numpy arrays. Xarray can wrap [other array](https://docs.xarray.dev/en/stable/user-guide/duckarrays.html) types! For example:\n", - "\n", - " [distributed parallel arrays](https://docs.dask.org/en/latest/array.html) & [Xarray user guide on Dask](https://docs.xarray.dev/en/stable/user-guide/dask.html)\n", - "\n", - " **pydata/sparse** : [sparse arrays](https://sparse.pydata.org)\n", - "\n", - " [GPU arrays](https://cupy.dev) & [cupy-xarray](https://cupy-xarray.readthedocs.io/)\n", - "\n", - " **pint** : [unit-aware arrays](https://pint.readthedocs.io) & [pint-xarray](https://github.com/xarray-contrib/pint-xarray)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Dask\n", - "\n", - "Dask cuts up NumPy arrays into blocks and parallelizes your analysis code across\n", - "these blocks\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate dask dataset\n", - "dasky = xr.tutorial.open_dataset(\n", - " \"air_temperature\",\n", - " chunks={\"time\": 10}, # 10 time steps in each block\n", - ")\n", - "\n", - "dasky.air" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "All computations with dask-backed xarray objects are lazy, allowing you to build\n", - "up a complicated chain of analysis steps quickly\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate lazy mean\n", - "dasky.air.mean(\"lat\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To get concrete values, call `.compute` or `.load`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# \"compute\" the mean\n", - "dasky.air.mean(\"lat\").compute()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### HoloViz\n", - "\n", - "Quickly generate interactive plots from your data!\n", - "\n", - "The [`hvplot` package](https://hvplot.holoviz.org/user_guide/Gridded_Data.html) attaches itself to all\n", - "xarray objects under the `.hvplot` namespace. So instead of using `.plot` use `.hvplot`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import hvplot.xarray\n", - "\n", - "ds.air.hvplot(groupby=\"time\", clim=(270, 300), widget_location='bottom')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```{note}\n", - "The time slider will only work if you're executing the notebook, rather than viewing the website\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### cf_xarray \n", - "\n", - "[cf_xarray](https://cf-xarray.readthedocs.io/) is a project that tries to\n", - "let you make use of other CF attributes that xarray ignores. It attaches itself\n", - "to all xarray objects under the `.cf` namespace.\n", - "\n", - "Where xarray allows you to specify dimension names for analysis, `cf_xarray`\n", - "lets you specify logical names like `\"latitude\"` or `\"longitude\"` instead as\n", - "long as the appropriate CF attributes are set.\n", - "\n", - "For example, the `\"longitude\"` dimension in different files might be labelled as: (lon, LON, long, x…), but cf_xarray let's you always refer to the logical name `\"longitude\"` in your code:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import cf_xarray" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# describe cf attributes in dataset\n", - "ds.air.cf" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The following `mean` operation will work with any dataset that has appropriate\n", - "attributes set that allow detection of the \"latitude\" variable (e.g.\n", - "`units: \"degress_north\"` or `standard_name: \"latitude\"`)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate equivalent of .mean(\"lat\")\n", - "ds.air.cf.mean(\"latitude\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate indexing\n", - "ds.air.cf.sel(longitude=242.5, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Other cool packages\n", - "\n", - "- [xgcm](https://xgcm.readthedocs.io/) : grid-aware operations with xarray\n", - " objects\n", - "- [xrft](https://xrft.readthedocs.io/) : fourier transforms with xarray\n", - "- [xclim](https://xclim.readthedocs.io/) : calculating climate indices with\n", - " xarray objects\n", - "- [intake-xarray](https://intake-xarray.readthedocs.io/) : forget about file\n", - " paths\n", - "- [rioxarray](https://corteva.github.io/rioxarray/stable/index.html) : raster\n", - " files and xarray\n", - "- [xesmf](https://xesmf.readthedocs.io/) : regrid using ESMF\n", - "- [MetPy](https://unidata.github.io/MetPy/latest/index.html) : tools for working\n", - " with weather data\n", - "\n", - "Check the Xarray [Ecosystem](https://docs.xarray.dev/en/stable/ecosystem.html) page and [this tutorial](https://tutorial.xarray.dev/intermediate/xarray_ecosystem.html) for even more packages and demonstrations." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Next\n", - "\n", - "1. Read the [tutorial](https://tutorial.xarray.dev) material and [user guide](https://docs.xarray.dev/en/stable/user-guide/index.html)\n", - "1. See the description of [common terms](https://docs.xarray.dev/en/stable/terminology.html) used in the xarray documentation: \n", - "1. Answers to common questions on \"how to do X\" with Xarray are [here](https://docs.xarray.dev/en/stable/howdoi.html)\n", - "1. Ryan Abernathey has a book on data analysis with a [chapter on Xarray](https://earth-env-data-science.github.io/lectures/xarray/xarray_intro.html)\n", - "1. [Project Pythia](https://projectpythia.org/) has [foundational](https://foundations.projectpythia.org/landing-page.html) and more [advanced](https://cookbooks.projectpythia.org/) material on Xarray. Pythia also aggregates other [Python learning resources](https://projectpythia.org/resource-gallery.html).\n", - "1. The [Xarray Github Discussions](https://github.com/pydata/xarray/discussions) and [Pangeo Discourse](https://discourse.pangeo.io/) are good places to ask questions.\n", - "1. Tell your friends! Tweet!\n", - "\n", - "\n", - "## Welcome!\n", - "\n", - "Xarray is an open-source project and gladly welcomes all kinds of contributions. This could include reporting bugs, discussing new enhancements, contributing code, helping answer user questions, contributing documentation (even small edits like fixing spelling mistakes or rewording to make the text clearer). Welcome!" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/pixi.lock b/pixi.lock deleted file mode 100644 index 07a87ab6..00000000 --- a/pixi.lock +++ /dev/null @@ -1,12815 +0,0 @@ -version: 6 -environments: - default: - channels: - - url: https://conda.anaconda.org/conda-forge/ - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-47.0-unix_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.19.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.12-py312h178313f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aioitertools-0.12.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.8.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.1-h205f482_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h1a47875_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.6-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h4e1184b_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h7959bf6_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hefd7a92_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-h173a860_6.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h11f4f37_12.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.9-he1b24dc_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.2-h4e1184b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h4e1184b_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.9-he0e7f3f_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.489-h4d475cb_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/botocore-1.36.3-pyge310_1234567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2025.1.31-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/crc32c-2.7.1-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.1-py312hda17c39_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-labextension-7.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dataclasses-0.8-pyhc8e2a94_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/datashader-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.12-py312h2ec8cdc_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.18-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/docopt-ng-0.9.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/docutils-0.20.1-py312h7900ff3_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/donfig-0.8.1.post1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-h166bdaf_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexcache-0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexparser-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flox-0.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.56.0-py312h178313f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.4-h3551947_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geoviews-core-1.14.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.82.2-h4833e2c_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.38.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-storage-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/google-crc32c-1.1.2-py312hb42adb9_6.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.68.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.2.1-h5ae0cbf_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.1.1-py312h2ec8cdc_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/grpcio-1.67.1-py312hacea422_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gsw-3.6.19-py312hc0a28a1_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h021d004_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.5.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.12.1-nompi_py312hd203070_103.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/holoviews-1.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hvplot-0.11.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.32.0-pyh907856f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-book-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-resource-usage-1.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-server-proxy-4.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_bokeh-4.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-myst-2.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.8-py312h84d6215_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.2-pyh41aed27_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-19.0.1-hfa2a6e7_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-19.0.1-hcb10f89_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-19.0.1-hcb10f89_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-19.0.1-h08228c5_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-31_h59b9bed_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-31_he106b2a_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.12.1-h332b0f4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6f5c62b_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.2-h3359108_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.35.0-h2b5623c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.35.0-h0121fbd_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-h25350d4_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libheif-1.19.5-gpl_hc21c24c_100.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h4ce23a2_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-31_h7ac8fdf_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-ha7bfdaf_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.4-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.29-pthreads_h94d23a6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.18.0-hfcad708_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-headers-1.18.0-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-19.0.1-h081d1f1_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.47-h943b412_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_12.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.1-hee588c1_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.10.0-h4c51ac1_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.8.0-hc4a0caf_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.6-h8d12d68_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/linkify-it-py-2.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312h374181b_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hf0f0c11_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/multipledispatch-0.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-nb-1.2.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-parser-2.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.61.0-py312h2e6246c_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numbagg-0.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.15.1-py312hf9745cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.3-py312h58c1407_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numpy_groupies-0.11.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.1-h7b32b05_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/panel-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.1-h861ebed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/param-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-0.24.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-xarray-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.26.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/protobuf-5.28.3-py312h2ec8cdc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py312h98912ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-19.0.1-py312h7900ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-19.0.1-py312h01725c0_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pybtex-docutils-1.0.3-py312h7900ff3_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyct-0.5.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydap-3.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-25.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.1-py312he630544_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.9-h9e4cc4f_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh91182bf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyviz_comms-3.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.1-py312hbf22597_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.3-py312h8cae83d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.18.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py312h12e396e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.11-h072c03f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/s3fs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.2-py312ha707e6e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.7-py312h391bc85_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/simpervisor-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.4.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-book-theme-1.1.4-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-codeautolink-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-comments-0.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-external-toc-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-jupyterbook-latex-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-multitoc-numbering-0.1.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-notfound-page-1.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-thebe-0.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-togglebutton-0.3.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-bibtex-2.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-mermaid-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxext-rediraffe-0.2.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.38-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.49.1-h9eae976_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uc-micro-py-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.17.2-py312h66e93f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.11-h4f16b4b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h178313f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zarr-3.0.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-47.0-unix_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.19.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aiohttp-3.11.12-py312h998013c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aioitertools-0.12.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.8.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/argon2-cffi-bindings-21.2.0-py312h024a12e_5.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/atk-1.0-2.38.0-hd03087b_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.8.1-hfc2798a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.8.1-hc8a0bd2_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.10.6-h5505292_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.0-hc8a0bd2_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.0-h54f970a_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.9.2-h96aa502_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.15.3-haba67d1_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.11.0-h24f418c_12.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.7.9-hf37e03c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.2-hc8a0bd2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.2-hc8a0bd2_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.29.9-ha81f72f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.489-h0e5014b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.14.0-hd50102c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.10.0-hc602bab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.13.0-h7585a09_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.8.0-h9ca1f76_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.12.0-hcdd55da_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/blosc-1.21.6-h7dd00d9_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/botocore-1.36.3-pyge310_1234567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-hd74edd7_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-hd74edd7_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312hde4cb15_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.4-h5505292_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2025.1.31-hf0a4a13_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.2-h6a3b0d2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cartopy-0.24.0-py312hcd31e36_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h0fad829_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cftime-1.6.4-py312h755e627_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.1-py312hb23fbb9_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/crc32c-2.7.1-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-44.0.1-py312hf9bd80e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.0.1-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-labextension-7.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dataclasses-0.8-pyhc8e2a94_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/datashader-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.12-py312hd8f9ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.18-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/docopt-ng-0.9.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/docutils-0.20.1-py312h81bd7bf_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/donfig-0.8.1.post1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/epoxy-1.5.10-h1c322ee_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexcache-0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexparser-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flox-0.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.15.0-h1383a14_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.56.0-py312h998013c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.12.1-hadb7bae_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freexl-2.0.0-h3ab3353_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fribidi-1.0.10-h27ca646_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.5.0-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gdk-pixbuf-2.42.12-h7ddc832_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/geos-3.13.0-hf9b8971_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/geotiff-1.7.4-hbef4fa4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geoviews-core-1.14.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/giflib-5.2.2-h93a5062_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glib-tools-2.82.2-h1dc7a0c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.38.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-storage-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/google-crc32c-1.1.2-py312h1fa1217_6.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.68.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/graphite2-1.3.13-hebf3989_1003.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/graphviz-12.2.1-hff64154_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.1.1-py312hd8f9ff3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.67.1-py312he4e58e5_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gsw-3.6.19-py312h755e627_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gtk3-3.24.43-he7bb075_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gts-0.7.6-he42f4ea_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.5.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.12.1-nompi_py312h34530d4_103.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/harfbuzz-10.2.0-ha0dd535_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf4-4.2.15-h2ee6834_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.4-nompi_ha698983_105.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/hicolor-icon-theme-0.17-hce30654_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/holoviews-1.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hvplot-0.11.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.32.0-pyh907856f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/json-c-0.18-he4178ee_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-book-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-resource-usage-1.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-server-proxy-4.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_bokeh-4.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-myst-2.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.8-py312h2c4a281_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.17-h7eeda09_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.2-pyh41aed27_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.0.0-h9a09cb3_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20240722.0-cxx17_h07bc746_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libaec-1.1.3-hebf3989_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.7-h3b16cec_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-19.0.1-h0945df6_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-19.0.1-hf07054f_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-19.0.1-hf07054f_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-19.0.1-h4239455_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libasprintf-0.23.1-h493aca8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.1.1-h45b7238_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-31_h10e41b3_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-hd74edd7_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-hd74edd7_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-hd74edd7_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-31_hb3479ef_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.12.1-h73640d1_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.7-ha82da77_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libde265-1.0.15-h2ffa867_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.23-hec38601_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libevent-2.1.12-h2757513_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgd-2.3.3-hb2c3a21_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-core-3.10.2-h9ef0d2d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgettextpo-0.23.1-h493aca8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-5.0.0-13_2_0_hd922786_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-13.2.0-hf226fd6_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.82.2-hdff4504_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.35.0-hdbe95d5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.35.0-h7081f7f_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.67.1-h0a426d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libheif-1.19.5-gpl_h297b2c4_100.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-hfe07756_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.23.1-h493aca8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.0.0-hb547adb_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libkml-1.3.0-he250239_1021.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-31_hc9a63f6_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libllvm15-15.0.7-h4429f82_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.6.4-h39f12f2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnetcdf-4.9.2-nompi_h6569565_116.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.29-openmp_hf332438_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.18.0-h0c05b2d_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.18.0-hce30654_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-19.0.1-h636d7b7_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libpng-1.6.47-h3783ad8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-5.28.3-h3bd63a1_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2024.07.02-h07bc746_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/librsvg-2.58.4-h266df6f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/librttopo-1.1.0-ha2cf0f4_17.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsodium-1.0.20-h99b78c6_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libspatialite-5.1.0-hf92fc0a_12.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.49.1-h3f77e49_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h9cc3647_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.21.0-h64651cc_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-h551f018_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.10.0-hda25de7_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libwebp-base-1.5.0-h2471fea_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.17.0-hdb1d25a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.6-h178c5d8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzip-1.11.2-h1336266_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/linkify-it-py-2.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-19.1.7-hdb05f8b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312h728bc31_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.3.3-py312hf263c89_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lzo-2.10-h93a5062_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.0-py312hdbc7e53_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.7-hff1a8ea_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.0-py312h6142ec9_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.1.0-py312hdb8e49c_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/multipledispatch-0.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-nb-1.2.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-parser-2.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/netcdf4-1.7.2-nompi_py312haae1a11_101.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.11.3-h00cdb27_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.61.0-py312hdf12f13_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numbagg-0.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numcodecs-0.15.1-py312hcb1e3ce_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.1.3-py312h94ee1e1_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numpy_groupies-0.11.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.3-h8a3d83b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.1-h81ee809_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.0.3-h0ff2369_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.2.3-py312hcd31e36_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/panel-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.56.1-h73f1e88_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/param-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.44-h297a79d_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.1.0-py312h50aef2c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-0.24.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-xarray-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.44.2-h2f9eb0b_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/proj-9.5.1-h1318a7e_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.2.1-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.26.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-5.28.3-py312hd8f9ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-5.9.8-py312he37b823_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-19.0.1-py312h1f38498_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-19.0.1-py312hc40f475_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pybtex-docutils-1.0.3-py312h81bd7bf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyct-0.5.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydap-3.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.0-py312hb9d441b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.0-py312hb9d441b_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-25.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyproj-3.7.1-py312h4b98159_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.9-hc22306f_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh91182bf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyviz_comms-3.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.2-py312h998013c_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-26.2.1-py312hf4875e0_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rasterio-1.4.3-py312h02264c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.6.6-h69fbcac_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2024.07.02-h6589ca4_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.18.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.22.3-py312hcd83bfe_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/s3fs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.15.2-py312h99a188d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/shapely-2.0.7-py312ha6455e5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/simpervisor-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.1-h98b9ce2_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.4.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-book-theme-1.1.4-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-codeautolink-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-comments-0.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-external-toc-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-jupyterbook-latex-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-multitoc-numbering-0.1.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-notfound-page-1.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-thebe-0.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-togglebutton-0.3.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-bibtex-2.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-mermaid-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxext-rediraffe-0.2.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.38-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlite-3.49.1-hd7222ec_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-2.3.0-hf24288c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.4.2-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uc-micro-py-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ukkonen-1.0.1-py312h6142ec9_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/uriparser-0.9.8-h00cdb27_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/wrapt-1.17.2-py312hea69d52_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/x265-3.5-hbc6ce65_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xerces-c-3.2.5-h92fc2f4_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.12-h5505292_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.5-hd74edd7_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h3422bc3_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.18.3-py312h998013c_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zarr-3.0.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc1bb282_7.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-h8359307_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py312h15fbf35_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.6-hb46c0d2_0.conda - win-64: - - conda: https://conda.anaconda.org/conda-forge/win-64/_libavif_api-1.1.1-h57928b3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.19.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aiohttp-3.11.12-py312h31fea79_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aioitertools-0.12.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.8.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aom-3.9.1-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.8.1-hd11252f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.8.1-h099ea23_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.10.6-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.3.0-h099ea23_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.5.0-h85d8506_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.9.2-h3888f84_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.15.3-hc5a9e45_6.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.11.0-h2c94728_12.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.7.9-h6a47413_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.2.2-h099ea23_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.2.2-h099ea23_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.29.9-he488853_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.489-h7d73209_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-hfd34d9b_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/botocore-1.36.3-pyge310_1234567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.4-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2025.1.31-h56e8100_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cairo-1.18.2-h5782bbf_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cartopy-0.24.0-py312h72972c8_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.4-py312h1a27103_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh7428d3b_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.1-py312hd5eb7cc_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.9-py312hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/crc32c-2.7.1-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cryptography-44.0.1-py312h9500af3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.1-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-labextension-7.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dataclasses-0.8-pyhc8e2a94_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/datashader-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/dav1d-1.2.1-hcfcfb64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.12-py312h275cf98_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.18-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/docopt-ng-0.9.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/docutils-0.20.1-py312h2e8e312_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/donfig-0.8.1.post1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexcache-0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flexparser-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/flox-0.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.15.0-h765892d_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.56.0-py312h31fea79_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-hf297d47_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fribidi-1.0.10-h8d14728_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/frozenlist-1.5.0-py312h31fea79_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.4-h887f4e7_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geoviews-core-1.14.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/getopt-win32-0.1-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.38.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-storage-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/google-crc32c-1.1.2-py312he3df1c8_6.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.68.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/graphviz-12.2.1-hf40819d_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/greenlet-3.1.1-py312h275cf98_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/grpcio-1.67.1-py312h5b982ce_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gsw-3.6.19-py312h1a27103_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gts-0.7.6-h6b5321d_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.5.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/h5py-3.12.1-nompi_py312h0db4ba1_103.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-10.2.0-h885c0d4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h5557f11_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.4-nompi_hd5d9e70_105.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/holoviews-1.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hvplot-0.11.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.32.0-pyh9ab4c32_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-book-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-resource-usage-1.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-server-proxy-4.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_bokeh-4.0.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-myst-2.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.8-py312hc790b64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.17-hbcf6048_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.2-pyh41aed27_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_h4eb7d71_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.7-h979ed78_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-19.0.1-h8dcb746_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-19.0.1-h7d8d6a5_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-19.0.1-h7d8d6a5_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-19.0.1-h3dbecdf_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libavif16-1.1.1-h4d049a7_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-31_h641d27c_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-31_h5e41251_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.12.1-h88aaa65_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libde265-1.0.15-h91493d7_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.23-h9062f6e_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.6-h537db12_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.2.0-h1383e82_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgd-2.3.3-h7208af6_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.10.2-h095903c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.2-h7025463_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.2.0-h1383e82_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.35.0-h95c5cb2_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.35.0-he5eb982_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.67.1-h0ac93cb_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libheif-1.19.5-gpl_hc631cee_100.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.2-default_ha69328c_1001.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.18-h135ad9c_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-31_h1aa476e_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/liblzma-5.6.4-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h5bdc103_116.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-19.0.1-ha850022_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.47-had7236b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.28.3-h8309712_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2024.07.02-h4eb7d71_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_12.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.49.1-h67fdade_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.1-he619c9f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.21.0-hbe90ef8_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.7.0-h797046b_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.10.0-hf9b99b7_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.5.0-h3b0e114_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.17.0-h0e4246c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.13.6-he286e8c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.11.2-h3135430_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/linkify-it-py-2.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.44.0-py312h1f7db74_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h032eceb_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.10.0-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.10.0-py312h90004f6_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/minizip-4.0.7-h9fa1bad_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.2.2-h66d3029_15.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/multidict-6.1.0-py312h31fea79_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/multipledispatch-0.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-nb-1.2.0-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/myst-parser-2.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.7.2-nompi_py312h57e6fe7_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.61.0-py312hcccf92d_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numbagg-0.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numcodecs-0.15.1-py312h72972c8_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.1.3-py312h49bc9c5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numpy_groupies-0.11.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.3-h4d64b90_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.4.1-ha4e3fda_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.3-haf104fe_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py312h72972c8_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/panel-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pango-1.56.1-h286b592_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/param-2.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-11.1.0-py312h078707f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-0.24.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pint-xarray-0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.44.2-had0cd8c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.1-h4f671f6_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/propcache-0.2.1-py312h31fea79_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.26.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/protobuf-5.28.3-py312h275cf98_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.8-py312he70551f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-19.0.1-py312h2e8e312_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-19.0.1-py312h6a9c419_0_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pybtex-docutils-1.0.3-py312h2e8e312_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyct-0.5.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydap-3.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-25.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.1-py312ha24589b_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh09c184e_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.9-h3f84c4b_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh91182bf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyviz_comms-3.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py312h275cf98_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.15-py312h275cf98_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h31fea79_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.1-py312hd7027bb_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.3-py312hc0daee4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rav1e-0.6.6-h975169c_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2024.07.02-haf4117d_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.18.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.22.3-py312h2615798_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/s3fs-2025.2.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.15.2-py312h451d5c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.7-py312h0c580ee_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/simpervisor-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h500f7fa_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.4.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-book-theme-1.1.4-pyh29332c3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-codeautolink-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-comments-0.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-external-toc-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-jupyterbook-latex-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-multitoc-numbering-0.1.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-notfound-page-1.0.4-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-thebe-0.3.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-togglebutton-0.3.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-bibtex-2.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-mermaid-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxext-rediraffe-0.2.7-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/sqlalchemy-2.0.38-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.49.1-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/svt-av1-2.3.0-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-h62715c5_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.2-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uc-micro-py-1.0.3-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/unicodedata2-16.0.0-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h5fd82a7_24.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34433-h6356254_24.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.42.34433-hfef2bbc_24.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_8.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.17.2-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/x265-3.5-h2d74725_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libice-1.1.2-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libsm-1.2.5-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libx11-1.8.11-hf48077a_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.12-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.5-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxext-1.3.6-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxpm-3.5.17-h0e40799_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxt-1.3.1-h0e40799_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/yarl-1.18.3-py312h31fea79_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zarr-3.0.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_7.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda -packages: -- conda: https://conda.anaconda.org/conda-forge/win-64/_libavif_api-1.1.1-h57928b3_2.conda - sha256: b99b8948a170ff721ea958ee04a4431797070e85dd6942cb27b73ac3102e5145 - md5: 76cf1f62c9a62d6b8f44339483e0f016 - size: 9286 - timestamp: 1730268773319 -- conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 - md5: d7c89558ba9fa0495403155b64376d81 - license: None - size: 2562 - timestamp: 1578324546067 -- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - build_number: 16 - sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 - md5: 73aaf86a425cc6e73fcf236a5a46396d - depends: - - _libgcc_mutex 0.1 conda_forge - - libgomp >=7.5.0 - constrains: - - openmp_impl 9999 - license: BSD-3-Clause - license_family: BSD - size: 23621 - timestamp: 1650670423406 -- conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - build_number: 8 - sha256: 1a62cd1f215fe0902e7004089693a78347a30ad687781dfda2289cab000e652d - md5: 37e16618af5c4851a3f3d66dd0e11141 - depends: - - libgomp >=7.5.0 - - libwinpthread >=12.0.0.r2.ggc561118da - constrains: - - openmp_impl 9999 - - msys2-conda-epoch <0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 49468 - timestamp: 1718213032772 -- conda: https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda - sha256: 1307719f0d8ee694fc923579a39c0621c23fdaa14ccdf9278a5aac5665ac58e9 - md5: 74ac5069774cdbc53910ec4d631a3999 - depends: - - pygments - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 1326096 - timestamp: 1734956217254 -- conda: https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-47.0-unix_0.conda - sha256: 188dca9a847f474b3df71eda9fe828fbe10b53aa6f4313c7e117f3114b1dd84e - md5: 49436a5c604f99058473d84580f0e341 - depends: - - __unix - - hicolor-icon-theme - - librsvg - license: LGPL-3.0-or-later OR CC-BY-SA-3.0 - license_family: LGPL - size: 566980 - timestamp: 1728314504182 -- conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda - sha256: 0deeaf0c001d5543719db9b2686bc1920c86c7e142f9bec74f35e1ce611b1fc2 - md5: 8c4061f499edec6b8ac7000f6d586829 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 19164 - timestamp: 1733762153202 -- conda: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.19.0-pyhd8ed1ab_1.conda - sha256: 0072feb6220733066b0b8a4293fa7d4e170490d52bab64f4491818325b2f6ffd - md5: 6dc626c926419c14546daedf1cffb4d4 - depends: - - aiohttp >=3.9.2,<4.0.0 - - aioitertools >=0.5.1,<1.0.0 - - botocore >=1.36.0,<1.36.4 - - jmespath >=0.7.1,<2.0.0 - - multidict >=6.0.0,<7.0.0 - - python >=3.9 - - python-dateutil >=2.1,<3.0.0 - - urllib3 >=1.25.4,!=2.2.0,<3 - - wrapt >=1.10.10,<2.0.0 - license: Apache-2.0 - license_family: Apache - size: 67401 - timestamp: 1738429355887 -- conda: https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.6-pyhd8ed1ab_0.conda - sha256: a2a5579be9fb21f9397f51a4ba09599782c93e9117951a5105d8ee4b80d648c1 - md5: 5b7d3ceeb36e8e6783eae78acd4c18e1 - depends: - - python >=3.9 - license: PSF-2.0 - license_family: PSF - size: 19236 - timestamp: 1739175837817 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.12-py312h178313f_0.conda - sha256: 223f271deceaf71d0cbee21162084104a6eca06e79c04ecb322706be3e406ea1 - md5: 9f96d8b6fb9bab11e46c12132283b5b1 - depends: - - __glibc >=2.17,<3.0.a0 - - aiohappyeyeballs >=2.3.0 - - aiosignal >=1.1.2 - - attrs >=17.3.0 - - frozenlist >=1.1.1 - - libgcc >=13 - - multidict >=4.5,<7.0 - - propcache >=0.2.0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - yarl >=1.17.0,<2.0 - license: MIT AND Apache-2.0 - license_family: Apache - size: 915782 - timestamp: 1738824701518 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aiohttp-3.11.12-py312h998013c_0.conda - sha256: a4d04942bdeedbb7260b41eafb0302b6f8c3799f578c4389984c084a8fc34c16 - md5: 7675cee14b7e7d9ccf17ad37a4bdf53a - depends: - - __osx >=11.0 - - aiohappyeyeballs >=2.3.0 - - aiosignal >=1.1.2 - - attrs >=17.3.0 - - frozenlist >=1.1.1 - - multidict >=4.5,<7.0 - - propcache >=0.2.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - yarl >=1.17.0,<2.0 - license: MIT AND Apache-2.0 - license_family: Apache - size: 889104 - timestamp: 1738823362301 -- conda: https://conda.anaconda.org/conda-forge/win-64/aiohttp-3.11.12-py312h31fea79_0.conda - sha256: 3ba3d384a48b01cd17b5c2c9f41a7a7484a1ac2891bb717313fa628b42d01649 - md5: 5685ef8b08b1e32378246cab51348a98 - depends: - - aiohappyeyeballs >=2.3.0 - - aiosignal >=1.1.2 - - attrs >=17.3.0 - - frozenlist >=1.1.1 - - multidict >=4.5,<7.0 - - propcache >=0.2.0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - yarl >=1.17.0,<2.0 - license: MIT AND Apache-2.0 - license_family: Apache - size: 859766 - timestamp: 1738823652042 -- conda: https://conda.anaconda.org/conda-forge/noarch/aioitertools-0.12.0-pyhd8ed1ab_1.conda - sha256: 7d56e547a819a03c058dd8793ca9df6ff9825812da52c214192edb61a7de1c95 - md5: 3eb47adbffac44483f59e580f8600a1e - depends: - - python >=3.9 - - typing_extensions >=4.0 - license: MIT - license_family: MIT - size: 25063 - timestamp: 1735329177103 -- conda: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda - sha256: 7de8ced1918bbdadecf8e1c1c68237fe5709c097bd9e0d254f4cad118f4345d0 - md5: 1a3981115a398535dbe3f6d5faae3d36 - depends: - - frozenlist >=1.1.0 - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 13229 - timestamp: 1734342253061 -- conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda - sha256: fd39ad2fabec1569bbb0dfdae34ab6ce7de6ec09dcec8638f83dad0373594069 - md5: def531a3ac77b7fb8c21d17bb5d0badb - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 18365 - timestamp: 1704848898483 -- conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.8.0-pyhd8ed1ab_0.conda - sha256: f1455d2953e3eb6d71bc49881c8558d8e01888469dfd21061dd48afb6183e836 - md5: 848d25bfbadf020ee4d4ba90e5668252 - depends: - - exceptiongroup >=1.0.2 - - idna >=2.8 - - python >=3.9 - - sniffio >=1.1 - - typing_extensions >=4.5 - constrains: - - trio >=0.26.1 - - uvloop >=0.21 - license: MIT - license_family: MIT - size: 115305 - timestamp: 1736174485476 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda - sha256: b08ef033817b5f9f76ce62dfcac7694e7b6b4006420372de22494503decac855 - md5: 346722a0be40f6edc53f12640d301338 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 2706396 - timestamp: 1718551242397 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda - sha256: ec238f18ce8140485645252351a0eca9ef4f7a1c568a420f240a585229bc12ef - md5: 7adba36492a1bb22d98ffffe4f6fc6de - depends: - - __osx >=11.0 - - libcxx >=16 - license: BSD-2-Clause - license_family: BSD - size: 2235747 - timestamp: 1718551382432 -- conda: https://conda.anaconda.org/conda-forge/win-64/aom-3.9.1-he0c23c2_0.conda - sha256: 0524d0c0b61dacd0c22ac7a8067f977b1d52380210933b04141f5099c5b6fec7 - md5: 3d7c14285d3eb3239a76ff79063f27a5 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 1958151 - timestamp: 1718551737234 -- conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_1.conda - sha256: 8f032b140ea4159806e4969a68b4a3c0a7cab1ad936eb958a2b5ffe5335e19bf - md5: 54898d0f524c9dee622d44bbb081a8ab - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 10076 - timestamp: 1733332433806 -- conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda - sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f - md5: a7ee488b71c30ada51c48468337b85ba - depends: - - argon2-cffi-bindings - - python >=3.9 - - typing-extensions - constrains: - - argon2_cffi ==999 - license: MIT - license_family: MIT - size: 18594 - timestamp: 1733311166338 -- conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - sha256: 3cbc3b026f5c3f26de696ead10607db8d80cbb003d87669ac3b02e884f711978 - md5: 1505fc57c305c0a3174ea7aae0a0db25 - depends: - - __glibc >=2.17,<3.0.a0 - - cffi >=1.0.1 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 34847 - timestamp: 1725356749774 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/argon2-cffi-bindings-21.2.0-py312h024a12e_5.conda - sha256: 0e32ddd41f273f505956254d81ffadaf982ed1cb7dfd70d9251a8c5b705c7267 - md5: 6ccaeafe1a52b0d0e7ebfbf53a374649 - depends: - - __osx >=11.0 - - cffi >=1.0.1 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 32838 - timestamp: 1725356954187 -- conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - sha256: 8764a8a9416d90264c7d36526de77240a454d0ee140841db545bdd5825ebd6f1 - md5: 53943e7ecba6b3e3744b292dc3fb4ae2 - depends: - - cffi >=1.0.1 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 34399 - timestamp: 1725357069475 -- conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda - sha256: c4b0bdb3d5dee50b60db92f99da3e4c524d5240aafc0a5fcc15e45ae2d1a3cd1 - md5: 46b53236fdd990271b03c3978d4218a9 - depends: - - python >=3.9 - - python-dateutil >=2.7.0 - - types-python-dateutil >=2.8.10 - license: Apache-2.0 - license_family: Apache - size: 99951 - timestamp: 1733584345583 -- conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda - sha256: 93b14414b3b3ed91e286e1cbe4e7a60c4e1b1c730b0814d1e452a8ac4b9af593 - md5: 8f587de4bcf981e26228f268df374a9b - depends: - - python >=3.9 - constrains: - - astroid >=2,<4 - license: Apache-2.0 - license_family: Apache - size: 28206 - timestamp: 1733250564754 -- conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda - sha256: 344157f396dfdc929d1dff8fe010abe173cd168d22a56648583e616495f2929e - md5: 40c673c7d585623b8f1ee650c8734eb6 - depends: - - python >=3.9 - - typing_extensions >=4.0.0 - license: MIT - license_family: MIT - size: 15318 - timestamp: 1733584388228 -- conda: https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2 - sha256: 26ab9386e80bf196e51ebe005da77d57decf6d989b4f34d96130560bc133479c - md5: 6b889f174df1e0f816276ae69281af4d - depends: - - at-spi2-core >=2.40.0,<2.41.0a0 - - atk-1.0 >=2.36.0 - - dbus >=1.13.6,<2.0a0 - - libgcc-ng >=9.3.0 - - libglib >=2.68.1,<3.0a0 - license: LGPL-2.1-or-later - license_family: LGPL - size: 339899 - timestamp: 1619122953439 -- conda: https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2 - sha256: c4f9b66bd94c40d8f1ce1fad2d8b46534bdefda0c86e3337b28f6c25779f258d - md5: 8cb2fc4cd6cc63f1369cfa318f581cc3 - depends: - - dbus >=1.13.6,<2.0a0 - - libgcc-ng >=9.3.0 - - libglib >=2.68.3,<3.0a0 - - xorg-libx11 - - xorg-libxi - - xorg-libxtst - license: LGPL-2.1-or-later - license_family: LGPL - size: 658390 - timestamp: 1625848454791 -- conda: https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda - sha256: df682395d05050cd1222740a42a551281210726a67447e5258968dd55854302e - md5: f730d54ba9cd543666d7220c9f7ed563 - depends: - - libgcc-ng >=12 - - libglib >=2.80.0,<3.0a0 - - libstdcxx-ng >=12 - constrains: - - atk-1.0 2.38.0 - license: LGPL-2.0-or-later - license_family: LGPL - size: 355900 - timestamp: 1713896169874 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/atk-1.0-2.38.0-hd03087b_2.conda - sha256: b0747f9b1bc03d1932b4d8c586f39a35ac97e7e72fe6e63f2b2a2472d466f3c1 - md5: 57301986d02d30d6805fdce6c99074ee - depends: - - __osx >=11.0 - - libcxx >=16 - - libglib >=2.80.0,<3.0a0 - - libintl >=0.22.5,<1.0a0 - constrains: - - atk-1.0 2.38.0 - license: LGPL-2.0-or-later - license_family: LGPL - size: 347530 - timestamp: 1713896411580 -- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda - sha256: 1f267886522dfb9ae4e5ebbc3135b5eb13cff27bdbfe8d881a4d893459166ab4 - md5: 2cc3f588512f04f3a0c64b4e9bedc02d - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 56370 - timestamp: 1737819298139 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.1-h205f482_0.conda - sha256: ebe5e33249f37f6bb481de99581ebdc92dbfcf1b6915609bcf3c9e78661d6352 - md5: 9c500858e88df50af3cc883d194de78a - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 108111 - timestamp: 1737509831651 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.8.1-hfc2798a_0.conda - sha256: 5a60d196a585b25d1446fb973009e4e648e8d70beaa2793787243ede6da0fd9a - md5: 0abd67c0f7b60d50348fbb32fef50b65 - depends: - - __osx >=11.0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - license: Apache-2.0 - license_family: Apache - size: 92562 - timestamp: 1737509877079 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.8.1-hd11252f_0.conda - sha256: 248332efb7528e512502fa03488c7694ab022cafd446cc586f5e59383c6386a5 - md5: fe0091e429538d2687ad3353decfe532 - depends: - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 103199 - timestamp: 1737510053257 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h1a47875_3.conda - sha256: 095ac824ea9303eff67e04090ae531d9eb33d2bf8f82eaade39b839c421e16e8 - md5: 55a8561fdbbbd34f50f57d9be12ed084 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - libgcc >=13 - - openssl >=3.3.1,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 47601 - timestamp: 1733991564405 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.8.1-hc8a0bd2_3.conda - sha256: 1f44be36e1daa17b4b081debb8aee492d13571084f38b503ad13e869fef24fe4 - md5: 8b0ce61384e5a33d2b301a64f3d22ac5 - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - openssl >=3.3.1,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 39925 - timestamp: 1733991649383 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.8.1-h099ea23_3.conda - sha256: e345717c4cbef8472b3f4f90b75d326ad66a84574bfb02740a860d8de6414c44 - md5: 767b18a469cf18d7476cab915f9fe207 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - openssl >=3.3.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 47436 - timestamp: 1733991914197 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.6-hb9d3cd8_0.conda - sha256: 496e92f2150fdc351eacf6e236015deedb3d0d3114f8e5954341cbf9f3dda257 - md5: d7d4680337a14001b0e043e96529409b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 236574 - timestamp: 1733975453350 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.10.6-h5505292_0.conda - sha256: 3bde135c8e74987c0f79ecd4fa17ec9cff0d658b3090168727ca1af3815ae57a - md5: 145e5b4c9702ed279d7d68aaf096f77d - depends: - - __osx >=11.0 - license: Apache-2.0 - license_family: Apache - size: 221863 - timestamp: 1733975576886 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.10.6-h2466b09_0.conda - sha256: 348af25291f2b4106d8453fddb8dcbfed452067bddfa0eeadd24f1c710617a4a - md5: 44a7e180f2054340401499de93ae39ba - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 235514 - timestamp: 1733975788721 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h4e1184b_5.conda - sha256: 62ca84da83585e7814a40240a1e750b1563b2680b032a471464eccc001c3309b - md5: 3f4c1197462a6df2be6dc8241828fe93 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 19086 - timestamp: 1733991637424 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.0-hc8a0bd2_5.conda - sha256: 47b2813f652ce7e64ac442f771b2a5f7d4af4ad0d07ff51f6075ea80ed2e3f09 - md5: a8b6c17732d14ed49d0e9b59c43186bc - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - license: Apache-2.0 - license_family: Apache - size: 18068 - timestamp: 1733991869211 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.3.0-h099ea23_5.conda - sha256: f30956b5c450e0a21adc3d523fdbe2d0dcc79125b135f5ccc4497d97f8733891 - md5: b4303abff1423285a2e5063d796e1614 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 22364 - timestamp: 1733991973284 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h7959bf6_11.conda - sha256: 10d7240c7db0c941fb1a59c4f8ea6689a434b03309ee7b766fa15a809c553c02 - md5: 9b3fb60fe57925a92f399bc3fc42eccf - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - libgcc >=13 - - libstdcxx >=13 - license: Apache-2.0 - license_family: Apache - size: 54003 - timestamp: 1734024480949 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.0-h54f970a_11.conda - sha256: f0667935f4e0d4c25e0e51da035640310b5ceeb8f723156734439bde8b848d7d - md5: ba41238f8e653998d7d2f42e3a8db054 - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - libcxx >=18 - license: Apache-2.0 - license_family: Apache - size: 47078 - timestamp: 1734024749727 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.5.0-h85d8506_11.conda - sha256: bd7d3849ae0a12e170d4d442f7d2db7de98827d8d3505d0a60d12b1170b1ab0d - md5: a32c029b7e933cf93c5066b186560e62 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 54426 - timestamp: 1734024881523 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hefd7a92_4.conda - sha256: 4a330206bd51148f6c13ca0b7a4db40f29a46f090642ebacdeb88b8a4abd7f99 - md5: 5ce4df662d32d3123ea8da15571b6f51 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-compression >=0.3.0,<0.3.1.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 197731 - timestamp: 1734008380764 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.9.2-h96aa502_4.conda - sha256: 22e4737c8a885995b7c1ae1d79c1f6e78d489e16ec079615980fdde067aeaf76 - md5: 495c93a4f08b17deb3c04894512330e6 - depends: - - __osx >=11.0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-compression >=0.3.0,<0.3.1.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - license: Apache-2.0 - license_family: Apache - size: 152983 - timestamp: 1734008451473 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.9.2-h3888f84_4.conda - sha256: ce0cedbe65e36f6e6dc9a8e07336f9c6ceecb09f0ed8eebdd01d74d261b59d16 - md5: 4e7cf9b498fcc5dee5abcdf24e64a96d - depends: - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-compression >=0.3.0,<0.3.1.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 182269 - timestamp: 1734008780813 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-h173a860_6.conda - sha256: 335d822eead0a097ffd23677a288e1f18ea22f47a92d4f877419debb93af0e81 - md5: 9a063178f1af0a898526cc24ba7be486 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - libgcc >=13 - - s2n >=1.5.11,<1.5.12.0a0 - license: Apache-2.0 - license_family: Apache - size: 157263 - timestamp: 1737207617838 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.15.3-haba67d1_6.conda - sha256: 73722dd175af78b6cbfa033066f0933351f5382a1a737f6c6d9b8cfa84022161 - md5: d02e8f40ff69562903e70a1c6c48b009 - depends: - - __osx >=11.0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - license: Apache-2.0 - license_family: Apache - size: 136048 - timestamp: 1737207681224 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.15.3-hc5a9e45_6.conda - sha256: 0cbf3ddd55835ba99726ffcc0118124fc8430fec41e81bb7b1d8c0c6e0d272e0 - md5: 48a9b0c65a94282ffa149ea7c0a53239 - depends: - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 159815 - timestamp: 1737207711320 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h11f4f37_12.conda - sha256: 512d3969426152d9d5fd886e27b13706122dc3fa90eb08c37b0d51a33d7bb14a - md5: 96c3e0221fa2da97619ee82faa341a73 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 194672 - timestamp: 1734025626798 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.11.0-h24f418c_12.conda - sha256: 96575ea1dd2a9ea94763882e40a66dcbff9c41f702bf37c9514c4c719b3c11dd - md5: c072045a6206f88015d02fcba1705ea1 - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - license: Apache-2.0 - license_family: Apache - size: 134371 - timestamp: 1734025379525 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.11.0-h2c94728_12.conda - sha256: bfe3e2c5de01e285e67ac8119de58a11e594d202b3ebcfaa55ffd138a3b28279 - md5: bad2afca289f8854d431acdcc8f1cea8 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 186987 - timestamp: 1734025825190 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.9-he1b24dc_1.conda - sha256: 15fbdedc56850f8be5be7a5bcaea1af09c97590e631c024ae089737fc932fc42 - md5: caafc32928a5f7f3f7ef67d287689144 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - libgcc >=13 - - openssl >=3.4.0,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 115413 - timestamp: 1737558687616 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.7.9-hf37e03c_1.conda - sha256: 92e8ca4eefcbbdf4189584c9410382884a06ed3030e5ecaac656dab8c95e6a80 - md5: de65f5e4ab5020103fe70a0eba9432a0 - depends: - - __osx >=11.0 - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - license: Apache-2.0 - license_family: Apache - size: 98731 - timestamp: 1737558731831 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.7.9-h6a47413_1.conda - sha256: 8761e823ae49514f352155135030e9a57d4fe70f363ce2fa7f8c38dd8c3835d7 - md5: 2a5283c5df98c20e695bfdf2d4019335 - depends: - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 109742 - timestamp: 1737559137789 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.2-h4e1184b_0.conda - sha256: 0424e380c435ba03b5948d02e8c958866c4eee50ed29e57f99473a5f795a4cfc - md5: dcd498d493818b776a77fbc242fbf8e4 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 55911 - timestamp: 1736535960724 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.2-hc8a0bd2_0.conda - sha256: ea4f0f1e99056293c69615f581a997d65ba7e229e296e402e0d8ef750648a5b5 - md5: e7b5498ac7b7ab921a907be38f3a8080 - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - license: Apache-2.0 - license_family: Apache - size: 49872 - timestamp: 1736536152332 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.2.2-h099ea23_0.conda - sha256: af9cc0696b9fb60e7d0738b140b3d93efcf7f354e56c3034f459fc1651d53921 - md5: 6292ef653d6002edc721d2dc9356aa57 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 55109 - timestamp: 1736536467087 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h4e1184b_4.conda - sha256: 1ed9a332d06ad595694907fad2d6d801082916c27cd5076096fda4061e6d24a8 - md5: 74e8c3e4df4ceae34aa2959df4b28101 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 72762 - timestamp: 1733994347547 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.2-hc8a0bd2_4.conda - sha256: 215086d95e8ff1d3fcb0197ada116cc9d7db1fdae7573f5e810d20fa9215b47c - md5: e70e88a357a3749b67679c0788c5b08a - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - license: Apache-2.0 - license_family: Apache - size: 70186 - timestamp: 1733994496998 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.2.2-h099ea23_4.conda - sha256: 577e62dbf1750219cfb017d36c9022f40d7dc287b597fd7dec1ca04cade0108c - md5: 5a8ce497f17cf1e6ae745f122b6a2bc3 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 91909 - timestamp: 1733994821424 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.9-he0e7f3f_2.conda - sha256: c1930569713bd5231d48d885a5e3707ac917b428e8f08189d14064a2bb128adc - md5: 8a4e6fc8a3b285536202b5456a74a940 - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-mqtt >=0.11.0,<0.11.1.0a0 - - aws-c-s3 >=0.7.9,<0.7.10.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - - libgcc >=13 - - libstdcxx >=13 - license: Apache-2.0 - license_family: Apache - size: 353222 - timestamp: 1737565463079 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.29.9-ha81f72f_2.conda - sha256: ed5f1d19aad53787fdebe13db4709c97eae2092536cc55d3536eba320c4286e1 - md5: c9c034d3239bf25687ca4dd985007ecd - depends: - - __osx >=11.0 - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-mqtt >=0.11.0,<0.11.1.0a0 - - aws-c-s3 >=0.7.9,<0.7.10.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - - libcxx >=18 - license: Apache-2.0 - license_family: Apache - size: 235976 - timestamp: 1737565563139 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.29.9-he488853_2.conda - sha256: dff67543a0cec319973ef17750760392623a5a0b726081378548a99f3899975f - md5: fd6464ad7158760f808c9b4b044cbcc0 - depends: - - aws-c-auth >=0.8.1,<0.8.2.0a0 - - aws-c-cal >=0.8.1,<0.8.2.0a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-c-http >=0.9.2,<0.9.3.0a0 - - aws-c-io >=0.15.3,<0.15.4.0a0 - - aws-c-mqtt >=0.11.0,<0.11.1.0a0 - - aws-c-s3 >=0.7.9,<0.7.10.0a0 - - aws-c-sdkutils >=0.2.2,<0.2.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 262083 - timestamp: 1737566019782 -- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.489-h4d475cb_0.conda - sha256: 08d6b7d2ed17bfcc7deb903c7751278ee434abdb27e3be0dceb561f30f030c75 - md5: b775e9f46dfa94b228a81d8e8c6d8b1d - depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - libcurl >=8.11.1,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 3144364 - timestamp: 1737576036746 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.489-h0e5014b_0.conda - sha256: d82451530ddf363d8bb31a8a7391bb9699f745e940ace91d78c0e6170deef03c - md5: 156cfb45a1bb8cffc81e59047bb34f51 - depends: - - __osx >=11.0 - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - libcurl >=8.11.1,<9.0a0 - - libcxx >=18 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 2874126 - timestamp: 1737577023623 -- conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.489-h7d73209_0.conda - sha256: 634c2d4cf07c049e36028294d94120532ca6697c29257191b0660ee9886e4269 - md5: 38c6bbaa9437ebd25885ce508853dc76 - depends: - - aws-c-common >=0.10.6,<0.10.7.0a0 - - aws-c-event-stream >=0.5.0,<0.5.1.0a0 - - aws-checksums >=0.2.2,<0.2.3.0a0 - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 3010024 - timestamp: 1737576786156 -- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda - sha256: fe07debdb089a3db17f40a7f20d283d75284bb4fc269ef727b8ba6fc93f7cb5a - md5: 0a8838771cc2e985cd295e01ae83baf1 - depends: - - __glibc >=2.17,<3.0.a0 - - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 345117 - timestamp: 1728053909574 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.14.0-hd50102c_0.conda - sha256: f5b91329ed59ffc0be8747784c6e4cc7e56250c54032883a83bc11808ef6a87e - md5: f093a11dcf3cdcca010b20a818fcc6dc - depends: - - __osx >=11.0 - - libcurl >=8.10.1,<9.0a0 - - libcxx >=17 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 294299 - timestamp: 1728054014060 -- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda - sha256: 286b31616c191486626cb49e9ceb5920d29394b9e913c23adb7eb637629ba4de - md5: 73f73f60854f325a55f1d31459f2ab73 - depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - libgcc >=13 - - libstdcxx >=13 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 232351 - timestamp: 1728486729511 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.10.0-hc602bab_0.conda - sha256: bde446b916fff5150606f8ed3e6058ffc55a3aa72381e46f1ab346590b1ae40a - md5: d7b71593a937459f2d4b67e1a4727dc2 - depends: - - __osx >=11.0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - libcxx >=17 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 166907 - timestamp: 1728486882502 -- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda - sha256: 2606260e5379eed255bcdc6adc39b93fb31477337bcd911c121fc43cd29bf394 - md5: 7eb66060455c7a47d9dcdbfa9f46579b - depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libgcc >=13 - - libstdcxx >=13 - license: MIT - license_family: MIT - size: 549342 - timestamp: 1728578123088 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.13.0-h7585a09_1.conda - sha256: 08d52d130addc0fb55d5ba10d9fa483e39be25d69bac7f4c676c2c3069207590 - md5: 704238ef05d46144dae2e6b5853df8bc - depends: - - __osx >=11.0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libcxx >=17 - license: MIT - license_family: MIT - size: 438636 - timestamp: 1728578216193 -- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda - sha256: 273475f002b091b66ce7366da04bf164c3732c03f8692ab2ee2d23335b6a82ba - md5: 13de36be8de3ae3f05ba127631599213 - depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libxml2 >=2.12.7,<3.0a0 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 149312 - timestamp: 1728563338704 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.8.0-h9ca1f76_1.conda - sha256: 77ab04e8fe5636a2de9c718f72a43645f7502cd208868c8a91ffba385547d585 - md5: 7a187cd7b1445afc80253bb186a607cc - depends: - - __osx >=11.0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - libcxx >=17 - - libxml2 >=2.12.7,<3.0a0 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 121278 - timestamp: 1728563418777 -- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - sha256: 5371e4f3f920933bb89b926a85a67f24388227419abd6e99f6086481e5e8d5f2 - md5: 7c1980f89dd41b097549782121a73490 - depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libgcc >=13 - - libstdcxx >=13 - license: MIT - license_family: MIT - size: 287366 - timestamp: 1728729530295 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.12.0-hcdd55da_1.conda - sha256: f48523f8aa0b5b80f45a92f0556b388dd96f44ac2dc2f44a01d08c1822eec97d - md5: c49fbc5233fcbaa86391162ff1adef38 - depends: - - __osx >=11.0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libcxx >=17 - license: MIT - license_family: MIT - size: 196032 - timestamp: 1728729672889 -- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda - sha256: 1c656a35800b7f57f7371605bc6507c8d3ad60fbaaec65876fce7f73df1fc8ac - md5: 0a01c169f0ab0f91b26e77a3301fbfe4 - depends: - - python >=3.9 - - pytz >=2015.7 - license: BSD-3-Clause - license_family: BSD - size: 6938256 - timestamp: 1738490268466 -- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - sha256: 4ce42860292a57867cfc81a5d261fb9886fc709a34eca52164cc8bbf6d03de9f - md5: 373374a3ed20141090504031dc7b693e - depends: - - python >=3.9 - - soupsieve >=1.2 - - typing-extensions - license: MIT - license_family: MIT - size: 145482 - timestamp: 1738740460562 -- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda - sha256: a05971bb80cca50ce9977aad3f7fc053e54ea7d5321523efc7b9a6e12901d3cd - md5: f0b4c8e370446ef89797608d60a564b3 - depends: - - python >=3.9 - - webencodings - - python - constrains: - - tinycss >=1.1.0,<1.5 - license: Apache-2.0 AND MIT - size: 141405 - timestamp: 1737382993425 -- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda - sha256: 0aba699344275b3972bd751f9403316edea2ceb942db12f9f493b63c74774a46 - md5: a30e9406c873940383555af4c873220d - depends: - - bleach ==6.2.0 pyh29332c3_4 - - tinycss2 - license: Apache-2.0 AND MIT - size: 4213 - timestamp: 1737382993425 -- conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - sha256: f7efd22b5c15b400ed84a996d777b6327e5c402e79e3c534a7e086236f1eb2dc - md5: 42834439227a4551b939beeeb8a4b085 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 13934 - timestamp: 1731096548765 -- conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda - sha256: e7af5d1183b06a206192ff440e08db1c4e8b2ca1f8376ee45fb2f3a85d4ee45d - md5: 2c2fae981fd2afd00812c92ac47d023d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 48427 - timestamp: 1733513201413 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/blosc-1.21.6-h7dd00d9_1.conda - sha256: c3fe902114b9a3ac837e1a32408cc2142c147ec054c1038d37aec6814343f48a - md5: 925acfb50a750aa178f7a0aced77f351 - depends: - - __osx >=11.0 - - libcxx >=18 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 33602 - timestamp: 1733513285902 -- conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-hfd34d9b_1.conda - sha256: 9303a7a0e03cf118eab3691013f6d6cbd1cbac66efbc70d89b20f5d0145257c0 - md5: 357d7be4146d5fec543bfaa96a8a40de - depends: - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 49840 - timestamp: 1733513605730 -- conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.3-pyhd8ed1ab_0.conda - sha256: 6cc6841b1660cd3246890d4f601baf51367526afe6256dfd8a8d9a8f7db651fe - md5: 606498329a91bd9d5c0439fb2815816f - depends: - - contourpy >=1.2 - - jinja2 >=2.9 - - numpy >=1.16 - - packaging >=16.8 - - pandas >=1.2 - - pillow >=7.1.0 - - python >=3.10 - - pyyaml >=3.10 - - tornado >=6.2 - - xyzservices >=2021.09.1 - license: BSD-3-Clause - license_family: BSD - size: 4524790 - timestamp: 1738843545439 -- conda: https://conda.anaconda.org/conda-forge/noarch/botocore-1.36.3-pyge310_1234567_0.conda - sha256: 3d41462f9f40d0e15b665ad0123693877b9445eca3ed1f16fa698fc5c2e66948 - md5: d21b74ea6fe0795af13a62f00f5258f3 - depends: - - jmespath >=0.7.1,<2.0.0 - - python >=3.10 - - python-dateutil >=2.1,<3.0.0 - - urllib3 >=1.25.4,!=2.2.0,<3 - license: Apache-2.0 - license_family: Apache - size: 7579612 - timestamp: 1737535716603 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda - sha256: fcb0b5b28ba7492093e54f3184435144e074dfceab27ac8e6a9457e736565b0b - md5: 98514fe74548d768907ce7a13f680e8f - depends: - - __glibc >=2.17,<3.0.a0 - - brotli-bin 1.1.0 hb9d3cd8_2 - - libbrotlidec 1.1.0 hb9d3cd8_2 - - libbrotlienc 1.1.0 hb9d3cd8_2 - - libgcc >=13 - license: MIT - license_family: MIT - size: 19264 - timestamp: 1725267697072 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-hd74edd7_2.conda - sha256: a086f36ff68d6e30da625e910547f6211385246fb2474b144ac8c47c32254576 - md5: 215e3dc8f2f837906d066e7f01aa77c0 - depends: - - __osx >=11.0 - - brotli-bin 1.1.0 hd74edd7_2 - - libbrotlidec 1.1.0 hd74edd7_2 - - libbrotlienc 1.1.0 hd74edd7_2 - license: MIT - license_family: MIT - size: 19588 - timestamp: 1725268044856 -- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda - sha256: d8fd7d1b446706776117d2dcad1c0289b9f5e1521cb13405173bad38568dd252 - md5: 378f1c9421775dfe644731cb121c8979 - depends: - - brotli-bin 1.1.0 h2466b09_2 - - libbrotlidec 1.1.0 h2466b09_2 - - libbrotlienc 1.1.0 h2466b09_2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 19697 - timestamp: 1725268293988 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - sha256: 261364d7445513b9a4debc345650fad13c627029bfc800655a266bf1e375bc65 - md5: c63b5e52939e795ba8d26e35d767a843 - depends: - - __glibc >=2.17,<3.0.a0 - - libbrotlidec 1.1.0 hb9d3cd8_2 - - libbrotlienc 1.1.0 hb9d3cd8_2 - - libgcc >=13 - license: MIT - license_family: MIT - size: 18881 - timestamp: 1725267688731 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-hd74edd7_2.conda - sha256: 28f1af63b49fddf58084fb94e5512ad46e9c453eb4be1d97449c67059e5b0680 - md5: b8512db2145dc3ae8d86cdc21a8d421e - depends: - - __osx >=11.0 - - libbrotlidec 1.1.0 hd74edd7_2 - - libbrotlienc 1.1.0 hd74edd7_2 - license: MIT - license_family: MIT - size: 16772 - timestamp: 1725268026061 -- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - sha256: f3bf2893613540ac256c68f211861c4de618d96291719e32178d894114ac2bc2 - md5: d22534a9be5771fc58eb7564947f669d - depends: - - libbrotlidec 1.1.0 h2466b09_2 - - libbrotlienc 1.1.0 h2466b09_2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 20837 - timestamp: 1725268270219 -- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - sha256: f2a59ccd20b4816dea9a2a5cb917eb69728271dbf1aeab4e1b7e609330a50b6f - md5: b0b867af6fc74b2a0aa206da29c0f3cf - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - libbrotlicommon 1.1.0 hb9d3cd8_2 - license: MIT - license_family: MIT - size: 349867 - timestamp: 1725267732089 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312hde4cb15_2.conda - sha256: 254b411fa78ccc226f42daf606772972466f93e9bc6895eabb4cfda22f5178af - md5: a83c2ef76ccb11bc2349f4f17696b15d - depends: - - __osx >=11.0 - - libcxx >=17 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - libbrotlicommon 1.1.0 hd74edd7_2 - license: MIT - license_family: MIT - size: 339360 - timestamp: 1725268143995 -- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - sha256: f83baa6f6bcba7b73f6921d5c1aa95ffc5d8b246ade933ade79250de0a4c9c4c - md5: a99aec1ac46794a5fb1cd3cf5d2b6110 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - libbrotlicommon 1.1.0 h2466b09_2 - license: MIT - license_family: MIT - size: 321874 - timestamp: 1725268491976 -- conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d - md5: 62ee74e96c5ebb0af99386de58cf9553 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: bzip2-1.0.6 - license_family: BSD - size: 252783 - timestamp: 1720974456583 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 - md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab - depends: - - __osx >=11.0 - license: bzip2-1.0.6 - license_family: BSD - size: 122909 - timestamp: 1720974522888 -- conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - sha256: 35a5dad92e88fdd7fc405e864ec239486f4f31eec229e31686e61a140a8e573b - md5: 276e7ffe9ffe39688abc665ef0f45596 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: bzip2-1.0.6 - license_family: BSD - size: 54927 - timestamp: 1720974860185 -- conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda - sha256: d4f28d87b6339b94f74762c0076e29c8ef8ddfff51a564a92da2843573c18320 - md5: e2775acf57efd5af15b8e3d1d74d72d3 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 206085 - timestamp: 1734208189009 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.4-h5505292_0.conda - sha256: 09c0c8476e50b2955f474a4a1c17c4c047dd52993b5366b6ea8e968e583b921f - md5: c1c999a38a4303b29d75c636eaa13cf9 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 179496 - timestamp: 1734208291879 -- conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.4-h2466b09_0.conda - sha256: f364f7de63a7c35a62c8d90383dd7747b46fa6b9c35c16c99154a8c45685c86b - md5: d387e6f147273d548f068f49a4291aef - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 193862 - timestamp: 1734208384429 -- conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2025.1.31-hbcca054_0.conda - sha256: bf832198976d559ab44d6cdb315642655547e26d826e34da67cbee6624cda189 - md5: 19f3a56f68d2fd06c516076bff482c52 - license: ISC - size: 158144 - timestamp: 1738298224464 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2025.1.31-hf0a4a13_0.conda - sha256: 7e12816618173fe70f5c638b72adf4bfd4ddabf27794369bb17871c5bb75b9f9 - md5: 3569d6a9141adc64d2fe4797f3289e06 - license: ISC - size: 158425 - timestamp: 1738298167688 -- conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2025.1.31-h56e8100_0.conda - sha256: 1bedccdf25a3bd782d6b0e57ddd97cdcda5501716009f2de4479a779221df155 - md5: 5304a31607974dfc2110dfbb662ed092 - license: ISC - size: 158690 - timestamp: 1738298232550 -- conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - noarch: python - sha256: 561e6660f26c35d137ee150187d89767c988413c978e1b712d53f27ddf70ea17 - md5: 9b347a7ec10940d3f7941ff6c460b551 - depends: - - cached_property >=1.5.2,<1.5.3.0a0 - license: BSD-3-Clause - license_family: BSD - size: 4134 - timestamp: 1615209571450 -- conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - sha256: 6dbf7a5070cc43d90a1e4c2ec0c541c69d8e30a0e25f50ce9f6e4a432e42c5d7 - md5: 576d629e47797577ab0f1b351297ef4a - depends: - - python >=3.6 - license: BSD-3-Clause - license_family: BSD - size: 11065 - timestamp: 1615209567874 -- conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.2-pyhd8ed1ab_0.conda - sha256: 1823dc939b2c2b5354b6add5921434f9b873209a99569b3a2f24dca6c596c0d6 - md5: bf9c1698e819fab31f67dbab4256f7ba - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 15220 - timestamp: 1740094145914 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda - sha256: de7d0d094e53decc005cb13e527be2635b8f604978da497d4c0d282c7dc08385 - md5: b34c2833a1f56db610aeb27f206d800d - depends: - - __glibc >=2.17,<3.0.a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.44,<1.7.0a0 - - libstdcxx >=13 - - libxcb >=1.17.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pixman >=0.44.2,<1.0a0 - - xorg-libice >=1.1.1,<2.0a0 - - xorg-libsm >=1.2.4,<2.0a0 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxrender >=0.9.11,<0.10.0a0 - license: LGPL-2.1-only or MPL-1.1 - size: 978868 - timestamp: 1733790976384 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.2-h6a3b0d2_1.conda - sha256: 9a28344e806b89c87fda0cdabd2fb961e5d2ff97107dba25bac9f5dc57220cc3 - md5: 8e3666c3f6e2c3e57aa261ab103a3600 - depends: - - __osx >=11.0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libcxx >=18 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.44,<1.7.0a0 - - libzlib >=1.3.1,<2.0a0 - - pixman >=0.44.2,<1.0a0 - license: LGPL-2.1-only or MPL-1.1 - size: 894517 - timestamp: 1733791145035 -- conda: https://conda.anaconda.org/conda-forge/win-64/cairo-1.18.2-h5782bbf_1.conda - sha256: 86fb783e19f7c46ad781d853b650f4cef1c3f2b1b07dd112afe1fc278bc73020 - md5: 63ff2bf400dde4fad0bed56debee5c16 - depends: - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.44,<1.7.0a0 - - libzlib >=1.3.1,<2.0a0 - - pixman >=0.44.2,<1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.1-only or MPL-1.1 - size: 1515969 - timestamp: 1733791355894 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda - sha256: 3e85b3aa555b7ea989dc80c47d714d89086d388359855ee7e19da988f797698b - md5: ea213e31805199cb7d0da457b879ceed - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - matplotlib-base >=3.6 - - numpy >=1.19,<3 - - packaging >=21 - - pyproj >=3.3.1 - - pyshp >=2.3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - shapely >=1.8 - license: BSD-3-Clause - license_family: BSD - size: 1520747 - timestamp: 1728342419990 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cartopy-0.24.0-py312hcd31e36_0.conda - sha256: 0edb3f7385ae58280dfbb09c11bbb7e7d41e2e4a19657ee8b9ab84e939906e6f - md5: 07dc477c85e1b5b4dac919b9f53d22e6 - depends: - - __osx >=11.0 - - libcxx >=17 - - matplotlib-base >=3.6 - - numpy >=1.19,<3 - - packaging >=21 - - pyproj >=3.3.1 - - pyshp >=2.3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - shapely >=1.8 - license: BSD-3-Clause - license_family: BSD - size: 1501531 - timestamp: 1728342435897 -- conda: https://conda.anaconda.org/conda-forge/win-64/cartopy-0.24.0-py312h72972c8_0.conda - sha256: 0e3df8fc39026877a6808dbaf90e2697803b3316de078aef474cc0446d9052fb - md5: a02c6799b4908a046d36aa2f0b69c9bc - depends: - - matplotlib-base >=3.6 - - numpy >=1.19,<3 - - packaging >=21 - - pyproj >=3.3.1 - - pyshp >=2.3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - shapely >=1.8 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 1567026 - timestamp: 1728343083360 -- conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - sha256: 42a78446da06a2568cb13e69be3355169fbd0ea424b00fc80b7d840f5baaacf3 - md5: c207fa5ac7ea99b149344385a9c0880d - depends: - - python >=3.9 - license: ISC - size: 162721 - timestamp: 1739515973129 -- conda: https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda - sha256: 4672b9d13d3ac4578a389a38a07c0a94e424f5e554d4ad48c0b7b55cd6a15305 - md5: ed15dcf944706ae6ea54968dfa4a06a5 - depends: - - python >=3.10 - - xarray >=2022.03.0 - license: Apache-2.0 - license_family: APACHE - size: 62573 - timestamp: 1734437564292 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - sha256: cba6ea83c4b0b4f5b5dc59cb19830519b28f95d7ebef7c9c5cf1c14843621457 - md5: a861504bbea4161a9170b85d4d2be840 - depends: - - __glibc >=2.17,<3.0.a0 - - libffi >=3.4,<4.0a0 - - libgcc >=13 - - pycparser - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 294403 - timestamp: 1725560714366 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.17.1-py312h0fad829_0.conda - sha256: 8d91a0d01358b5c3f20297c6c536c5d24ccd3e0c2ddd37f9d0593d0f0070226f - md5: 19a5456f72f505881ba493979777b24e - depends: - - __osx >=11.0 - - libffi >=3.4,<4.0a0 - - pycparser - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 281206 - timestamp: 1725560813378 -- conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - sha256: ac007bf5fd56d13e16d95eea036433012f2e079dc015505c8a79efebbad1fcbc - md5: 08310c1a22ef957d537e547f8d484f92 - depends: - - pycparser - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 288142 - timestamp: 1725560896359 -- conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda - sha256: d5696636733b3c301054b948cdd793f118efacce361d9bd4afb57d5980a9064f - md5: 57df494053e17dce2ac3a0b33e1b2a2e - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 12973 - timestamp: 1734267180483 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda - sha256: f881ead7671e89367003eaedcba8108828661d01d6fb1e160a6ad93145301328 - md5: 990033147b0a998e756eaaed6b28f48d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 247446 - timestamp: 1725400651615 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cftime-1.6.4-py312h755e627_1.conda - sha256: fe33603ceba5022485da697d6dada0cf4624638ab10465b86203ed5335f38e27 - md5: 4bc8fd608d8c259fd10fdcac6b4b6c12 - depends: - - __osx >=11.0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 200432 - timestamp: 1725400849542 -- conda: https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.4-py312h1a27103_1.conda - sha256: 24d85f9737258940b6de2d52c5bb3e8deaead62849b4992f32f5d2c5d6244373 - md5: dc76be2943a23a41c999fa0c233fc345 - depends: - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 178922 - timestamp: 1725401137650 -- conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - sha256: 4e0ee91b97e5de3e74567bdacea27f0139709fceca4db8adffbe24deffccb09b - md5: e83a31202d1c0a000fce3e9cf3825875 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 47438 - timestamp: 1735929811779 -- conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - sha256: c920d23cd1fcf565031c679adb62d848af60d6fbb0edc2d50ba475cea4f0d8ab - md5: f22f4d4970e09d68a10b922cbb0408d3 - depends: - - __unix - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 84705 - timestamp: 1734858922844 -- conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh7428d3b_0.conda - sha256: c889ed359ae47eead4ffe8927b7206b22c55e67d6e74a9044c23736919d61e8d - md5: 90e5571556f7a45db92ee51cb8f97af6 - depends: - - __win - - colorama - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 85169 - timestamp: 1734858972635 -- conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda - sha256: e7e2371a2561fbda9d50deb895d56fb16ccefe54f6d81b35ba8f1d33d3cc6957 - md5: 82bea35e4dac4678ba623cf10e95e375 - depends: - - click >=3.0 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 12057 - timestamp: 1733731217399 -- conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda - sha256: 1a52ae1febfcfb8f56211d1483a1ac4419b0028b7c3e9e61960a298978a42396 - md5: 55c7804f428719241a90b152016085a1 - depends: - - click >=4.0 - - python >=3.9,<4.0 - license: BSD-3-Clause - license_family: BSD - size: 12521 - timestamp: 1733750069604 -- conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda - sha256: 21ecead7268241007bf65691610cd7314da68c1f88113092af690203b5780db5 - md5: 364ba6c9fb03886ac979b482f39ebb92 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 25870 - timestamp: 1736947650712 -- conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda - sha256: ab29d57dc70786c1269633ba3dff20288b81664d3ff8d21af995742e2bb03287 - md5: 962b9857ee8e7018c22f2776ffa0b2d7 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 27011 - timestamp: 1733218222191 -- conda: https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda - sha256: 46055a0524ed3a48b23cd27a52246c89ac059ccce90a50b2eeb84d2f833ae827 - md5: 91d7152c744dc0f18ef8beb3cbc9980a - depends: - - python >=3.9 - license: CC-BY-4.0 - size: 173950 - timestamp: 1734007415513 -- conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda - sha256: 7e87ef7c91574d9fac19faedaaee328a70f718c9b4ddadfdc0ba9ac021bd64af - md5: 74673132601ec2b7fc592755605f4c1b - depends: - - python >=3.9 - - traitlets >=5.3 - license: BSD-3-Clause - license_family: BSD - size: 12103 - timestamp: 1733503053903 -- conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda - sha256: e977af50b844b5b8cfec358131a4e923f0aa718e8334321cf8d84f5093576259 - md5: f5fbba0394ee45e9a64a73c2a994126a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - numpy >=1.23 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 276332 - timestamp: 1731428454756 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.1-py312hb23fbb9_0.conda - sha256: fa1f8505f45eac22f25c48cd46809da0d26bcb028c37517b3474bacddd029b0a - md5: f4408290387836e05ac267cd7ec80c5c - depends: - - __osx >=11.0 - - libcxx >=18 - - numpy >=1.23 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 245638 - timestamp: 1731428781337 -- conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.1-py312hd5eb7cc_0.conda - sha256: b5643ea0dd0bf57e1847679f5985feb649289de872b85c3db900f4110ac83cdd - md5: 83f7a2ec652abd37a178e35493dfd029 - depends: - - numpy >=1.23 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 216484 - timestamp: 1731428831843 -- conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.9-py312hd8ed1ab_0.conda - noarch: generic - sha256: f5c7ad0bd23fa8645ac279d99bddba656ff61483dc6312af12aae13910dfb210 - md5: a5b10f166467fecec692abaee84d16aa - depends: - - python 3.12.9.* - - python_abi * *_cp312 - license: Python-2.0 - size: 44836 - timestamp: 1739519561557 -- conda: https://conda.anaconda.org/conda-forge/linux-64/crc32c-2.7.1-py312h66e93f0_0.conda - sha256: 1a56db4294d49d8c5a9b44be20a7ac034ec9e1913c204a72d0a2b65a1a9086a8 - md5: 85a6111a6fb1b1e569e68484df606963 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: GNU Lesser General Public v2 or later (LGPLv2+) - license_family: LGPL - size: 49381 - timestamp: 1727173033763 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/crc32c-2.7.1-py312hea69d52_0.conda - sha256: 48540e34d6f28dbc64da25c60b57536b9ee5948eaa79cff58288b46caaa85b65 - md5: 5e2553ce7523be62e5dc1348515b5bfe - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: GNU Lesser General Public v2 or later (LGPLv2+) - license_family: LGPL - size: 47837 - timestamp: 1736964240234 -- conda: https://conda.anaconda.org/conda-forge/win-64/crc32c-2.7.1-py312h4389bb4_0.conda - sha256: 0d0d151ad663a4cb8470db72b715c0d773688002cccbf738d6c25c07b25e6386 - md5: 8ee9d7611da87df1fca2659c1810836e - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: GNU Lesser General Public v2 or later (LGPLv2+) - license_family: LGPL - size: 52279 - timestamp: 1727173561347 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.1-py312hda17c39_0.conda - sha256: d52873bbcdc2979a4a0f1a0a84e461e9d9113246738b762e1425a7f1a1c67042 - md5: 6e8c59c750da59e0b97bed7b2e44029d - depends: - - __glibc >=2.17,<3.0.a0 - - cffi >=1.12 - - libgcc >=13 - - openssl >=3.4.0,<4.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - __glibc >=2.17 - license: Apache-2.0 AND BSD-3-Clause AND PSF-2.0 AND MIT - license_family: BSD - size: 1591295 - timestamp: 1739299240416 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-44.0.1-py312hf9bd80e_0.conda - sha256: c8fb6557dd84461e68b6106a3dac6412ba2ebab11f2dcc41c1d9219048aa5d79 - md5: 75315ace218049c951167bad6634656c - depends: - - __osx >=11.0 - - cffi >=1.12 - - openssl >=3.4.0,<4.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - __osx >=11.0 - license: Apache-2.0 AND BSD-3-Clause AND PSF-2.0 AND MIT - license_family: BSD - size: 1479866 - timestamp: 1739299535869 -- conda: https://conda.anaconda.org/conda-forge/win-64/cryptography-44.0.1-py312h9500af3_0.conda - sha256: 2071164f11a935d901eaa3daffc9235c636c7f0e7e7ed329be687c2596714b7e - md5: 608bb166d3ec5d6c000de25e87df152a - depends: - - cffi >=1.12 - - openssl >=3.4.0,<4.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 AND BSD-3-Clause AND PSF-2.0 AND MIT - license_family: BSD - size: 1349776 - timestamp: 1739299853352 -- conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda - sha256: 9827efa891e507a91a8a2acf64e210d2aff394e1cde432ad08e1f8c66b12293c - md5: 44600c4667a319d67dbe0681fc0bc833 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 13399 - timestamp: 1733332563512 -- conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda - sha256: 63a64d4e71148c4efd8db17b4a19b8965990d1e08ed2e24b84bc36b6c166a705 - md5: 6198b134b1c08173f33653896974d477 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - toolz >=0.10.0 - license: BSD-3-Clause - license_family: BSD - size: 394309 - timestamp: 1734107344014 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.0.1-py312hea69d52_0.conda - sha256: 0df5e51c5598d5c098ac79c249f42f04bd6cb77969bc91a832c1ee763e40f55a - md5: e674d71e573746c29e99659a00391809 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - toolz >=0.10.0 - license: BSD-3-Clause - license_family: BSD - size: 338844 - timestamp: 1734107464832 -- conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.1-py312h4389bb4_0.conda - sha256: e657e468fdae72302951bba92f94bcb31566a237e5f979a7dd205603a0750b59 - md5: fba0567971249f5d0cce4d35b1184c75 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - toolz >=0.10.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 316347 - timestamp: 1734107735311 -- conda: https://conda.anaconda.org/conda-forge/noarch/dask-2025.2.0-pyhd8ed1ab_0.conda - sha256: 8be4982c98f4829a92b690dd47f516474d8e69d00f992bbf89764e08d535b679 - md5: 60455cddc5f868d7ad37a504ff4ffd37 - depends: - - bokeh >=3.1.0 - - cytoolz >=0.11.0 - - dask-core >=2025.2.0,<2025.2.1.0a0 - - distributed >=2025.2.0,<2025.2.1.0a0 - - jinja2 >=2.10.3 - - lz4 >=4.3.2 - - numpy >=1.24 - - pandas >=2.0 - - pyarrow >=14.0.1 - - python >=3.10 - constrains: - - openssl !=1.1.1e - license: BSD-3-Clause - license_family: BSD - size: 7598 - timestamp: 1739495288724 -- conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.2.0-pyhd8ed1ab_0.conda - sha256: 22ae6c5125a08cfe6569eb729900ba7fb96320e66fe08de1c32f1191eb7e08af - md5: 3bc22d25e3ee83d709804a2040b4463c - depends: - - click >=8.1 - - cloudpickle >=3.0.0 - - fsspec >=2021.09.0 - - importlib-metadata >=4.13.0 - - packaging >=20.0 - - partd >=1.4.0 - - python >=3.10 - - pyyaml >=5.3.1 - - toolz >=0.10.0 - license: BSD-3-Clause - license_family: BSD - size: 968347 - timestamp: 1739488681467 -- conda: https://conda.anaconda.org/conda-forge/noarch/dask-labextension-7.0.0-pyhd8ed1ab_1.conda - sha256: a157787c6c524686d200d35f77ffc12e22d2bcd13e2de3d25c70842d9d3e0ac2 - md5: 9aef5e5bebe4a054efd88c298beaeae8 - depends: - - bokeh >=1.0.0,!=2.0.0 - - distributed >=1.24.1 - - jupyter-server-proxy >=1.3.2 - - jupyterlab >=4.0.0,<5 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 39682 - timestamp: 1735227027456 -- conda: https://conda.anaconda.org/conda-forge/noarch/dataclasses-0.8-pyhc8e2a94_3.tar.bz2 - sha256: 63a83e62e0939bc1ab32de4ec736f6403084198c4639638b354a352113809c92 - md5: a362b2124b06aad102e2ee4581acee7d - depends: - - python >=3.7 - license: Apache-2.0 - license_family: APACHE - size: 9870 - timestamp: 1628958582931 -- conda: https://conda.anaconda.org/conda-forge/noarch/datashader-0.17.0-pyhd8ed1ab_0.conda - sha256: 85b1ba89064501b4ae9695aeb819db4acfec74cd3acbb56df11c93b91ac0cbd4 - md5: e7511f05f4938cb0d988026507fed69a - depends: - - colorcet - - multipledispatch - - numba - - numpy - - packaging - - pandas - - param - - pyct - - python >=3.10 - - requests - - scipy - - toolz - - xarray - license: BSD-3-Clause - license_family: BSD - size: 17226809 - timestamp: 1738222917452 -- conda: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda - sha256: 22053a5842ca8ee1cf8e1a817138cdb5e647eb2c46979f84153f6ad7bde73020 - md5: 418c6ca5929a611cbd69204907a83995 - depends: - - libgcc-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 760229 - timestamp: 1685695754230 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda - sha256: 93e077b880a85baec8227e8c72199220c7f87849ad32d02c14fb3807368260b8 - md5: 5a74cdee497e6b65173e10d94582fae6 - license: BSD-2-Clause - license_family: BSD - size: 316394 - timestamp: 1685695959391 -- conda: https://conda.anaconda.org/conda-forge/win-64/dav1d-1.2.1-hcfcfb64_0.conda - sha256: 2aa2083c9c186da7d6f975ccfbef654ed54fff27f4bc321dbcd12cee932ec2c4 - md5: ed2c27bda330e3f0ab41577cf8b9b585 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 618643 - timestamp: 1685696352968 -- conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - sha256: 8f5f995699a2d9dbdd62c61385bfeeb57c82a681a7c8c5313c395aa0ccab68a5 - md5: ecfff944ba3960ecb334b9a2663d708d - depends: - - expat >=2.4.2,<3.0a0 - - libgcc-ng >=9.4.0 - - libglib >=2.70.2,<3.0a0 - license: GPL-2.0-or-later - license_family: GPL - size: 618596 - timestamp: 1640112124844 -- conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.12-py312h2ec8cdc_0.conda - sha256: f88c3a7ff384d1726aea2cb2342cf67f1502915391860335c40ab81d7e381e30 - md5: 6be6dcb4bffd1d456bdad28341d507bd - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 2646757 - timestamp: 1737269937348 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.12-py312hd8f9ff3_0.conda - sha256: 0ba7ba5f5529bd9cf103d4684e2e9af8a7791a8732c3a0ac689f2d6f2223feca - md5: 92ebf61ce320b7060ead08666dbc9369 - depends: - - __osx >=11.0 - - libcxx >=18 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 2564438 - timestamp: 1737270030625 -- conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.12-py312h275cf98_0.conda - sha256: e171edeeb28bb8d8a10bc6040606a25490827590c73bfcbdfb1cfc45b2b1523d - md5: 62f81383dba2fb096df3ee7b0df1467f - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 3672189 - timestamp: 1737270151760 -- conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda - sha256: 84e5120c97502a3785e8c3241c3bf51f64b4d445f13b4d2445db00d9816fe479 - md5: d622d8d7ee8868870f9cbe259f381181 - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 14068 - timestamp: 1733236549190 -- conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - sha256: 9717a059677553562a8f38ff07f3b9f61727bd614f505658b0a5ecbcf8df89be - md5: 961b3a227b437d82ad7054484cfa71b2 - depends: - - python >=3.6 - license: PSF-2.0 - license_family: PSF - size: 24062 - timestamp: 1615232388757 -- conda: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.18-pyhd8ed1ab_0.conda - sha256: d614bcff10696f1efc714df07651b50bf3808401fcc03814309ecec242cc8870 - md5: 0cef44b1754ae4d6924ac0eef6b9fdbe - depends: - - python >=3.9 - - wrapt <2,>=1.10 - license: MIT - license_family: MIT - size: 14382 - timestamp: 1737987072859 -- conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda - sha256: 0e160c21776bd881b79ce70053e59736f51036784fa43a50da10a04f0c1b9c45 - md5: 8d88f4a2242e6b96f9ecff9a6a05b2f1 - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 274151 - timestamp: 1733238487461 -- conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2025.2.0-pyhd8ed1ab_0.conda - sha256: ccac7437df729ea2f249aef22b6e412ea7c63722cc094c4708d35453518b5c6d - md5: 54562a2b30c8f357097e2be75295601e - depends: - - click >=8.0 - - cloudpickle >=3.0.0 - - cytoolz >=0.11.2 - - dask-core >=2025.2.0,<2025.2.1.0a0 - - jinja2 >=2.10.3 - - locket >=1.0.0 - - msgpack-python >=1.0.2 - - packaging >=20.0 - - psutil >=5.8.0 - - python >=3.10 - - pyyaml >=5.4.1 - - sortedcontainers >=2.0.5 - - tblib >=1.6.0 - - toolz >=0.11.2 - - tornado >=6.2.0 - - urllib3 >=1.26.5 - - zict >=3.0.0 - constrains: - - openssl !=1.1.1e - license: BSD-3-Clause - license_family: BSD - size: 800317 - timestamp: 1739491744587 -- conda: https://conda.anaconda.org/conda-forge/noarch/docopt-ng-0.9.0-pyhd8ed1ab_1.conda - sha256: fb8c1b918b3c28ff9cdf21279aad9a50a659dd3bcbdb95d687044fb35b58b2df - md5: 7635e4907164a088d932f7d8965db7ab - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 21886 - timestamp: 1734684011188 -- conda: https://conda.anaconda.org/conda-forge/linux-64/docutils-0.20.1-py312h7900ff3_3.conda - sha256: b9fb75d806afc53d9d7b98edb0c45ac38a3cc983916b8dac4ad7ddac5c18a024 - md5: 1b90835ae26b9b8250b302649359a989 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: CC-PDDC AND BSD-3-Clause AND BSD-2-Clause AND ZPL-2.1 - size: 898253 - timestamp: 1701882735141 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/docutils-0.20.1-py312h81bd7bf_3.conda - sha256: e1ad41c6401ab2ada143d6e1dbbe6ae8afbe3e651211bb414b9ae1f0f8c13249 - md5: 50ea9a1ab48349f343ca2ae82833bda4 - depends: - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: CC-PDDC AND BSD-3-Clause AND BSD-2-Clause AND ZPL-2.1 - size: 901757 - timestamp: 1701883118423 -- conda: https://conda.anaconda.org/conda-forge/win-64/docutils-0.20.1-py312h2e8e312_3.conda - sha256: c00c5f6d840da8cde00794f793bc624139321db2249f633486e3e21f1f831741 - md5: 3adb364864b61a8cb8a530d8c960762c - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: CC-PDDC AND BSD-3-Clause AND BSD-2-Clause AND ZPL-2.1 - size: 951313 - timestamp: 1701883281097 -- conda: https://conda.anaconda.org/conda-forge/noarch/donfig-0.8.1.post1-pyhd8ed1ab_1.conda - sha256: d58e97d418f71703e822c422af5b9c431e3621a0ecdc8b0334c1ca33e076dfe7 - md5: c56a7fa5597ad78b62e1f5d21f7f8b8f - depends: - - python >=3.9 - - pyyaml - license: MIT - license_family: MIT - size: 22491 - timestamp: 1734368817583 -- conda: https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-h166bdaf_1.tar.bz2 - sha256: 1e58ee2ed0f4699be202f23d49b9644b499836230da7dd5b2f63e6766acff89e - md5: a089d06164afd2d511347d3f87214e0b - depends: - - libgcc-ng >=10.3.0 - license: MIT - license_family: MIT - size: 1440699 - timestamp: 1648505042260 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/epoxy-1.5.10-h1c322ee_1.tar.bz2 - sha256: 8b93dbebab0fe12ece4767e6a2dc53a6600319ece0b8ba5121715f28c7b0f8d1 - md5: 20dd7359a6052120d52e1e13b4c818b9 - license: MIT - license_family: MIT - size: 355201 - timestamp: 1648505273975 -- conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 - md5: a16662747cdeb9abbac74d0057cc976e - depends: - - python >=3.9 - license: MIT and PSF-2.0 - size: 20486 - timestamp: 1733208916977 -- conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda - sha256: 28d25ea375ebab4bf7479228f8430db20986187b04999136ff5c722ebd32eb60 - md5: ef8b5fca76806159fc25b4f48d8737eb - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 28348 - timestamp: 1733569440265 -- conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda - sha256: 1848c7db9e264e3b8036ee133d570dd880422983cd20dd9585a505289606d276 - md5: 1d6afef758879ef5ee78127eb4cd2c4a - depends: - - __glibc >=2.17,<3.0.a0 - - libexpat 2.6.4 h5888daf_0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 138145 - timestamp: 1730967050578 -- conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda - sha256: 006d7e5a0c17a6973596dd86bfc80d74ce541144d2aee2d22d46fd41df560a63 - md5: 7f402b4a1007ee355bc50ce4d24d4a57 - depends: - - python >=3.9 - license: Unlicense - size: 17544 - timestamp: 1737517924333 -- conda: https://conda.anaconda.org/conda-forge/noarch/flexcache-0.3-pyhd8ed1ab_1.conda - sha256: acdb7b73d84268773fcc8192965994554411edc488ec3447925a62154e9d3baa - md5: f1e618f2f783427019071b14a111b30d - depends: - - python >=3.9 - - typing-extensions - license: BSD-3-Clause - license_family: BSD - size: 16674 - timestamp: 1733663669958 -- conda: https://conda.anaconda.org/conda-forge/noarch/flexparser-0.4-pyhd8ed1ab_1.conda - sha256: 9bdad0cd9fb6d67e48798c03930d634ea2d33a894d30439d3d7bdffd3c21af7b - md5: 6dc4e43174cd552452fdb8c423e90e69 - depends: - - python >=3.9 - - typing-extensions - - typing_extensions - license: BSD-3-Clause - license_family: BSD - size: 28686 - timestamp: 1733663636245 -- conda: https://conda.anaconda.org/conda-forge/noarch/flox-0.10.0-pyhd8ed1ab_0.conda - sha256: 59638d04cfa726c982a42edd096fa0a81e8283eac0d8f7d8b0ca0d98c3dc500e - md5: 93b0fb0f6eba16b40f19d2b7f297ac71 - depends: - - numpy >=1.22 - - numpy_groupies >=0.9.19 - - packaging >=21.3 - - pandas >=1.5 - - python >=3.10 - - scipy >=1.9 - - toolz - constrains: - - numbagg >=0.6 - license: Apache-2.0 - license_family: APACHE - size: 63582 - timestamp: 1738215850712 -- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - sha256: 58d7f40d2940dd0a8aa28651239adbf5613254df0f75789919c4e6762054403b - md5: 0c96522c6bdaed4b1566d11387caaf45 - license: BSD-3-Clause - license_family: BSD - size: 397370 - timestamp: 1566932522327 -- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - sha256: c52a29fdac682c20d252facc50f01e7c2e7ceac52aa9817aaf0bb83f7559ec5c - md5: 34893075a5c9e55cdafac56607368fc6 - license: OFL-1.1 - license_family: Other - size: 96530 - timestamp: 1620479909603 -- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - sha256: 00925c8c055a2275614b4d983e1df637245e19058d79fc7dd1a93b8d9fb4b139 - md5: 4d59c254e01d9cde7957100457e2d5fb - license: OFL-1.1 - license_family: Other - size: 700814 - timestamp: 1620479612257 -- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - sha256: 2821ec1dc454bd8b9a31d0ed22a7ce22422c0aef163c59f49dfdf915d0f0ca14 - md5: 49023d73832ef61042f6a237cb2687e7 - license: LicenseRef-Ubuntu-Font-Licence-Version-1.0 - license_family: Other - size: 1620504 - timestamp: 1727511233259 -- conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda - sha256: 7093aa19d6df5ccb6ca50329ef8510c6acb6b0d8001191909397368b65b02113 - md5: 8f5b0b297b59e1ac160ad4beec99dbee - depends: - - __glibc >=2.17,<3.0.a0 - - freetype >=2.12.1,<3.0a0 - - libexpat >=2.6.3,<3.0a0 - - libgcc >=13 - - libuuid >=2.38.1,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 265599 - timestamp: 1730283881107 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.15.0-h1383a14_1.conda - sha256: f79d3d816fafbd6a2b0f75ebc3251a30d3294b08af9bb747194121f5efa364bc - md5: 7b29f48742cea5d1ccb5edd839cb5621 - depends: - - __osx >=11.0 - - freetype >=2.12.1,<3.0a0 - - libexpat >=2.6.3,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 234227 - timestamp: 1730284037572 -- conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.15.0-h765892d_1.conda - sha256: ed122fc858fb95768ca9ca77e73c8d9ddc21d4b2e13aaab5281e27593e840691 - md5: 9bb0026a2131b09404c59c4290c697cd - depends: - - freetype >=2.12.1,<3.0a0 - - libexpat >=2.6.3,<3.0a0 - - libiconv >=1.17,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 192355 - timestamp: 1730284147944 -- conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - sha256: a997f2f1921bb9c9d76e6fa2f6b408b7fa549edd349a77639c9fe7a23ea93e61 - md5: fee5683a3f04bd15cbd8318b096a27ab - depends: - - fonts-conda-forge - license: BSD-3-Clause - license_family: BSD - size: 3667 - timestamp: 1566974674465 -- conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - sha256: 53f23a3319466053818540bcdf2091f253cbdbab1e0e9ae7b9e509dcaa2a5e38 - md5: f766549260d6815b0c52253f1fb1bb29 - depends: - - font-ttf-dejavu-sans-mono - - font-ttf-inconsolata - - font-ttf-source-code-pro - - font-ttf-ubuntu - license: BSD-3-Clause - license_family: BSD - size: 4102 - timestamp: 1566932280397 -- conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.56.0-py312h178313f_0.conda - sha256: 76ca95b4111fe27e64d74111b416b3462ad3db99f7109cbdf50e6e4b67dcf5b7 - md5: 2f8a66f2f9eb931cdde040d02c6ab54c - depends: - - __glibc >=2.17,<3.0.a0 - - brotli - - libgcc >=13 - - munkres - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - unicodedata2 >=15.1.0 - license: MIT - license_family: MIT - size: 2834054 - timestamp: 1738940929849 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.56.0-py312h998013c_0.conda - sha256: 6b003a5100ec58e1bd456bf55d0727606f7b067628aed1a7c5d8cf4f0174bfc5 - md5: a5cf7d0629863be81d90054882de908c - depends: - - __osx >=11.0 - - brotli - - munkres - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - unicodedata2 >=15.1.0 - license: MIT - license_family: MIT - size: 2753059 - timestamp: 1738940607300 -- conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.56.0-py312h31fea79_0.conda - sha256: 31f245d4ceb7a8e9df8d292ff1efdb4be9a8fa7a9be7a1d0394465aa7f824d50 - md5: 7c08698c54ca6390314c19167c16745e - depends: - - brotli - - munkres - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - unicodedata2 >=15.1.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 2413563 - timestamp: 1738940929060 -- conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda - sha256: 2509992ec2fd38ab27c7cdb42cf6cadc566a1cc0d1021a2673475d9fa87c6276 - md5: d3549fd50d450b6d9e7dddff25dd2110 - depends: - - cached-property >=1.3.0 - - python >=3.9,<4 - license: MPL-2.0 - license_family: MOZILLA - size: 16705 - timestamp: 1733327494780 -- conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - sha256: b2e3c449ec9d907dd4656cb0dc93e140f447175b125a3824b31368b06c666bb6 - md5: 9ae35c3d96db2c94ce0cef86efdfa2cb - depends: - - libgcc-ng >=12 - - libpng >=1.6.39,<1.7.0a0 - - libzlib >=1.2.13,<2.0.0a0 - license: GPL-2.0-only OR FTL - size: 634972 - timestamp: 1694615932610 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.12.1-hadb7bae_2.conda - sha256: 791673127e037a2dc0eebe122dc4f904cb3f6e635bb888f42cbe1a76b48748d9 - md5: e6085e516a3e304ce41a8ee08b9b89ad - depends: - - libpng >=1.6.39,<1.7.0a0 - - libzlib >=1.2.13,<2.0.0a0 - license: GPL-2.0-only OR FTL - size: 596430 - timestamp: 1694616332835 -- conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - sha256: 2c53ee8879e05e149a9e525481d36adfd660a6abda26fd731376fa64ff03e728 - md5: 3761b23693f768dc75a8fd0a73ca053f - depends: - - libpng >=1.6.39,<1.7.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: GPL-2.0-only OR FTL - size: 510306 - timestamp: 1694616398888 -- conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda - sha256: c8960e00a6db69b85c16c693ce05484facf20f1a80430552145f652a880e0d2a - md5: ecb5d11305b8ba1801543002e69d2f2f - depends: - - __glibc >=2.17,<3.0.a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libiconv >=1.17,<2.0a0 - - minizip >=4.0.7,<5.0a0 - license: MPL-1.1 - license_family: MOZILLA - size: 59299 - timestamp: 1734014884486 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/freexl-2.0.0-h3ab3353_2.conda - sha256: b4146ac9ba1676494e3d812ca39664dd7dd454e4d0984f3665fd6feec318c71c - md5: dd655a29b40fe0d1bf95c64cf3cb348d - depends: - - __osx >=11.0 - - libexpat >=2.6.4,<3.0a0 - - libiconv >=1.17,<2.0a0 - - minizip >=4.0.7,<5.0a0 - license: MPL-1.1 - license_family: MOZILLA - size: 53378 - timestamp: 1734014980768 -- conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-hf297d47_2.conda - sha256: 1e62cbc6daa74656034dc4a6e58faa2d50291719c1cba53cc0b1946f0d2b9404 - md5: d6a8059de245e53478b581742b53f71d - depends: - - libexpat >=2.6.4,<3.0a0 - - libiconv >=1.17,<2.0a0 - - minizip >=4.0.7,<5.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MPL-1.1 - license_family: MOZILLA - size: 77528 - timestamp: 1734015193826 -- conda: https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2 - sha256: 5d7b6c0ee7743ba41399e9e05a58ccc1cfc903942e49ff6f677f6e423ea7a627 - md5: ac7bc6a654f8f41b352b38f4051135f8 - depends: - - libgcc-ng >=7.5.0 - license: LGPL-2.1 - size: 114383 - timestamp: 1604416621168 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fribidi-1.0.10-h27ca646_0.tar.bz2 - sha256: 4b37ea851a2cf85edf0a63d2a63266847ec3dcbba4a31156d430cdd6aa811303 - md5: c64443234ff91d70cb9c7dc926c58834 - license: LGPL-2.1 - size: 60255 - timestamp: 1604417405528 -- conda: https://conda.anaconda.org/conda-forge/win-64/fribidi-1.0.10-h8d14728_0.tar.bz2 - sha256: e0323e6d7b6047042970812ee810c6b1e1a11a3af4025db26d0965ae5d206104 - md5: 807e81d915f2bb2e49951648615241f6 - depends: - - vc >=14.1,<15.0a0 - - vs2015_runtime >=14.16.27012 - license: LGPL-2.1 - size: 64567 - timestamp: 1604417122064 -- conda: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h178313f_1.conda - sha256: 501e20626798b6d7f130f4db0fb02c0385d8f4c11ca525925602a4208afb343f - md5: fb986e1c089021979dc79606af78ef8f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 60939 - timestamp: 1737645356438 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.5.0-py312h998013c_1.conda - sha256: d503ac8c050abdbd129253973f23be34944978d510de78ef5a3e6aa1e3d9552d - md5: 5eb3715c7e3fa9b533361375bfefe6ee - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 57256 - timestamp: 1737645503377 -- conda: https://conda.anaconda.org/conda-forge/win-64/frozenlist-1.5.0-py312h31fea79_1.conda - sha256: cd76ed5939c4a8ade6305fb204c8816a4ba1e1dfed8c589aa05ec4005369c5cd - md5: 2f8a0fcbbd39e3c5d363a1918198d434 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 54592 - timestamp: 1737645777248 -- conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.2.0-pyhd8ed1ab_0.conda - sha256: 7433b8469074985b651693778ec6f03d2a23fad9919a515e3b8545996b5e721a - md5: d9ea16b71920b03beafc17fcca16df90 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 138186 - timestamp: 1738501352608 -- conda: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2025.2.0-pyhd8ed1ab_0.conda - sha256: a70d302d860f32bf7080bc015a4d0d2d8c26b2aecfd597d4ae60644fef42f7b5 - md5: 938f33e5b3a56cfeb7f60df98afdf548 - depends: - - aiohttp - - decorator >4.1.2 - - fsspec 2025.2.0 - - google-auth >=1.2 - - google-auth-oauthlib - - google-cloud-storage >1.40 - - python >=3.9 - - requests - license: BSD-3-Clause - license_family: BSD - size: 37641 - timestamp: 1738526426772 -- conda: https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda - sha256: d5283b95a8d49dcd88d29b360d8b38694aaa905d968d156d72ab71d32b38facb - md5: 201db6c2d9a3c5e46573ac4cb2e92f4f - depends: - - libgcc-ng >=12 - - libglib >=2.80.2,<3.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.8.0a0 - license: LGPL-2.1-or-later - license_family: LGPL - size: 528149 - timestamp: 1715782983957 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gdk-pixbuf-2.42.12-h7ddc832_0.conda - sha256: 72bcf0a4d3f9aa6d99d7d1d224d19f76ccdb3a4fa85e60f77d17e17985c81bd2 - md5: 151309a7e1eb57a3c2ab8088a1d74f3e - depends: - - __osx >=11.0 - - libglib >=2.80.2,<3.0a0 - - libintl >=0.22.5,<1.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.8.0a0 - license: LGPL-2.1-or-later - license_family: LGPL - size: 509570 - timestamp: 1715783199780 -- conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda - sha256: 5c70d6d16e044859edca85feb9d4f1c3c6062aaf88d650826f5ccdf8c44336de - md5: 40b4ab956c90390e407bb177f8a58bab - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: LGPL-2.1-only - size: 1869233 - timestamp: 1725676083126 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/geos-3.13.0-hf9b8971_0.conda - sha256: 273381020b72bde1597d4e07e855ed50ffac083512e61ccbdd99d93f03c6cbf2 - md5: 45b2e9adb9663644b1eefa5300b9eef3 - depends: - - __osx >=11.0 - - libcxx >=17 - license: LGPL-2.1-only - size: 1481430 - timestamp: 1725676193541 -- conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda - sha256: 2b46d6f304f70dfca304169299908b558bd1e83992acb5077766eefa3d3fe35f - md5: 08a30fe29a645fc5c768c0968db116d3 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.1-only - size: 1665961 - timestamp: 1725676536384 -- conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.4-h3551947_0.conda - sha256: a5c6bf5654cf7e96d44aaac68b4b654a9e148b811e5b0f36ba7d70db87416fff - md5: 5998212641e3feb3660295eacc717139 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libstdcxx >=13 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - zlib - license: MIT - license_family: MIT - size: 129359 - timestamp: 1739974781272 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/geotiff-1.7.4-hbef4fa4_0.conda - sha256: e0d914bab03a578ace37cb45446249f8e23a36d80cf866e37c582e7f9d6eca0e - md5: c01fde51346f834d3f80affab45f0740 - depends: - - __osx >=11.0 - - libcxx >=18 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - zlib - license: MIT - license_family: MIT - size: 112457 - timestamp: 1739974826028 -- conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.4-h887f4e7_0.conda - sha256: f0435dd63c97ad9e8d35a2c1d55c823c50dac82a8dffd8d41c79c0305fa0cc2b - md5: d5edee34ab83553450b1225cf0a14273 - depends: - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zlib - license: MIT - license_family: MIT - size: 123341 - timestamp: 1739975151946 -- conda: https://conda.anaconda.org/conda-forge/noarch/geoviews-core-1.14.0-pyha770c72_0.conda - sha256: 34af4b5a1ef9360418ddb45ccf06b093b5d222909816432a889f327107209789 - md5: 7363b7d4fc02240b69c81ad96af5611e - depends: - - bokeh >=3.6.0 - - cartopy >=0.18.0 - - holoviews >=1.16.0 - - numpy >=1.0 - - packaging - - panel >=1.0.0 - - param >=1.9.3,<3.0 - - pyproj - - python >=3.10 - - shapely - - xyzservices - constrains: - - geoviews 1.14.0 - license: BSD-3-Clause - license_family: BSD - size: 403670 - timestamp: 1734509718143 -- conda: https://conda.anaconda.org/conda-forge/win-64/getopt-win32-0.1-hcfcfb64_1.conda - sha256: f3b6e689724a62f36591f6f0e4657db5507feca78e7ef08690a6b2a384216a5c - md5: 714d0882dc5e692ca4683d8e520f73c6 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-3.0-only - license_family: GPL - size: 21903 - timestamp: 1694400856979 -- conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - sha256: 6c33bf0c4d8f418546ba9c250db4e4221040936aef8956353bc764d4877bc39a - md5: d411fc29e338efb48c5fd4576d71d881 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: BSD-3-Clause - license_family: BSD - size: 119654 - timestamp: 1726600001928 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda - sha256: fd56ed8a1dab72ab90d8a8929b6f916a6d9220ca297ff077f8f04c5ed3408e20 - md5: 57a511a5905caa37540eb914dfcbf1fb - depends: - - __osx >=11.0 - - libcxx >=17 - license: BSD-3-Clause - license_family: BSD - size: 82090 - timestamp: 1726600145480 -- conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - sha256: aac402a8298f0c0cc528664249170372ef6b37ac39fdc92b40601a6aed1e32ff - md5: 3bf7b9fd5a7136126e0234db4b87c8b6 - depends: - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 77248 - timestamp: 1712692454246 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/giflib-5.2.2-h93a5062_0.conda - sha256: 843b3f364ff844137e37d5c0a181f11f6d51adcedd216f019d074e5aa5d7e09c - md5: 95fa1486c77505330c20f7202492b913 - license: MIT - license_family: MIT - size: 71613 - timestamp: 1712692611426 -- conda: https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.82.2-h4833e2c_1.conda - sha256: 5d8a48abdb1bc2b54f1380d2805cb9cd6cd9609ed0e5c3ed272aef92ab53b190 - md5: e2e44caeaef6e4b107577aa46c95eb12 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libglib 2.82.2 h2ff4ddf_1 - license: LGPL-2.1-or-later - size: 115452 - timestamp: 1737037532892 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/glib-tools-2.82.2-h1dc7a0c_1.conda - sha256: b6874fea5674855149f929899126e4298d020945f3d9c6a7955d14ede1855e3a - md5: bdc35b7b75b7cd2bcfd288e399333f29 - depends: - - __osx >=11.0 - - libglib 2.82.2 hdff4504_1 - - libintl >=0.22.5,<1.0a0 - license: LGPL-2.1-or-later - size: 101008 - timestamp: 1737037840312 -- conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - sha256: dc824dc1d0aa358e28da2ecbbb9f03d932d976c8dca11214aa1dcdfcbd054ba2 - md5: ff862eebdfeb2fd048ae9dc92510baca - depends: - - gflags >=2.2.2,<2.3.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: BSD-3-Clause - license_family: BSD - size: 143452 - timestamp: 1718284177264 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda - sha256: 9fc77de416953aa959039db72bc41bfa4600ae3ff84acad04a7d0c1ab9552602 - md5: fef68d0a95aa5b84b5c1a4f6f3bf40e1 - depends: - - __osx >=11.0 - - gflags >=2.2.2,<2.3.0a0 - - libcxx >=16 - license: BSD-3-Clause - license_family: BSD - size: 112215 - timestamp: 1718284365403 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.1-pyhd8ed1ab_0.conda - sha256: b60cb5d2b11c3fd71e04948c6afd860fa233cd5b8d65478cbb3db67e1f32cfcb - md5: 9e68d88fc56d20ff627e9f4f87e0569b - depends: - - google-auth >=2.14.1,<3.0.dev0 - - googleapis-common-protos >=1.56.2,<2.0.dev0 - - proto-plus >=1.25.0,<2.0.0dev - - protobuf >=3.19.5,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5 - - python >=3.9 - - requests >=2.18.0,<3.0.0.dev0 - license: Apache-2.0 - license_family: APACHE - size: 91181 - timestamp: 1738075026463 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.38.0-pyhd8ed1ab_0.conda - sha256: 0bbff264a2a50af0e2a61a4445c1b2353c6f44d87b83ffb36c95cca5d8fd4aaa - md5: c48abda87ffa7a0cc9f819cb8a384a9a - depends: - - aiohttp >=3.6.2,<4.0.0 - - cachetools >=2.0.0,<6.0 - - cryptography >=38.0.3 - - pyasn1-modules >=0.2.1 - - pyopenssl >=20.0.0 - - python >=3.9 - - pyu2f >=0.1.5 - - requests >=2.20.0,<3.0.0 - - rsa >=3.1.4,<5 - license: Apache-2.0 - license_family: Apache - size: 116328 - timestamp: 1737618370547 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.1-pyhd8ed1ab_1.conda - sha256: bc24ca2adc93a827a20e076e6ac0b9c0beaa1eb8d3cd6c5f6cf027f53113a93c - md5: 0fd0e6681f01076477c713ff70dbdf75 - depends: - - click >=6.0.0 - - google-auth >=2.15.0 - - python >=3.9 - - requests-oauthlib >=0.7.0 - license: Apache-2.0 - license_family: Apache - size: 25474 - timestamp: 1734029033935 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda - sha256: 28af1a03f9debc80d6dddebfd50c45b75579f02095a44b583e5fecea7b347626 - md5: 574cda1b3e3c74cab4632659d39fdf07 - depends: - - google-api-core >=1.31.6,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0 - - google-auth >=1.25.0,<3.0dev - - grpcio >=1.38.0,<2.0.0dev - - python >=3.9 - license: Apache-2.0 - license_family: Apache - size: 28169 - timestamp: 1733699052493 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-cloud-storage-3.0.0-pyhd8ed1ab_0.conda - sha256: 920e3e888966bacc5660b9c13acfaeac9466ac483234c391b2296da053cb5fe8 - md5: 784e98bf458e4693c7ff17967bb72309 - depends: - - google-api-core >=2.15.0,<3.0.0dev - - google-auth >=2.26.1,<3.0dev - - google-cloud-core >=2.3.0,<3.0dev - - google-crc32c >=1.0,<2.0dev - - google-resumable-media >=2.7.2 - - protobuf <6.0.0dev - - python >=3.9 - - requests >=2.18.0,<3.0.0dev - license: Apache-2.0 - license_family: APACHE - size: 116555 - timestamp: 1738249730923 -- conda: https://conda.anaconda.org/conda-forge/linux-64/google-crc32c-1.1.2-py312hb42adb9_6.conda - sha256: 155c689b286e968a3ce5681a487d56c7977ab355d53a5eb99868d9482513d096 - md5: 2403f17e36b46604b2b794d50d59b290 - depends: - - __glibc >=2.17,<3.0.a0 - - cffi >=1.0.0 - - libcrc32c >=1.1.2,<1.2.0a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools - license: Apache-2.0 - license_family: Apache - size: 25313 - timestamp: 1726579630239 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/google-crc32c-1.1.2-py312h1fa1217_6.conda - sha256: 21d8cd51c4aa40f9d7c32c0fe5ebf274e4807cbb4bdfbeb01416d000347dcfdd - md5: 95fd1e032b32f21cf19b6ce968362feb - depends: - - __osx >=11.0 - - cffi >=1.0.0 - - libcrc32c >=1.1.2,<1.2.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - setuptools - license: Apache-2.0 - license_family: Apache - size: 25106 - timestamp: 1726579766856 -- conda: https://conda.anaconda.org/conda-forge/win-64/google-crc32c-1.1.2-py312he3df1c8_6.conda - sha256: 6d343474e0c2cc1d40861aa411a1fa264ed25f85c9cea1e66f4907bbd8ef3503 - md5: 084fc2e220c458df4bb0a7fb48e1a976 - depends: - - cffi >=1.0.0 - - libcrc32c >=1.1.2,<1.2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 28131 - timestamp: 1726580095103 -- conda: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda - sha256: 53f613ff22203c9d8a81ac9eb2351d0b9dea44e92922e62cdd2d45a676582cc7 - md5: 1792ca195c71d1304b3f7c783a3d7419 - depends: - - google-crc32c >=1.0,<2.0dev - - python >=3.9 - constrains: - - requests >=2.18.0,<3.0.0dev - - aiohttp >=3.6.2,<4.0.0dev - license: Apache-2.0 - license_family: APACHE - size: 46566 - timestamp: 1733728567440 -- conda: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.68.0-pyhd8ed1ab_0.conda - sha256: efaca61923e45849a3d763280305cb3a00a769546ce5df2b1bbe24db40aa23af - md5: 3b8e056a42f71c9398857ecf0b8fbcb6 - depends: - - protobuf >=3.20.2,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5 - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 67755 - timestamp: 1740136133858 -- conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - sha256: 0595b009f20f8f60f13a6398e7cdcbd2acea5f986633adcf85f5a2283c992add - md5: f87c7b7c2cb45f323ffbce941c78ab7c - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: LGPL-2.0-or-later - license_family: LGPL - size: 96855 - timestamp: 1711634169756 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/graphite2-1.3.13-hebf3989_1003.conda - sha256: 2eadafbfc52f5e7df3da3c3b7e5bbe34d970bea1d645ffe60b0b1c3a216657f5 - md5: 339991336eeddb70076d8ca826dac625 - depends: - - libcxx >=16 - license: LGPL-2.0-or-later - license_family: LGPL - size: 79774 - timestamp: 1711634444608 -- conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - sha256: 25040a4f371b9b51663f546bac620122c237fa1d5d32968e21b0751af9b7f56f - md5: 3194499ee7d1a67404a87d0eefdd92c6 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.0-or-later - license_family: LGPL - size: 95406 - timestamp: 1711634622644 -- conda: https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.2.1-h5ae0cbf_1.conda - sha256: e6866409ba03df392ac5ec6f0d6ff9751a685ed917bfbcd8a73f550c5fe83c2b - md5: df7835d2c73cd1889d377cfd6694ada4 - depends: - - __glibc >=2.17,<3.0.a0 - - adwaita-icon-theme - - cairo >=1.18.2,<2.0a0 - - fonts-conda-ecosystem - - gdk-pixbuf >=2.42.12,<3.0a0 - - gtk3 >=3.24.43,<4.0a0 - - gts >=0.7.6,<0.8.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libgd >=2.3.3,<2.4.0a0 - - libglib >=2.82.2,<3.0a0 - - librsvg >=2.58.4,<3.0a0 - - libstdcxx >=13 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pango >=1.56.1,<2.0a0 - license: EPL-1.0 - license_family: Other - size: 2413095 - timestamp: 1738602910851 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/graphviz-12.2.1-hff64154_1.conda - sha256: 54e3ce5668b17ea41fed515e57fbd9e805969df468eaf7ff65389d7f53b46d54 - md5: b0b656550a16dfba7efa1479756c5b63 - depends: - - __osx >=11.0 - - adwaita-icon-theme - - cairo >=1.18.2,<2.0a0 - - fonts-conda-ecosystem - - gdk-pixbuf >=2.42.12,<3.0a0 - - gtk3 >=3.24.43,<4.0a0 - - gts >=0.7.6,<0.8.0a0 - - libcxx >=18 - - libexpat >=2.6.4,<3.0a0 - - libgd >=2.3.3,<2.4.0a0 - - libglib >=2.82.2,<3.0a0 - - librsvg >=2.58.4,<3.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pango >=1.56.1,<2.0a0 - license: EPL-1.0 - license_family: Other - size: 2189259 - timestamp: 1738603343083 -- conda: https://conda.anaconda.org/conda-forge/win-64/graphviz-12.2.1-hf40819d_1.conda - sha256: f68aa78450917dd0e3c18340b249bdaed05425e0ab5d64e1ebbe16c1416b807c - md5: 981641a62e6786479ac4d425dc853989 - depends: - - cairo >=1.18.2,<2.0a0 - - getopt-win32 >=0.1,<0.2.0a0 - - gts >=0.7.6,<0.8.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgd >=2.3.3,<2.4.0a0 - - libglib >=2.82.2,<3.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pango >=1.56.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: EPL-1.0 - license_family: Other - size: 1172679 - timestamp: 1738603383430 -- conda: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.1.1-py312h2ec8cdc_1.conda - sha256: f366599a0ebc1cbbf5a31ba2f107e8270a6720b017284e08290895ce3a1fe76c - md5: 38c2f46af8b45f914f55911006cd7056 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 237610 - timestamp: 1734532954563 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.1.1-py312hd8f9ff3_1.conda - sha256: b723598c11f28d97f8e0219d7e956dbf42bd303558c32bcfeef9b3b4ef7dec0c - md5: a86b17a70c836a899dc5b3098594d343 - depends: - - __osx >=11.0 - - libcxx >=18 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 232021 - timestamp: 1734533015935 -- conda: https://conda.anaconda.org/conda-forge/win-64/greenlet-3.1.1-py312h275cf98_1.conda - sha256: c6b6c56b900407618f9f5e57414c284ddefde2d161fff06e94f1c0df3841ab7a - md5: db1ff6ac27d2d7bea014a15713ac622a - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 221637 - timestamp: 1734533240587 -- conda: https://conda.anaconda.org/conda-forge/linux-64/grpcio-1.67.1-py312hacea422_1.conda - sha256: 1fd739d765ab628bd1c7629200a8af37075102c46ef3a79800b7b3ae0976fed8 - md5: d4bf39ccd6b59d7a8b37f674eff22d3a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libgrpc 1.67.1 h25350d4_1 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 900397 - timestamp: 1735585168178 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.67.1-py312he4e58e5_1.conda - sha256: c74ec354382fb64113109e37d25edd108a32124d9b20e9bcdeacf4d8d5bb795d - md5: 54af0bfb0365a4168e6af459641864bc - depends: - - __osx >=11.0 - - libcxx >=18 - - libgrpc 1.67.1 h0a426d6_1 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 815408 - timestamp: 1735585327211 -- conda: https://conda.anaconda.org/conda-forge/win-64/grpcio-1.67.1-py312h5b982ce_1.conda - sha256: 78667b06d24868c77777ad43c480254df1d4748bb2e9494a2d0e5f46fb3db5b2 - md5: 02fd018f386b90c9becda1f45aac9527 - depends: - - libgrpc 1.67.1 h0ac93cb_1 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 725272 - timestamp: 1735634401600 -- conda: https://conda.anaconda.org/conda-forge/linux-64/gsw-3.6.19-py312hc0a28a1_1.conda - sha256: 412ce0b648a1000152eddb8c9fc07433e7d690d806d1a5e139f08d57c941e84a - md5: 139e155f54733b36f621db9c2cf33ef7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 2115465 - timestamp: 1726898005861 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gsw-3.6.19-py312h755e627_1.conda - sha256: 2909e6f29918166baf49e9242114c9a60636873a8387cc7b426f5a7b04bd3305 - md5: 1a5355e9657b669d0e1739e9c4aec0fa - depends: - - __osx >=11.0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 2146718 - timestamp: 1726898020239 -- conda: https://conda.anaconda.org/conda-forge/win-64/gsw-3.6.19-py312h1a27103_1.conda - sha256: e6eaee104164be25e3c73e857ddc0f8bcda956317e90aec3587881d4e7ed07bc - md5: a422b7a91b9b29811d4fefd1429c04e0 - depends: - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 2116009 - timestamp: 1726898369345 -- conda: https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h021d004_3.conda - sha256: c8f939497b43d90fa2ac9d99b44ed25759a798c305237300508e526de5e78de7 - md5: 56c679bcdb8c1d824e927088725862cb - depends: - - __glibc >=2.17,<3.0.a0 - - at-spi2-atk >=2.38.0,<3.0a0 - - atk-1.0 >=2.38.0 - - cairo >=1.18.2,<2.0a0 - - epoxy >=1.5.10,<1.6.0a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - fribidi >=1.0.10,<2.0a0 - - gdk-pixbuf >=2.42.12,<3.0a0 - - glib-tools - - harfbuzz >=10.2.0,<11.0a0 - - hicolor-icon-theme - - libcups >=2.3.3,<2.4.0a0 - - libcups >=2.3.3,<3.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libglib >=2.82.2,<3.0a0 - - liblzma >=5.6.3,<6.0a0 - - libxkbcommon >=1.7.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pango >=1.56.0,<2.0a0 - - wayland >=1.23.1,<2.0a0 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxcomposite >=0.4.6,<1.0a0 - - xorg-libxcursor >=1.2.3,<2.0a0 - - xorg-libxdamage >=1.1.6,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - - xorg-libxi >=1.8.2,<2.0a0 - - xorg-libxinerama >=1.1.5,<1.2.0a0 - - xorg-libxrandr >=1.5.4,<2.0a0 - - xorg-libxrender >=0.9.12,<0.10.0a0 - license: LGPL-2.0-or-later - license_family: LGPL - size: 5565328 - timestamp: 1737497685605 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gtk3-3.24.43-he7bb075_3.conda - sha256: 5f52152c0af1953c220e9faf8132f010c4eb85a749319889abc2e17e6c430651 - md5: bf683088766bb687f27d39f5e128d2b0 - depends: - - __osx >=11.0 - - atk-1.0 >=2.38.0 - - cairo >=1.18.2,<2.0a0 - - epoxy >=1.5.10,<1.6.0a0 - - fribidi >=1.0.10,<2.0a0 - - gdk-pixbuf >=2.42.12,<3.0a0 - - glib-tools - - harfbuzz >=10.2.0,<11.0a0 - - hicolor-icon-theme - - libasprintf >=0.22.5,<1.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgettextpo >=0.22.5,<1.0a0 - - libglib >=2.82.2,<3.0a0 - - libintl >=0.22.5,<1.0a0 - - liblzma >=5.6.3,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - - pango >=1.56.0,<2.0a0 - license: LGPL-2.0-or-later - license_family: LGPL - size: 8923896 - timestamp: 1737499184255 -- conda: https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda - sha256: b5cd16262fefb836f69dc26d879b6508d29f8a5c5948a966c47fe99e2e19c99b - md5: 4d8df0b0db060d33c9a702ada998a8fe - depends: - - libgcc-ng >=12 - - libglib >=2.76.3,<3.0a0 - - libstdcxx-ng >=12 - license: LGPL-2.0-or-later - license_family: LGPL - size: 318312 - timestamp: 1686545244763 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gts-0.7.6-he42f4ea_4.conda - sha256: e0f8c7bc1b9ea62ded78ffa848e37771eeaaaf55b3146580513c7266862043ba - md5: 21b4dd3098f63a74cf2aa9159cbef57d - depends: - - libcxx >=15.0.7 - - libglib >=2.76.3,<3.0a0 - license: LGPL-2.0-or-later - license_family: LGPL - size: 304331 - timestamp: 1686545503242 -- conda: https://conda.anaconda.org/conda-forge/win-64/gts-0.7.6-h6b5321d_4.conda - sha256: b79755d2f9fc2113b6949bfc170c067902bc776e2c20da26e746e780f4f5a2d4 - md5: a41f14768d5e377426ad60c613f2923b - depends: - - libglib >=2.76.3,<3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.0-or-later - license_family: LGPL - size: 188688 - timestamp: 1686545648050 -- conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda - sha256: 622516185a7c740d5c7f27016d0c15b45782c1501e5611deec63fd70344ce7c8 - md5: 7ee49e89531c0dcbba9466f6d115d585 - depends: - - python >=3.9 - - typing_extensions - license: MIT - license_family: MIT - size: 51846 - timestamp: 1733327599467 -- conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda - sha256: 0aa1cdc67a9fe75ea95b5644b734a756200d6ec9d0dff66530aec3d1c1e9df75 - md5: b4754fb1bdcb70c8fd54f918301582c6 - depends: - - hpack >=4.1,<5 - - hyperframe >=6.1,<7 - - python >=3.9 - license: MIT - license_family: MIT - size: 53888 - timestamp: 1738578623567 -- conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.5.0-pyhd8ed1ab_0.conda - sha256: 2dd1aa54eb0b0f0e15db77d6dd3f16e532903f99a0375283142dd3df4d990e46 - md5: af8ab1ff0815078c40ba96f47f48f353 - depends: - - h5py - - packaging - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 46375 - timestamp: 1737887368948 -- conda: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.12.1-nompi_py312hd203070_103.conda - sha256: bc385c98d6d2f233ea472b0fb50e9ca796d926f1c15d12bb07fed2cb40905dd4 - md5: 9bd82d55b98c65f49c44201339245cde - depends: - - __glibc >=2.17,<3.0.a0 - - cached-property - - hdf5 >=1.14.4,<1.14.5.0a0 - - libgcc >=13 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 1384262 - timestamp: 1734545269624 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.12.1-nompi_py312h34530d4_103.conda - sha256: 8d1441742c14e7e989e3845d9251717882d8ae8c49769830fa3e12b94170fe9a - md5: 343b1fbff8c9cca25ccfd7a61ed44f99 - depends: - - __osx >=11.0 - - cached-property - - hdf5 >=1.14.4,<1.14.5.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 1187566 - timestamp: 1734546853116 -- conda: https://conda.anaconda.org/conda-forge/win-64/h5py-3.12.1-nompi_py312h0db4ba1_103.conda - sha256: 3968c4f7abff11264398d06c17d79549d5d9c6f5ca82c5c28f04f6eb36155d58 - md5: 2b2b69668e15d4dec167f504e96706c5 - depends: - - cached-property - - hdf5 >=1.14.4,<1.14.5.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 1096148 - timestamp: 1734547162668 -- conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda - sha256: 94426eca8c60b43f57beb3338d3298dda09452c7a42314bbbb4ebfa552542a84 - md5: 9e38e86167e8b1ea0094747d12944ce4 - depends: - - __glibc >=2.17,<3.0.a0 - - cairo >=1.18.2,<2.0a0 - - freetype >=2.12.1,<3.0a0 - - graphite2 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libglib >=2.82.2,<3.0a0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 1646987 - timestamp: 1736702906600 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/harfbuzz-10.2.0-ha0dd535_0.conda - sha256: e9d148870adbe8efd9913fb036461d337609359b5d4474d0963d8ebe6b9789b2 - md5: 30377b8ff7d4e8a2c08be6957999c100 - depends: - - __osx >=11.0 - - cairo >=1.18.2,<2.0a0 - - freetype >=2.12.1,<3.0a0 - - graphite2 - - icu >=75.1,<76.0a0 - - libcxx >=18 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 1473375 - timestamp: 1736703265901 -- conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-10.2.0-h885c0d4_0.conda - sha256: d366a5a6da75254e912f90a342af909e8eeeb306613e09f164bc30139b73c5e5 - md5: faaf912396cba72bd54c8b3772944ab7 - depends: - - cairo >=1.18.2,<2.0a0 - - freetype >=2.12.1,<3.0a0 - - graphite2 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 1103154 - timestamp: 1736704125064 -- conda: https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda - sha256: 0d09b6dc1ce5c4005ae1c6a19dc10767932ef9a5e9c755cfdbb5189ac8fb0684 - md5: bd77f8da987968ec3927990495dc22e4 - depends: - - libgcc-ng >=12 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 756742 - timestamp: 1695661547874 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf4-4.2.15-h2ee6834_7.conda - sha256: c3b01e3c3fe4ca1c4d28c287eaa5168a4f2fd3ffd76690082ac919244c22fa90 - md5: ff5d749fd711dc7759e127db38005924 - depends: - - libcxx >=15.0.7 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 762257 - timestamp: 1695661864625 -- conda: https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h5557f11_7.conda - sha256: 52fa5dde69758c19c69ab68a3d7ebfb2c9042e3a55d405c29a59d3b0584fd790 - md5: 84344a916a73727c1326841007b52ca8 - depends: - - libjpeg-turbo >=3.0.0,<4.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 779637 - timestamp: 1695662145568 -- conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda - sha256: 93d2bfc672f3ee0988d277ce463330a467f3686d3f7ee37812a3d8ca11776d77 - md5: d76fff0092b6389a12134ddebc0929bd - depends: - - __glibc >=2.17,<3.0.a0 - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libgfortran - - libgfortran5 >=13.3.0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 3950601 - timestamp: 1733003331788 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.4-nompi_ha698983_105.conda - sha256: 1746cd2465832bf23d1e91b680935655dea9053d51e526deea86b0afb0b9d6a3 - md5: 7e85ea8b6a35b163a516e8c483960600 - depends: - - __osx >=11.0 - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libcxx >=18 - - libgfortran 5.* - - libgfortran5 >=13.2.0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 3485821 - timestamp: 1733002735281 -- conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.4-nompi_hd5d9e70_105.conda - sha256: e8ced65c604a3b9e4803758a25149d71d8096f186fe876817a0d1d97190550c0 - md5: 4381be33460283890c34341ecfa42d97 - depends: - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 2048450 - timestamp: 1733003052575 -- conda: https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2 - sha256: 336f29ceea9594f15cc8ec4c45fdc29e10796573c697ee0d57ebb7edd7e92043 - md5: bbf6f174dcd3254e19a2f5d2295ce808 - license: GPL-2.0-or-later - license_family: GPL - size: 13841 - timestamp: 1605162808667 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/hicolor-icon-theme-0.17-hce30654_2.tar.bz2 - sha256: 286e33fb452f61133a3a61d002890235d1d1378554218ab063d6870416440281 - md5: 237b05b7eb284d7eebc3c5d93f5e4bca - license: GPL-2.0-or-later - license_family: GPL - size: 13800 - timestamp: 1611053664863 -- conda: https://conda.anaconda.org/conda-forge/noarch/holoviews-1.20.1-pyhd8ed1ab_0.conda - sha256: bf28b371fad75d6627b2fb4a37f6b5036e2197753355f40c6c5d450a32f8f680 - md5: 995a8f8782e4b572141e77d0c7e0ff1e - depends: - - bokeh >=3.1 - - colorcet - - matplotlib-base >=3.0 - - numpy >=1.21 - - packaging - - pandas >=1.3 - - panel >=1.0 - - param >=2.0,<3.0 - - python >=3.9 - - pyviz_comms >=2.1 - license: BSD-3-Clause - license_family: BSD - size: 3996135 - timestamp: 1739986603096 -- conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda - sha256: 6ad78a180576c706aabeb5b4c8ceb97c0cb25f1e112d76495bff23e3779948ba - md5: 0a802cb9888dd14eeefc611f05c40b6e - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 30731 - timestamp: 1737618390337 -- conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda - sha256: c84d012a245171f3ed666a8bf9319580c269b7843ffa79f26468842da3abd5df - md5: 2ca8e6dbc86525c8b95e3c0ffa26442e - depends: - - python >=3.8 - - h11 >=0.13,<0.15 - - h2 >=3,<5 - - sniffio 1.* - - anyio >=3.0,<5.0 - - certifi - license: BSD-3-Clause - license_family: BSD - size: 48959 - timestamp: 1731707562362 -- conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda - sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 - md5: d6989ead454181f4f9bc987d3dc4e285 - depends: - - anyio - - certifi - - httpcore 1.* - - idna - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 63082 - timestamp: 1733663449209 -- conda: https://conda.anaconda.org/conda-forge/noarch/hvplot-0.11.2-pyhd8ed1ab_0.conda - sha256: cc4367490e9f159d4fc91a2aecb12e37621fe38c0a9c244d3086f11a35a3186b - md5: a8143fe7133f43b62a96a77455c30ffe - depends: - - bokeh >=3.1 - - colorcet >=2 - - holoviews >=1.19.0 - - numpy >=1.21 - - packaging - - pandas >=1.3 - - panel >=1.0 - - param >=1.12.0,<3.0 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 247667 - timestamp: 1734388824728 -- conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - sha256: 77af6f5fe8b62ca07d09ac60127a30d9069fdc3c68d6b256754d0ffb1f7779f8 - md5: 8e6923fc12f1fe8f8c4e5c9f343256ac - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 17397 - timestamp: 1737618427549 -- conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - sha256: 71e750d509f5fa3421087ba88ef9a7b9be11c53174af3aa4d06aff4c18b38e8e - md5: 8b189310083baabfb622af68fd9d3ae3 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: MIT - license_family: MIT - size: 12129203 - timestamp: 1720853576813 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-75.1-hfee45f7_0.conda - sha256: 9ba12c93406f3df5ab0a43db8a4b4ef67a5871dfd401010fbe29b218b2cbe620 - md5: 5eb22c1d7b3fc4abb50d92d621583137 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 11857802 - timestamp: 1720853997952 -- conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - sha256: 1d04369a1860a1e9e371b9fc82dd0092b616adcf057d6c88371856669280e920 - md5: 8579b6bb8d18be7c0b27fb08adeeeb40 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 14544252 - timestamp: 1720853966338 -- conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.7-pyhd8ed1ab_0.conda - sha256: 27c1c5c1f3090ce826a08d50e70c53c77a8b6b39561b2967388f014fa2d52297 - md5: 1027da8216437467b4588fa79e143d89 - depends: - - python >=3.9 - - ukkonen - license: MIT - license_family: MIT - size: 78597 - timestamp: 1739058677860 -- conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - sha256: d7a472c9fd479e2e8dcb83fb8d433fce971ea369d704ece380e876f9c3494e87 - md5: 39a4f67be3286c86d696df570b1201b7 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 49765 - timestamp: 1733211921194 -- conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2 - sha256: c2bfd7043e0c4c12d8b5593de666c1e81d67b83c474a0a79282cc5c4ef845460 - md5: 7de5386c8fea29e76b303f37dde4c352 - depends: - - python >=3.4 - license: MIT - license_family: MIT - size: 10164 - timestamp: 1656939625410 -- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03 - md5: f4b39bf00c69f56ac01e020ebfac066c - depends: - - python >=3.9 - - zipp >=0.5 - license: Apache-2.0 - license_family: APACHE - size: 29141 - timestamp: 1737420302391 -- conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda - sha256: acc1d991837c0afb67c75b77fdc72b4bf022aac71fedd8b9ea45918ac9b08a80 - md5: c85c76dc67d75619a92f51dfbce06992 - depends: - - python >=3.9 - - zipp >=3.1.0 - constrains: - - importlib-resources >=6.5.2,<6.5.3.0a0 - license: Apache-2.0 - license_family: APACHE - size: 33781 - timestamp: 1736252433366 -- conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - sha256: 0fd2b0b84c854029041b0ede8f4c2369242ee92acc0092f8407b1fe9238a8209 - md5: 2d89243bfb53652c182a7c73182cce4f - license: LicenseRef-IntelSimplifiedSoftwareOct2022 - license_family: Proprietary - size: 1852356 - timestamp: 1723739573141 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - sha256: 33cfd339bb4efac56edf93474b37ddc049e08b1b4930cf036c893cc1f5a1f32a - md5: b40131ab6a36ac2c09b7c57d4d3fbf99 - depends: - - __linux - - comm >=0.1.1 - - debugpy >=1.6.5 - - ipython >=7.23.1 - - jupyter_client >=6.1.12 - - jupyter_core >=4.12,!=5.0.* - - matplotlib-inline >=0.1 - - nest-asyncio - - packaging - - psutil - - python >=3.8 - - pyzmq >=24 - - tornado >=6.1 - - traitlets >=5.4.0 - license: BSD-3-Clause - license_family: BSD - size: 119084 - timestamp: 1719845605084 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - sha256: dc569094125127c0078aa536f78733f383dd7e09507277ef8bcd1789786e7086 - md5: 18df5fc4944a679e085e0e8f31775fc8 - depends: - - __win - - comm >=0.1.1 - - debugpy >=1.6.5 - - ipython >=7.23.1 - - jupyter_client >=6.1.12 - - jupyter_core >=4.12,!=5.0.* - - matplotlib-inline >=0.1 - - nest-asyncio - - packaging - - psutil - - python >=3.8 - - pyzmq >=24 - - tornado >=6.1 - - traitlets >=5.4.0 - license: BSD-3-Clause - license_family: BSD - size: 119853 - timestamp: 1719845858082 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - sha256: 072534d4d379225b2c3a4e38bc7730b65ae171ac7f0c2d401141043336e97980 - md5: 9eb15d654daa0ef5a98802f586bb4ffc - depends: - - __osx - - appnope - - comm >=0.1.1 - - debugpy >=1.6.5 - - ipython >=7.23.1 - - jupyter_client >=6.1.12 - - jupyter_core >=4.12,!=5.0.* - - matplotlib-inline >=0.1 - - nest-asyncio - - packaging - - psutil - - python >=3.8 - - pyzmq >=24 - - tornado >=6.1 - - traitlets >=5.4.0 - license: BSD-3-Clause - license_family: BSD - size: 119568 - timestamp: 1719845667420 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.32.0-pyh907856f_0.conda - sha256: b1b940cfe85d5f0aaed83ef8c9f07ee80daa68acb05feeb5142d620472b01e0d - md5: 9de86472b8f207fb098c69daaad50e67 - depends: - - __unix - - pexpect >4.3 - - python >=3.10 - - decorator - - exceptiongroup - - jedi >=0.16 - - matplotlib-inline - - pickleshare - - prompt-toolkit >=3.0.41,<3.1.0 - - pygments >=2.4.0 - - stack_data - - traitlets >=5.13.0 - - typing_extensions >=4.6 - - python - license: BSD-3-Clause - license_family: BSD - size: 636676 - timestamp: 1738421264236 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.32.0-pyh9ab4c32_0.conda - sha256: 970b10688d376dd7a9963478e78f80d62708df73b368fed9295ef100a99b6b04 - md5: e34c8a3475d6e2743f4f5093a39004fd - depends: - - __win - - colorama - - python >=3.10 - - decorator - - exceptiongroup - - jedi >=0.16 - - matplotlib-inline - - pickleshare - - prompt-toolkit >=3.0.41,<3.1.0 - - pygments >=2.4.0 - - stack_data - - traitlets >=5.13.0 - - typing_extensions >=4.6 - - python - license: BSD-3-Clause - license_family: BSD - size: 636000 - timestamp: 1738421304330 -- conda: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda - sha256: f419657566e3d9bea85b288a0ce3a8e42d76cd82ac1697c6917891df3ae149ab - md5: bb19ad65196475ab6d0bb3532d7f8d96 - depends: - - comm >=0.1.3 - - ipython >=6.1.0 - - jupyterlab_widgets >=3.0.13,<3.1.0 - - python >=3.9 - - traitlets >=4.3.1 - - widgetsnbextension >=4.0.13,<4.1.0 - license: BSD-3-Clause - license_family: BSD - size: 113982 - timestamp: 1733493669268 -- conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda - sha256: 08e838d29c134a7684bca0468401d26840f41c92267c4126d7b43a6b533b0aed - md5: 0b0154421989637d424ccf0f104be51a - depends: - - arrow >=0.15.0 - - python >=3.9 - license: MIT - license_family: MIT - size: 19832 - timestamp: 1733493720346 -- conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda - sha256: 92c4d217e2dc68983f724aa983cca5464dcb929c566627b26a2511159667dba8 - md5: a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 - depends: - - parso >=0.8.3,<0.9.0 - - python >=3.9 - license: Apache-2.0 AND MIT - size: 843646 - timestamp: 1733300981994 -- conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda - sha256: 98977694b9ecaa3218662f843425f39501f81973c450f995eec68f1803ed71c3 - md5: 2752a6ed44105bfb18c9bef1177d9dcd - depends: - - markupsafe >=2.0 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 112561 - timestamp: 1734824044952 -- conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda - sha256: 3d2f20ee7fd731e3ff55c189db9c43231bc8bde957875817a609c227bcb295c6 - md5: 972bdca8f30147135f951847b30399ea - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 23708 - timestamp: 1733229244590 -- conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda - sha256: 09e706cb388d3ea977fabcee8e28384bdaad8ce1fc49340df5f868a2bd95a7da - md5: 38f5dbc9ac808e31c00650f7be1db93f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 82709 - timestamp: 1726487116178 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/json-c-0.18-he4178ee_0.conda - sha256: 73179a1cd0b45c09d4f631cb359d9e755e6e573c5d908df42006728e0bf8297c - md5: 94f14ef6157687c30feb44e1abecd577 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 73715 - timestamp: 1726487214495 -- conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda - sha256: 61bca2dac194c44603446944745566d7b4e55407280f6f6cea8bbe4de26b558f - md5: cd170f82d8e5b355dfdea6adab23e4af - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 31573 - timestamp: 1733272196759 -- conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - sha256: 76ccb7bffc7761d1d3133ffbe1f7f1710a0f0d9aaa9f7ea522652e799f3601f4 - md5: 6b51f7459ea4073eeb5057207e2e1e3d - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 17277 - timestamp: 1725303032027 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/jsonpointer-3.0.0-py312h81bd7bf_1.conda - sha256: f6fb3734e967d1cd0cde32844ee952809f6c0a49895da7ec1c8cfdf97739b947 - md5: 80f403c03290e1662be03e026fb5f8ab - depends: - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 17865 - timestamp: 1725303130815 -- conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - sha256: 6865b97780e795337f65592582aee6f25e5b96214c64ffd3f8cdf580fd64ba22 - md5: e3ceda014d8461a11ca8552830a978f9 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 42235 - timestamp: 1725303419414 -- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda - sha256: be992a99e589146f229c58fe5083e0b60551d774511c494f91fe011931bd7893 - md5: a3cead9264b331b32fe8f0aabc967522 - depends: - - attrs >=22.2.0 - - importlib_resources >=1.4.0 - - jsonschema-specifications >=2023.03.6 - - pkgutil-resolve-name >=1.3.10 - - python >=3.9 - - referencing >=0.28.4 - - rpds-py >=0.7.1 - license: MIT - license_family: MIT - size: 74256 - timestamp: 1733472818764 -- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 - md5: 3b519bc21bc80e60b456f1e62962a766 - depends: - - python >=3.9 - - referencing >=0.31.0 - license: MIT - license_family: MIT - size: 16170 - timestamp: 1733493624968 -- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda - sha256: 6e0184530011961a0802fda100ecdfd4b0eca634ed94c37e553b72e21c26627d - md5: a5b1a8065857cc4bd8b7a38d063bb728 - depends: - - fqdn - - idna - - isoduration - - jsonpointer >1.13 - - jsonschema >=4.23.0,<4.23.1.0a0 - - rfc3339-validator - - rfc3986-validator >0.1.0 - - uri-template - - webcolors >=24.6.0 - license: MIT - license_family: MIT - size: 7135 - timestamp: 1733472820035 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-book-1.0.3-pyhd8ed1ab_1.conda - sha256: f028c32b5d97d24df44b1a41f771a9932e07815c60c02e24acd9bd2eca31097f - md5: 739a29ac73026e68405153b50d0c60c2 - depends: - - click >=7.1,<9 - - importlib-metadata >=4.8.3 - - jinja2 - - jsonschema <5 - - linkify-it-py >=2,<3 - - myst-nb >=1,<3 - - myst-parser >=1,<3 - - python >=3.9 - - pyyaml - - sphinx >=5,<8 - - sphinx-book-theme >=1.1.0,<2 - - sphinx-comments - - sphinx-copybutton - - sphinx-design >=0.5,<1 - - sphinx-external-toc >=1.0.1,<2 - - sphinx-jupyterbook-latex >=1,<2 - - sphinx-multitoc-numbering >=0.1.3,<1 - - sphinx-thebe >=0.3.1,<1 - - sphinx-togglebutton - - sphinxcontrib-bibtex >=2.5.0,<3 - license: BSD-3-Clause - license_family: BSD - size: 44976 - timestamp: 1735574196497 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda - sha256: 054d397dd45ed08bffb0976702e553dfb0d0b0a477da9cff36e2ea702e928f48 - md5: b0ee650829b8974202a7abe7f8b81e5a - depends: - - attrs - - click - - importlib-metadata - - nbclient >=0.2 - - nbformat - - python >=3.9 - - pyyaml - - sqlalchemy >=1.3.12,<3 - - tabulate - license: MIT - license_family: MIT - size: 31236 - timestamp: 1731777189586 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda - sha256: 1565c8b1423a37fca00fe0ab2a17cd8992c2ecf23e7867a1c9f6f86a9831c196 - md5: 0b4c3908e5a38ea22ebb98ee5888c768 - depends: - - importlib-metadata >=4.8.3 - - jupyter_server >=1.1.2 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 55221 - timestamp: 1733493006611 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-resource-usage-1.1.1-pyhd8ed1ab_0.conda - sha256: 93df25a9cda37db4c62c3aa2da070af4d721dc02508b0011c41a61ff9c7394ed - md5: 2d7371814049db8228fb4d945d2b455e - depends: - - jupyter_server >=2.0.0,<3 - - psutil >=5.6.0,<6 - - python >=3.9 - - pyzmq >=19 - license: BSD-3-Clause - license_family: BSD - size: 42515 - timestamp: 1739452443904 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-server-proxy-4.4.0-pyhd8ed1ab_1.conda - sha256: 51006cf07d38c410890a8ae42d5e85fc806c3fb0740a7d7749afe48bff0e5580 - md5: 4696e23d86b32957ff469870e74c8c97 - depends: - - aiohttp - - importlib-metadata >=4.8.3 - - jupyter_server >=1.24.0 - - python >=3.9 - - simpervisor >=1.0.0 - - tornado >=6.1.0 - - traitlets >=5.1.1 - license: BSD-3-Clause - license_family: BSD - size: 37140 - timestamp: 1734379307021 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_bokeh-4.0.5-pyhd8ed1ab_1.conda - sha256: 6a0dc91d7a93f3afba5f1ad224603986fc80df1e41c87ba8c52fb6c1dd96e958 - md5: ffaa7f642a00edf0be1bb3ff7306cc52 - depends: - - bokeh 3.* - - ipywidgets 8.* - - python >=3.9 - constrains: - - jupyterlab 4.* - license: BSD-3-Clause - license_family: BSD - size: 76539 - timestamp: 1734648716202 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda - sha256: 19d8bd5bb2fde910ec59e081eeb59529491995ce0d653a5209366611023a0b3a - md5: 4ebae00eae9705b0c3d6d1018a81d047 - depends: - - importlib-metadata >=4.8.3 - - jupyter_core >=4.12,!=5.0.* - - python >=3.9 - - python-dateutil >=2.8.2 - - pyzmq >=23.0 - - tornado >=6.2 - - traitlets >=5.3 - license: BSD-3-Clause - license_family: BSD - size: 106342 - timestamp: 1733441040958 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - sha256: 732b1e8536bc22a5a174baa79842d79db2f4956d90293dd82dc1b3f6099bcccd - md5: 0a2980dada0dd7fd0998f0342308b1b1 - depends: - - __unix - - platformdirs >=2.5 - - python >=3.8 - - traitlets >=5.3 - license: BSD-3-Clause - license_family: BSD - size: 57671 - timestamp: 1727163547058 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda - sha256: 7c903b2d62414c3e8da1f78db21f45b98de387aae195f8ca959794113ba4b3fd - md5: 46d87d1c0ea5da0aae36f77fa406e20d - depends: - - __win - - cpython - - platformdirs >=2.5 - - python >=3.8 - - pywin32 >=300 - - traitlets >=5.3 - license: BSD-3-Clause - license_family: BSD - size: 58269 - timestamp: 1727164026641 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda - sha256: 37e6ac3ccf7afcc730c3b93cb91a13b9ae827fd306f35dd28f958a74a14878b5 - md5: f56000b36f09ab7533877e695e4e8cb0 - depends: - - jsonschema-with-format-nongpl >=4.18.0 - - packaging - - python >=3.9 - - python-json-logger >=2.0.4 - - pyyaml >=5.3 - - referencing - - rfc3339-validator - - rfc3986-validator >=0.1.1 - - traitlets >=5.3 - - python - license: BSD-3-Clause - license_family: BSD - size: 23647 - timestamp: 1738765986736 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda - sha256: be5f9774065d94c4a988f53812b83b67618bec33fcaaa005a98067d506613f8a - md5: 6ba8c206b5c6f52b82435056cf74ee46 - depends: - - anyio >=3.1.0 - - argon2-cffi >=21.1 - - jinja2 >=3.0.3 - - jupyter_client >=7.4.4 - - jupyter_core >=4.12,!=5.0.* - - jupyter_events >=0.11.0 - - jupyter_server_terminals >=0.4.4 - - nbconvert-core >=6.4.4 - - nbformat >=5.3.0 - - overrides >=5.0 - - packaging >=22.0 - - prometheus_client >=0.9 - - python >=3.9 - - pyzmq >=24 - - send2trash >=1.8.2 - - terminado >=0.8.3 - - tornado >=6.2.0 - - traitlets >=5.6.0 - - websocket-client >=1.7 - license: BSD-3-Clause - license_family: BSD - size: 327747 - timestamp: 1734702771032 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda - sha256: 0890fc79422191bc29edf17d7b42cff44ba254aa225d31eb30819f8772b775b8 - md5: 2d983ff1b82a1ccb6f2e9d8784bdd6bd - depends: - - python >=3.9 - - terminado >=0.8.3 - license: BSD-3-Clause - license_family: BSD - size: 19711 - timestamp: 1733428049134 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.5-pyhd8ed1ab_0.conda - sha256: 9d033314060993522e1ad999ded9da316a8b928d11b7a58c254597382239a72e - md5: ec1f95d39ec862a7a87de0662a98ce3e - depends: - - async-lru >=1.0.0 - - httpx >=0.25.0 - - importlib-metadata >=4.8.3 - - ipykernel >=6.5.0 - - jinja2 >=3.0.3 - - jupyter-lsp >=2.0.0 - - jupyter_core - - jupyter_server >=2.4.0,<3 - - jupyterlab_server >=2.27.1,<3 - - notebook-shim >=0.2 - - packaging - - python >=3.9 - - setuptools >=40.8.0 - - tomli >=1.2.2 - - tornado >=6.2.0 - - traitlets - license: BSD-3-Clause - license_family: BSD - size: 7614652 - timestamp: 1738184813883 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-myst-2.4.2-pyhd8ed1ab_1.conda - sha256: 9ac0455d6ed74d317e7394dc89158d44d99089f35a144eee8964015903699475 - md5: 1a76cdfb2080ae8007cd58b79707fb6b - depends: - - jupyter_server >=2.0.1,<3 - - python >=3.9 - constrains: - - jupyterlab >=4,<5 - license: BSD-3-Clause - license_family: BSD - size: 2125353 - timestamp: 1736260161474 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda - sha256: dc24b900742fdaf1e077d9a3458fd865711de80bca95fe3c6d46610c532c6ef0 - md5: fd312693df06da3578383232528c468d - depends: - - pygments >=2.4.1,<3 - - python >=3.9 - constrains: - - jupyterlab >=4.0.8,<5.0.0 - license: BSD-3-Clause - license_family: BSD - size: 18711 - timestamp: 1733328194037 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda - sha256: d03d0b7e23fa56d322993bc9786b3a43b88ccc26e58b77c756619a921ab30e86 - md5: 9dc4b2b0f41f0de41d27f3293e319357 - depends: - - babel >=2.10 - - importlib-metadata >=4.8.3 - - jinja2 >=3.0.3 - - json5 >=0.9.0 - - jsonschema >=4.18 - - jupyter_server >=1.21,<3 - - packaging >=21.3 - - python >=3.9 - - requests >=2.31 - constrains: - - openapi-core >=0.18.0,<0.19.0 - license: BSD-3-Clause - license_family: BSD - size: 49449 - timestamp: 1733599666357 -- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda - sha256: 206489e417408d2ffc2a7b245008b4735a8beb59df6c9109d4f77e7bc5969d5d - md5: b26e487434032d7f486277beb0cead3a - depends: - - python >=3.9 - constrains: - - jupyterlab >=3,<5 - license: BSD-3-Clause - license_family: BSD - size: 186358 - timestamp: 1733428156991 -- conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - sha256: 150c05a6e538610ca7c43beb3a40d65c90537497a4f6a5f4d15ec0451b6f5ebb - md5: 30186d27e2c9fa62b45fb1476b7200e3 - depends: - - libgcc-ng >=10.3.0 - license: LGPL-2.1-or-later - size: 117831 - timestamp: 1646151697040 -- conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.8-py312h84d6215_0.conda - sha256: 3ce99d721c1543f6f8f5155e53eef11be47b2f5942a8d1060de6854f9d51f246 - md5: 6713467dc95509683bfa3aca08524e8a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 71649 - timestamp: 1736908364705 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.4.8-py312h2c4a281_0.conda - sha256: 01366fa9d65bedb4069266d08c8a7a2ebbe6f25cedf60eebeeb701067f162f68 - md5: a94f3ac940c391e7716b6ffd332d7463 - depends: - - __osx >=11.0 - - libcxx >=18 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 61368 - timestamp: 1736908431125 -- conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.8-py312hc790b64_0.conda - sha256: 2cce3d9bcc95c68069e3032cda25b732f69be7b025f94685ee4783d7b54588dd - md5: 7ef59428fc0dcb8a78a5e23dc4f50aa3 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 71318 - timestamp: 1736908754898 -- conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - sha256: 99df692f7a8a5c27cd14b5fb1374ee55e756631b9c3d659ed3ee60830249b238 - md5: 3f43953b7d3fb3aaa1d0d0723d91e368 - depends: - - keyutils >=1.6.1,<2.0a0 - - libedit >=3.1.20191231,<3.2.0a0 - - libedit >=3.1.20191231,<4.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 - license: MIT - license_family: MIT - size: 1370023 - timestamp: 1719463201255 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - sha256: 4442f957c3c77d69d9da3521268cad5d54c9033f1a73f99cde0a3658937b159b - md5: c6dc8a0fdec13a0565936655c33069a1 - depends: - - __osx >=11.0 - - libcxx >=16 - - libedit >=3.1.20191231,<3.2.0a0 - - libedit >=3.1.20191231,<4.0a0 - - openssl >=3.3.1,<4.0a0 - license: MIT - license_family: MIT - size: 1155530 - timestamp: 1719463474401 -- conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - sha256: 18e8b3430d7d232dad132f574268f56b3eb1a19431d6d5de8c53c29e6c18fa81 - md5: 31aec030344e962fbd7dbbbbd68e60a9 - depends: - - openssl >=3.3.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 712034 - timestamp: 1719463874284 -- conda: https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2 - sha256: 5210d31c8f2402dd1ad1b3edcf7a53292b9da5de20cd14d9c243dbf9278b1c4f - md5: 8d67904973263afd2985ba56aa2d6bb4 - depends: - - python - - six - license: MIT - license_family: MIT - size: 18212 - timestamp: 1592937373647 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda - sha256: d6a61830a354da022eae93fa896d0991385a875c6bba53c82263a289deda9db8 - md5: 000e85703f0fd9594c81710dd5066471 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - license: MIT - license_family: MIT - size: 248046 - timestamp: 1739160907615 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.17-h7eeda09_0.conda - sha256: 310a62c2f074ebd5aa43b3cd4b00d46385ce680fa2132ecee255a200e2d2f15f - md5: 92a61fd30b19ebd5c1621a5bfe6d8b5f - depends: - - __osx >=11.0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - license: MIT - license_family: MIT - size: 212125 - timestamp: 1739161108467 -- conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.17-hbcf6048_0.conda - sha256: 7712eab5f1a35ca3ea6db48ead49e0d6ac7f96f8560da8023e61b3dbe4f3b25d - md5: 3538827f77b82a837fa681a4579e37a1 - depends: - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 510641 - timestamp: 1739161381270 -- conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_3.conda - sha256: f754b85b9c4528e09749fea56466e3a4f38439dd70b34f0f9bebff9b31055cf1 - md5: 5d5839b8df5d356d701bdd3450af6955 - depends: - - __glibc >=2.17,<3.0.a0 - constrains: - - binutils_impl_linux-64 2.43 - license: GPL-3.0-only - size: 671098 - timestamp: 1740102063271 -- conda: https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.2-pyh41aed27_1.conda - sha256: aca06fc1d0cdbb342951c5d1187569c5fcfaa6fc10310a7ca88e5e44520f398d - md5: 097b37f4503a319a9631020ecfe0e845 - depends: - - python >=3.10 - - python - license: EPL-2.0 - size: 29762 - timestamp: 1738017223129 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 - sha256: cb55f36dcd898203927133280ae1dc643368af041a48bcf7c026acb7c47b0c12 - md5: 76bbff344f0134279f225174e9064c8f - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: Apache-2.0 - license_family: Apache - size: 281798 - timestamp: 1657977462600 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.0.0-h9a09cb3_0.tar.bz2 - sha256: 6f068bb53dfb6147d3147d981bb851bb5477e769407ad4e6a68edf482fdcb958 - md5: de462d5aacda3b30721b512c5da4e742 - depends: - - libcxx >=13.0.1 - license: Apache-2.0 - license_family: Apache - size: 215721 - timestamp: 1657977558796 -- conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - sha256: f4f39d7f6a2f9b407f8fb567a6c25755270421731d70f0ff331f5de4fa367488 - md5: 1900cb3cab5055833cfddb0ba233b074 - depends: - - vc >=14.2,<15 - - vs2015_runtime >=14.29.30037 - license: Apache-2.0 - license_family: Apache - size: 194365 - timestamp: 1657977692274 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda - sha256: 143a586aa67d50622ef703de57b9d43f44945836d6568e0e7aa174bd8c45e0d4 - md5: 488f260ccda0afaf08acb286db439c2f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - constrains: - - libabseil-static =20240722.0=cxx17* - - abseil-cpp =20240722.0 - license: Apache-2.0 - license_family: Apache - size: 1311599 - timestamp: 1736008414161 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20240722.0-cxx17_h07bc746_4.conda - sha256: 05fa5e5e908962b9c5aba95f962e2ca81d9599c4715aebe5e4ddb72b309d1770 - md5: c2d95bd7aa8d564a9bd7eca5e571a5b3 - depends: - - __osx >=11.0 - - libcxx >=18 - constrains: - - libabseil-static =20240722.0=cxx17* - - abseil-cpp =20240722.0 - license: Apache-2.0 - license_family: Apache - size: 1178260 - timestamp: 1736008642885 -- conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_h4eb7d71_4.conda - sha256: 846eacff96d36060fe5f7b351e4df6fafae56bf34cc6426497f12b5c13f317cf - md5: c57ee7f404d1aa84deb3e15852bec6fa - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - abseil-cpp =20240722.0 - - libabseil-static =20240722.0=cxx17* - license: Apache-2.0 - license_family: Apache - size: 1784929 - timestamp: 1736008778245 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - sha256: 2ef420a655528bca9d269086cf33b7e90d2f54ad941b437fb1ed5eca87cee017 - md5: 5e97e271911b8b2001a8b71860c32faa - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 35446 - timestamp: 1711021212685 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libaec-1.1.3-hebf3989_0.conda - sha256: 896189b7b48a194c46a3556ea04943ef81cbe0498521231f8eb25816a68bc8ed - md5: 6f0b8e56d2e7bae12a18fc5b2cd9f310 - depends: - - libcxx >=16 - license: BSD-2-Clause - license_family: BSD - size: 28451 - timestamp: 1711021498493 -- conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - sha256: f5c293d3cfc00f71dfdb64bd65ab53625565f8778fc2d5790575bef238976ebf - md5: 8723000f6ffdbdaef16025f0a01b64c5 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 32567 - timestamp: 1711021603471 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda - sha256: 2466803e26ae9dbd2263de3a102b572b741c056549875c04b6ec10830bd5d338 - md5: a28808eae584c7f519943719b2a2b386 - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc >=13 - - liblzma >=5.6.3,<6.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - lzo >=2.10,<3.0a0 - - openssl >=3.4.0,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-2-Clause - license_family: BSD - size: 878021 - timestamp: 1734020918345 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.7-h3b16cec_3.conda - sha256: cbce64423e72bcd3576b5cfe0e4edd255900100f72467d5b4ea1d77449ac1ce9 - md5: 1c2eda2163510220b9f9d56a85c8da9d - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libiconv >=1.17,<2.0a0 - - liblzma >=5.6.3,<6.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - lzo >=2.10,<3.0a0 - - openssl >=3.4.0,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-2-Clause - license_family: BSD - size: 772780 - timestamp: 1734021109752 -- conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.7-h979ed78_3.conda - sha256: 3a44d5584db995497ea96d911a2419b6920317b927af7f1df8464cd5492f5ab3 - md5: 7c29b6918c2aa6a44ed32e2cf816da7b - depends: - - bzip2 >=1.0.8,<2.0a0 - - liblzma >=5.6.3,<6.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - lzo >=2.10,<3.0a0 - - openssl >=3.4.0,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-2-Clause - license_family: BSD - size: 1082930 - timestamp: 1734021400781 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-19.0.1-hfa2a6e7_0_cpu.conda - sha256: 7b1f61045b37266989023a007d6331875062bb658068a6e6ab49720495ca3543 - md5: 11b712ed1316c98592f6bae7ccfaa86c - depends: - - __glibc >=2.17,<3.0.a0 - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - aws-sdk-cpp >=1.11.489,<1.11.490.0a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - - bzip2 >=1.0.8,<2.0a0 - - glog >=0.7.1,<0.8.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libbrotlidec >=1.1.0,<1.2.0a0 - - libbrotlienc >=1.1.0,<1.2.0a0 - - libgcc >=13 - - libgoogle-cloud >=2.35.0,<2.36.0a0 - - libgoogle-cloud-storage >=2.35.0,<2.36.0a0 - - libopentelemetry-cpp >=1.18.0,<1.19.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libstdcxx >=13 - - libutf8proc >=2.10.0,<2.11.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - orc >=2.0.3,<2.0.4.0a0 - - re2 - - snappy >=1.2.1,<1.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - arrow-cpp <0.0a0 - - parquet-cpp <0.0a0 - - apache-arrow-proc =*=cpu - license: Apache-2.0 - license_family: APACHE - size: 8967810 - timestamp: 1739768880886 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-19.0.1-h0945df6_0_cpu.conda - sha256: e34199bea635b1bf9f3819205b291f714ddd47db1bf6e6d10a4eb61da7330214 - md5: 21bcb04df4b1a99721199c5aa6273f53 - depends: - - __osx >=11.0 - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - aws-sdk-cpp >=1.11.489,<1.11.490.0a0 - - azure-core-cpp >=1.14.0,<1.14.1.0a0 - - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - - bzip2 >=1.0.8,<2.0a0 - - glog >=0.7.1,<0.8.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libbrotlidec >=1.1.0,<1.2.0a0 - - libbrotlienc >=1.1.0,<1.2.0a0 - - libcxx >=18 - - libgoogle-cloud >=2.35.0,<2.36.0a0 - - libgoogle-cloud-storage >=2.35.0,<2.36.0a0 - - libopentelemetry-cpp >=1.18.0,<1.19.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libutf8proc >=2.10.0,<2.11.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - orc >=2.0.3,<2.0.4.0a0 - - re2 - - snappy >=1.2.1,<1.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - apache-arrow-proc =*=cpu - - arrow-cpp <0.0a0 - - parquet-cpp <0.0a0 - license: Apache-2.0 - license_family: APACHE - size: 5571369 - timestamp: 1739767084108 -- conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-19.0.1-h8dcb746_0_cpu.conda - sha256: 567d1cf9d14d1dcea3877cd063f3381e3f5c9fd51cef72e38114f7ba48195921 - md5: 9df767d91d5f573b1bc1d18c27f2f48a - depends: - - aws-crt-cpp >=0.29.9,<0.29.10.0a0 - - aws-sdk-cpp >=1.11.489,<1.11.490.0a0 - - bzip2 >=1.0.8,<2.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libbrotlidec >=1.1.0,<1.2.0a0 - - libbrotlienc >=1.1.0,<1.2.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl >=8.12.1,<9.0a0 - - libgoogle-cloud >=2.35.0,<2.36.0a0 - - libgoogle-cloud-storage >=2.35.0,<2.36.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libutf8proc >=2.10.0,<2.11.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - orc >=2.0.3,<2.0.4.0a0 - - re2 - - snappy >=1.2.1,<1.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.42.34433 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - apache-arrow-proc =*=cpu - - parquet-cpp <0.0a0 - - arrow-cpp <0.0a0 - license: Apache-2.0 - license_family: APACHE - size: 5303286 - timestamp: 1739770845910 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-19.0.1-hcb10f89_0_cpu.conda - sha256: 9a3c38a8f1516fe5b7801d0407ff704efd53955ebd63f7fbc439ec3b563d19cc - md5: 0d63e2dea06c44c9d2c8be3e7e38eea9 - depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 19.0.1 hfa2a6e7_0_cpu - - libgcc >=13 - - libstdcxx >=13 - license: Apache-2.0 - license_family: APACHE - size: 638054 - timestamp: 1739768924910 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-19.0.1-hf07054f_0_cpu.conda - sha256: b15f5fab53d941917143bb1cf22c5a0eacffb8ff2a010ee2e909afab3821d5f9 - md5: 9213d80ffba1921b86bfdf5fdd2c10c4 - depends: - - __osx >=11.0 - - libarrow 19.0.1 h0945df6_0_cpu - - libcxx >=18 - license: Apache-2.0 - license_family: APACHE - size: 500147 - timestamp: 1739767179329 -- conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-19.0.1-h7d8d6a5_0_cpu.conda - sha256: 3942a53d93fd743d3297757d82b7b9ee7ebdb0854d12e1e43c6946530ec65b7b - md5: 8b3eab29d714ce61b13aad5417ffa668 - depends: - - libarrow 19.0.1 h8dcb746_0_cpu - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.42.34433 - license: Apache-2.0 - license_family: APACHE - size: 449963 - timestamp: 1739770921236 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-19.0.1-hcb10f89_0_cpu.conda - sha256: f756208d787db50b6be68210cb9eec3644b8291a8a353bb2071ea4451bfc1412 - md5: ec52b3b990be399f4267a9acabb73070 - depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 19.0.1 hfa2a6e7_0_cpu - - libarrow-acero 19.0.1 hcb10f89_0_cpu - - libgcc >=13 - - libparquet 19.0.1 h081d1f1_0_cpu - - libstdcxx >=13 - license: Apache-2.0 - license_family: APACHE - size: 604500 - timestamp: 1739769034226 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-19.0.1-hf07054f_0_cpu.conda - sha256: 21fcb9a09e5872b4f1d483d8d950a1804ccb6804881881ca6fe6c5968a5e4dbc - md5: 0695382a64b393765b4bc9e1ee99250c - depends: - - __osx >=11.0 - - libarrow 19.0.1 h0945df6_0_cpu - - libarrow-acero 19.0.1 hf07054f_0_cpu - - libcxx >=18 - - libparquet 19.0.1 h636d7b7_0_cpu - license: Apache-2.0 - license_family: APACHE - size: 501234 - timestamp: 1739768239766 -- conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-19.0.1-h7d8d6a5_0_cpu.conda - sha256: e7691b0f521f2f6baaf3f3ca8b1aaeb00e438612d00db531a8bb3eb67d398a98 - md5: f880e06be679f2b9edb1abb2505f03a9 - depends: - - libarrow 19.0.1 h8dcb746_0_cpu - - libarrow-acero 19.0.1 h7d8d6a5_0_cpu - - libparquet 19.0.1 ha850022_0_cpu - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.42.34433 - license: Apache-2.0 - license_family: APACHE - size: 434909 - timestamp: 1739771142936 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-19.0.1-h08228c5_0_cpu.conda - sha256: e0b3ed06ce74c6a083dab59fb3059fdbc40fc71ff94ce470ca0a7c7ffe8d0317 - md5: 792e2359bb93513324326cbe3ee4ebdd - depends: - - __glibc >=2.17,<3.0.a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libarrow 19.0.1 hfa2a6e7_0_cpu - - libarrow-acero 19.0.1 hcb10f89_0_cpu - - libarrow-dataset 19.0.1 hcb10f89_0_cpu - - libgcc >=13 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libstdcxx >=13 - license: Apache-2.0 - license_family: APACHE - size: 523313 - timestamp: 1739769085090 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-19.0.1-h4239455_0_cpu.conda - sha256: 0b5c0839102b396f8b0ba376189562a727ebbed3c6bdab74aaf56227ee45cb73 - md5: 2893dd55f7804b9106126c2f00712ec2 - depends: - - __osx >=11.0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libarrow 19.0.1 h0945df6_0_cpu - - libarrow-acero 19.0.1 hf07054f_0_cpu - - libarrow-dataset 19.0.1 hf07054f_0_cpu - - libcxx >=18 - - libprotobuf >=5.28.3,<5.28.4.0a0 - license: Apache-2.0 - license_family: APACHE - size: 450361 - timestamp: 1739768396169 -- conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-19.0.1-h3dbecdf_0_cpu.conda - sha256: 03b6d6dd152865196d757a053ec8b1aad55489c8a292748dedf71429b8491ede - md5: d59244ba3e95ce67e8889726cb40aa1f - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libarrow 19.0.1 h8dcb746_0_cpu - - libarrow-acero 19.0.1 h7d8d6a5_0_cpu - - libarrow-dataset 19.0.1 h7d8d6a5_0_cpu - - libprotobuf >=5.28.3,<5.28.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.42.34433 - license: Apache-2.0 - license_family: APACHE - size: 363280 - timestamp: 1739771244591 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libasprintf-0.23.1-h493aca8_0.conda - sha256: 2b27d2ede7867fd362f94644aac1d7fb9af7f7fc3f122cb014647b47ffd402a4 - md5: baf9e4423f10a15ca7eab26480007639 - depends: - - __osx >=11.0 - - libcxx >=18 - license: LGPL-2.1-or-later - size: 41679 - timestamp: 1739039255705 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda - sha256: e06da844b007a64a9ac35d4e3dc4dbc66583f79b57d08166cf58f2f08723a6e8 - md5: 21e468ed3786ebcb2124b123aa2484b7 - depends: - - __glibc >=2.17,<3.0.a0 - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - libgcc >=13 - - rav1e >=0.6.6,<1.0a0 - - svt-av1 >=2.3.0,<2.3.1.0a0 - license: BSD-2-Clause - license_family: BSD - size: 116202 - timestamp: 1730268687453 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.1.1-h45b7238_2.conda - sha256: c671365e8c822d29b53f20c4573fdbc70f18b50ff9a4b5b2b6b3c8f7ad2ac2a9 - md5: 7571064a60bc193ff5c25f36ed23394a - depends: - - __osx >=11.0 - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - rav1e >=0.6.6,<1.0a0 - - svt-av1 >=2.3.0,<2.3.1.0a0 - license: BSD-2-Clause - license_family: BSD - size: 96781 - timestamp: 1730268761553 -- conda: https://conda.anaconda.org/conda-forge/win-64/libavif16-1.1.1-h4d049a7_2.conda - sha256: f74662ac8325dedbc786bf4f3faef39ad4981739cf0239c2ea2d80c791b04de5 - md5: e7e7405d962ebcb6803f29dc4eabae69 - depends: - - _libavif_api >=1.1.1,<1.1.2.0a0 - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - rav1e >=0.6.6,<1.0a0 - - svt-av1 >=2.3.0,<2.3.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 97828 - timestamp: 1730269135854 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-31_h59b9bed_openblas.conda - build_number: 31 - sha256: 9839fc4ac0cbb0aa3b9eea520adfb57311838959222654804e58f6f2d1771db5 - md5: 728dbebd0f7a20337218beacffd37916 - depends: - - libopenblas >=0.3.29,<0.3.30.0a0 - - libopenblas >=0.3.29,<1.0a0 - constrains: - - liblapacke =3.9.0=31*_openblas - - liblapack =3.9.0=31*_openblas - - blas =2.131=openblas - - mkl <2025 - - libcblas =3.9.0=31*_openblas - license: BSD-3-Clause - size: 16859 - timestamp: 1740087969120 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-31_h10e41b3_openblas.conda - build_number: 31 - sha256: 369586e7688b59b4f92c709b99d847d66d4d095425db327dd32ee5e6ab74697f - md5: 39b053da5e7035c6592102280aa7612a - depends: - - libopenblas >=0.3.29,<0.3.30.0a0 - - libopenblas >=0.3.29,<1.0a0 - constrains: - - liblapacke =3.9.0=31*_openblas - - libcblas =3.9.0=31*_openblas - - blas =2.131=openblas - - mkl <2025 - - liblapack =3.9.0=31*_openblas - license: BSD-3-Clause - size: 17123 - timestamp: 1740088119350 -- conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-31_h641d27c_mkl.conda - build_number: 31 - sha256: 7bb4d5b591e98fe607279520ee78e3571a297b5720aa789a2536041ad5540de8 - md5: d05563c577fe2f37693a554b3f271e8f - depends: - - mkl 2024.2.2 h66d3029_15 - constrains: - - libcblas =3.9.0=31*_mkl - - blas =2.131=mkl - - liblapacke =3.9.0=31*_mkl - - liblapack =3.9.0=31*_mkl - license: BSD-3-Clause - size: 3733728 - timestamp: 1740088452830 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - sha256: d9db2de60ea917298e658143354a530e9ca5f9c63471c65cf47ab39fd2f429e3 - md5: 41b599ed2b02abcfdd84302bff174b23 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 68851 - timestamp: 1725267660471 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-hd74edd7_2.conda - sha256: 839dacb741bdbb25e58f42088a2001b649f4f12195aeb700b5ddfca3267749e5 - md5: d0bf1dff146b799b319ea0434b93f779 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 68426 - timestamp: 1725267943211 -- conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - sha256: 33e8851c6cc8e2d93059792cd65445bfe6be47e4782f826f01593898ec95764c - md5: f7dc9a8f21d74eab46456df301da2972 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 70526 - timestamp: 1725268159739 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - sha256: 2892d512cad096cb03f1b66361deeab58b64e15ba525d6592bb6d609e7045edf - md5: 9566f0bd264fbd463002e759b8a82401 - depends: - - __glibc >=2.17,<3.0.a0 - - libbrotlicommon 1.1.0 hb9d3cd8_2 - - libgcc >=13 - license: MIT - license_family: MIT - size: 32696 - timestamp: 1725267669305 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-hd74edd7_2.conda - sha256: 6c6862eb274f21a7c0b60e5345467a12e6dda8b9af4438c66d496a2c1a538264 - md5: 55e66e68ce55523a6811633dd1ac74e2 - depends: - - __osx >=11.0 - - libbrotlicommon 1.1.0 hd74edd7_2 - license: MIT - license_family: MIT - size: 28378 - timestamp: 1725267980316 -- conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - sha256: 234fc92f4c4f1cf22f6464b2b15bfc872fa583c74bf3ab9539ff38892c43612f - md5: 9bae75ce723fa34e98e239d21d752a7e - depends: - - libbrotlicommon 1.1.0 h2466b09_2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 32685 - timestamp: 1725268208844 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - sha256: 779f58174e99de3600e939fa46eddb453ec5d3c60bb46cdaa8b4c127224dbf29 - md5: 06f70867945ea6a84d35836af780f1de - depends: - - __glibc >=2.17,<3.0.a0 - - libbrotlicommon 1.1.0 hb9d3cd8_2 - - libgcc >=13 - license: MIT - license_family: MIT - size: 281750 - timestamp: 1725267679782 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-hd74edd7_2.conda - sha256: eeb1eb0d58b9d02bc1b98dc0a058f104ab168eb2f7d1c7bfa0570a12cfcdb7b7 - md5: 4f3a434504c67b2c42565c0b85c1885c - depends: - - __osx >=11.0 - - libbrotlicommon 1.1.0 hd74edd7_2 - license: MIT - license_family: MIT - size: 279644 - timestamp: 1725268003553 -- conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - sha256: 3d0dd7ef505962f107b7ea8f894e0b3dd01bf46852b362c8a7fc136b039bc9e1 - md5: 85741a24d97954a991e55e34bc55990b - depends: - - libbrotlicommon 1.1.0 h2466b09_2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 245929 - timestamp: 1725268238259 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-31_he106b2a_openblas.conda - build_number: 31 - sha256: ede8545011f5b208b151fe3e883eb4e31d495ab925ab7b9ce394edca846e0c0d - md5: abb32c727da370c481a1c206f5159ce9 - depends: - - libblas 3.9.0 31_h59b9bed_openblas - constrains: - - liblapacke =3.9.0=31*_openblas - - liblapack =3.9.0=31*_openblas - - blas =2.131=openblas - license: BSD-3-Clause - size: 16796 - timestamp: 1740087984429 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-31_hb3479ef_openblas.conda - build_number: 31 - sha256: f237486cc9118d09d0f3ff8820280de34365f98ee7b7dc5ab923b04c7cbf25a5 - md5: 7353c2bf0e90834cb70545671996d871 - depends: - - libblas 3.9.0 31_h10e41b3_openblas - constrains: - - liblapacke =3.9.0=31*_openblas - - blas =2.131=openblas - - liblapack =3.9.0=31*_openblas - license: BSD-3-Clause - size: 17032 - timestamp: 1740088127097 -- conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-31_h5e41251_mkl.conda - build_number: 31 - sha256: 609f455b099919bd4d15d4a733f493dc789e02da73fe4474f1cca73afafb95b8 - md5: 43c100b94ad2607382b0cf0f3a6b0bf3 - depends: - - libblas 3.9.0 31_h641d27c_mkl - constrains: - - blas =2.131=mkl - - liblapacke =3.9.0=31*_mkl - - liblapack =3.9.0=31*_mkl - license: BSD-3-Clause - size: 3733549 - timestamp: 1740088502127 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - sha256: fd1d153962764433fe6233f34a72cdeed5dcf8a883a85769e8295ce940b5b0c5 - md5: c965a5aa0d5c1c37ffc62dff36e28400 - depends: - - libgcc-ng >=9.4.0 - - libstdcxx-ng >=9.4.0 - license: BSD-3-Clause - license_family: BSD - size: 20440 - timestamp: 1633683576494 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 - sha256: 58477b67cc719060b5b069ba57161e20ba69b8695d154a719cb4b60caf577929 - md5: 32bd82a6a625ea6ce090a81c3d34edeb - depends: - - libcxx >=11.1.0 - license: BSD-3-Clause - license_family: BSD - size: 18765 - timestamp: 1633683992603 -- conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - sha256: 75e60fbe436ba8a11c170c89af5213e8bec0418f88b7771ab7e3d9710b70c54e - md5: cd4cc2d0c610c8cb5419ccc979f2d6ce - depends: - - vc >=14.1,<15.0a0 - - vs2015_runtime >=14.16.27012 - license: BSD-3-Clause - license_family: BSD - size: 25694 - timestamp: 1633684287072 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - sha256: bc67b9b21078c99c6bd8595fe7e1ed6da1f721007726e717f0449de7032798c4 - md5: d4529f4dff3057982a7617c7ac58fde3 - depends: - - krb5 >=1.21.1,<1.22.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: Apache-2.0 - license_family: Apache - size: 4519402 - timestamp: 1689195353551 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.12.1-h332b0f4_0.conda - sha256: 2ebc3039af29269e4cdb858fca36265e5e400c1125a4bcd84ae73a596e0e76ca - md5: 45e9dc4e7b25e2841deb392be085500e - depends: - - __glibc >=2.17,<3.0.a0 - - krb5 >=1.21.3,<1.22.0a0 - - libgcc >=13 - - libnghttp2 >=1.64.0,<2.0a0 - - libssh2 >=1.11.1,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.1,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: curl - license_family: MIT - size: 426675 - timestamp: 1739512336799 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.12.1-h73640d1_0.conda - sha256: 0bddd1791eb0602c8c6aa465802e9d4526d3ec1251d900b209e767753565d5df - md5: 105f0cceef753644912f42e11c1ae9cf - depends: - - __osx >=11.0 - - krb5 >=1.21.3,<1.22.0a0 - - libnghttp2 >=1.64.0,<2.0a0 - - libssh2 >=1.11.1,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.1,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: curl - license_family: MIT - size: 387893 - timestamp: 1739512564746 -- conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.12.1-h88aaa65_0.conda - sha256: 4c8e62fd32d59e5fbfad0f37e33083928bbb3c8800258650d4e7911e6f6fd1aa - md5: 2b1c729d91f3b07502981b6e0c7727cc - depends: - - krb5 >=1.21.3,<1.22.0a0 - - libssh2 >=1.11.1,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: curl - license_family: MIT - size: 349696 - timestamp: 1739512628733 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.7-ha82da77_0.conda - sha256: 776092346da87a2a23502e14d91eb0c32699c4a1522b7331537bd1c3751dcff5 - md5: 5b3e1610ff8bd5443476b91d618f5b77 - depends: - - __osx >=11.0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 523505 - timestamp: 1736877862502 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda - sha256: 7cf7e294e1a7c8219065885e186d8f52002fb900bf384d815f159b5874204e3d - md5: 407fee7a5d7ab2dca12c9ca7f62310ad - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: LGPL-3.0-or-later - license_family: LGPL - size: 411814 - timestamp: 1703088639063 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libde265-1.0.15-h2ffa867_0.conda - sha256: 13747fa634f7f16d7f222b7d3869e3c1aab9d3a2791edeb2fc632a87663950e0 - md5: 7c718ee6d8497702145612fa0898a12d - depends: - - libcxx >=15 - license: LGPL-3.0-or-later - license_family: LGPL - size: 277861 - timestamp: 1703089176970 -- conda: https://conda.anaconda.org/conda-forge/win-64/libde265-1.0.15-h91493d7_0.conda - sha256: f52c603151743486d2faec37e161c60731001d9c955e0f12ac9ad334c1119116 - md5: 9dc3c1fbc1c7bc6204e8a603f45e156b - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-3.0-or-later - license_family: LGPL - size: 252968 - timestamp: 1703089151021 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda - sha256: 511d801626d02f4247a04fff957cc6e9ec4cc7e8622bd9acd076bcdc5de5fe66 - md5: 8dfae1d2e74767e9ce36d5fa0d8605db - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 72255 - timestamp: 1734373823254 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.23-hec38601_0.conda - sha256: 887c02deaed6d583459eba6367023e36d8761085b2f7126e389424f57155da53 - md5: 1d8b9588be14e71df38c525767a1ac30 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 54132 - timestamp: 1734373971372 -- conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.23-h9062f6e_0.conda - sha256: 96c47725a8258159295996ea2758fa0ff9bea330e72b59641642e16be8427ce8 - md5: a9624935147a25b06013099d3038e467 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 155723 - timestamp: 1734374084110 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda - sha256: d789471216e7aba3c184cd054ed61ce3f6dac6f87a50ec69291b9297f8c18724 - md5: c277e0a4d549b03ac1e9d6cbbe3d017b - depends: - - ncurses - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - ncurses >=6.5,<7.0a0 - license: BSD-2-Clause - license_family: BSD - size: 134676 - timestamp: 1738479519902 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda - sha256: 66aa216a403de0bb0c1340a88d1a06adaff66bae2cfd196731aa24db9859d631 - md5: 44083d2d2c2025afca315c7a172eab2b - depends: - - ncurses - - __osx >=11.0 - - ncurses >=6.5,<7.0a0 - license: BSD-2-Clause - license_family: BSD - size: 107691 - timestamp: 1738479560845 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - sha256: 1cd6048169fa0395af74ed5d8f1716e22c19a81a8a36f934c110ca3ad4dd27b4 - md5: 172bf1cd1ff8629f2b1179945ed45055 - depends: - - libgcc-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 112766 - timestamp: 1702146165126 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda - sha256: 95cecb3902fbe0399c3a7e67a5bed1db813e5ab0e22f4023a5e0f722f2cc214f - md5: 36d33e440c31857372a72137f78bacf5 - license: BSD-2-Clause - license_family: BSD - size: 107458 - timestamp: 1702146414478 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - sha256: 2e14399d81fb348e9d231a82ca4d816bf855206923759b69ad006ba482764131 - md5: a1cfcc585f0c42bf8d5546bb1dfb668d - depends: - - libgcc-ng >=12 - - openssl >=3.1.1,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 427426 - timestamp: 1685725977222 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libevent-2.1.12-h2757513_1.conda - sha256: 8c136d7586259bb5c0d2b913aaadc5b9737787ae4f40e3ad1beaf96c80b919b7 - md5: 1a109764bff3bdc7bdd84088347d71dc - depends: - - openssl >=3.1.1,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 368167 - timestamp: 1685726248899 -- conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - sha256: af03882afb7a7135288becf340c2f0cf8aa8221138a9a7b108aaeb308a486da1 - md5: 25efbd786caceef438be46da78a7b5ef - depends: - - openssl >=3.1.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 410555 - timestamp: 1685726568668 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda - sha256: 56541b98447b58e52d824bd59d6382d609e11de1f8adf20b23143e353d2b8d26 - md5: db833e03127376d461e1e13e76f09b6c - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - constrains: - - expat 2.6.4.* - license: MIT - license_family: MIT - size: 73304 - timestamp: 1730967041968 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda - sha256: e42ab5ace927ee7c84e3f0f7d813671e1cf3529f5f06ee5899606630498c2745 - md5: 38d2656dd914feb0cab8c629370768bf - depends: - - __osx >=11.0 - constrains: - - expat 2.6.4.* - license: MIT - license_family: MIT - size: 64693 - timestamp: 1730967175868 -- conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda - sha256: 0c0447bf20d1013d5603499de93a16b6faa92d7ead870d96305c0f065b6a5a12 - md5: eb383771c680aa792feb529eaf9df82f - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - expat 2.6.4.* - license: MIT - license_family: MIT - size: 139068 - timestamp: 1730967442102 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_0.conda - sha256: 67a6c95e33ebc763c1adc3455b9a9ecde901850eb2fceb8e646cc05ef3a663da - md5: e3eb7806380bc8bcecba6d749ad5f026 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 53415 - timestamp: 1739260413716 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - sha256: 41b3d13efb775e340e4dba549ab5c029611ea6918703096b2eaa9c015c0750ca - md5: 086914b672be056eb70fd4285b6783b6 - license: MIT - license_family: MIT - size: 39020 - timestamp: 1636488587153 -- conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.6-h537db12_0.conda - sha256: 77922d8dd2faf88ac6accaeebf06409d1820486fde710cff6b554d12273e46be - md5: 31d5107f75b2f204937728417e2e39e5 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 40830 - timestamp: 1739260917585 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda - sha256: 53eb8a79365e58849e7b1a068d31f4f9e718dc938d6f2c03e960345739a03569 - md5: 3cb76c3f10d3bc7f1105b2fc9db984df - depends: - - _libgcc_mutex 0.1 conda_forge - - _openmp_mutex >=4.5 - constrains: - - libgomp 14.2.0 h77fa898_1 - - libgcc-ng ==14.2.0=*_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 848745 - timestamp: 1729027721139 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.2.0-h1383e82_1.conda - sha256: ef840e797714440bb10b69446d815966fff41fdac79f79c4e19c475d81cd375d - md5: 75fdd34824997a0f9950a703b15d8ac5 - depends: - - _openmp_mutex >=4.5 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - constrains: - - libgcc-ng ==14.2.0=*_1 - - libgomp 14.2.0 h1383e82_1 - - msys2-conda-epoch <0.0a0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 666386 - timestamp: 1729089506769 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda - sha256: 3a76969c80e9af8b6e7a55090088bc41da4cffcde9e2c71b17f44d37b7cb87f7 - md5: e39480b9ca41323497b05492a63bc35b - depends: - - libgcc 14.2.0 h77fa898_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 54142 - timestamp: 1729027726517 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6f5c62b_11.conda - sha256: 19e5be91445db119152217e8e8eec4fd0499d854acc7d8062044fb55a70971cd - md5: 68fc66282364981589ef36868b1a7c78 - depends: - - __glibc >=2.17,<3.0.a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - license: GD - license_family: BSD - size: 177082 - timestamp: 1737548051015 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgd-2.3.3-hb2c3a21_11.conda - sha256: be038eb8dfe296509aee2df21184c72cb76285b0340448525664bc396aa6146d - md5: 4581aa3cfcd1a90967ed02d4a9f3db4b - depends: - - __osx >=11.0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libiconv >=1.17,<2.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - license: GD - license_family: BSD - size: 156868 - timestamp: 1737548290283 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgd-2.3.3-h7208af6_11.conda - sha256: 485a30af9e710feeda8d5b537b2db1e32e41f29ef24683bbe7deb6f7fd915825 - md5: 2070a706123b2d5e060b226a00e96488 - depends: - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - icu >=75.1,<76.0a0 - - libexpat >=2.6.4,<3.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - xorg-libxpm >=3.5.17,<4.0a0 - license: GD - license_family: BSD - size: 165838 - timestamp: 1737548342665 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.2-h3359108_0.conda - sha256: a3b549ab4a096561ce4046955505c2806bab721948d35c57dcc4cf2a64a19691 - md5: f460a299f5a2f34a8049071f47a81949 - depends: - - __glibc >=2.17,<3.0.a0 - - blosc >=1.21.6,<2.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - geotiff >=1.7.3,<1.8.0a0 - - giflib >=5.2.2,<5.3.0a0 - - json-c >=0.18,<0.19.0a0 - - lerc >=4.0.0,<5.0a0 - - libarchive >=3.7.7,<3.8.0a0 - - libcurl >=8.12.1,<9.0a0 - - libdeflate >=1.23,<1.24.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libheif >=1.19.5,<1.20.0a0 - - libiconv >=1.17,<2.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libkml >=1.3.0,<1.4.0a0 - - liblzma >=5.6.4,<6.0a0 - - libpng >=1.6.46,<1.7.0a0 - - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libstdcxx >=13 - - libtiff >=4.7.0,<4.8.0a0 - - libuuid >=2.38.1,<3.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - openssl >=3.4.1,<4.0a0 - - pcre2 >=10.44,<10.45.0a0 - - proj >=9.5.1,<9.6.0a0 - - xerces-c >=3.2.5,<3.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - libgdal 3.10.2.* - license: MIT - license_family: MIT - size: 10801211 - timestamp: 1739626046005 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-core-3.10.2-h9ef0d2d_0.conda - sha256: bb72a3c879eddcb0e7e01e662245ec3f9fe07c53cf287217a200e52a39115014 - md5: 5982d6c652d39c9cef709bf22cbbb94d - depends: - - __osx >=11.0 - - blosc >=1.21.6,<2.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - geotiff >=1.7.3,<1.8.0a0 - - giflib >=5.2.2,<5.3.0a0 - - json-c >=0.18,<0.19.0a0 - - lerc >=4.0.0,<5.0a0 - - libarchive >=3.7.7,<3.8.0a0 - - libcurl >=8.12.1,<9.0a0 - - libcxx >=18 - - libdeflate >=1.23,<1.24.0a0 - - libexpat >=2.6.4,<3.0a0 - - libheif >=1.19.5,<1.20.0a0 - - libiconv >=1.17,<2.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libkml >=1.3.0,<1.4.0a0 - - liblzma >=5.6.4,<6.0a0 - - libpng >=1.6.46,<1.7.0a0 - - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - openssl >=3.4.1,<4.0a0 - - pcre2 >=10.44,<10.45.0a0 - - proj >=9.5.1,<9.6.0a0 - - xerces-c >=3.2.5,<3.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - libgdal 3.10.2.* - license: MIT - license_family: MIT - size: 8463050 - timestamp: 1739626559515 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.10.2-h095903c_0.conda - sha256: e387998eee0468570dd87764d0c78d94f7d9d5846cdfa050f71f3fe01f8941b9 - md5: 91f3b8afc65762e8710b50bdda8116c7 - depends: - - blosc >=1.21.6,<2.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - geotiff >=1.7.3,<1.8.0a0 - - lerc >=4.0.0,<5.0a0 - - libarchive >=3.7.7,<3.8.0a0 - - libcurl >=8.12.1,<9.0a0 - - libdeflate >=1.23,<1.24.0a0 - - libexpat >=2.6.4,<3.0a0 - - libheif >=1.19.5,<1.20.0a0 - - libiconv >=1.17,<2.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libkml >=1.3.0,<1.4.0a0 - - liblzma >=5.6.4,<6.0a0 - - libpng >=1.6.46,<1.7.0a0 - - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - openssl >=3.4.1,<4.0a0 - - pcre2 >=10.44,<10.45.0a0 - - proj >=9.5.1,<9.6.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - xerces-c >=3.2.5,<3.3.0a0 - - zstd >=1.5.6,<1.6.0a0 - constrains: - - libgdal 3.10.2.* - license: MIT - license_family: MIT - size: 8392331 - timestamp: 1739628560888 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgettextpo-0.23.1-h493aca8_0.conda - sha256: 4dbd3f698d027330033f06778567eda5b985e2348ca92900083654a114ddd051 - md5: 18ad77def4cb7326692033eded9c815d - depends: - - __osx >=11.0 - - libiconv >=1.17,<2.0a0 - - libintl 0.23.1 h493aca8_0 - license: GPL-3.0-or-later - license_family: GPL - size: 166929 - timestamp: 1739039303132 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda - sha256: fc9e7f22a17faf74da904ebfc4d88699013d2992e55505e4aa0eb01770290977 - md5: f1fd30127802683586f768875127a987 - depends: - - libgfortran5 14.2.0 hd5240d6_1 - constrains: - - libgfortran-ng ==14.2.0=*_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 53997 - timestamp: 1729027752995 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-5.0.0-13_2_0_hd922786_3.conda - sha256: 44e541b4821c96b28b27fef5630883a60ce4fee91fd9c79f25a199f8f73f337b - md5: 4a55d9e169114b2b90d3ec4604cd7bbf - depends: - - libgfortran5 13.2.0 hf226fd6_3 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 110233 - timestamp: 1707330749033 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda - sha256: d149a37ca73611e425041f33b9d8dbed6e52ec506fe8cc1fc0ee054bddeb6d5d - md5: 9822b874ea29af082e5d36098d25427d - depends: - - libgcc >=14.2.0 - constrains: - - libgfortran 14.2.0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 1462645 - timestamp: 1729027735353 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-13.2.0-hf226fd6_3.conda - sha256: bafc679eedb468a86aa4636061c55966186399ee0a04b605920d208d97ac579a - md5: 66ac81d54e95c534ae488726c1f698ea - depends: - - llvm-openmp >=8.0.0 - constrains: - - libgfortran 5.0.0 13_2_0_*_3 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 997381 - timestamp: 1707330687590 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_1.conda - sha256: f0804a9e46ae7b32ca698d26c1c95aa82a91f71b6051883d4a46bea725be9ea4 - md5: 37d1af619d999ee8f1f73cf5a06f4e2f - depends: - - __glibc >=2.17,<3.0.a0 - - libffi >=3.4,<4.0a0 - - libgcc >=13 - - libiconv >=1.17,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.44,<10.45.0a0 - constrains: - - glib 2.82.2 *_1 - license: LGPL-2.1-or-later - size: 3923974 - timestamp: 1737037491054 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.82.2-hdff4504_1.conda - sha256: d002aeaa51424e331f8504a54b6ba4388a6011a0ebcac29296f3d14282bf733b - md5: 849da57c370384ce48bef2e050488882 - depends: - - __osx >=11.0 - - libffi >=3.4,<4.0a0 - - libiconv >=1.17,<2.0a0 - - libintl >=0.22.5,<1.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.44,<10.45.0a0 - constrains: - - glib 2.82.2 *_1 - license: LGPL-2.1-or-later - size: 3643364 - timestamp: 1737037789629 -- conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.2-h7025463_1.conda - sha256: 77c4e6af9cc4e966a5100f48378ea3fb4ab7ed913f24af9217cc3a43242d65d5 - md5: 40596e78a77327f271acea904efdc911 - depends: - - libffi >=3.4,<4.0a0 - - libiconv >=1.17,<2.0a0 - - libintl >=0.22.5,<1.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.44,<10.45.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - glib 2.82.2 *_1 - license: LGPL-2.1-or-later - size: 3783933 - timestamp: 1737038122172 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda - sha256: 1911c29975ec99b6b906904040c855772ccb265a1c79d5d75c8ceec4ed89cd63 - md5: cc3573974587f12dda90d96e3e55a702 - depends: - - _libgcc_mutex 0.1 conda_forge - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 460992 - timestamp: 1729027639220 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.2.0-h1383e82_1.conda - sha256: d8739b834608f35775209b032f0c2be752ef187863c7ec847afcebe2f681be4e - md5: 9e2d4d1214df6f21cba12f6eff4972f9 - depends: - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - constrains: - - msys2-conda-epoch <0.0a0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 524249 - timestamp: 1729089441747 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.35.0-h2b5623c_0.conda - sha256: d747d14c69da512d8993a995dc2df90e857778b0a8542f12fb751544128af685 - md5: 1040ab07d7af9f23cf2466ffe4e58db1 - depends: - - __glibc >=2.17,<3.0.a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcurl >=8.11.1,<9.0a0 - - libgcc >=13 - - libgrpc >=1.67.1,<1.68.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libstdcxx >=13 - - openssl >=3.4.0,<4.0a0 - constrains: - - libgoogle-cloud 2.35.0 *_0 - license: Apache-2.0 - license_family: Apache - size: 1258035 - timestamp: 1738662406183 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.35.0-hdbe95d5_0.conda - sha256: 9bee9773540956d8a2ca0b317f73d94916200a4bfd8151319bf7fdcbf704d692 - md5: b1ea94282f38b142f8bc842ef7bcc18c - depends: - - __osx >=11.0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcurl >=8.11.1,<9.0a0 - - libcxx >=18 - - libgrpc >=1.67.1,<1.68.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - openssl >=3.4.0,<4.0a0 - constrains: - - libgoogle-cloud 2.35.0 *_0 - license: Apache-2.0 - license_family: Apache - size: 877733 - timestamp: 1738662822079 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.35.0-h95c5cb2_0.conda - sha256: 5c558b47346a690c490b18da2d17d877207e1e2f3a0650bbbb4433be46f88edf - md5: 6abfc56751ccb4e6bb936f7c5dc93ddf - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcurl >=8.11.1,<9.0a0 - - libgrpc >=1.67.1,<1.68.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - libgoogle-cloud 2.35.0 *_0 - license: Apache-2.0 - license_family: Apache - size: 14439 - timestamp: 1738663276705 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.35.0-h0121fbd_0.conda - sha256: cb1ef70e55d2c1defbfd8413dbe85b5550782470dda4f8d393f28d41b6d9b007 - md5: 34e2243e0428aac6b3e903ef99b6d57d - depends: - - __glibc >=2.17,<3.0.a0 - - libabseil - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl - - libgcc >=13 - - libgoogle-cloud 2.35.0 h2b5623c_0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl - license: Apache-2.0 - license_family: Apache - size: 785777 - timestamp: 1738662565066 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.35.0-h7081f7f_0.conda - sha256: 52dc2d18264543b564b59fb80338fbd9cb2296f011d75f41adcd85041795201c - md5: 958beca4e16f59360e30c48ff0351e04 - depends: - - __osx >=11.0 - - libabseil - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl - - libcxx >=18 - - libgoogle-cloud 2.35.0 hdbe95d5_0 - - libzlib >=1.3.1,<2.0a0 - - openssl - license: Apache-2.0 - license_family: Apache - size: 529210 - timestamp: 1738664024959 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.35.0-he5eb982_0.conda - sha256: dbdd164974e2ead7c2912764ddbaefebe81d2b19fb22c5500cf77dda5fb70855 - md5: 6b29ee7cb57c23aa64c00de029483307 - depends: - - libabseil - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl - - libgoogle-cloud 2.35.0 h95c5cb2_0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 14355 - timestamp: 1738663584421 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-h25350d4_1.conda - sha256: 014627485b3cf0ea18e04c0bab07be7fb98722a3aeeb58477acc7e1c3d2f911e - md5: 0c6497a760b99a926c7c12b74951a39c - depends: - - __glibc >=2.17,<3.0.a0 - - c-ares >=1.34.4,<2.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libgcc >=13 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - re2 - constrains: - - grpc-cpp =1.67.1 - license: Apache-2.0 - license_family: APACHE - size: 7792251 - timestamp: 1735584856826 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.67.1-h0a426d6_1.conda - sha256: 630edf63981818ff590367cb95fddbed0f5a390464d0952c90ec81de899e84a6 - md5: 8a3cba079d6ac985e7d73c76a678fbb4 - depends: - - __osx >=11.0 - - c-ares >=1.34.4,<2.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcxx >=18 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - re2 - constrains: - - grpc-cpp =1.67.1 - license: Apache-2.0 - license_family: APACHE - size: 5311706 - timestamp: 1735585137716 -- conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.67.1-h0ac93cb_1.conda - sha256: 4bf4b455fc8c56ac84001d394f93465c0cd42e78d8053a7c99668bba681b0973 - md5: d41dfb3f07ea2f3687e9a2d7db31c506 - depends: - - c-ares >=1.34.4,<2.0a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libre2-11 >=2024.7.2 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - re2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - grpc-cpp =1.67.1 - license: Apache-2.0 - license_family: APACHE - size: 17282979 - timestamp: 1735632501670 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libheif-1.19.5-gpl_hc21c24c_100.conda - sha256: d814dd9203d5ba2f38b4682f53ac02ddd17578324d715a101d29c057610c6545 - md5: 3b57852666eaacc13414ac811dde3f8a - depends: - - __glibc >=2.17,<3.0.a0 - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - libavif16 >=1.1.1,<2.0a0 - - libde265 >=1.0.15,<1.0.16.0a0 - - libgcc >=13 - - libstdcxx >=13 - - x265 >=3.5,<3.6.0a0 - license: LGPL-3.0-or-later - license_family: LGPL - size: 588609 - timestamp: 1735260140647 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libheif-1.19.5-gpl_h297b2c4_100.conda - sha256: f340e8e51519bcf885da9dd12602f19f76f3206347701accb28034dd0112b1a1 - md5: 5e457131dd237050dbfe6b141592f3ea - depends: - - __osx >=11.0 - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - libavif16 >=1.1.1,<2.0a0 - - libcxx >=18 - - libde265 >=1.0.15,<1.0.16.0a0 - - x265 >=3.5,<3.6.0a0 - license: LGPL-3.0-or-later - license_family: LGPL - size: 429678 - timestamp: 1735260330340 -- conda: https://conda.anaconda.org/conda-forge/win-64/libheif-1.19.5-gpl_hc631cee_100.conda - sha256: c0ee7fbbf78e66388146348ba78a206eeadf59602d9ca10ecaf64e019cd70cd3 - md5: 8c77ee62663e5e4bbb60b86ba54fdbeb - depends: - - aom >=3.9.1,<3.10.0a0 - - dav1d >=1.2.1,<1.2.2.0a0 - - libavif16 >=1.1.1,<2.0a0 - - libde265 >=1.0.15,<1.0.16.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - x265 >=3.5,<3.6.0a0 - license: LGPL-3.0-or-later - license_family: LGPL - size: 388187 - timestamp: 1735260582529 -- conda: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.2-default_ha69328c_1001.conda - sha256: 850e255997f538d5fb6ed651321141155a33bb781d43d326fc4ff62114dd2842 - md5: b87a0ac5ab6495d8225db5dc72dd21cd - depends: - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - libxml2 >=2.13.4,<3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 2390021 - timestamp: 1731375651179 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h4ce23a2_1.conda - sha256: 18a4afe14f731bfb9cf388659994263904d20111e42f841e9eea1bb6f91f4ab4 - md5: e796ff8ddc598affdf7c173d6145f087 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: LGPL-2.1-only - size: 713084 - timestamp: 1740128065462 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-hfe07756_1.conda - sha256: d30780d24bf3a30b4f116fca74dedb4199b34d500fe6c52cced5f8cc1e926f03 - md5: 450e6bdc0c7d986acf7b8443dce87111 - depends: - - __osx >=11.0 - license: LGPL-2.1-only - size: 681804 - timestamp: 1740128227484 -- conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.18-h135ad9c_1.conda - sha256: ea5ed2b362b6dbc4ba7188eb4eaf576146e3dfc6f4395e9f0db76ad77465f786 - md5: 21fc5dba2cbcd8e5e26ff976a312122c - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.1-only - size: 638142 - timestamp: 1740128665984 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.23.1-h493aca8_0.conda - sha256: 30d2a8a37070615a61777ce9317968b54c2197d04e9c6c2eea6cdb46e47f94dc - md5: 7b8faf3b5fc52744bda99c4cd1d6438d - depends: - - __osx >=11.0 - - libiconv >=1.17,<2.0a0 - license: LGPL-2.1-or-later - size: 78921 - timestamp: 1739039271409 -- conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - sha256: c7e4600f28bcada8ea81456a6530c2329312519efcf0c886030ada38976b0511 - md5: 2cf0cf76cc15d360dfa2f17fd6cf9772 - depends: - - libiconv >=1.17,<2.0a0 - license: LGPL-2.1-or-later - size: 95568 - timestamp: 1723629479451 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - sha256: b954e09b7e49c2f2433d6f3bb73868eda5e378278b0f8c1dd10a7ef090e14f2f - md5: ea25936bb4080d843790b586850f82b8 - depends: - - libgcc-ng >=12 - constrains: - - jpeg <0.0.0a - license: IJG AND BSD-3-Clause AND Zlib - size: 618575 - timestamp: 1694474974816 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.0.0-hb547adb_1.conda - sha256: a42054eaa38e84fc1e5ab443facac4bbc9d1b6b6f23f54b7bf4f1eb687e1d993 - md5: 3ff1e053dc3a2b8e36b9bfa4256a58d1 - constrains: - - jpeg <0.0.0a - license: IJG AND BSD-3-Clause AND Zlib - size: 547541 - timestamp: 1694475104253 -- conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - sha256: 4e7808e3098b4b4ed7e287f63bb24f9045cc4d95bfd39f0db870fc2837d74dff - md5: 3f1b948619c45b1ca714d60c7389092c - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - jpeg <0.0.0a - license: IJG AND BSD-3-Clause AND Zlib - size: 822966 - timestamp: 1694475223854 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - sha256: 721c3916d41e052ffd8b60e77f2da6ee47ff0d18babfca48ccf93606f1e0656a - md5: e8c7620cc49de0c6a2349b6dd6e39beb - depends: - - __glibc >=2.17,<3.0.a0 - - libexpat >=2.6.2,<3.0a0 - - libgcc-ng >=13 - - libstdcxx-ng >=13 - - libzlib >=1.3.1,<2.0a0 - - uriparser >=0.9.8,<1.0a0 - license: BSD-3-Clause - license_family: BSD - size: 402219 - timestamp: 1724667059411 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libkml-1.3.0-he250239_1021.conda - sha256: e578ba448489465b8fea743e214272a9fcfccb0d152ba1ff57657aaa76a0cd7d - md5: 891bb2a18eaef684f37bd4fb942cd8b2 - depends: - - __osx >=11.0 - - libcxx >=17 - - libexpat >=2.6.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - uriparser >=0.9.8,<1.0a0 - license: BSD-3-Clause - license_family: BSD - size: 281362 - timestamp: 1724667138089 -- conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - sha256: 81a6096a2db500f0c3527ae59398eacca0634c3381559713ab28022d711dd3bd - md5: 431ec3b40b041576811641e2d643954e - depends: - - libexpat >=2.6.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - uriparser >=0.9.8,<1.0a0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 1651104 - timestamp: 1724667610262 -- conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-31_h7ac8fdf_openblas.conda - build_number: 31 - sha256: f583661921456e798aba10972a8abbd9d33571c655c1f66eff450edc9cbefcf3 - md5: 452b98eafe050ecff932f0ec832dd03f - depends: - - libblas 3.9.0 31_h59b9bed_openblas - constrains: - - libcblas =3.9.0=31*_openblas - - liblapacke =3.9.0=31*_openblas - - blas =2.131=openblas - license: BSD-3-Clause - size: 16790 - timestamp: 1740087997375 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-31_hc9a63f6_openblas.conda - build_number: 31 - sha256: fe55b9aaf82c6c0192c3d1fcc9b8e884f97492dda9a8de5dae29334b3135fab5 - md5: ff57a55a2cbce171ef5707fb463caf19 - depends: - - libblas 3.9.0 31_h10e41b3_openblas - constrains: - - liblapacke =3.9.0=31*_openblas - - libcblas =3.9.0=31*_openblas - - blas =2.131=openblas - license: BSD-3-Clause - size: 17033 - timestamp: 1740088134988 -- conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-31_h1aa476e_mkl.conda - build_number: 31 - sha256: 9415e807aa6f8968322bbd756aab8f487379d809c74266d37c697b8d85c534ad - md5: 40b47ee720a185289760960fc6185750 - depends: - - libblas 3.9.0 31_h641d27c_mkl - constrains: - - libcblas =3.9.0=31*_mkl - - blas =2.131=mkl - - liblapacke =3.9.0=31*_mkl - license: BSD-3-Clause - size: 3732648 - timestamp: 1740088548986 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-ha7bfdaf_5.conda - sha256: 7dfa43a79a35debdff93328f9acc3b0ad859929dc7e761160ecbd93275e64e6f - md5: f55d1108d59fa85e6a1ded9c70766bd8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 33233890 - timestamp: 1739680079644 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libllvm15-15.0.7-h4429f82_5.conda - sha256: e2806042e60b1a92747298ea30007f50443e879881886c743d2ade30a1bd7da4 - md5: e81ccd3b5e036152fe9b7be87282201b - depends: - - __osx >=11.0 - - libcxx >=18 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 22216441 - timestamp: 1739672571591 -- conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.4-hb9d3cd8_0.conda - sha256: cad52e10319ca4585bc37f0bc7cce99ec7c15dc9168e42ccb96b741b0a27db3f - md5: 42d5b6a0f30d3c10cd88cb8584fda1cb - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: 0BSD - size: 111357 - timestamp: 1738525339684 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.6.4-h39f12f2_0.conda - sha256: 560c59d3834cc652a84fb45531bd335ad06e271b34ebc216e380a89798fe8e2c - md5: e3fd1f8320a100f2b210e690a57cd615 - depends: - - __osx >=11.0 - license: 0BSD - size: 98945 - timestamp: 1738525462560 -- conda: https://conda.anaconda.org/conda-forge/win-64/liblzma-5.6.4-h2466b09_0.conda - sha256: 3f552b0bdefdd1459ffc827ea3bf70a6a6920c7879d22b6bfd0d73015b55227b - md5: c48f6ad0ef0a555b27b233dfcab46a90 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: 0BSD - size: 104465 - timestamp: 1738525557254 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda - sha256: 6c61842c8d8f885019f52a2f989d197b6bf33c030b030226e665f01ca0fa3f71 - md5: f51573abc223afed7e5374f34135ce05 - depends: - - __glibc >=2.17,<3.0.a0 - - blosc >=1.21.6,<2.0a0 - - bzip2 >=1.0.8,<2.0a0 - - hdf4 >=4.2.15,<4.2.16.0a0 - - hdf5 >=1.14.4,<1.14.5.0a0 - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libxml2 >=2.13.5,<3.0a0 - - libzip >=1.11.2,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - zlib - - zstd >=1.5.6,<1.6.0a0 - license: MIT - license_family: MIT - size: 832800 - timestamp: 1733232193218 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnetcdf-4.9.2-nompi_h6569565_116.conda - sha256: 09d0194d8639e1f061f1a11d809a61030abcf335feefb10a10e65e43812a1205 - md5: 6257f1136b1285acf5c3b171249fdf52 - depends: - - __osx >=11.0 - - blosc >=1.21.6,<2.0a0 - - bzip2 >=1.0.8,<2.0a0 - - hdf4 >=4.2.15,<4.2.16.0a0 - - hdf5 >=1.14.4,<1.14.5.0a0 - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libcxx >=18 - - libxml2 >=2.13.5,<3.0a0 - - libzip >=1.11.2,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - zlib - - zstd >=1.5.6,<1.6.0a0 - license: MIT - license_family: MIT - size: 685178 - timestamp: 1733232329857 -- conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h5bdc103_116.conda - sha256: fa0591430e03ac302782dec8261bc85e1bb1e374f47e2bbbcd23469680d8e5f2 - md5: a95ec17163d3e07bc0bf3f5ca9c86fde - depends: - - blosc >=1.21.6,<2.0a0 - - bzip2 >=1.0.8,<2.0a0 - - hdf4 >=4.2.15,<4.2.16.0a0 - - hdf5 >=1.14.4,<1.14.5.0a0 - - libaec >=1.1.3,<2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzip >=1.11.2,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zlib - - zstd >=1.5.6,<1.6.0a0 - license: MIT - license_family: MIT - size: 625802 - timestamp: 1733232741492 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda - sha256: b0f2b3695b13a989f75d8fd7f4778e1c7aabe3b36db83f0fe80b2cd812c0e975 - md5: 19e57602824042dfd0446292ef90488b - depends: - - __glibc >=2.17,<3.0.a0 - - c-ares >=1.32.3,<2.0a0 - - libev >=4.33,<4.34.0a0 - - libev >=4.33,<5.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 647599 - timestamp: 1729571887612 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda - sha256: 00cc685824f39f51be5233b54e19f45abd60de5d8847f1a56906f8936648b72f - md5: 3408c02539cee5f1141f9f11450b6a51 - depends: - - __osx >=11.0 - - c-ares >=1.34.2,<2.0a0 - - libcxx >=17 - - libev >=4.33,<4.34.0a0 - - libev >=4.33,<5.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: MIT - license_family: MIT - size: 566719 - timestamp: 1729572385640 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 - md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 - depends: - - libgcc-ng >=12 - license: LGPL-2.1-only - license_family: GPL - size: 33408 - timestamp: 1697359010159 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.29-pthreads_h94d23a6_0.conda - sha256: cc5389ea254f111ef17a53df75e8e5209ef2ea6117e3f8aced88b5a8e51f11c4 - md5: 0a4d0252248ef9a0f88f2ba8b8a08e12 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libgfortran - - libgfortran5 >=14.2.0 - constrains: - - openblas >=0.3.29,<0.3.30.0a0 - license: BSD-3-Clause - license_family: BSD - size: 5919288 - timestamp: 1739825731827 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.29-openmp_hf332438_0.conda - sha256: 8989d9e01ec8c9b2d48dbb5efbe70b356fcd15990fb53b64fcb84798982c0343 - md5: 0cd1148c68f09027ee0b0f0179f77c30 - depends: - - __osx >=11.0 - - libgfortran 5.* - - libgfortran5 >=13.2.0 - - llvm-openmp >=18.1.8 - constrains: - - openblas >=0.3.29,<0.3.30.0a0 - license: BSD-3-Clause - license_family: BSD - size: 4168442 - timestamp: 1739825514918 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.18.0-hfcad708_1.conda - sha256: 4ea235e08676f16b0d3c3380befe1478c0fa0141512ee709b011005c55c9619f - md5: 1f5a5d66e77a39dc5bd639ec953705cf - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcurl >=8.11.1,<9.0a0 - - libgrpc >=1.67.1,<1.68.0a0 - - libopentelemetry-cpp-headers 1.18.0 ha770c72_1 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libzlib >=1.3.1,<2.0a0 - - nlohmann_json - - prometheus-cpp >=1.3.0,<1.4.0a0 - constrains: - - cpp-opentelemetry-sdk =1.18.0 - license: Apache-2.0 - license_family: APACHE - size: 801927 - timestamp: 1735643375271 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.18.0-h0c05b2d_1.conda - sha256: c6bcbd53d62a9e0d8c667e560db0ca2ecb7679277cbb3c23457aabe74fcb8cba - md5: 19c46cc18825f3924251c39ec1b0d983 - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcurl >=8.11.1,<9.0a0 - - libgrpc >=1.67.1,<1.68.0a0 - - libopentelemetry-cpp-headers 1.18.0 hce30654_1 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libzlib >=1.3.1,<2.0a0 - - nlohmann_json - - prometheus-cpp >=1.3.0,<1.4.0a0 - constrains: - - cpp-opentelemetry-sdk =1.18.0 - license: Apache-2.0 - license_family: APACHE - size: 529588 - timestamp: 1735643889612 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-headers-1.18.0-ha770c72_1.conda - sha256: aa1f7dea79ea8513ff77339ba7c6e9cf10dfa537143e7718b1cfb3af52b649f2 - md5: 4fb055f57404920a43b147031471e03b - license: Apache-2.0 - license_family: APACHE - size: 320359 - timestamp: 1735643346175 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.18.0-hce30654_1.conda - sha256: 82e5f5ba64debbaab3c601b265dfc0cdb4d2880feba9bada5fd2e67b9f91ada5 - md5: e965dad955841507549fdacd8f7f94c0 - license: Apache-2.0 - license_family: APACHE - size: 320565 - timestamp: 1735643673319 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-19.0.1-h081d1f1_0_cpu.conda - sha256: e9c4a07e79886963bfcd05894a15b5d4c7137c1122273de68845315c35d6505d - md5: 8b58c378d65b213c001f04a174a2a70e - depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 19.0.1 hfa2a6e7_0_cpu - - libgcc >=13 - - libstdcxx >=13 - - libthrift >=0.21.0,<0.21.1.0a0 - - openssl >=3.4.1,<4.0a0 - license: Apache-2.0 - license_family: APACHE - size: 1244749 - timestamp: 1739769006551 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-19.0.1-h636d7b7_0_cpu.conda - sha256: 54e4a18493d63b7fbd5cf39fadabe665bcf462121a7bc2f394f510b0bcf22031 - md5: 0cce19e6981849babe6c73797abbfa4e - depends: - - __osx >=11.0 - - libarrow 19.0.1 h0945df6_0_cpu - - libcxx >=18 - - libthrift >=0.21.0,<0.21.1.0a0 - - openssl >=3.4.1,<4.0a0 - license: Apache-2.0 - license_family: APACHE - size: 895659 - timestamp: 1739768176454 -- conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-19.0.1-ha850022_0_cpu.conda - sha256: 2c38d3e90d7f087c8e5a8361d1e4557264ecd60e98f7aa982d45563c63aa2304 - md5: f74c0e448b71c8f4bc0c8e8fd7fc7a43 - depends: - - libarrow 19.0.1 h8dcb746_0_cpu - - libthrift >=0.21.0,<0.21.1.0a0 - - openssl >=3.4.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.42.34433 - license: Apache-2.0 - license_family: APACHE - size: 824659 - timestamp: 1739771094165 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.47-h943b412_0.conda - sha256: 23367d71da58c9a61c8cbd963fcffb92768d4ae5ffbef9a47cdf1f54f98c5c36 - md5: 55199e2ae2c3651f6f9b2a447b47bdc9 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libzlib >=1.3.1,<2.0a0 - license: zlib-acknowledgement - size: 288701 - timestamp: 1739952993639 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libpng-1.6.47-h3783ad8_0.conda - sha256: dc93cc30f59b28e7812c6f14d2c2e590b509c38092cce7ababe6b23541b7ed8f - md5: 3550e05e3af94a3fa9cef2694417ccdf - depends: - - __osx >=11.0 - - libzlib >=1.3.1,<2.0a0 - license: zlib-acknowledgement - size: 259332 - timestamp: 1739953032676 -- conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.47-had7236b_0.conda - sha256: cf8a594b697de103025dcae2c917ec9c100609caf7c917a94c64a683cb1db1ac - md5: 7d717163d9dab337c65f2bf21a676b8f - depends: - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: zlib-acknowledgement - size: 346101 - timestamp: 1739953426806 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda - sha256: 51125ebb8b7152e4a4e69fd2398489c4ec8473195c27cde3cbdf1cb6d18c5493 - md5: d8703f1ffe5a06356f06467f1d0b9464 - depends: - - __glibc >=2.17,<3.0.a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 2960815 - timestamp: 1735577210663 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-5.28.3-h3bd63a1_1.conda - sha256: f58a16b13ad53346903c833e266f83c3d770a43a432659b98710aed85ca885e7 - md5: bdbfea4cf45ae36652c6bbcc2e7ebe91 - depends: - - __osx >=11.0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcxx >=18 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 2271580 - timestamp: 1735576361997 -- conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.28.3-h8309712_1.conda - sha256: 78c1b917d50c0317579bd9a5714a6d544d69786fd3228a4201dc4e8710ef6348 - md5: 3be9f2fb7dce19d66d5cf1003a34b0e1 - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 6172959 - timestamp: 1735577517299 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_2.conda - sha256: 4420f8362c71251892ba1eeb957c5e445e4e1596c0c651c28d0d8b415fe120c7 - md5: b2fede24428726dd867611664fb372e8 - depends: - - __glibc >=2.17,<3.0.a0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libgcc >=13 - - libstdcxx >=13 - constrains: - - re2 2024.07.02.* - license: BSD-3-Clause - license_family: BSD - size: 209793 - timestamp: 1735541054068 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2024.07.02-h07bc746_2.conda - sha256: 112a73ad483353751d4c5d63648c69a4d6fcebf5e1b698a860a3f5124fc3db96 - md5: 6b1e3624d3488016ca4f1ca0c412efaa - depends: - - __osx >=11.0 - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - libcxx >=18 - constrains: - - re2 2024.07.02.* - license: BSD-3-Clause - license_family: BSD - size: 167155 - timestamp: 1735541067807 -- conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2024.07.02-h4eb7d71_2.conda - sha256: f5bcc036ea1946444dc3adc772dfb045ff9e6d3486e924133ad7d018de651738 - md5: 67612b1af5350b6dcf289db63ec3e685 - depends: - - libabseil * cxx17* - - libabseil >=20240722.0,<20240723.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - re2 2024.07.02.* - license: BSD-3-Clause - license_family: BSD - size: 260655 - timestamp: 1735541391655 -- conda: https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda - sha256: 475013475a3209c24a82f9e80c545d56ccca2fa04df85952852f3d73caa38ff9 - md5: b9846db0abffb09847e2cb0fec4b4db6 - depends: - - __glibc >=2.17,<3.0.a0 - - cairo >=1.18.2,<2.0a0 - - freetype >=2.12.1,<3.0a0 - - gdk-pixbuf >=2.42.12,<3.0a0 - - harfbuzz >=10.1.0,<11.0a0 - - libgcc >=13 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.44,<1.7.0a0 - - libxml2 >=2.13.5,<3.0a0 - - pango >=1.54.0,<2.0a0 - constrains: - - __glibc >=2.17 - license: LGPL-2.1-or-later - size: 6342757 - timestamp: 1734902068235 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/librsvg-2.58.4-h266df6f_2.conda - sha256: c1ef2c5855166001967952d7525aa2f29707214495c74c2bbb60e691aee45ef0 - md5: 82c31ce77bac095b5700b1fdaad9a628 - depends: - - __osx >=11.0 - - cairo >=1.18.2,<2.0a0 - - gdk-pixbuf >=2.42.12,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libxml2 >=2.13.5,<3.0a0 - - pango >=1.54.0,<2.0a0 - constrains: - - __osx >=11.0 - license: LGPL-2.1-or-later - size: 4728552 - timestamp: 1734903448902 -- conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - sha256: 1fb8a71bdbc236b8e74f0475887786735d5fa6f5d76d9a4135021279c7ff54b8 - md5: e16e9b1333385c502bf915195f421934 - depends: - - __glibc >=2.17,<3.0.a0 - - geos >=3.13.0,<3.13.1.0a0 - - libgcc >=13 - - libstdcxx >=13 - license: GPL-2.0-or-later - license_family: GPL - size: 231770 - timestamp: 1727338518657 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/librttopo-1.1.0-ha2cf0f4_17.conda - sha256: 9ff3162d035a1d9022f6145755a70d0c0ce6c9152792402bc42294354c871a17 - md5: ba729f000ea379b76ed2190119d21e13 - depends: - - __osx >=11.0 - - geos >=3.13.0,<3.13.1.0a0 - - libcxx >=17 - license: GPL-2.0-or-later - license_family: GPL - size: 191064 - timestamp: 1727265842691 -- conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - sha256: 0f4a1c8ed579f96ccb73245b4002d7152a2a8ecd05a01d49901c5d280561f766 - md5: 06ea16b8c60b4ce1970c06191f8639d4 - depends: - - geos >=3.13.0,<3.13.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: GPL-2.0-or-later - license_family: GPL - size: 404515 - timestamp: 1727265928370 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - sha256: 0105bd108f19ea8e6a78d2d994a6d4a8db16d19a41212070d2d1d48a63c34161 - md5: a587892d3c13b6621a6091be690dbca2 - depends: - - libgcc-ng >=12 - license: ISC - size: 205978 - timestamp: 1716828628198 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsodium-1.0.20-h99b78c6_0.conda - sha256: fade8223e1e1004367d7101dd17261003b60aa576df6d7802191f8972f7470b1 - md5: a7ce36e284c5faaf93c220dfc39e3abd - depends: - - __osx >=11.0 - license: ISC - size: 164972 - timestamp: 1716828607917 -- conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - sha256: 7bcb3edccea30f711b6be9601e083ecf4f435b9407d70fc48fbcf9e5d69a0fc6 - md5: 198bb594f202b205c7d18b936fa4524f - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: ISC - size: 202344 - timestamp: 1716828757533 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_12.conda - sha256: a9274b30ecc8967fa87959c1978de3b2bfae081b1a8fea7c5a61588041de818f - md5: 641f91ac6f984a91a78ba2411fe4f106 - depends: - - __glibc >=2.17,<3.0.a0 - - freexl >=2 - - freexl >=2.0.0,<3.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - libgcc >=13 - - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.47.2,<4.0a0 - - libstdcxx >=13 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - sqlite - - zlib - license: MPL-1.1 - license_family: MOZILLA - size: 4033736 - timestamp: 1734001047320 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libspatialite-5.1.0-hf92fc0a_12.conda - sha256: b11e6169fdbef472c307129192fd46133eec543036e41ab2f957615713b03d19 - md5: f05759528e44f74888830119ab32fc81 - depends: - - __osx >=11.0 - - freexl >=2 - - freexl >=2.0.0,<3.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - libcxx >=18 - - libiconv >=1.17,<2.0a0 - - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.47.2,<4.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - sqlite - - zlib - license: MPL-1.1 - license_family: MOZILLA - size: 2943606 - timestamp: 1734001158789 -- conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_12.conda - sha256: fafedc5940e49b3dcce2cd6dfe3cabf64e7cc6b2a0ef7c8fefbf9d6d2c1afb77 - md5: 8b5bfc6caa7c652ec4ec755efb5b7b73 - depends: - - freexl >=2 - - freexl >=2.0.0,<3.0a0 - - geos >=3.13.0,<3.13.1.0a0 - - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.47.2,<4.0a0 - - libxml2 >=2.13.5,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - proj >=9.5.1,<9.6.0a0 - - sqlite - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zlib - license: MPL-1.1 - license_family: MOZILLA - size: 8715367 - timestamp: 1734001064515 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.1-hee588c1_1.conda - sha256: 7a09eef804ef7cf4d88215c2297eabb72af8ad0bd5b012060111c289f14bbe7d - md5: 73cea06049cc4174578b432320a003b8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libzlib >=1.3.1,<2.0a0 - license: Unlicense - size: 915956 - timestamp: 1739953155793 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.49.1-h3f77e49_1.conda - sha256: 266639fb10ca92287961574b0b4d6031fa40dd9d723d64a0fcb08513a24dab03 - md5: c83357a21092bd952933c36c5cb4f4d6 - depends: - - __osx >=11.0 - - libzlib >=1.3.1,<2.0a0 - license: Unlicense - size: 898767 - timestamp: 1739953312379 -- conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.49.1-h67fdade_1.conda - sha256: 08669790e4de89201079e93e8a8d8c51a3cd57a19dd559bb0d5bc6c9a7970b99 - md5: 88931435901c1f13d4e3a472c24965aa - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Unlicense - size: 1081190 - timestamp: 1739953491995 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda - sha256: 0407ac9fda2bb67e11e357066eff144c845801d00b5f664efbc48813af1e7bb9 - md5: be2de152d8073ef1c01b7728475f2fe7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 304278 - timestamp: 1732349402869 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h9cc3647_0.conda - sha256: f7047c6ed44bcaeb04432e8c74da87591940d091b0a3940c0d884b7faa8062e9 - md5: ddc7194676c285513706e5fc64f214d7 - depends: - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 279028 - timestamp: 1732349599461 -- conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.1-he619c9f_0.conda - sha256: 4b3256bd2b4e4b3183005d3bd8826d651eccd1a4740b70625afa2b7e7123d191 - md5: af0cbf037dd614c34399b3b3e568c557 - depends: - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 291889 - timestamp: 1732349796504 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda - sha256: 4661af0eb9bdcbb5fb33e5d0023b001ad4be828fccdcc56500059d56f9869462 - md5: 234a5554c53625688d51062645337328 - depends: - - libgcc 14.2.0 h77fa898_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 3893695 - timestamp: 1729027746910 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda - sha256: 25bb30b827d4f6d6f0522cc0579e431695503822f144043b93c50237017fffd8 - md5: 8371ac6457591af2cf6159439c1fd051 - depends: - - libstdcxx 14.2.0 hc0a3c3a_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 54105 - timestamp: 1729027780628 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - sha256: ebb395232973c18745b86c9a399a4725b2c39293c9a91b8e59251be013db42f0 - md5: dcb95c0a98ba9ff737f7ae482aef7833 - depends: - - __glibc >=2.17,<3.0.a0 - - libevent >=2.1.12,<2.1.13.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: Apache-2.0 - license_family: APACHE - size: 425773 - timestamp: 1727205853307 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.21.0-h64651cc_0.conda - sha256: 7a6c7d5f58cbbc2ccd6493b4b821639fdb0701b9b04c737a949e8cb6adf1c9ad - md5: 7ce2bd2f650f8c31ad7ba4c7bfea61b7 - depends: - - __osx >=11.0 - - libcxx >=17 - - libevent >=2.1.12,<2.1.13.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: Apache-2.0 - license_family: APACHE - size: 324342 - timestamp: 1727206096912 -- conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.21.0-hbe90ef8_0.conda - sha256: 81ca4873ba09055c307f8777fb7d967b5c26291f38095785ae52caed75946488 - md5: 7699570e1f97de7001a7107aabf2d677 - depends: - - libevent >=2.1.12,<2.1.13.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 633857 - timestamp: 1727206429954 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda - sha256: b224e16b88d76ea95e4af56e2bc638c603bd26a770b98d117d04541d3aafa002 - md5: 0ea6510969e1296cc19966fad481f6de - depends: - - __glibc >=2.17,<3.0.a0 - - lerc >=4.0.0,<5.0a0 - - libdeflate >=1.23,<1.24.0a0 - - libgcc >=13 - - libjpeg-turbo >=3.0.0,<4.0a0 - - liblzma >=5.6.3,<6.0a0 - - libstdcxx >=13 - - libwebp-base >=1.4.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: HPND - size: 428173 - timestamp: 1734398813264 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-h551f018_3.conda - sha256: 91417846157e04992801438a496b151df89604b2e7c6775d6f701fcd0cbed5ae - md5: a5d084a957563e614ec0c0196d890654 - depends: - - __osx >=11.0 - - lerc >=4.0.0,<5.0a0 - - libcxx >=18 - - libdeflate >=1.23,<1.24.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - liblzma >=5.6.3,<6.0a0 - - libwebp-base >=1.4.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: HPND - size: 370600 - timestamp: 1734398863052 -- conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.7.0-h797046b_3.conda - sha256: c363a8baba4ce12b8f01f0ab74fe8b0dc83facd89c6604f4a191084923682768 - md5: defed79ff7a9164ad40320e3f116a138 - depends: - - lerc >=4.0.0,<5.0a0 - - libdeflate >=1.23,<1.24.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - liblzma >=5.6.3,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.6.0a0 - license: HPND - size: 978878 - timestamp: 1734399004259 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.10.0-h4c51ac1_0.conda - sha256: 8e41563ee963bf8ded06da45f4e70bf42f913cb3c2e79364eb3218deffa3cd74 - md5: aeccfff2806ae38430638ffbb4be9610 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 82745 - timestamp: 1737244366901 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.10.0-hda25de7_0.conda - sha256: aca3ef31d3dff5cefd3790742a5ee6548f1cf0201d0e8cee08b01da503484eb6 - md5: 5f741aed1d8d393586a5fdcaaa87f45c - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 83628 - timestamp: 1737244450097 -- conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.10.0-hf9b99b7_0.conda - sha256: 43cbec5355e78be500ec14322a59a6b9aac05fb72aea739356549a7637dd02a4 - md5: a4685a23eaf9ffb3eb6506102f5360b8 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 85371 - timestamp: 1737244781933 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 - md5: 40b61aab5c7ba9ff276c41cfffe6b80b - depends: - - libgcc-ng >=12 - license: BSD-3-Clause - license_family: BSD - size: 33601 - timestamp: 1680112270483 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda - sha256: c45283fd3e90df5f0bd3dbcd31f59cdd2b001d424cf30a07223655413b158eaf - md5: 63f790534398730f59e1b899c3644d4a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - constrains: - - libwebp 1.5.0 - license: BSD-3-Clause - license_family: BSD - size: 429973 - timestamp: 1734777489810 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libwebp-base-1.5.0-h2471fea_0.conda - sha256: f8bdb876b4bc8cb5df47c28af29188de8911c3fea4b799a33743500149de3f4a - md5: 569466afeb84f90d5bb88c11cc23d746 - depends: - - __osx >=11.0 - constrains: - - libwebp 1.5.0 - license: BSD-3-Clause - license_family: BSD - size: 290013 - timestamp: 1734777593617 -- conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.5.0-h3b0e114_0.conda - sha256: 1d75274614e83a5750b8b94f7bad2fc0564c2312ff407e697d99152ed095576f - md5: 33f7313967072c6e6d8f865f5493c7ae - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - libwebp 1.5.0 - license: BSD-3-Clause - license_family: BSD - size: 273661 - timestamp: 1734777665516 -- conda: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_9.conda - sha256: 373f2973b8a358528b22be5e8d84322c165b4c5577d24d94fd67ad1bb0a0f261 - md5: 08bfa5da6e242025304b206d152479ef - depends: - - ucrt - constrains: - - pthreads-win32 <0.0a0 - - msys2-conda-epoch <0.0a0 - license: MIT AND BSD-3-Clause-Clear - size: 35794 - timestamp: 1737099561703 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa - md5: 92ed62436b625154323d40d5f2f11dd7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - pthread-stubs - - xorg-libxau >=1.0.11,<2.0a0 - - xorg-libxdmcp - license: MIT - license_family: MIT - size: 395888 - timestamp: 1727278577118 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.17.0-hdb1d25a_0.conda - sha256: bd3816218924b1e43b275863e21a3e13a5db4a6da74cca8e60bc3c213eb62f71 - md5: af523aae2eca6dfa1c8eec693f5b9a79 - depends: - - __osx >=11.0 - - pthread-stubs - - xorg-libxau >=1.0.11,<2.0a0 - - xorg-libxdmcp - license: MIT - license_family: MIT - size: 323658 - timestamp: 1727278733917 -- conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.17.0-h0e4246c_0.conda - sha256: 08dec73df0e161c96765468847298a420933a36bc4f09b50e062df8793290737 - md5: a69bbf778a462da324489976c84cfc8c - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - pthread-stubs - - ucrt >=10.0.20348.0 - - xorg-libxau >=1.0.11,<2.0a0 - - xorg-libxdmcp - license: MIT - license_family: MIT - size: 1208687 - timestamp: 1727279378819 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c - md5: 5aa797f8787fe7a17d1b0821485b5adc - depends: - - libgcc-ng >=12 - license: LGPL-2.1-or-later - size: 100393 - timestamp: 1702724383534 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.8.0-hc4a0caf_0.conda - sha256: 583203155abcfb03938d8473afbf129156b5b30301a0f796c8ecca8c5b7b2ed2 - md5: f1656760dbf05f47f962bfdc59fc3416 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - libxcb >=1.17.0,<2.0a0 - - libxml2 >=2.13.5,<3.0a0 - - xkeyboard-config - - xorg-libxau >=1.0.12,<2.0a0 - license: MIT/X11 Derivative - license_family: MIT - size: 642349 - timestamp: 1738735301999 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.6-h8d12d68_0.conda - sha256: db8af71ea9c0ae95b7cb4a0f59319522ed2243942437a1200ceb391493018d85 - md5: 328382c0e0ca648e5c189d5ec336c604 - depends: - - __glibc >=2.17,<3.0.a0 - - icu >=75.1,<76.0a0 - - libgcc >=13 - - libiconv >=1.18,<2.0a0 - - liblzma >=5.6.4,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 690296 - timestamp: 1739952967309 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.6-h178c5d8_0.conda - sha256: 1d2ebce1a16db1017e3892a67cb7ced4aa2858f549dba6852a60d02a4925c205 - md5: 277864577d514bea4b30f8a9335b8d26 - depends: - - __osx >=11.0 - - icu >=75.1,<76.0a0 - - libiconv >=1.18,<2.0a0 - - liblzma >=5.6.4,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - license: MIT - license_family: MIT - size: 583389 - timestamp: 1739953062282 -- conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.13.6-he286e8c_0.conda - sha256: 2919f4e9fffefbf3ff6ecd8ebe81584d573c069b2b82eaeed797b1f56ac8d97b - md5: c66d5bece33033a9c028bbdf1e627ec5 - depends: - - libiconv >=1.18,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 1669569 - timestamp: 1739953461426 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda - sha256: 991e7348b0f650d495fb6d8aa9f8c727bdf52dabf5853c0cc671439b160dce48 - md5: a7b27c075c9b7f459f1c022090697cba - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 109043 - timestamp: 1730442108429 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzip-1.11.2-h1336266_0.conda - sha256: 507599a77c1ce823c2d3acaefaae4ead0686f183f3980467a4c4b8ba209eff40 - md5: 7177414f275db66735a17d316b0a81d6 - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 125507 - timestamp: 1730442214849 -- conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.11.2-h3135430_0.conda - sha256: 8ed49d8aa0ff908e16c82f92154174027c8906429e8b63d71f0b27ecc987b43e - md5: 09066edc7810e4bd1b41ad01a6cc4706 - depends: - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 146856 - timestamp: 1730442305774 -- conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 - md5: edb0dca6bc32e4f4789199455a1dbeb8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 60963 - timestamp: 1727963148474 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b - md5: 369964e85dc26bfe78f41399b366c435 - depends: - - __osx >=11.0 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 46438 - timestamp: 1727963202283 -- conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda - sha256: ba945c6493449bed0e6e29883c4943817f7c79cbff52b83360f7b341277c6402 - md5: 41fbfac52c601159df6c01f875de31b9 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 55476 - timestamp: 1727963768015 -- conda: https://conda.anaconda.org/conda-forge/noarch/linkify-it-py-2.0.3-pyhd8ed1ab_1.conda - sha256: d975a2015803d4fdaaae3f53e21f64996577d7a069eb61c6d2792504f16eb57b - md5: b02fe519b5dc0dc55e7299810fcdfb8e - depends: - - python >=3.9 - - uc-micro-py - license: MIT - license_family: MIT - size: 24154 - timestamp: 1733781296133 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-19.1.7-hdb05f8b_0.conda - sha256: b92a669f2059874ebdcb69041b6c243d68ffc3fb356ac1339cec44aeb27245d7 - md5: c4d54bfd3817313ce758aa76283b118d - depends: - - __osx >=11.0 - constrains: - - openmp 19.1.7|19.1.7.* - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 280830 - timestamp: 1736986295869 -- conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.44.0-py312h374181b_0.conda - sha256: c05668c8099cd398c4fca015f0189187dd24f5b6763caf85cda299fde0092e5b - md5: 4fec2cf2f40c75c0993964bb7a4c8424 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libllvm15 >=15.0.7,<15.1.0a0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-2-Clause - license_family: BSD - size: 4031831 - timestamp: 1738108426043 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.44.0-py312h728bc31_0.conda - sha256: 9eb98299e5a7c71128930dd3e152572d2aeba1935f0a638af50e00a2416000b3 - md5: 4ead86be7c51a3dc8e76f2b059bacd86 - depends: - - __osx >=11.0 - - libcxx >=18 - - libllvm15 >=15.0.7,<15.1.0a0 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-2-Clause - license_family: BSD - size: 409102 - timestamp: 1738108909555 -- conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.44.0-py312h1f7db74_0.conda - sha256: 94afd860e51d6b4f1780f431d6502da0644ffa5d74d3205faf0d4a4d97ff990f - md5: c84b19c4d5ebe38ae5c63511c411b1f8 - depends: - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - vs2015_runtime - license: BSD-2-Clause - license_family: BSD - size: 18104073 - timestamp: 1738108864193 -- conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - sha256: 9afe0b5cfa418e8bdb30d8917c5a6cec10372b037924916f1f85b9f4899a67a6 - md5: 91e27ef3d05cc772ce627e51cff111c4 - depends: - - python >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* - license: BSD-2-Clause - license_family: BSD - size: 8250 - timestamp: 1650660473123 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hf0f0c11_2.conda - sha256: 3fa0195a2f3d1fbdd51929154790422b92977c16ade49d325b3053ba93e2d108 - md5: 9a7fd2a97c20b2a078a39e739bae746a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - lz4-c >=1.10.0,<1.11.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 39147 - timestamp: 1733474350790 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.3.3-py312hf263c89_2.conda - sha256: 5115e04714db96ed793ba04c71d0f63a69311ef9d1b64031e6d77018bf9068af - md5: eb031555cb970d3547b75e55016052cc - depends: - - __osx >=11.0 - - lz4-c >=1.10.0,<1.11.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 105517 - timestamp: 1733474478647 -- conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h032eceb_2.conda - sha256: 6a82f7491b0b25696b719daab0dc5d56fb89b2a199e5872b81c022465fb7dbc3 - md5: 7872436e250ac3c1147cfc90c1e70a54 - depends: - - lz4-c >=1.10.0,<1.11.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 42400 - timestamp: 1733474775746 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda - sha256: 47326f811392a5fd3055f0f773036c392d26fdb32e4d8e7a8197eed951489346 - md5: 9de5350a85c4a20c685259b889aa6393 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: BSD-2-Clause - license_family: BSD - size: 167055 - timestamp: 1733741040117 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda - sha256: 94d3e2a485dab8bdfdd4837880bde3dd0d701e2b97d6134b8806b7c8e69c8652 - md5: 01511afc6cc1909c5303cf31be17b44f - depends: - - __osx >=11.0 - - libcxx >=18 - license: BSD-2-Clause - license_family: BSD - size: 148824 - timestamp: 1733741047892 -- conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.10.0-h2466b09_1.conda - sha256: 632cf3bdaf7a7aeb846de310b6044d90917728c73c77f138f08aa9438fc4d6b5 - md5: 0b69331897a92fac3d8923549d48d092 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 139891 - timestamp: 1733741168264 -- conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - sha256: 88433b98a9dd9da315400e7fb9cd5f70804cb17dca8b1c85163a64f90f584126 - md5: ec7398d21e2651e0dcb0044d03b9a339 - depends: - - libgcc-ng >=12 - license: GPL-2.0-or-later - license_family: GPL2 - size: 171416 - timestamp: 1713515738503 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lzo-2.10-h93a5062_1001.conda - sha256: b68160b0a8ec374cea12de7afb954ca47419cdc300358232e19cec666d60b929 - md5: 915996063a7380c652f83609e970c2a7 - license: GPL-2.0-or-later - license_family: GPL2 - size: 131447 - timestamp: 1713516009610 -- conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - sha256: 39e176b8cc8fe878d87594fae0504c649d1c2c6d5476dd7238237d19eb825751 - md5: 629f4f4e874cf096eb93a23240910cee - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: GPL-2.0-or-later - license_family: GPL2 - size: 142771 - timestamp: 1713516312465 -- conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - sha256: fce1fde00359696983989699c00f9891194c4ebafea647a8d21b7e2e3329b56e - md5: 06e9bebf748a0dea03ecbe1f0e27e909 - depends: - - importlib-metadata >=4.4 - - python >=3.6 - license: BSD-3-Clause - license_family: BSD - size: 78331 - timestamp: 1710435316163 -- conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda - sha256: 0fbacdfb31e55964152b24d5567e9a9996e1e7902fb08eb7d91b5fd6ce60803a - md5: fee3164ac23dfca50cfcc8b85ddefb81 - depends: - - mdurl >=0.1,<1 - - python >=3.9 - license: MIT - license_family: MIT - size: 64430 - timestamp: 1733250550053 -- conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda - sha256: 4a6bf68d2a2b669fecc9a4a009abd1cf8e72c2289522ff00d81b5a6e51ae78f5 - md5: eb227c3e0bf58f5bd69c0532b157975b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - jinja2 >=3.0.0 - license: BSD-3-Clause - license_family: BSD - size: 24604 - timestamp: 1733219911494 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312h998013c_1.conda - sha256: 4aa997b244014d3707eeef54ab0ee497d12c0d0d184018960cce096169758283 - md5: 46e547061080fddf9cf95a0327e8aba6 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - jinja2 >=3.0.0 - license: BSD-3-Clause - license_family: BSD - size: 24048 - timestamp: 1733219945697 -- conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_1.conda - sha256: bbb9595fe72231a8fbc8909cfa479af93741ecd2d28dfe37f8f205fef5df2217 - md5: 944fdd848abfbd6929e57c790b8174dd - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - jinja2 >=3.0.0 - license: BSD-3-Clause - license_family: BSD - size: 27582 - timestamp: 1733220007802 -- conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda - sha256: eed67ea988883a3c05160c6d02f34f5a4b6405713cf699d9117eb68fb4743017 - md5: c27a17a8c54c0d35cf83bbc0de8f7f77 - depends: - - __glibc >=2.17,<3.0.a0 - - contourpy >=1.0.1 - - cycler >=0.10 - - fonttools >=4.22.0 - - freetype >=2.12.1,<3.0a0 - - kiwisolver >=1.3.1 - - libgcc >=13 - - libstdcxx >=13 - - numpy >=1.19,<3 - - numpy >=1.23 - - packaging >=20.0 - - pillow >=8 - - pyparsing >=2.3.1 - - python >=3.12,<3.13.0a0 - - python-dateutil >=2.7 - - python_abi 3.12.* *_cp312 - - qhull >=2020.2,<2020.3.0a0 - - tk >=8.6.13,<8.7.0a0 - license: PSF-2.0 - license_family: PSF - size: 8210655 - timestamp: 1734380560683 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.0-py312hdbc7e53_0.conda - sha256: 8e53e3e3a7c81aed357b92e5dc0be0199a0081a2ce9cc726f5afba946ed77796 - md5: af50086982d6939b23d2656c21172be0 - depends: - - __osx >=11.0 - - contourpy >=1.0.1 - - cycler >=0.10 - - fonttools >=4.22.0 - - freetype >=2.12.1,<3.0a0 - - kiwisolver >=1.3.1 - - libcxx >=18 - - numpy >=1.19,<3 - - numpy >=1.23 - - packaging >=20.0 - - pillow >=8 - - pyparsing >=2.3.1 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python-dateutil >=2.7 - - python_abi 3.12.* *_cp312 - - qhull >=2020.2,<2020.3.0a0 - license: PSF-2.0 - license_family: PSF - size: 8019543 - timestamp: 1734380918722 -- conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.10.0-py312h90004f6_0.conda - sha256: d2bd259bde388ead1ff6505932592a0f0e49a6bd1b1f186e32fde094d8ed8ef2 - md5: e777aaaf4593e5cb2735f0e1b87b63bc - depends: - - contourpy >=1.0.1 - - cycler >=0.10 - - fonttools >=4.22.0 - - freetype >=2.12.1,<3.0a0 - - kiwisolver >=1.3.1 - - numpy >=1.19,<3 - - numpy >=1.23 - - packaging >=20.0 - - pillow >=8 - - pyparsing >=2.3.1 - - python >=3.12,<3.13.0a0 - - python-dateutil >=2.7 - - python_abi 3.12.* *_cp312 - - qhull >=2020.2,<2020.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: PSF-2.0 - license_family: PSF - size: 8012369 - timestamp: 1734381419845 -- conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda - sha256: 69b7dc7131703d3d60da9b0faa6dd8acbf6f6c396224cf6aef3e855b8c0c41c6 - md5: af6ab708897df59bd6e7283ceab1b56b - depends: - - python >=3.9 - - traitlets - license: BSD-3-Clause - license_family: BSD - size: 14467 - timestamp: 1733417051523 -- conda: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda - sha256: c63ed79d9745109c0a70397713b0c07f06e7d3561abcb122cfc80a141ab3b449 - md5: af2060041d4f3250a7eb6ab3ec0e549b - depends: - - markdown-it-py >=1.0.0,<4.0.0 - - python >=3.9 - license: MIT - license_family: MIT - size: 42180 - timestamp: 1733854816517 -- conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda - sha256: 78c1bbe1723449c52b7a9df1af2ee5f005209f67e40b6e1d3c7619127c43b1c7 - md5: 592132998493b3ff25fd7479396e8351 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 14465 - timestamp: 1733255681319 -- conda: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda - sha256: 9a9459024e9cdc68c799b057de021b8c652de542e24e9e48f2726578e822659c - md5: eec77634ccdb2ba6c231290c399b1dae - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc >=13 - - libiconv >=1.17,<2.0a0 - - liblzma >=5.6.3,<6.0a0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: Zlib - license_family: Other - size: 92332 - timestamp: 1734012081442 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.7-hff1a8ea_3.conda - sha256: 6d904a6fc5e875e687b9fab244d5b286961222d72f546f9939d8f80ebe873c1c - md5: 666bd61287ad7ee417884eacd9aef2ea - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libcxx >=18 - - libiconv >=1.17,<2.0a0 - - liblzma >=5.6.3,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.0,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: Zlib - license_family: Other - size: 77597 - timestamp: 1734012196026 -- conda: https://conda.anaconda.org/conda-forge/win-64/minizip-4.0.7-h9fa1bad_3.conda - sha256: 16f329eac4551fe343f77a0c84cae5f9e68a0fb43a641e6ea2d8553053c3fa2e - md5: 632caee448c60ca5f85bf0748ed24401 - depends: - - bzip2 >=1.0.8,<2.0a0 - - liblzma >=5.6.3,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.6.0a0 - license: Zlib - license_family: Other - size: 85799 - timestamp: 1734012307818 -- conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.2-pyhd8ed1ab_0.conda - sha256: 63d5308ac732b2f8130702c83ee40ce31c5451ebcb6e70075b771cc8f7df0156 - md5: 0982b0f06168fe3421d09f70596ca1f0 - depends: - - python >=3.9 - - typing_extensions - license: BSD-3-Clause - license_family: BSD - size: 68903 - timestamp: 1739952304731 -- conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.2.2-h66d3029_15.conda - sha256: 20e52b0389586d0b914a49cd286c5ccc9c47949bed60ca6df004d1d295f2edbd - md5: 302dff2807f2927b3e9e0d19d60121de - depends: - - intel-openmp 2024.* - - tbb 2021.* - license: LicenseRef-IntelSimplifiedSoftwareOct2022 - license_family: Proprietary - size: 103106385 - timestamp: 1730232843711 -- conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - sha256: 4bc53333774dea1330643b7e23aa34fd6880275737fc2e07491795872d3af8dd - md5: 5c9b020a3f86799cdc6115e55df06146 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 105271 - timestamp: 1725975182669 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.0-py312h6142ec9_0.conda - sha256: 2b8c22f8a4e0031c2d6fa81d32814c8afdaf7e7fe2e681bf2369a35ff3eab1fd - md5: 0dfc3750cc6bbc463d72c0b727e60d8a - depends: - - __osx >=11.0 - - libcxx >=17 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 90793 - timestamp: 1725975279147 -- conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - sha256: 3fd45d9c0830e931e34990cb90e88ba53cc7f89fce69fc7d1a8289639d363e85 - md5: ff4f1e63a6438a06d1ab259936e5c2ac - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 88169 - timestamp: 1725975418157 -- conda: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_2.conda - sha256: b05bc8252a6e957bf4a776ed5e0e61d1ba88cdc46ccb55890c72cc58b10371f4 - md5: 5b5e3267d915a107eca793d52e1b780a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 61507 - timestamp: 1733913288935 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.1.0-py312hdb8e49c_1.conda - sha256: 482fd09fb798090dc8cce2285fa69f43b1459099122eac2fb112d9b922b9f916 - md5: 0048335516fed938e4dd2c457b4c5b9b - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 55968 - timestamp: 1729065664275 -- conda: https://conda.anaconda.org/conda-forge/win-64/multidict-6.1.0-py312h31fea79_1.conda - sha256: 374050b314f35b7d869b7f085284a8ac3de2030f5b26e4992845e3f881626846 - md5: f5489605efd8bf8a850383d146f00d84 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 56283 - timestamp: 1729066082188 -- conda: https://conda.anaconda.org/conda-forge/noarch/multipledispatch-0.6.0-pyhd8ed1ab_1.conda - sha256: c6216a21154373b340c64f321f22fec51db4ee6156c2e642fa58368103ac5d09 - md5: 121a57fce7fff0857ec70fa03200962f - depends: - - python >=3.6 - - six - license: BSD-3-Clause - license_family: BSD - size: 17254 - timestamp: 1721907640382 -- conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - sha256: f86fb22b58e93d04b6f25e0d811b56797689d598788b59dcb47f59045b568306 - md5: 2ba8498c1018c1e9c61eb99b973dfe19 - depends: - - python - license: Apache-2.0 - license_family: Apache - size: 12452 - timestamp: 1600387789153 -- conda: https://conda.anaconda.org/conda-forge/noarch/myst-nb-1.2.0-pyh29332c3_0.conda - sha256: de3e58d54126fdb667a55921675693fb8eee23757fd3be6116f6565cae710279 - md5: 4f63865e1bb08e05476fa136a2dfe2ac - depends: - - importlib-metadata - - ipykernel - - ipython - - jupyter-cache >=0.5 - - myst-parser >=1.0.0 - - nbclient - - nbformat >=5.0 - - python >=3.9 - - pyyaml - - sphinx >=5 - - typing_extensions - - python - license: BSD-3-Clause - license_family: BSD - size: 66384 - timestamp: 1739024493029 -- conda: https://conda.anaconda.org/conda-forge/noarch/myst-parser-2.0.0-pyhd8ed1ab_0.conda - sha256: 59cdc52d9875f623a4df82896d80f304e436138f8410cbef969a7e4452c6bab7 - md5: 70699181909e468875f12076e1b0a8a9 - depends: - - docutils >=0.16,<0.21 - - jinja2 - - markdown-it-py >=3.0.0,<4.0.0 - - mdit-py-plugins >=0.4,<1 - - python >=3.8 - - pyyaml - - sphinx >=6,<8 - license: MIT - license_family: MIT - size: 67063 - timestamp: 1686686421092 -- conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda - sha256: a20cff739d66c2f89f413e4ba4c6f6b59c50d5c30b5f0d840c13e8c9c2df9135 - md5: 6bb0d77277061742744176ab555b723c - depends: - - jupyter_client >=6.1.12 - - jupyter_core >=4.12,!=5.0.* - - nbformat >=5.1 - - python >=3.8 - - traitlets >=5.4 - license: BSD-3-Clause - license_family: BSD - size: 28045 - timestamp: 1734628936013 -- conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda - sha256: dcccb07c5a1acb7dc8be94330e62d54754c0e9c9cb2bb6865c8e3cfe44cf5a58 - md5: d24beda1d30748afcc87c429454ece1b - depends: - - beautifulsoup4 - - bleach-with-css !=5.0.0 - - defusedxml - - importlib-metadata >=3.6 - - jinja2 >=3.0 - - jupyter_core >=4.7 - - jupyterlab_pygments - - markupsafe >=2.0 - - mistune >=2.0.3,<4 - - nbclient >=0.5.0 - - nbformat >=5.7 - - packaging - - pandocfilters >=1.4.1 - - pygments >=2.4.1 - - python >=3.9 - - traitlets >=5.1 - - python - constrains: - - pandoc >=2.9.2,<4.0.0 - - nbconvert ==7.16.6 *_0 - license: BSD-3-Clause - license_family: BSD - size: 200601 - timestamp: 1738067871724 -- conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda - sha256: 7a5bd30a2e7ddd7b85031a5e2e14f290898098dc85bea5b3a5bf147c25122838 - md5: bbe1963f1e47f594070ffe87cdf612ea - depends: - - jsonschema >=2.6 - - jupyter_core >=4.12,!=5.0.* - - python >=3.9 - - python-fastjsonschema >=2.15 - - traitlets >=5.1 - license: BSD-3-Clause - license_family: BSD - size: 100945 - timestamp: 1733402844974 -- conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda - sha256: 3fde293232fa3fca98635e1167de6b7c7fda83caf24b9d6c91ec9eefb4f4d586 - md5: 47e340acb35de30501a76c7c799c41d7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: X11 AND BSD-3-Clause - size: 891641 - timestamp: 1738195959188 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda - sha256: 2827ada40e8d9ca69a153a45f7fd14f32b2ead7045d3bbb5d10964898fe65733 - md5: 068d497125e4bf8a66bf707254fff5ae - depends: - - __osx >=11.0 - license: X11 AND BSD-3-Clause - size: 797030 - timestamp: 1738196177597 -- conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda - sha256: bb7b21d7fd0445ddc0631f64e66d91a179de4ba920b8381f29b9d006a42788c0 - md5: 598fd7d4d0de2455fb74f56063969a97 - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 11543 - timestamp: 1733325673691 -- conda: https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda - sha256: 5a6ce5997c155abfc1941de7d2fba207bd3f269987d847937caa51799625d5c3 - md5: 7e41ca6012a6bf609539aec0dfee93f7 - depends: - - __glibc >=2.17,<3.0.a0 - - certifi - - cftime - - hdf5 >=1.14.4,<1.14.5.0a0 - - libgcc >=13 - - libnetcdf >=4.9.2,<4.9.3.0a0 - - libzlib >=1.3.1,<2.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 1151656 - timestamp: 1733253250333 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/netcdf4-1.7.2-nompi_py312haae1a11_101.conda - sha256: 48882b5f465c88239ce7d1d5cbbbbda5470d1e90992f5493fa32215af3155cb9 - md5: 3dd4c6f36cf1da9280026e1c16fd2725 - depends: - - __osx >=11.0 - - certifi - - cftime - - hdf5 >=1.14.4,<1.14.5.0a0 - - libnetcdf >=4.9.2,<4.9.3.0a0 - - libzlib >=1.3.1,<2.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 1038072 - timestamp: 1733255332022 -- conda: https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.7.2-nompi_py312h57e6fe7_101.conda - sha256: 0f0319713cab0c5c94e2ad1d0b83fe8c2cca60e1a2eccad7ae6541262b2d7e90 - md5: 4f71bb0116e80e682056eaf4c0bd1cec - depends: - - certifi - - cftime - - hdf5 >=1.14.4,<1.14.5.0a0 - - libnetcdf >=4.9.2,<4.9.3.0a0 - - libzlib >=1.3.1,<2.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 998261 - timestamp: 1733256360287 -- conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda - sha256: ce4bcced4f8eea71b7cac8bc3daac097abf7a5792f278cd811dedada199500c1 - md5: e46f7ac4917215b49df2ea09a694a3fa - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: MIT - license_family: MIT - size: 122743 - timestamp: 1723652407663 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.11.3-h00cdb27_1.conda - sha256: 3f4e6a4fa074bb297855f8111ab974dab6d9f98b7d4317d4dd46f8687ee2363b - md5: d2dee849c806430eee64d3acc98ce090 - depends: - - __osx >=11.0 - - libcxx >=16 - license: MIT - license_family: MIT - size: 123250 - timestamp: 1723652704997 -- conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda - sha256: 3636eec0e60466a00069b47ce94b6d88b01419b6577d8e393da44bb5bc8d3468 - md5: 7ba3f09fceae6a120d664217e58fe686 - depends: - - python >=3.9 - - setuptools - license: BSD-3-Clause - license_family: BSD - size: 34574 - timestamp: 1734112236147 -- conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda - sha256: 7b920e46b9f7a2d2aa6434222e5c8d739021dbc5cc75f32d124a8191d86f9056 - md5: e7f89ea5f7ea9401642758ff50a2d9c1 - depends: - - jupyter_server >=1.8,<3 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 16817 - timestamp: 1733408419340 -- conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.61.0-py312h2e6246c_1.conda - sha256: 1ebd4f29d7ffa7aa8320a16caee7e6722b719daf4819c08cdb30c8c636f005b9 - md5: f65d300639d0d9d2777cd4cb10440eab - depends: - - __glibc >=2.17,<3.0.a0 - - _openmp_mutex >=4.5 - - libgcc >=13 - - libstdcxx >=13 - - llvmlite >=0.44.0,<0.45.0a0 - - numpy >=1.19,<3 - - numpy >=1.24,<2.2 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - libopenblas !=0.3.6 - - cuda-version >=11.2 - - cuda-python >=11.6 - - scipy >=1.0 - - tbb >=2021.6.0 - - cudatoolkit >=11.2 - license: BSD-2-Clause - license_family: BSD - size: 5811114 - timestamp: 1739224921661 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.61.0-py312hdf12f13_1.conda - sha256: ad53ce23810a69901ffca9492c1bafcecac53256a8064209be6f8de6153ee966 - md5: c71f93305452b543043100f7dc71f9ac - depends: - - __osx >=11.0 - - libcxx >=18 - - llvm-openmp >=18.1.8 - - llvm-openmp >=19.1.7 - - llvmlite >=0.44.0,<0.45.0a0 - - numpy >=1.19,<3 - - numpy >=1.24,<2.2 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - scipy >=1.0 - - cuda-version >=11.2 - - tbb >=2021.6.0 - - cudatoolkit >=11.2 - - libopenblas >=0.3.18, !=0.3.20 - - cuda-python >=11.6 - license: BSD-2-Clause - license_family: BSD - size: 5827517 - timestamp: 1739225028923 -- conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.61.0-py312hcccf92d_1.conda - sha256: 09efe54f11c3022ec875316a7d31efa5cf2d9abbf452790e088abb2c7d8b6e8b - md5: 1859be3163feedb04c9602cded099296 - depends: - - llvmlite >=0.44.0,<0.45.0a0 - - numpy >=1.19,<3 - - numpy >=1.24,<2.2 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - libopenblas !=0.3.6 - - cudatoolkit >=11.2 - - tbb >=2021.6.0 - - cuda-version >=11.2 - - cuda-python >=11.6 - - scipy >=1.0 - license: BSD-2-Clause - license_family: BSD - size: 5790829 - timestamp: 1739225202263 -- conda: https://conda.anaconda.org/conda-forge/noarch/numbagg-0.9.0-pyhd8ed1ab_0.conda - sha256: 44db759dfff3fe13d2570eccf664380d3776ed7cab6b86067ada20f52b752cd6 - md5: 902b209784862ad1d8843861659108ad - depends: - - numba - - numpy - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 45956 - timestamp: 1739061750868 -- conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.15.1-py312hf9745cd_0.conda - sha256: 209a84599e36db68865dce5618c3328a2d57267d339255204815885b220a20f2 - md5: 8a1f88d4985ee1c16b0db1af39a8554d - depends: - - __glibc >=2.17,<3.0.a0 - - deprecated - - libgcc >=13 - - libstdcxx >=13 - - msgpack-python - - numpy >=1.19,<3 - - numpy >=1.24 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 848654 - timestamp: 1739285119780 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/numcodecs-0.15.1-py312hcb1e3ce_0.conda - sha256: 86a69d03bb12f4c4600941e07210c883adbd4e19b5401b3f5e233b68447741cd - md5: c83fe9947503b337f3e5d993de6bcaea - depends: - - __osx >=11.0 - - deprecated - - libcxx >=18 - - msgpack-python - - numpy >=1.19,<3 - - numpy >=1.24 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 690518 - timestamp: 1739285377173 -- conda: https://conda.anaconda.org/conda-forge/win-64/numcodecs-0.15.1-py312h72972c8_0.conda - sha256: ce01a82077b12bffd6c3e5281f02bc6a690a8e0e3750c44e3c624c68f6a70d9e - md5: bba8bf88b520170565f2f51e99926683 - depends: - - deprecated - - msgpack-python - - numpy >=1.19,<3 - - numpy >=1.24 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 546655 - timestamp: 1739285369282 -- conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.3-py312h58c1407_0.conda - sha256: e4c14f71588a5627a6935d3e7d9ca78a8387229ec8ebc91616b0988ce57ba0dc - md5: dfdbc12e6d81889ba4c494a23f23eba8 - depends: - - __glibc >=2.17,<3.0.a0 - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - libgcc >=13 - - liblapack >=3.9.0,<4.0a0 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - numpy-base <0a0 - license: BSD-3-Clause - license_family: BSD - size: 8388631 - timestamp: 1730588649810 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.1.3-py312h94ee1e1_0.conda - sha256: cd287b6c270ee8af77d200c46d56fdfe1e2a9deeff68044439718b8d073214dd - md5: a2af54c86582e08718805c69af737897 - depends: - - __osx >=11.0 - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - libcxx >=18 - - liblapack >=3.9.0,<4.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - numpy-base <0a0 - license: BSD-3-Clause - license_family: BSD - size: 6398123 - timestamp: 1730588490904 -- conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.1.3-py312h49bc9c5_0.conda - sha256: f7e6648e2e55de450c8022008eb86158c55786f360aacc91fe3a5a53ba52d5d8 - md5: 4d03cad3ea6c6cc575f1fd811691432f - depends: - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - liblapack >=3.9.0,<4.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - numpy-base <0a0 - license: BSD-3-Clause - license_family: BSD - size: 6965471 - timestamp: 1730589010831 -- conda: https://conda.anaconda.org/conda-forge/noarch/numpy_groupies-0.11.2-pyhd8ed1ab_1.conda - sha256: bc453d60a0eff86f500a0c114fe3996543731b019e5998e664347d2ab52ee880 - md5: 7ec5afe3dc4c585abd49bb40edc96428 - depends: - - numpy - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 37633 - timestamp: 1734512747767 -- conda: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda - sha256: bec65607d36759e85aab2331ff7f056cb32be0bca92ee2b955aea3306330bd1b - md5: bf5f2c90d503d43a8c45cedf766b4b8e - depends: - - blinker - - cryptography - - pyjwt >=1.0.0 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 97604 - timestamp: 1733752957557 -- conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda - sha256: 5bee706ea5ba453ed7fd9da7da8380dd88b865c8d30b5aaec14d2b6dd32dbc39 - md5: 9e5816bc95d285c115a3ebc2f8563564 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libpng >=1.6.44,<1.7.0a0 - - libstdcxx >=13 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - license: BSD-2-Clause - license_family: BSD - size: 342988 - timestamp: 1733816638720 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.3-h8a3d83b_0.conda - sha256: 1d59bc72ca7faac06d349c1a280f5cfb8a57ee5896f1e24225a997189d7418c7 - md5: 4b71d78648dbcf68ce8bf22bb07ff838 - depends: - - __osx >=11.0 - - libcxx >=18 - - libpng >=1.6.44,<1.7.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - license: BSD-2-Clause - license_family: BSD - size: 319362 - timestamp: 1733816781741 -- conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.3-h4d64b90_0.conda - sha256: 410175815df192f57a07c29a6b3fdd4231937173face9e63f0830c1234272ce3 - md5: fc050366dd0b8313eb797ed1ffef3a29 - depends: - - libpng >=1.6.44,<1.7.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 240148 - timestamp: 1733817010335 -- conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.1-h7b32b05_0.conda - sha256: cbf62df3c79a5c2d113247ddea5658e9ff3697b6e741c210656e239ecaf1768f - md5: 41adf927e746dc75ecf0ef841c454e48 - depends: - - __glibc >=2.17,<3.0.a0 - - ca-certificates - - libgcc >=13 - license: Apache-2.0 - license_family: Apache - size: 2939306 - timestamp: 1739301879343 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.1-h81ee809_0.conda - sha256: 4f8e2389e1b711b44182a075516d02c80fa7a3a7e25a71ff1b5ace9eae57a17a - md5: 75f9f0c7b1740017e2db83a53ab9a28e - depends: - - __osx >=11.0 - - ca-certificates - license: Apache-2.0 - license_family: Apache - size: 2934522 - timestamp: 1739301896733 -- conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.4.1-ha4e3fda_0.conda - sha256: 56dcc2b4430bfc1724e32661c34b71ae33a23a14149866fc5645361cfd3b3a6a - md5: 0730f8094f7088592594f9bf3ae62b3f - depends: - - ca-certificates - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 8515197 - timestamp: 1739304103653 -- conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda - sha256: dff5cc8023905782c86b3459055f26d4b97890e403b0698477c9fed15d8669cc - md5: 4f6f9f3f80354ad185e276c120eac3f0 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - tzdata - - zstd >=1.5.6,<1.6.0a0 - license: Apache-2.0 - license_family: Apache - size: 1188881 - timestamp: 1735630209320 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.0.3-h0ff2369_2.conda - sha256: cca330695f3bdb8c0e46350c29cd4af3345865544e36f1d7c9ba9190ad22f5f4 - md5: 24b1897c0d24afbb70704ba998793b78 - depends: - - __osx >=11.0 - - libcxx >=18 - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - tzdata - - zstd >=1.5.6,<1.6.0a0 - license: Apache-2.0 - license_family: Apache - size: 438520 - timestamp: 1735630624140 -- conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.3-haf104fe_2.conda - sha256: 35522ebcdd10f9d8600cbffa99efd59053bf2148965cfbb4575680e61c1d41dd - md5: c8abacd8bdb242c9ba9c9a6c7ec09b01 - depends: - - libprotobuf >=5.28.3,<5.28.4.0a0 - - libzlib >=1.3.1,<2.0a0 - - lz4-c >=1.10.0,<1.11.0a0 - - snappy >=1.2.1,<1.3.0a0 - - tzdata - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.6.0a0 - license: Apache-2.0 - license_family: Apache - size: 902551 - timestamp: 1735630416110 -- conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda - sha256: 1840bd90d25d4930d60f57b4f38d4e0ae3f5b8db2819638709c36098c6ba770c - md5: e51f1e4089cad105b6cac64bd8166587 - depends: - - python >=3.9 - - typing_utils - license: Apache-2.0 - license_family: APACHE - size: 30139 - timestamp: 1734587755455 -- conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 - md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa - depends: - - python >=3.8 - license: Apache-2.0 - license_family: APACHE - size: 60164 - timestamp: 1733203368787 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda - sha256: ad275a83bfebfa8a8fee9b0569aaf6f513ada6a246b2f5d5b85903d8ca61887e - md5: 8bce4f6caaf8c5448c7ac86d87e26b4b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - numpy >=1.19,<3 - - numpy >=1.22.4 - - python >=3.12,<3.13.0a0 - - python-dateutil >=2.8.1 - - python-tzdata >=2022a - - python_abi 3.12.* *_cp312 - - pytz >=2020.1,<2024.2 - license: BSD-3-Clause - license_family: BSD - size: 15436913 - timestamp: 1726879054912 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.2.3-py312hcd31e36_1.conda - sha256: ff0cb54b5d058c7987b4a0984066e893642d1865a7bb695294b6172e2fcdc457 - md5: c68bfa69e6086c381c74e16fd72613a8 - depends: - - __osx >=11.0 - - libcxx >=17 - - numpy >=1.19,<3 - - numpy >=1.22.4 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python-dateutil >=2.8.1 - - python-tzdata >=2022a - - python_abi 3.12.* *_cp312 - - pytz >=2020.1,<2024.2 - license: BSD-3-Clause - license_family: BSD - size: 14470437 - timestamp: 1726878887799 -- conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py312h72972c8_1.conda - sha256: dfd30e665b1ced1b783ca303799e250d8acc40943bcefb3a9b2bb13c3b17911c - md5: bf6f01c03e0688523d4b5cff8fe8c977 - depends: - - numpy >=1.19,<3 - - numpy >=1.22.4 - - python >=3.12,<3.13.0a0 - - python-dateutil >=2.8.1 - - python-tzdata >=2022a - - python_abi 3.12.* *_cp312 - - pytz >=2020.1,<2024.2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 14218658 - timestamp: 1726879426348 -- conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - sha256: 2bb9ba9857f4774b85900c2562f7e711d08dd48e2add9bee4e1612fbee27e16f - md5: 457c2c8c08e54905d6954e79cb5b5db9 - depends: - - python !=3.0,!=3.1,!=3.2,!=3.3 - license: BSD-3-Clause - license_family: BSD - size: 11627 - timestamp: 1631603397334 -- conda: https://conda.anaconda.org/conda-forge/noarch/panel-1.6.1-pyhd8ed1ab_0.conda - sha256: 2decd60c2276d818ca23b134491644fe067746e11dc1501dfdafc3e523564a3c - md5: e52807f99bbd86f6aef02c1c954fc1c1 - depends: - - bleach - - bokeh >=3.5.0,<3.7.0 - - linkify-it-py - - markdown - - markdown-it-py - - mdit-py-plugins - - packaging - - pandas >=1.2 - - param >=2.1.0,<3.0 - - python >=3.10 - - pyviz_comms >=2.0.0 - - requests - - tqdm - - typing_extensions - constrains: - - holoviews >=1.18.0 - license: BSD-3-Clause - license_family: BSD - size: 21525119 - timestamp: 1739557836573 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.1-h861ebed_0.conda - sha256: 20e5e280859a7803e8b5a09f18a7e43b56d1b8e61e4888c1a24cbb0d5b9cabd3 - md5: 59e660508a4de9401543303d5f576aeb - depends: - - __glibc >=2.17,<3.0.a0 - - cairo >=1.18.2,<2.0a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - fribidi >=1.0.10,<2.0a0 - - harfbuzz >=10.2.0,<11.0a0 - - libexpat >=2.6.4,<3.0a0 - - libgcc >=13 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libzlib >=1.3.1,<2.0a0 - license: LGPL-2.1-or-later - size: 451406 - timestamp: 1737510786003 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.56.1-h73f1e88_0.conda - sha256: 1f032cd6e70a07071f2839e79a07976b3d66c1c742e5bc5276ac91a4f738babb - md5: d90e7fdeb40d3e1739f3d2da0c15edf0 - depends: - - __osx >=11.0 - - cairo >=1.18.2,<2.0a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - fribidi >=1.0.10,<2.0a0 - - harfbuzz >=10.2.0,<11.0a0 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libzlib >=1.3.1,<2.0a0 - license: LGPL-2.1-or-later - size: 423919 - timestamp: 1737511036696 -- conda: https://conda.anaconda.org/conda-forge/win-64/pango-1.56.1-h286b592_0.conda - sha256: 5c3b76c39f8fa397f1ff9c84d37ee4e1cc0f944fc12614ee5696ebbeae6e78a3 - md5: 9ca94f758cc7399cde20986e04bb7140 - depends: - - cairo >=1.18.2,<2.0a0 - - fontconfig >=2.15.0,<3.0a0 - - fonts-conda-ecosystem - - freetype >=2.12.1,<3.0a0 - - fribidi >=1.0.10,<2.0a0 - - harfbuzz >=10.2.0,<11.0a0 - - libexpat >=2.6.4,<3.0a0 - - libglib >=2.82.2,<3.0a0 - - libpng >=1.6.45,<1.7.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LGPL-2.1-or-later - size: 454143 - timestamp: 1737511140026 -- conda: https://conda.anaconda.org/conda-forge/noarch/param-2.2.0-pyhd8ed1ab_0.conda - sha256: 857c0e09b51d5c81d5a2144d4a5bd3dc15f81a52f1bf3da9290baff3deae6b5d - md5: 8bd46aebe85bd9c5f30affd520ab441f - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 104754 - timestamp: 1734441144421 -- conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda - sha256: 17131120c10401a99205fc6fe436e7903c0fa092f1b3e80452927ab377239bcc - md5: 5c092057b6badd30f75b06244ecd01c9 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 75295 - timestamp: 1733271352153 -- conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - sha256: 472fc587c63ec4f6eba0cc0b06008a6371e0a08a5986de3cf4e8024a47b4fe6c - md5: 0badf9c54e24cecfb0ad2f99d680c163 - depends: - - locket - - python >=3.9 - - toolz - license: BSD-3-Clause - license_family: BSD - size: 20884 - timestamp: 1715026639309 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - sha256: 1087716b399dab91cc9511d6499036ccdc53eb29a288bebcb19cf465c51d7c0d - md5: df359c09c41cd186fffb93a2d87aa6f5 - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc-ng >=12 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 952308 - timestamp: 1723488734144 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.44-h297a79d_2.conda - sha256: 83153c7d8fd99cab33c92ce820aa7bfed0f1c94fc57010cf227b6e3c50cb7796 - md5: 147c83e5e44780c7492998acbacddf52 - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 618973 - timestamp: 1723488853807 -- conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - sha256: f4a12cbf8a7c5bfa2592b9dc92b492c438781898e5b02f397979b0be6e1b5851 - md5: a3a3baddcfb8c80db84bec3cb7746fb8 - depends: - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 820831 - timestamp: 1723489427046 -- conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda - sha256: 202af1de83b585d36445dc1fda94266697341994d1a3328fabde4989e1b3d07a - md5: d0d408b1f18883a944376da5cf8101ea - depends: - - ptyprocess >=0.5 - - python >=3.9 - license: ISC - size: 53561 - timestamp: 1733302019362 -- conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda - sha256: e2ac3d66c367dada209fc6da43e645672364b9fd5f9d28b9f016e24b81af475b - md5: 11a9d1d09a3615fc07c3faf79bc0b943 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 11748 - timestamp: 1733327448200 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda - sha256: 5c347962202b55ae4d8a463e0555c5c6ca33396266a08284bf1384399894e541 - md5: d3894405f05b2c0f351d5de3ae26fa9c - depends: - - __glibc >=2.17,<3.0.a0 - - freetype >=2.12.1,<3.0a0 - - lcms2 >=2.16,<3.0a0 - - libgcc >=13 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxcb >=1.17.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openjpeg >=2.5.3,<3.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - tk >=8.6.13,<8.7.0a0 - license: HPND - size: 42749785 - timestamp: 1735929845390 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.1.0-py312h50aef2c_0.conda - sha256: b29b7c915053e06a7a5b4118760202c572c9c35d23bd6ce8e73270b6a50e50ee - md5: 94d6ba8cd468668a9fb04193b0f4b36e - depends: - - __osx >=11.0 - - freetype >=2.12.1,<3.0a0 - - lcms2 >=2.16,<3.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxcb >=1.17.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openjpeg >=2.5.3,<3.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - tk >=8.6.13,<8.7.0a0 - license: HPND - size: 42852329 - timestamp: 1735930118976 -- conda: https://conda.anaconda.org/conda-forge/win-64/pillow-11.1.0-py312h078707f_0.conda - sha256: 1047f68dce73ae88369ee323b64b9a67c28f4fb3d15215344eb478a1454438bb - md5: e609a6cb41a83f7b67c326e51f008a79 - depends: - - freetype >=2.12.1,<3.0a0 - - lcms2 >=2.16,<3.0a0 - - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - libwebp-base >=1.5.0,<2.0a0 - - libxcb >=1.17.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openjpeg >=2.5.3,<3.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - tk >=8.6.13,<8.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: HPND - size: 41878282 - timestamp: 1735930321933 -- conda: https://conda.anaconda.org/conda-forge/noarch/pint-0.24.4-pyhd8ed1ab_1.conda - sha256: 4595b54c19a46a8fc320d01e71000cee8bbfa47d9494fd2c8041d5c86f721b09 - md5: a566694ac0ab8f25e7f40a5d24070a1a - depends: - - flexcache >=0.3 - - flexparser >=0.4 - - platformdirs >=2.1.0 - - python >=3.9 - - typing_extensions >=4.0.0 - constrains: - - numpy >=1.23 - license: BSD-3-Clause - license_family: BSD - size: 230217 - timestamp: 1733663596173 -- conda: https://conda.anaconda.org/conda-forge/noarch/pint-xarray-0.4-pyhd8ed1ab_1.conda - sha256: 259dc84b5d498a5aa073c5cb0a9e59fa26028525d79beb4dc495e46c2be7661e - md5: 33499983bfdbeb4bc996ffa707c56228 - depends: - - numpy >=1.17 - - pint >=0.16 - - python >=3.9 - - xarray >=0.16.1 - license: Apache-2.0 - license_family: APACHE - size: 33777 - timestamp: 1734958080440 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda - sha256: 747c58db800d5583fee78e76240bf89cbaeedf7ab1ef339c2990602332b9c4be - md5: 5e2a7acfa2c24188af39e7944e1b3604 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: MIT - license_family: MIT - size: 381072 - timestamp: 1733698987122 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.44.2-h2f9eb0b_0.conda - sha256: 28855d4cb2d9fc9a6bd9196dadbaecd6868ec706394cec2f88824a61ba4b1bc0 - md5: fa8e429fdb9e5b757281f69b8cc4330b - depends: - - __osx >=11.0 - - libcxx >=18 - license: MIT - license_family: MIT - size: 201076 - timestamp: 1733699127167 -- conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.44.2-had0cd8c_0.conda - sha256: 6648bd6e050f37c062ced1bbd4201dee617c3dacda1fc3a0de70335cf736f11b - md5: c720ac9a3bd825bf8b4dc7523ea49be4 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 455582 - timestamp: 1733699458861 -- conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda - sha256: adb2dde5b4f7da70ae81309cce6188ed3286ff280355cf1931b45d91164d2ad8 - md5: 5a5870a74432aa332f7d32180633ad05 - depends: - - python >=3.9 - license: MIT AND PSF-2.0 - size: 10693 - timestamp: 1733344619659 -- conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda - sha256: bb50f6499e8bc1d1a26f17716c97984671121608dc0c3ecd34858112bce59a27 - md5: 577852c7e53901ddccc7e6a9959ddebe - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 20448 - timestamp: 1733232756001 -- conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda - sha256: bedda6b36e8e42b0255179446699a0cf08051e6d9d358dd0dd0e787254a3620e - md5: b3e783e8e8ed7577cf0b6dee37d1fbac - depends: - - packaging >=20.0 - - platformdirs >=2.5.0 - - python >=3.9 - - requests >=2.19.0 - license: BSD-3-Clause - license_family: BSD - size: 54116 - timestamp: 1733421432357 -- conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda - sha256: b260b4b47956b654232f698be1b757935268830a808040aff2006d08953e9e32 - md5: 5353f5eb201a9415b12385e35ed1148d - depends: - - cfgv >=2.0.0 - - identify >=1.0.0 - - nodeenv >=0.11.1 - - python >=3.9 - - pyyaml >=5.1 - - virtualenv >=20.10.0 - license: MIT - license_family: MIT - size: 195101 - timestamp: 1737408051494 -- conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda - sha256: 835afb9c8198895ec1ce2916320503d47bb0c25b75c228d744c44e505f1f4e3b - md5: 398cabfd9bd75e90d0901db95224f25f - depends: - - __glibc >=2.17,<3.0.a0 - - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libsqlite >=3.47.0,<4.0a0 - - libstdcxx >=13 - - libtiff >=4.7.0,<4.8.0a0 - - sqlite - constrains: - - proj4 ==999999999999 - license: MIT - license_family: MIT - size: 3108751 - timestamp: 1733138115896 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/proj-9.5.1-h1318a7e_0.conda - sha256: c6289d6f1a13f28ff3754ac0cb2553f7e7bc4a3102291115f62a04995d0421eb - md5: 5eb42e77ae79b46fabcb0f6f6d130763 - depends: - - __osx >=11.0 - - libcurl >=8.10.1,<9.0a0 - - libcxx >=18 - - libsqlite >=3.47.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - sqlite - constrains: - - proj4 ==999999999999 - license: MIT - license_family: MIT - size: 2673401 - timestamp: 1733138376056 -- conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.1-h4f671f6_0.conda - sha256: ddd0be6172e3903bc6602a93394e8051826235377c1ce8c6ba2435869794e726 - md5: 7303dac2aa92318f319508aedab6a127 - depends: - - libcurl >=8.10.1,<9.0a0 - - libsqlite >=3.47.0,<4.0a0 - - libtiff >=4.7.0,<4.8.0a0 - - sqlite - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - proj4 ==999999999999 - license: MIT - license_family: MIT - size: 2740461 - timestamp: 1733138695290 -- conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda - sha256: 013669433eb447548f21c3c6b16b2ed64356f726b5f77c1b39d5ba17a8a4b8bc - md5: a83f6a2fdc079e643237887a37460668 - depends: - - __glibc >=2.17,<3.0.a0 - - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - zlib - license: MIT - license_family: MIT - size: 199544 - timestamp: 1730769112346 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda - sha256: 851a77ae1a8e90db9b9f3c4466abea7afb52713c3d98ceb0d37ba6ff27df2eff - md5: 7172339b49c94275ba42fec3eaeda34f - depends: - - __osx >=11.0 - - libcurl >=8.10.1,<9.0a0 - - libcxx >=18 - - libzlib >=1.3.1,<2.0a0 - - zlib - license: MIT - license_family: MIT - size: 173220 - timestamp: 1730769371051 -- conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda - sha256: bc8f00d5155deb7b47702cb8370f233935704100dbc23e30747c161d1b6cf3ab - md5: 3e01e386307acc60b2f89af0b2e161aa - depends: - - python >=3.9 - license: Apache-2.0 - license_family: Apache - size: 49002 - timestamp: 1733327434163 -- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda - sha256: 0749c49a349bf55b8539ce5addce559b77592165da622944a51c630e94d97889 - md5: 7d823138f550b14ecae927a5ff3286de - depends: - - python >=3.9 - - wcwidth - constrains: - - prompt_toolkit 3.0.50 - license: BSD-3-Clause - license_family: BSD - size: 271905 - timestamp: 1737453457168 -- conda: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h178313f_1.conda - sha256: 6d5ff6490c53e14591b70924711fe7bd70eb7fbeeeb1cbd9ed2f6d794ec8c4eb - md5: 349635694b4df27336bc15a49e9220e9 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 52947 - timestamp: 1737635699390 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.2.1-py312h998013c_1.conda - sha256: 96145760baad111d7ae4213ea8f8cc035cf33b001f5ff37d92268e4d28b0941d - md5: 83678928c58c9ae76778a435b6c7a94a - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 50942 - timestamp: 1737635896600 -- conda: https://conda.anaconda.org/conda-forge/win-64/propcache-0.2.1-py312h31fea79_1.conda - sha256: 7c1ba527bd32d6a17884a288e059f8e57a7f007dbb0e8369e085fa46c644fa7b - md5: 97ad6f805e3cb55a575c3b5dee3e4b4c - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 49500 - timestamp: 1737636482838 -- conda: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.26.0-pyhd8ed1ab_0.conda - sha256: cb877fb0e23d52acd25c6e4a752b0ec3b5f3df5744dfaecf084bb86bcdc42e07 - md5: af56ef3028e6f1ec3afab296e9026921 - depends: - - protobuf >=3.19.0,<6.0.0dev - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 42484 - timestamp: 1738062011003 -- conda: https://conda.anaconda.org/conda-forge/linux-64/protobuf-5.28.3-py312h2ec8cdc_0.conda - sha256: acb2e0ee948e3941f8ed191cb77f654e06538638aed8ccd71cbc78a15242ebbb - md5: 9d7e427d159c1b2d516cc047ff177c48 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - libprotobuf 5.28.3 - license: BSD-3-Clause - license_family: BSD - size: 464794 - timestamp: 1731366525051 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-5.28.3-py312hd8f9ff3_0.conda - sha256: 9d572a97419bdace14d7c7cc8cc8c4bf2dcb22b56965dac87a27fbdb5061b926 - md5: 5afbe52a59f04dd1fe566d0d17590d7e - depends: - - __osx >=11.0 - - libcxx >=18 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - libprotobuf 5.28.3 - license: BSD-3-Clause - license_family: BSD - size: 448803 - timestamp: 1731367010746 -- conda: https://conda.anaconda.org/conda-forge/win-64/protobuf-5.28.3-py312h275cf98_0.conda - sha256: c85b7114662defc0a21c6b489318f5b26fa9a809222d5af9ee7b1d5662da6a67 - md5: 6ea880c5f165c3ede52f3bc9bdf86d25 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - libprotobuf 5.28.3 - license: BSD-3-Clause - license_family: BSD - size: 454022 - timestamp: 1731367921353 -- conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py312h98912ed_0.conda - sha256: 27e7f8f5d30c74439f39d61e21ac14c0cd03b5d55f7bf9f946fb619016f73c61 - md5: 3facaca6cc0f7988df3250efccd32da3 - depends: - - libgcc-ng >=12 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 486243 - timestamp: 1705722547420 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-5.9.8-py312he37b823_0.conda - sha256: a996bd5f878da264d1d3ba7fde717b0a2c158a86645efb1e899d087cca74832d - md5: cd6e99b9c5a623735161973b5f693a86 - depends: - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 499490 - timestamp: 1705722767772 -- conda: https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.8-py312he70551f_0.conda - sha256: 36f8addb327f80da4d6bd421170ff4cf8fb570d9ee8df39372427a4e33298dca - md5: 5f2998851564bea33a159bd00e6249e8 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 503677 - timestamp: 1705722843679 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 - md5: b3c17d95b5a10c6e64a21fa17573e70e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 8252 - timestamp: 1726802366959 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda - sha256: 8ed65e17fbb0ca944bfb8093b60086e3f9dd678c3448b5de212017394c247ee3 - md5: 415816daf82e0b23a736a069a75e9da7 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 8381 - timestamp: 1726802424786 -- conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - sha256: 7e446bafb4d692792310ed022fe284e848c6a868c861655a92435af7368bae7b - md5: 3c8f2573569bb816483e5cf57efbbe29 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - license: MIT - license_family: MIT - size: 9389 - timestamp: 1726802555076 -- conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda - sha256: a7713dfe30faf17508ec359e0bc7e0983f5d94682492469bd462cdaae9c64d83 - md5: 7d9daffbb8d8e0af0f769dbbcd173a54 - depends: - - python >=3.9 - license: ISC - size: 19457 - timestamp: 1733302371990 -- conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda - sha256: 71bd24600d14bb171a6321d523486f6a06f855e75e547fa0cb2a0953b02047f0 - md5: 3bfdfb8dbcdc4af1ae3f9a8eb3948f04 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 16668 - timestamp: 1733569518868 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-19.0.1-py312h7900ff3_0.conda - sha256: 82a0b6ef00473c134ff32138a6fe1f6edc600f362f2007d33d6c6723e220a83d - md5: 972f2a7f04b117accc08a11469c2cb6e - depends: - - libarrow-acero 19.0.1.* - - libarrow-dataset 19.0.1.* - - libarrow-substrait 19.0.1.* - - libparquet 19.0.1.* - - pyarrow-core 19.0.1 *_0_* - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 25300 - timestamp: 1739792645286 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-19.0.1-py312h1f38498_0.conda - sha256: 212a9ef1971d69882b977a930b3e8cd9d78bb58c027119a52b92d48a5e47f9eb - md5: 4bbcfad0bfcad7ee1d617a9b6db564ee - depends: - - libarrow-acero 19.0.1.* - - libarrow-dataset 19.0.1.* - - libarrow-substrait 19.0.1.* - - libparquet 19.0.1.* - - pyarrow-core 19.0.1 *_0_* - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 25462 - timestamp: 1739792876991 -- conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-19.0.1-py312h2e8e312_0.conda - sha256: ebb0c17d19507ed3c07df72b7782eea947a7324d99952e1c7fa4150fcc5560e6 - md5: de43dc66283d6b651bcb58b81d7471ba - depends: - - libarrow-acero 19.0.1.* - - libarrow-dataset 19.0.1.* - - libarrow-substrait 19.0.1.* - - libparquet 19.0.1.* - - pyarrow-core 19.0.1 *_0_* - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: APACHE - size: 25741 - timestamp: 1739792797898 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-19.0.1-py312h01725c0_0_cpu.conda - sha256: b2d397ee72a8e33aa1b2bcaa525b3bfc1dad333a631e668e54bcdcf275b3d69b - md5: 227543d1eef90da786f0c63bd0787839 - depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 19.0.1.* *cpu - - libgcc >=13 - - libstdcxx >=13 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - numpy >=1.21,<3 - - apache-arrow-proc =*=cpu - license: Apache-2.0 - license_family: APACHE - size: 5203933 - timestamp: 1739792285799 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-19.0.1-py312hc40f475_0_cpu.conda - sha256: 50ad4d67a1a2be32c2441a945a333b5347e36f027ea629fd903e7eaae77e0ed7 - md5: 90e3c0898e229d76e4a6949f621f0f58 - depends: - - __osx >=11.0 - - libarrow 19.0.1.* *cpu - - libcxx >=18 - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - apache-arrow-proc =*=cpu - - numpy >=1.21,<3 - license: Apache-2.0 - license_family: APACHE - size: 4398733 - timestamp: 1739792829575 -- conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-19.0.1-py312h6a9c419_0_cpu.conda - sha256: e95305dc9659b9548c6762948c98bf6def2f0bf15ed126bcff08caf3c485aae9 - md5: f5816784046c7a5e92be5d4558c89d2b - depends: - - libarrow 19.0.1.* *cpu - - libzlib >=1.3.1,<2.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - numpy >=1.21,<3 - - apache-arrow-proc =*=cpu - license: Apache-2.0 - license_family: APACHE - size: 3457909 - timestamp: 1739792767102 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda - sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 - md5: 09bb17ed307ad6ab2fd78d32372fdd4e - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 62230 - timestamp: 1733217699113 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda - sha256: 565e961fce215ccf14f863c3030eda5b83014489679d27166ff97144bf977810 - md5: 1c6476fdb96e6c3db6c3f7693cdba78e - depends: - - pyasn1 >=0.4.6,<0.7.0 - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 95825 - timestamp: 1733324693664 -- conda: https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda - sha256: c87615fcc7327c5dcc247f309731c98f7b9867a48e6265e9584af6dc8cd82345 - md5: 556a52a96313364aa79990ed1337b9a5 - depends: - - latexcodec >=1.0.4 - - python >=3.9 - - pyyaml >=3.01 - - setuptools - - six - license: MIT - license_family: MIT - size: 73221 - timestamp: 1733928237757 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pybtex-docutils-1.0.3-py312h7900ff3_2.conda - sha256: bf9c8f4c5282d46ce54bd2c6837fa5ff7a1c112382be3d13a7a0ae038d92b7c7 - md5: 0472f87b9dc0b1db7b501f4d814ba90b - depends: - - docutils >=0.14 - - pybtex >=0.16 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools - license: MIT - license_family: MIT - size: 16629 - timestamp: 1725691821342 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pybtex-docutils-1.0.3-py312h81bd7bf_2.conda - sha256: 246ff1b7cd335a5ffb60f180426d1f7c75b7abd04e8a54dfb95ac499b5bb8307 - md5: 573f5bef5c0b4ea1405e78e941a29284 - depends: - - docutils >=0.14 - - pybtex >=0.16 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - setuptools - license: MIT - license_family: MIT - size: 17243 - timestamp: 1725691887793 -- conda: https://conda.anaconda.org/conda-forge/win-64/pybtex-docutils-1.0.3-py312h2e8e312_2.conda - sha256: 2118403f158511cd869ac5cfe1d8a4bb50b4a6b7a0f181272909f0e4f60cf91b - md5: 3bd0fdb9f643c218de4a0db9d72e734f - depends: - - docutils >=0.14 - - pybtex >=0.16 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools - license: MIT - license_family: MIT - size: 17204 - timestamp: 1725691991954 -- conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda - sha256: 79db7928d13fab2d892592223d7570f5061c192f27b9febd1a418427b719acc6 - md5: 12c566707c80111f9799308d9e265aef - depends: - - python >=3.9 - - python - license: BSD-3-Clause - license_family: BSD - size: 110100 - timestamp: 1733195786147 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyct-0.5.0-pyhd8ed1ab_1.conda - sha256: 456d1fb91e00ea0b55efa63c64d80b18489b0b71bffaa72c7a19bed0c637b9f4 - md5: dcd4770a9dff3c3bb2e21cb0108af3d0 - depends: - - param >=1.7.0 - - python >=3.9 - - pyyaml - - requests - - setuptools >=61.0 - constrains: - - pyct-core <0 - license: BSD-3-Clause - license_family: BSD - size: 20171 - timestamp: 1734342620392 -- conda: https://conda.anaconda.org/conda-forge/noarch/pydap-3.5.3-pyhd8ed1ab_0.conda - sha256: 732c15ee53e66e6ea04df42ffb61e880587fd3182a4a0d653affec49881a4660 - md5: c0d92078c90ca826aa89cf93233c21ad - depends: - - docopt-ng - - jinja2 - - numpy >=2.0 - - python >=3.10 - - requests - - scipy - - webob - license: MIT - license_family: MIT - size: 780075 - timestamp: 1736245212948 -- conda: https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda - sha256: 5ec877142ded763061e114e787a4e201c2fb3f0b1db2f04ace610a1187bb34ae - md5: c7c50dd5192caa58a05e6a4248a27acb - depends: - - accessible-pygments - - babel - - beautifulsoup4 - - docutils !=0.17.0 - - packaging - - pygments >=2.7 - - python >=3.9 - - sphinx >=5.0 - - typing_extensions - license: BSD-3-Clause - license_family: BSD - size: 1393462 - timestamp: 1719344980505 -- conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda - sha256: 28a3e3161390a9d23bc02b4419448f8d27679d9e2c250e29849e37749c8de86b - md5: 232fb4577b6687b2d503ef8e254270c9 - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 888600 - timestamp: 1736243563082 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda - sha256: 158d8911e873e2a339c27768933747bf9c2aec1caa038f1b7b38a011734a956f - md5: 84c5c40ea7c5bbc6243556e5daed20e7 - depends: - - python >=3.9 - constrains: - - cryptography >=3.4.0 - license: MIT - license_family: MIT - size: 25093 - timestamp: 1732782523102 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.0-py312hb9d441b_0.conda - sha256: 7805d910dd6ac686e2f780c879a986f35d7a4c73f4236c956c03bdcb26bec421 - md5: 0726db04477a28c51d1a260afb356b67 - depends: - - __osx >=11.0 - - libffi >=3.4,<4.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - setuptools - license: MIT - license_family: MIT - size: 478921 - timestamp: 1736891272846 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.0-py312hb9d441b_0.conda - sha256: 53d099865f8f758029708f4365ee7c9184d9ffcc8fc8210971b723a3936f9c00 - md5: dc263e6e18b32318a43252dbb0596ad4 - depends: - - __osx >=11.0 - - libffi >=3.4,<4.0a0 - - pyobjc-core 11.0.* - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 383608 - timestamp: 1736927118445 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-25.0.0-pyhd8ed1ab_0.conda - sha256: 18a487af2ae5e2c380a8bb3fe38da2b4dc3aa8d033aa75202442e1075e6f635b - md5: 195fbabc5cc805f2cc10cb881a19cf8b - depends: - - cryptography >=41.0.5,<45 - - python >=3.9 - - typing-extensions >=4.9 - license: Apache-2.0 - license_family: Apache - size: 122758 - timestamp: 1737243471659 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda - sha256: f513fed4001fd228d3bf386269237b4ca6bff732c99ffc11fcbad8529b35407c - md5: 285e237b8f351e85e7574a2c7bfa6d46 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 93082 - timestamp: 1735698406955 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.1-py312he630544_0.conda - sha256: dca6b25e81b1b7462eee5eb70d5dc4703ff5a61242c9445184e083aef3f60468 - md5: ed6a6bbd26559986ecd90b9a1797cbf0 - depends: - - __glibc >=2.17,<3.0.a0 - - certifi - - libgcc >=13 - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 553891 - timestamp: 1739711828185 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyproj-3.7.1-py312h4b98159_0.conda - sha256: d210bdab6fe85c95f1394bc9ea80e4d40e4574c176d9369682dd3b90b5e0e152 - md5: 813964057f99a42fb63f3279bac521fd - depends: - - __osx >=11.0 - - certifi - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 511496 - timestamp: 1739711915351 -- conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.1-py312ha24589b_0.conda - sha256: 9534409396a33f31cf31261aac042031aa6fe7387dca15af4ea67f77d1133414 - md5: 07aff1c41b436cd8d3d3bf16b994692b - depends: - - certifi - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 746016 - timestamp: 1739712053090 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda - sha256: a721b3663d1917f3c9caa01069d23c44b0a378a6d3639f7e4f7b06887a9ac9bf - md5: 856b387c270e9eaf6e41e978057a2b62 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 427368 - timestamp: 1733821648154 -- conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh09c184e_7.conda - sha256: d016e04b0e12063fbee4a2d5fbb9b39a8d191b5a0042f0b8459188aedeabb0ca - md5: e2fd202833c4a981ce8a65974fe4abd1 - depends: - - __win - - python >=3.9 - - win_inet_pton - license: BSD-3-Clause - license_family: BSD - size: 21784 - timestamp: 1733217448189 -- conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda - sha256: ba3b032fa52709ce0d9fd388f63d330a026754587a2f461117cac9ab73d8d0d8 - md5: 461219d1a5bd61342293efa2c0c90eac - depends: - - __unix - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 21085 - timestamp: 1733217331982 -- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.9-h9e4cc4f_0_cpython.conda - sha256: 64fed5178f1e9c8ac0f572ac0ce37955f5dee7b2bcac665202bc14f1f7dd618a - md5: 5665f0079432f8848079c811cdb537d5 - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libexpat >=2.6.4,<3.0a0 - - libffi >=3.4,<4.0a0 - - libgcc >=13 - - liblzma >=5.6.4,<6.0a0 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 - - libzlib >=1.3.1,<2.0a0 - - ncurses >=6.5,<7.0a0 - - openssl >=3.4.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - constrains: - - python_abi 3.12.* *_cp312 - license: Python-2.0 - size: 31581682 - timestamp: 1739521496324 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.9-hc22306f_0_cpython.conda - sha256: cbf81a78d3ca6e663e827523e6ddbc28369cac488da047a28f83875eb52fe5f6 - md5: 1d105a6c46a753e3c0bab54a1ad24063 - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libexpat >=2.6.4,<3.0a0 - - libffi >=3.4,<4.0a0 - - liblzma >=5.6.4,<6.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libzlib >=1.3.1,<2.0a0 - - ncurses >=6.5,<7.0a0 - - openssl >=3.4.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - constrains: - - python_abi 3.12.* *_cp312 - license: Python-2.0 - size: 12947786 - timestamp: 1739520092196 -- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.9-h3f84c4b_0_cpython.conda - sha256: 972ef8c58bb1efd058ec70fa957f673e5ad7298d05e501769359f49ae26c7065 - md5: f01cb4695ac632a3530200455e31cec5 - depends: - - bzip2 >=1.0.8,<2.0a0 - - libexpat >=2.6.4,<3.0a0 - - libffi >=3.4,<4.0a0 - - liblzma >=5.6.4,<6.0a0 - - libsqlite >=3.48.0,<4.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.4.1,<4.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - python_abi 3.12.* *_cp312 - license: Python-2.0 - size: 15963997 - timestamp: 1739519811306 -- conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - sha256: a50052536f1ef8516ed11a844f9413661829aa083304dc624c5925298d078d79 - md5: 5ba79d7c71f03c678c8ead841f347d6e - depends: - - python >=3.9 - - six >=1.5 - license: Apache-2.0 - license_family: APACHE - size: 222505 - timestamp: 1733215763718 -- conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda - sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 - md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 226259 - timestamp: 1733236073335 -- conda: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh91182bf_2.conda - sha256: c8f5d3d23b5962524217f33549add8d6c5af22fe839b49603f4588771154a51c - md5: f822f0e13849c2283f72ec4aa120eeaa - depends: - - graphviz >=2.46.1 - - python >=3.9 - license: MIT - license_family: MIT - size: 38220 - timestamp: 1733792086212 -- conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - sha256: 4790787fe1f4e8da616edca4acf6a4f8ed4e7c6967aa31b920208fc8f95efcca - md5: a61bf9ec79426938ff785eb69dbb1960 - depends: - - python >=3.6 - license: BSD-2-Clause - license_family: BSD - size: 13383 - timestamp: 1677079727691 -- conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda - sha256: 1597d6055d34e709ab8915091973552a0b8764c8032ede07c4e99670da029629 - md5: 392c91c42edd569a7ec99ed8648f597a - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 143794 - timestamp: 1737541204030 -- conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - build_number: 5 - sha256: d10e93d759931ffb6372b45d65ff34d95c6000c61a07e298d162a3bc2accebb0 - md5: 0424ae29b104430108f5218a66db7260 - constrains: - - python 3.12.* *_cpython - license: BSD-3-Clause - license_family: BSD - size: 6238 - timestamp: 1723823388266 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-5_cp312.conda - build_number: 5 - sha256: 49d624e4b809c799d2bf257b22c23cf3fc4460f5570d9a58e7ad86350aeaa1f4 - md5: b76f9b1c862128e56ac7aa8cd2333de9 - constrains: - - python 3.12.* *_cpython - license: BSD-3-Clause - license_family: BSD - size: 6278 - timestamp: 1723823099686 -- conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - build_number: 5 - sha256: 9486662af81a219e96d343449eff242f38d7c5128ced5ce5acf85857265058d6 - md5: e8681f534453af7afab4cd2bc1423eec - constrains: - - python 3.12.* *_cpython - license: BSD-3-Clause - license_family: BSD - size: 6730 - timestamp: 1723823139725 -- conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - sha256: 1a7d6b233f7e6e3bbcbad054c8fd51e690a67b129a899a056a5e45dd9f00cb41 - md5: 3eeeeb9e4827ace8c0c1419c85d590ad - depends: - - python >=3.7 - license: MIT - license_family: MIT - size: 188538 - timestamp: 1706886944988 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda - sha256: 991caa5408aea018488a2c94e915c11792b9321b0ef64401f4829ebd0abfb3c0 - md5: 644bd4ca9f68ef536b902685d773d697 - depends: - - python >=3.9 - - six - license: Apache-2.0 - license_family: APACHE - size: 36786 - timestamp: 1733738704089 -- conda: https://conda.anaconda.org/conda-forge/noarch/pyviz_comms-3.0.4-pyhd8ed1ab_1.conda - sha256: 0352b6935ec73bc996829c61d1ebc7896caa31015073e43036af939fbe91a17a - md5: 99b8cf929b145ae310b333ce3496b56b - depends: - - param - - python >=3.9 - constrains: - - jupyterlab >=4.0,<5 - license: BSD-3-Clause - license_family: BSD - size: 48732 - timestamp: 1736890466861 -- conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py312h275cf98_3.conda - sha256: 68f8781b83942b91dbc0df883f9edfd1a54a1e645ae2a97c48203ff6c2919de3 - md5: 1747fbbdece8ab4358b584698b19c44d - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: PSF-2.0 - license_family: PSF - size: 6032183 - timestamp: 1728636767192 -- conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.15-py312h275cf98_0.conda - sha256: 22b901606eda476a19fcc9376a906ef2e16fc6690186bc1d9a213f6c8e93d061 - md5: 1fb4bbe58100be45b37781a367c92fe8 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - winpty - license: MIT - license_family: MIT - size: 215864 - timestamp: 1738661787591 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda - sha256: 159cba13a93b3fe084a1eb9bda0a07afc9148147647f0d437c3c3da60980503b - md5: cf2485f39740de96e2a7f2bb18ed2fee - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - yaml >=0.2.5,<0.3.0a0 - license: MIT - license_family: MIT - size: 206903 - timestamp: 1737454910324 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.2-py312h998013c_2.conda - sha256: ad225ad24bfd60f7719709791345042c3cb32da1692e62bd463b084cf140e00d - md5: 68149ed4d4e9e1c42d2ba1f27f08ca96 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - yaml >=0.2.5,<0.3.0a0 - license: MIT - license_family: MIT - size: 192148 - timestamp: 1737454886351 -- conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h31fea79_2.conda - sha256: 76fec03ef7e67e37724873e1f805131fb88efb57f19e9a77b4da616068ef5c28 - md5: ba00a2e5059c1fde96459858537cc8f5 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - yaml >=0.2.5,<0.3.0a0 - license: MIT - license_family: MIT - size: 181734 - timestamp: 1737455207230 -- conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.1-py312hbf22597_0.conda - sha256: 90ec0da0317d3d76990a40c61e1709ef859dd3d8c63838bad2814f46a63c8a2e - md5: 7cec8d0dac15a2d9fea8e49879aa779d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libsodium >=1.0.20,<1.0.21.0a0 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - zeromq >=4.3.5,<4.4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 382698 - timestamp: 1738271121975 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyzmq-26.2.1-py312hf4875e0_0.conda - sha256: 70d398b334668dc597d33e27847ede1b0829a639b9c91ee845355e52c86c2293 - md5: bfbefdb140b546a80827ff7c9d5ac7b8 - depends: - - __osx >=11.0 - - libcxx >=18 - - libsodium >=1.0.20,<1.0.21.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - zeromq >=4.3.5,<4.4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 364649 - timestamp: 1738271263898 -- conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.1-py312hd7027bb_0.conda - sha256: 9a139a04873a53eefb7b39d3b9f04ae99ab8079b68a80b3caaa310d93483e6ff - md5: e2ae9a063922c1798908cce9961a86fb - depends: - - libsodium >=1.0.20,<1.0.21.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zeromq >=4.3.5,<4.3.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 365033 - timestamp: 1738271264094 -- conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - sha256: 776363493bad83308ba30bcb88c2552632581b143e8ee25b1982c8c743e73abc - md5: 353823361b1d27eb3960efb076dfcaf6 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: LicenseRef-Qhull - size: 552937 - timestamp: 1720813982144 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda - sha256: 873ac689484262a51fd79bc6103c1a1bedbf524924d7f0088fb80703042805e4 - md5: 6483b1f59526e05d7d894e466b5b6924 - depends: - - __osx >=11.0 - - libcxx >=16 - license: LicenseRef-Qhull - size: 516376 - timestamp: 1720814307311 -- conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - sha256: 887d53486a37bd870da62b8fa2ebe3993f912ad04bd755e7ed7c47ced97cbaa8 - md5: 854fbdff64b572b5c0b470f334d34c11 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: LicenseRef-Qhull - size: 1377020 - timestamp: 1720814433486 -- conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.3-py312h8cae83d_0.conda - sha256: 260a7009182f701077b93b0bf58f94e66c39e11876453a76d0983d983f90acfd - md5: 9691edb87ec148887c6488412355ed49 - depends: - - __glibc >=2.17,<3.0.a0 - - affine - - attrs - - certifi - - click >=4 - - click-plugins - - cligj >=0.5 - - libgcc >=13 - - libgdal-core >=3.10.0,<3.11.0a0 - - libstdcxx >=13 - - numpy >=1.21,<3 - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools >=0.9.8 - - snuggs >=1.4.1 - license: BSD-3-Clause - license_family: BSD - size: 7915119 - timestamp: 1733163694583 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rasterio-1.4.3-py312h02264c4_0.conda - sha256: d0846ad98d0969d365a2919a282825894b783639528b83b425b1dda93409f75e - md5: a4ecd14e35c1b6cf8177565f7938ff03 - depends: - - __osx >=11.0 - - affine - - attrs - - certifi - - click >=4 - - click-plugins - - cligj >=0.5 - - libcxx >=18 - - libgdal-core >=3.10.0,<3.11.0a0 - - numpy >=1.21,<3 - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - setuptools >=0.9.8 - - snuggs >=1.4.1 - license: BSD-3-Clause - license_family: BSD - size: 8298181 - timestamp: 1733164123636 -- conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.3-py312hc0daee4_0.conda - sha256: 425a15ba33f4afd6f753283256977e45470c26a6c20d29d77364a9c3b3ef17dd - md5: 96b4206e7a0bd2273e56c416c193d7f7 - depends: - - affine - - attrs - - certifi - - click >=4 - - click-plugins - - cligj >=0.5 - - libgdal-core >=3.10.0,<3.11.0a0 - - numpy >=1.21,<3 - - proj >=9.5.1,<9.6.0a0 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - setuptools >=0.9.8 - - snuggs >=1.4.1 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 7870209 - timestamp: 1733164113387 -- conda: https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda - sha256: 91b3c1ced90d04ee2eded1f72cf3cbc19ff05a25e41876ef0758266a5bab009f - md5: 77d9955b4abddb811cb8ab1aa7d743e4 - depends: - - libgcc-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 15423721 - timestamp: 1694329261357 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.6.6-h69fbcac_2.conda - sha256: be6174970193cb4d0ffa7d731a93a4c9542881dbc7ab24e74b460ef312161169 - md5: e309ae86569b1cd55a0285fa4e939844 - license: BSD-2-Clause - license_family: BSD - size: 1526706 - timestamp: 1694329743011 -- conda: https://conda.anaconda.org/conda-forge/win-64/rav1e-0.6.6-h975169c_2.conda - sha256: 3193451440e5ac737b7d5d2a79f9e012d426c0c53e41e60df4992150bfc39565 - md5: bd32cc2ed62374932f9d57a2e3eb2863 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 1523119 - timestamp: 1694330157594 -- conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda - sha256: d213c44958d49ce7e0d4d5b81afec23640cce5016685dbb2d23571a99caa4474 - md5: e84ddf12bde691e8ec894b00ea829ddf - depends: - - libre2-11 2024.07.02 hbbce691_2 - license: BSD-3-Clause - license_family: BSD - size: 26786 - timestamp: 1735541074034 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2024.07.02-h6589ca4_2.conda - sha256: 4d3799c05f8f662922a0acd129d119774760a3281b883603678e128d1cb307fb - md5: 7a8b4ad8c58a3408ca89d78788c78178 - depends: - - libre2-11 2024.07.02 h07bc746_2 - license: BSD-3-Clause - license_family: BSD - size: 26861 - timestamp: 1735541088455 -- conda: https://conda.anaconda.org/conda-forge/win-64/re2-2024.07.02-haf4117d_2.conda - sha256: fde3bbe0ade147bf735bf1bb5a15aa26d2cc197bfa026d2964012737f89ed351 - md5: 10980cbe103147435a40288db9f49847 - depends: - - libre2-11 2024.07.02 h4eb7d71_2 - license: BSD-3-Clause - license_family: BSD - size: 214916 - timestamp: 1735541425594 -- conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 - md5: 47d31b792659ce70f470b5c82fdfb7a4 - depends: - - libgcc-ng >=12 - - ncurses >=6.3,<7.0a0 - license: GPL-3.0-only - license_family: GPL - size: 281456 - timestamp: 1679532220005 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - sha256: a1dfa679ac3f6007362386576a704ad2d0d7a02e98f5d0b115f207a2da63e884 - md5: 8cbb776a2f641b943d413b3e19df71f4 - depends: - - ncurses >=6.3,<7.0a0 - license: GPL-3.0-only - license_family: GPL - size: 250351 - timestamp: 1679532511311 -- conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda - sha256: e20909f474a6cece176dfc0dc1addac265deb5fa92ea90e975fbca48085b20c3 - md5: 9140f1c09dd5489549c6a33931b943c7 - depends: - - attrs >=22.2.0 - - python >=3.9 - - rpds-py >=0.7.0 - - typing_extensions >=4.4.0 - - python - license: MIT - license_family: MIT - size: 51668 - timestamp: 1737836872415 -- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - sha256: d701ca1136197aa121bbbe0e8c18db6b5c94acbd041c2b43c70e5ae104e1d8ad - md5: a9b9368f3701a417eac9edbcae7cb737 - depends: - - certifi >=2017.4.17 - - charset-normalizer >=2,<4 - - idna >=2.5,<4 - - python >=3.9 - - urllib3 >=1.21.1,<3 - constrains: - - chardet >=3.0.2,<6 - license: Apache-2.0 - license_family: APACHE - size: 58723 - timestamp: 1733217126197 -- conda: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda - sha256: 75ef0072ae6691f5ca9709fe6a2570b98177b49d0231a6749ac4e610da934cab - md5: a283b764d8b155f81e904675ef5e1f4b - depends: - - oauthlib >=3.0.0 - - python >=3.9 - - requests >=2.0.0 - license: ISC - size: 25875 - timestamp: 1733772348802 -- conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda - sha256: 2e4372f600490a6e0b3bac60717278448e323cab1c0fecd5f43f7c56535a99c5 - md5: 36de09a8d3e5d5e6f4ee63af49e59706 - depends: - - python >=3.9 - - six - license: MIT - license_family: MIT - size: 10209 - timestamp: 1733600040800 -- conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - sha256: 2a5b495a1de0f60f24d8a74578ebc23b24aa53279b1ad583755f223097c41c37 - md5: 912a71cc01012ee38e6b90ddd561e36f - depends: - - python - license: MIT - license_family: MIT - size: 7818 - timestamp: 1598024297745 -- conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.18.2-pyhd8ed1ab_0.conda - sha256: 77ca13bbbd01c0649eeac57db35ddf511d16e09b53703cc28cffa0ee32bf3f25 - md5: daf05c3baaae11700637ab0e9c678c00 - depends: - - numpy >=1.23 - - packaging - - pyproj >=3.3 - - python >=3.10 - - rasterio >=1.3.7 - - scipy - - xarray >=2024.7.0 - license: Apache-2.0 - license_family: Apache - size: 52468 - timestamp: 1737141232838 -- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py312h12e396e_0.conda - sha256: e8662d21ca3c912ac8941725392b838a29458b106ef22d9489cdf0f8de145fad - md5: bfb49da0cc9098597d527def04d66f8b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - constrains: - - __glibc >=2.17 - license: MIT - license_family: MIT - size: 354410 - timestamp: 1733366814237 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.22.3-py312hcd83bfe_0.conda - sha256: 0a8b50bf22400004a706ba160d7cb31f82b8d8c328a59aec73a9e0d3372d1964 - md5: 2f7c4d01946fa2ce73d7ef3eeb041877 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - constrains: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 318920 - timestamp: 1733367225496 -- conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.22.3-py312h2615798_0.conda - sha256: 77eef6586408dfe7d4cff3050ab905021df8e27a591675a0d9c9a49a5398c027 - md5: 82220a6592c8c7939900b1018f111568 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 225369 - timestamp: 1733367159579 -- conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda - sha256: 210ff0e3aaa8ce8e9d45a5fd578ce7b2d5bcd7d3054dc779c3a159b8f72104d6 - md5: 91def14612d11100329d53a75993a4d5 - depends: - - pyasn1 >=0.1.3 - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 30799 - timestamp: 1733662778918 -- conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.11-h072c03f_0.conda - sha256: cfdd98c8f9a1e5b6f9abce5dac6d590cc9fe541a08466c9e4a26f90e00b569e3 - md5: 5e8060d52f676a40edef0006a75c718f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - openssl >=3.4.0,<4.0a0 - license: Apache-2.0 - license_family: Apache - size: 356213 - timestamp: 1737146304079 -- conda: https://conda.anaconda.org/conda-forge/noarch/s3fs-2025.2.0-pyhd8ed1ab_0.conda - sha256: 5fe3413275ff7d02ff73f054d8d341a072316a387c96839e3580393ec11368fb - md5: abe2ecb98694733ad8a82513465531e4 - depends: - - aiobotocore >=2.5.4,<3.0.0 - - aiohttp - - fsspec 2025.2.0 - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 33043 - timestamp: 1738526429475 -- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.2-py312ha707e6e_0.conda - sha256: b9faaa024b77a3678a988c5a490f02c4029c0d5903998b585100e05bc7d4ff36 - md5: 00b999c5f9d01fb633db819d79186bd4 - depends: - - __glibc >=2.17,<3.0.a0 - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - libgcc >=13 - - libgfortran - - libgfortran5 >=13.3.0 - - liblapack >=3.9.0,<4.0a0 - - libstdcxx >=13 - - numpy <2.5 - - numpy >=1.19,<3 - - numpy >=1.23.5 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 17064784 - timestamp: 1739791925628 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.15.2-py312h99a188d_0.conda - sha256: af61f6e29a0d3d4c66699a35b19ce6849d6e0fa15017d7a9ef6268cc1c4e1264 - md5: b1d324bf5018b451152bbdc4ffd3d378 - depends: - - __osx >=11.0 - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - libcxx >=18 - - libgfortran 5.* - - libgfortran5 >=13.2.0 - - liblapack >=3.9.0,<4.0a0 - - numpy <2.5 - - numpy >=1.19,<3 - - numpy >=1.23.5 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 14394729 - timestamp: 1739792424558 -- conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.15.2-py312h451d5c4_0.conda - sha256: a154a6b6f4efefc65366437f611fa89c8178059e2ee7350515fe4a4c3da55c1d - md5: 50632c72cc92ae3ebb615cb496bbf946 - depends: - - libblas >=3.9.0,<4.0a0 - - libcblas >=3.9.0,<4.0a0 - - liblapack >=3.9.0,<4.0a0 - - numpy <2.5 - - numpy >=1.19,<3 - - numpy >=1.23.5 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 15350553 - timestamp: 1739793319263 -- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda - sha256: 00926652bbb8924e265caefdb1db100f86a479e8f1066efe395d5552dde54d02 - md5: 938c8de6b9de091997145b3bf25cdbf9 - depends: - - __linux - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 22736 - timestamp: 1733322148326 -- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda - sha256: 5282eb5b462502c38df8cb37cd1542c5bbe26af2453a18a0a0602d084ca39f53 - md5: e67b1b1fa7a79ff9e8e326d0caf55854 - depends: - - __osx - - pyobjc-framework-cocoa - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 23100 - timestamp: 1733322309409 -- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_1.conda - sha256: ba8b93df52e0d625177907852340d735026c81118ac197f61f1f5baea19071ad - md5: e6a4e906051565caf5fdae5b0415b654 - depends: - - __win - - python >=3.9 - - pywin32 - license: BSD-3-Clause - license_family: BSD - size: 23359 - timestamp: 1733322590167 -- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda - sha256: e0778e4f276e9a81b51c56f51ec22a27b4d8fc955abc0be77ad09ca9bea06bb9 - md5: 8f28e299c11afdd79e0ec1e279dcdc52 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 775598 - timestamp: 1736512753595 -- conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.7-py312h391bc85_0.conda - sha256: 424f0e237d6d59f9c027445d6022ca65960ffeea41adc6b52d8460ea1962fee1 - md5: 3491bd7e78aa7407c965312c4a5a9254 - depends: - - __glibc >=2.17,<3.0.a0 - - geos >=3.13.0,<3.13.1.0a0 - - libgcc >=13 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 572126 - timestamp: 1738308035156 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/shapely-2.0.7-py312ha6455e5_0.conda - sha256: 1e4712903ba44aac06eb24f2f374424737578ea3270199cd1dccad5902462330 - md5: fe6dc4f29cf78ea0d68946c18a44ec24 - depends: - - __osx >=11.0 - - geos >=3.13.0,<3.13.1.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-3-Clause - license_family: BSD - size: 537550 - timestamp: 1738308261464 -- conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.7-py312h0c580ee_0.conda - sha256: fe550a8c068b0fba692419e365d65107468886ed7e64bb359be2d2e531db4171 - md5: b706fd254130241793ebc8eafbecb8c4 - depends: - - geos >=3.13.0,<3.13.1.0a0 - - numpy >=1.19,<3 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 537046 - timestamp: 1738308469909 -- conda: https://conda.anaconda.org/conda-forge/noarch/simpervisor-1.0.0-pyhd8ed1ab_1.conda - sha256: 9c53a1dc8c7fd2c881b98f3a9e50fa8c5d67e3ca52de12338f0d94b40da6881e - md5: b12cd36c9eea3f4d2f77daef432bdc00 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 13639 - timestamp: 1734339920707 -- conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - sha256: 41db0180680cc67c3fa76544ffd48d6a5679d96f4b71d7498a759e94edc9a2db - md5: a451d576819089b0d672f18768be0f65 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 16385 - timestamp: 1733381032766 -- conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda - sha256: ec91e86eeb2c6bbf09d51351b851e945185d70661d2ada67204c9a6419d282d3 - md5: 3b3e64af585eadfb52bb90b553db5edf - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: BSD-3-Clause - license_family: BSD - size: 42739 - timestamp: 1733501881851 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.1-h98b9ce2_1.conda - sha256: 4242f95b215127a006eb664fe26ed5a82df87e90cbdbc7ce7ff4971f0720997f - md5: ded86dee325290da2967a3fea3800eb5 - depends: - - __osx >=11.0 - - libcxx >=18 - license: BSD-3-Clause - license_family: BSD - size: 35857 - timestamp: 1733502172664 -- conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h500f7fa_1.conda - sha256: 29753b51803c0396c3cb56e4f11e68c968a2f43b71b648634bef1f9193f9e78b - md5: e32fb978aaea855ddce624eb8c8eb69a - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 59757 - timestamp: 1733502109991 -- conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda - sha256: c2248418c310bdd1719b186796ae50a8a77ce555228b6acd32768e2543a15012 - md5: bf7a226e58dfb8346c70df36065d86c9 - depends: - - python >=3.9 - license: Apache-2.0 - license_family: Apache - size: 15019 - timestamp: 1733244175724 -- conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 - sha256: a0fd916633252d99efb6223b1050202841fa8d2d53dacca564b0ed77249d3228 - md5: 4d22a9315e78c6827f806065957d566e - depends: - - python >=2 - license: BSD-3-Clause - license_family: BSD - size: 58824 - timestamp: 1637143137377 -- conda: https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda - sha256: 61f9373709e7d9009e3a062b135dbe44b16e684a4fcfe2dd624143bc0f80d402 - md5: 9aa358575bbd4be126eaa5e0039f835c - depends: - - numpy - - pyparsing >=2.1.6 - - python >=3.9 - license: MIT - license_family: MIT - size: 11313 - timestamp: 1733818738919 -- conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda - sha256: d1e3e06b5cf26093047e63c8cc77b70d970411c5cbc0cb1fad461a8a8df599f7 - md5: 0401a17ae845fa72c7210e206ec5647d - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 28657 - timestamp: 1738440459037 -- conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c - md5: 3f144b2c34f8cb5a9abd9ed23a39c561 - depends: - - python >=3.8 - license: MIT - license_family: MIT - size: 36754 - timestamp: 1693929424267 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.4.7-pyhd8ed1ab_0.conda - sha256: 0de25d561b20dd06982df45a2c3cef490e45b0d4bae8d2c290030721bdadecd6 - md5: c568e260463da2528ecfd7c5a0b41bbd - depends: - - alabaster >=0.7.14,<0.8.dev0 - - babel >=2.13 - - colorama >=0.4.6 - - docutils >=0.20,<0.22 - - imagesize >=1.3 - - importlib-metadata >=6.0 - - jinja2 >=3.1 - - packaging >=23.0 - - pygments >=2.17 - - python >=3.9 - - requests >=2.30.0 - - snowballstemmer >=2.2 - - sphinxcontrib-applehelp - - sphinxcontrib-devhelp - - sphinxcontrib-htmlhelp >=2.0.0 - - sphinxcontrib-jsmath - - sphinxcontrib-qthelp - - sphinxcontrib-serializinghtml >=1.1.9 - - tomli >=2.0 - license: BSD-2-Clause - license_family: BSD - size: 1358660 - timestamp: 1721487658869 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-book-theme-1.1.4-pyh29332c3_0.conda - sha256: 78581f1ba538186fc4129191a8db4ee7798382b6b4a1a0c55dedb437da1a9fd8 - md5: f3d3f4e7e2c9198e88cd524633665081 - depends: - - pydata-sphinx-theme ==0.15.4 - - python >=3.9 - - sphinx >=6.1 - - python - license: BSD-3-Clause - size: 255445 - timestamp: 1740145414720 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-codeautolink-0.17.0-pyhd8ed1ab_0.conda - sha256: ddd6d14a0f4c82b93350f1332b4fcbd49909b3c50f8380df53a12204c5ec5fbf - md5: 40ccd47b26f90cb3bf7d7e744da87f0c - depends: - - beautifulsoup4 >=4.8.1 - - dataclasses - - docutils - - python >=3.10 - - sphinx >=3.2.0 - license: MIT - license_family: MIT - size: 27257 - timestamp: 1739876822003 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-comments-0.0.3-pyhd8ed1ab_1.conda - sha256: 00129f91b905441a9e27c46ef32c22617743eb4a4f7207e1dd84bc19505d4381 - md5: 30e02fa8e40287da066e348c95ff5609 - depends: - - python >=3.9 - - sphinx >=1.8 - license: MIT - license_family: MIT - size: 11014 - timestamp: 1736273059036 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda - sha256: 8cd892e49cb4d00501bc4439fb0c73ca44905f01a65b2b7fa05ba0e8f3924f19 - md5: bf22cb9c439572760316ce0748af3713 - depends: - - python >=3.9 - - sphinx >=1.8 - license: MIT - license_family: MIT - size: 17893 - timestamp: 1734573117732 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda - sha256: eb335aef48e49107b55299cedc197f86d05651f1eeff83ed8acf89df7cdc9765 - md5: 3e6c15d914b03f83fc96344f917e0838 - depends: - - python >=3.9 - - sphinx >=6,<9 - license: MIT - license_family: MIT - size: 911336 - timestamp: 1734614675610 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-external-toc-1.0.1-pyhd8ed1ab_1.conda - sha256: 47dda7135f9fb1777b7066c3b9260fdd796d6ec2aeb8804161f39c65b3461401 - md5: d248f9db0f1c2e7c480b058925afa9c5 - depends: - - click >=7.1 - - python >=3.9 - - pyyaml - - sphinx >=5 - license: MIT - license_family: MIT - size: 28933 - timestamp: 1735253529471 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-jupyterbook-latex-1.0.0-pyhd8ed1ab_1.conda - sha256: b64c031795918f26ddeb5148ede2d3a4944cd9f5461cf72bde3f28acdc71d2f3 - md5: 9261bc5d987013f5d8dc58061c34f1a3 - depends: - - packaging - - python >=3.9 - - sphinx >=5 - constrains: - - myst-nb >=1.0.0 - license: BSD-3-Clause - license_family: BSD - size: 17727 - timestamp: 1735227211614 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-multitoc-numbering-0.1.3-pyhd8ed1ab_1.conda - sha256: 9fa48b33334c3a9971c96dd3d921950e8350cfa88a8e8ebaec6d8261071ea2ac - md5: cc5fc0988f0fedab436361b9b5906a58 - depends: - - python >=3.9 - - sphinx >=3 - license: MIT - license_family: MIT - size: 10541 - timestamp: 1735142015381 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-notfound-page-1.0.4-pyhd8ed1ab_1.conda - sha256: 96be04bfbc575cd28c0739bd2b938489599c2f208e4889ffa70ece0c447083d2 - md5: 5947f4433b325b8e7f98fd8370d7a716 - depends: - - docutils - - python >=3.9 - - sphinx - license: MIT - license_family: MIT - size: 13689 - timestamp: 1735979658021 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-thebe-0.3.1-pyhd8ed1ab_1.conda - sha256: 9d0cd52edcb2274bf7c8e9327317d9bb48e1d092afeaed093e0242876ad3c008 - md5: f6627ce09745a0f822cc6e7de8cf4f99 - depends: - - python >=3.9 - - sphinx >=4 - license: MIT - license_family: MIT - size: 14713 - timestamp: 1734702587800 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-togglebutton-0.3.2-pyhd8ed1ab_0.tar.bz2 - sha256: 0dcee238aae6337fae5eaf1f9a29b0c51ed9834ae501fccb2cde0fed8dae1a88 - md5: 382738101934261ea7931d1460e64868 - depends: - - docutils - - python >=3.6 - - sphinx - license: MIT - license_family: MIT - size: 12268 - timestamp: 1664390298824 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda - sha256: d7433a344a9ad32a680b881c81b0034bc61618d12c39dd6e3309abeffa9577ba - md5: 16e3f039c0aa6446513e94ab18a8784b - depends: - - python >=3.9 - - sphinx >=5 - license: BSD-2-Clause - license_family: BSD - size: 29752 - timestamp: 1733754216334 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-bibtex-2.6.3-pyhd8ed1ab_1.conda - sha256: 6543dde21e08af2f649ff857d35b777d20c28599d72e7422a6e87f0da91ea38d - md5: 5ffeb6a3bd8fa140aa95b58b7fd264ae - depends: - - docutils >=0.8,!=0.18.*,!=0.19.* - - importlib-metadata >=3.6 - - pybtex >=0.24 - - pybtex-docutils >=1.0.0 - - python >=3.9 - - sphinx >=3.5 - license: BSD-2-Clause - license_family: BSD - size: 32595 - timestamp: 1734603350720 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda - sha256: 55d5076005d20b84b20bee7844e686b7e60eb9f683af04492e598a622b12d53d - md5: 910f28a05c178feba832f842155cbfff - depends: - - python >=3.9 - - sphinx >=5 - license: BSD-2-Clause - license_family: BSD - size: 24536 - timestamp: 1733754232002 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda - sha256: c1492c0262ccf16694bdcd3bb62aa4627878ea8782d5cd3876614ffeb62b3996 - md5: e9fb3fe8a5b758b4aff187d434f94f03 - depends: - - python >=3.9 - - sphinx >=5 - license: BSD-2-Clause - license_family: BSD - size: 32895 - timestamp: 1733754385092 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda - sha256: 578bef5ec630e5b2b8810d898bbbf79b9ae66d49b7938bcc3efc364e679f2a62 - md5: fa839b5ff59e192f411ccc7dae6588bb - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 10462 - timestamp: 1733753857224 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-mermaid-1.0.0-pyhd8ed1ab_1.conda - sha256: 795505fcda6b5d8bcc4deab0f22a449bdc863614e4c8edc5702b4ad0e2e2604f - md5: f7f723ddde7269d491be27510403dd9c - depends: - - python >=3.9 - - pyyaml - - sphinx - license: BSD-2-Clause - license_family: BSD - size: 15095 - timestamp: 1734770249700 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda - sha256: c664fefae4acdb5fae973bdde25836faf451f41d04342b64a358f9a7753c92ca - md5: 00534ebcc0375929b45c3039b5ba7636 - depends: - - python >=3.9 - - sphinx >=5 - license: BSD-2-Clause - license_family: BSD - size: 26959 - timestamp: 1733753505008 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda - sha256: 64d89ecc0264347486971a94487cb8d7c65bfc0176750cf7502b8a272f4ab557 - md5: 3bc61f7161d28137797e038263c04c54 - depends: - - python >=3.9 - - sphinx >=5 - license: BSD-2-Clause - license_family: BSD - size: 28669 - timestamp: 1733750596111 -- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxext-rediraffe-0.2.7-pyhd8ed1ab_2.conda - sha256: a191e0fe8ef135f870f94c9456b8f86fde76e453905d40cdcce059dc67049c21 - md5: 6e9abf97190dc7a1f0c207e86ef3abc5 - depends: - - python >=3.9 - - sphinx >=2.0 - license: MIT - license_family: MIT - size: 14572 - timestamp: 1737296852387 -- conda: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.38-py312h66e93f0_0.conda - sha256: e70d630452e3b44f0c4c127a73df3c90bfae125fc53f9c698954058153b18aa1 - md5: 91e345de80d0dc238b598683b69da123 - depends: - - __glibc >=2.17,<3.0.a0 - - greenlet !=0.4.17 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - typing-extensions >=4.6.0 - license: MIT - license_family: MIT - size: 3541509 - timestamp: 1738913258169 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.38-py312hea69d52_0.conda - sha256: 1ef9de27375b81260fbe2642903ee242168c4a56c785a3d900e06af9268356a9 - md5: d435f5366cb956b8711b0bd2e430cecf - depends: - - __osx >=11.0 - - greenlet !=0.4.17 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - typing-extensions >=4.6.0 - license: MIT - license_family: MIT - size: 3466441 - timestamp: 1738913408053 -- conda: https://conda.anaconda.org/conda-forge/win-64/sqlalchemy-2.0.38-py312h4389bb4_0.conda - sha256: 6d4d128c46c891224690a24d1453de720de5435f9d60ad136614d75a56652144 - md5: 42fb97584c9d1d6c5d423a1e38374d04 - depends: - - greenlet !=0.4.17 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - typing-extensions >=4.6.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 3470056 - timestamp: 1738913726182 -- conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.49.1-h9eae976_1.conda - sha256: e4535c13b082b6126330b4b8f323d308e24f454e4f218a2a6c6135fb10050b1c - md5: 069213b4f57ec55bbcfaebe415c758f7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libsqlite 3.49.1 hee588c1_1 - - libzlib >=1.3.1,<2.0a0 - - ncurses >=6.5,<7.0a0 - - readline >=8.2,<9.0a0 - license: Unlicense - size: 859431 - timestamp: 1739953169934 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlite-3.49.1-hd7222ec_1.conda - sha256: 725c38281dd9d5b2103bc50825eaac609e7a994fe523255188948b496186b20a - md5: 03fcb37eec4fa42ea528cbfc8407bc83 - depends: - - __osx >=11.0 - - libsqlite 3.49.1 h3f77e49_1 - - libzlib >=1.3.1,<2.0a0 - - ncurses >=6.5,<7.0a0 - - readline >=8.2,<9.0a0 - license: Unlicense - size: 883286 - timestamp: 1739953334773 -- conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.49.1-h2466b09_1.conda - sha256: 74f72577619656cac82e65caf321f97b7554775839de0113cbd8fce74e5d0f6f - md5: 163973102aaae5b6fff2103a94debce9 - depends: - - libsqlite 3.49.1 h67fdade_1 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Unlicense - size: 1104586 - timestamp: 1739953514395 -- conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda - sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 - md5: b1b505328da7a6b246787df4b5a49fbc - depends: - - asttokens - - executing - - pure_eval - - python >=3.9 - license: MIT - license_family: MIT - size: 26988 - timestamp: 1733569565672 -- conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda - sha256: df30a9be29f1a8b5a2e314dd5b16ccfbcbd1cc6a4f659340e8bc2bd4de37bc6f - md5: 355898d24394b2af353eb96358db9fdd - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: BSD-2-Clause - license_family: BSD - size: 2746291 - timestamp: 1730246036363 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-2.3.0-hf24288c_0.conda - sha256: ab876ed8bdd20e22a868dcb8d03e9ce9bbba7762d7e652d49bfff6af768a5b8f - md5: 114c33e9eec335a379c9ee6c498bb807 - depends: - - __osx >=11.0 - - libcxx >=17 - license: BSD-2-Clause - license_family: BSD - size: 1387330 - timestamp: 1730246134730 -- conda: https://conda.anaconda.org/conda-forge/win-64/svt-av1-2.3.0-he0c23c2_0.conda - sha256: c25bf68ef411d41ee29f353acc698c482fdd087426a77398b7b41ce9d968519e - md5: ac11ae1da661e573b71870b1191ce079 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 1845727 - timestamp: 1730246453216 -- conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda - sha256: 090023bddd40d83468ef86573976af8c514f64119b2bd814ee63a838a542720a - md5: 959484a66b4b76befcddc4fa97c95567 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 37554 - timestamp: 1733589854804 -- conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-h62715c5_1.conda - sha256: 03cc5442046485b03dd1120d0f49d35a7e522930a2ab82f275e938e17b07b302 - md5: 9190dd0a23d925f7602f9628b3aed511 - depends: - - libhwloc >=2.11.2,<2.11.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: APACHE - size: 151460 - timestamp: 1732982860332 -- conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a - md5: 60ce69f73f3e75b21f1c27b1b471320c - depends: - - python >=3.9 - license: BSD-2-Clause - license_family: BSD - size: 17421 - timestamp: 1733842487151 -- conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - sha256: b300557c0382478cf661ddb520263508e4b3b5871b471410450ef2846e8c352c - md5: efba281bbdae5f6b0a1d53c6d4a97c93 - depends: - - __linux - - ptyprocess - - python >=3.8 - - tornado >=6.1.0 - license: BSD-2-Clause - license_family: BSD - size: 22452 - timestamp: 1710262728753 -- conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - sha256: 4daae56fc8da17784578fbdd064f17e3b3076b394730a14119e571707568dc8a - md5: 00b54981b923f5aefcd5e8547de056d5 - depends: - - __osx - - ptyprocess - - python >=3.8 - - tornado >=6.1.0 - license: BSD-2-Clause - license_family: BSD - size: 22717 - timestamp: 1710265922593 -- conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - sha256: 8cb078291fd7882904e3de594d299c8de16dd3af7405787fce6919a385cfc238 - md5: 4abd500577430a942a995fd0d09b76a2 - depends: - - __win - - python >=3.8 - - pywinpty >=1.1.0 - - tornado >=6.1.0 - license: BSD-2-Clause - license_family: BSD - size: 22883 - timestamp: 1710262943966 -- conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda - sha256: cad582d6f978276522f84bd209a5ddac824742fe2d452af6acf900f8650a73a2 - md5: f1acf5fdefa8300de697982bcb1761c9 - depends: - - python >=3.5 - - webencodings >=0.4 - license: BSD-3-Clause - license_family: BSD - size: 28285 - timestamp: 1729802975370 -- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e - md5: d453b98d9c83e71da0741bb0ff4d76bc - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: TCL - license_family: BSD - size: 3318875 - timestamp: 1699202167581 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - sha256: 72457ad031b4c048e5891f3f6cb27a53cb479db68a52d965f796910e71a403a8 - md5: b50a57ba89c32b62428b71a875291c9b - depends: - - libzlib >=1.2.13,<2.0.0a0 - license: TCL - license_family: BSD - size: 3145523 - timestamp: 1699202432999 -- conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - sha256: 2c4e914f521ccb2718946645108c9bd3fc3216ba69aea20c2c3cedbd8db32bb1 - md5: fc048363eb8f03cd1737600a5d08aafe - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: TCL - license_family: BSD - size: 3503410 - timestamp: 1699202577803 -- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda - sha256: 18636339a79656962723077df9a56c0ac7b8a864329eb8f847ee3d38495b863e - md5: ac944244f1fed2eb49bae07193ae8215 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 19167 - timestamp: 1733256819729 -- conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda - sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 - md5: 40d0ed782a8aaa16ef248e68c06c168d - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 52475 - timestamp: 1733736126261 -- conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda - sha256: 062a3a3a37fa8615ce57929ba7e982c76f5a5810bcebd435950f6d6c4147c310 - md5: e417822cb989e80a0d2b1b576fdd1657 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 840414 - timestamp: 1732616043734 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.4.2-py312hea69d52_0.conda - sha256: 964a2705a36c50040c967b18b45b9cc8de3c2aff4af546979a574e0b38e58e39 - md5: fb0605888a475d6a380ae1d1a819d976 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 842549 - timestamp: 1732616081362 -- conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.2-py312h4389bb4_0.conda - sha256: e21f24e5d598d9a31c604f510c82fbe73d756696bc70a69f11811a2ea9dd5d95 - md5: f06104f71f496b0784b35b23e30e7990 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 844347 - timestamp: 1732616435803 -- conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_1.conda - sha256: 11e2c85468ae9902d24a27137b6b39b4a78099806e551d390e394a8c34b48e40 - md5: 9efbfdc37242619130ea42b1cc4ed861 - depends: - - colorama - - python >=3.9 - license: MPL-2.0 or MIT - size: 89498 - timestamp: 1735661472632 -- conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda - sha256: f39a5620c6e8e9e98357507262a7869de2ae8cc07da8b7f84e517c9fd6c2b959 - md5: 019a7385be9af33791c989871317e1ed - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 110051 - timestamp: 1733367480074 -- conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda - sha256: 8b98cd9464837174ab58aaa912fc95d5831879864676650a383994033533b8d1 - md5: 1dbc4a115e2ad9fb7f9d5b68397f66f9 - depends: - - python >=3.9 - license: Apache-2.0 AND MIT - size: 22104 - timestamp: 1733612458611 -- conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - noarch: python - sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 - md5: b6a408c64b78ec7b779a3e5c7a902433 - depends: - - typing_extensions 4.12.2 pyha770c72_1 - license: PSF-2.0 - license_family: PSF - size: 10075 - timestamp: 1733188758872 -- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 - md5: d17f13df8b65464ca316cbc000a3cb64 - depends: - - python >=3.9 - license: PSF-2.0 - license_family: PSF - size: 39637 - timestamp: 1733188758212 -- conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda - sha256: 3088d5d873411a56bf988eee774559335749aed6f6c28e07bf933256afb9eb6c - md5: f6d7aa696c67756a650e91e15e88223c - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 15183 - timestamp: 1733331395943 -- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda - sha256: c4b1ae8a2931fe9b274c44af29c5475a85b37693999f8c792dad0f8c6734b1de - md5: dbcace4706afdfb7eb891f7b37d07c04 - license: LicenseRef-Public-Domain - size: 122921 - timestamp: 1737119101255 -- conda: https://conda.anaconda.org/conda-forge/noarch/uc-micro-py-1.0.3-pyhd8ed1ab_1.conda - sha256: a2f837780af450d633efc052219c31378bcad31356766663fb88a99e8e4c817b - md5: 9c96c9876ba45368a03056ddd0f20431 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 11199 - timestamp: 1733784280160 -- conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda - sha256: db8dead3dd30fb1a032737554ce91e2819b43496a0db09927edf01c32b577450 - md5: 6797b005cd0f439c4c5c9ac565783700 - constrains: - - vs2015_runtime >=14.29.30037 - license: LicenseRef-MicrosoftWindowsSDK10 - size: 559710 - timestamp: 1728377334097 -- conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - sha256: 9fb020083a7f4fee41f6ece0f4840f59739b3e249f157c8a407bb374ffb733b5 - md5: f9664ee31aed96c85b7319ab0a693341 - depends: - - __glibc >=2.17,<3.0.a0 - - cffi - - libgcc >=13 - - libstdcxx >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 13904 - timestamp: 1725784191021 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ukkonen-1.0.1-py312h6142ec9_5.conda - sha256: 1e4452b4a12d8a69c237f14b876fbf0cdc456914170b49ba805779c749c31eca - md5: 2b485a809d1572cbe7f0ad9ee107e4b0 - depends: - - __osx >=11.0 - - cffi - - libcxx >=17 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: MIT - license_family: MIT - size: 13605 - timestamp: 1725784243533 -- conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - sha256: f1944f3d9645a6fa2770966ff010791136e7ce0eaa0c751822b812ac04fee7d6 - md5: d8c5ef1991a5121de95ea8e44c34e13a - depends: - - cffi - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MIT - license_family: MIT - size: 17213 - timestamp: 1725784449622 -- conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda - sha256: 638916105a836973593547ba5cf4891d1f2cb82d1cf14354fcef93fd5b941cdc - md5: 617f5d608ff8c28ad546e5d9671cbb95 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 404401 - timestamp: 1736692621599 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-16.0.0-py312hea69d52_0.conda - sha256: c6ca9ea11eecc650df4bce4b3daa843821def6d753eeab6d81de35bb43f9d984 - md5: 9a835052506b91ea8f0d8e352cd12246 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 409745 - timestamp: 1736692768349 -- conda: https://conda.anaconda.org/conda-forge/win-64/unicodedata2-16.0.0-py312h4389bb4_0.conda - sha256: 0889ccb541d0b63cbf42ea5b1f1686b772e872bfcddd3a18787dc4437ebbd7c6 - md5: 3b124c38c7852704ba6a42a170c152a1 - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 400974 - timestamp: 1736693037551 -- conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda - sha256: e0eb6c8daf892b3056f08416a96d68b0a358b7c46b99c8a50481b22631a4dfc0 - md5: e7cb0f5745e4c5035a460248334af7eb - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 23990 - timestamp: 1733323714454 -- conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - sha256: 2aad2aeff7c69a2d7eecd7b662eef756b27d6a6b96f3e2c2a7071340ce14543e - md5: d71d3a66528853c0a1ac2c02d79a0284 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: BSD-3-Clause - license_family: BSD - size: 48270 - timestamp: 1715010035325 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/uriparser-0.9.8-h00cdb27_0.conda - sha256: fa0bcbfb20a508ca9bf482236fe799581cbd0eab016e47a865e9fa44dbe3c512 - md5: e8ff9e11babbc8cd77af5a4258dc2802 - depends: - - __osx >=11.0 - - libcxx >=16 - license: BSD-3-Clause - license_family: BSD - size: 40625 - timestamp: 1715010029254 -- conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - sha256: ed0eed8ed0343d29cdbfaeb1bfd141f090af696547d69f91c18f46350299f00d - md5: 28b4cf9065681f43cc567410edf8243d - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 49181 - timestamp: 1715010467661 -- conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda - sha256: 114919ffa80c328127dab9c8e7a38f9d563c617691fb81fccb11c1e86763727e - md5: 32674f8dbfb7b26410ed580dd3c10a29 - depends: - - brotli-python >=1.0.9 - - h2 >=4,<5 - - pysocks >=1.5.6,<2.0,!=1.5.7 - - python >=3.9 - - zstandard >=0.18.0 - license: MIT - license_family: MIT - size: 100102 - timestamp: 1734859520452 -- conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h5fd82a7_24.conda - sha256: 7ce178cf139ccea5079f9c353b3d8415d1d49b0a2f774662c355d3f89163d7b4 - md5: 00cf3a61562bd53bd5ea99e6888793d0 - depends: - - vc14_runtime >=14.40.33810 - track_features: - - vc14 - license: BSD-3-Clause - license_family: BSD - size: 17693 - timestamp: 1737627189024 -- conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34433-h6356254_24.conda - sha256: abda97b8728cf6e3c37df8f1178adde7219bed38b96e392cb3be66336386d32e - md5: 2441e010ee255e6a38bf16705a756e94 - depends: - - ucrt >=10.0.20348.0 - constrains: - - vs2015_runtime 14.42.34433.* *_24 - license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime - license_family: Proprietary - size: 753531 - timestamp: 1737627061911 -- conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.2-pyhd8ed1ab_0.conda - sha256: c50a4ab0f5f1164230d42a29f12f61ece9c7b102f57ed1c607d2cd7c77e107b5 - md5: d8a3ee355d5ecc9ee2565cafba1d3573 - depends: - - distlib >=0.3.7,<1 - - filelock >=3.12.2,<4 - - platformdirs >=3.9.1,<5 - - python >=3.9 - license: MIT - license_family: MIT - size: 3519478 - timestamp: 1739263533376 -- conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.42.34433-hfef2bbc_24.conda - sha256: 09102e0bd283af65772c052d85028410b0c31989b3cd96c260485d28e270836e - md5: 117fcc5b86c48f3b322b0722258c7259 - depends: - - vc14_runtime >=14.42.34433 - license: BSD-3-Clause - license_family: BSD - size: 17669 - timestamp: 1737627066773 -- conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - sha256: 0884b2023a32d2620192cf2e2fc6784b8d1e31cf9f137e49e00802d4daf7d1c1 - md5: 0a732427643ae5e0486a727927791da1 - depends: - - __glibc >=2.17,<3.0.a0 - - libexpat >=2.6.2,<3.0a0 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=13 - - libstdcxx-ng >=13 - license: MIT - license_family: MIT - size: 321561 - timestamp: 1724530461598 -- conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda - sha256: f21e63e8f7346f9074fd00ca3b079bd3d2fa4d71f1f89d5b6934bf31446dc2a5 - md5: b68980f2495d096e71c7fd9d7ccf63e6 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 32581 - timestamp: 1733231433877 -- conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda - sha256: 08315dc2e61766a39219b2d82685fc25a56b2817acf84d5b390176080eaacf99 - md5: b49f7b291e15494aafb0a7d74806f337 - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 18431 - timestamp: 1733359823938 -- conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 - md5: 2841eb5bfc75ce15e9a0054b98dcd64d - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 15496 - timestamp: 1733236131358 -- conda: https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda - sha256: 75d5dc901bf80b1cdfc3ab06aa712971034a8efb426b15355c16166d0de58898 - md5: 293718ddac83a0fbc0f2193ff77d1e1c - depends: - - legacy-cgi >=2.6 - - python >=3.9 - license: MIT - license_family: MIT - size: 92706 - timestamp: 1733185749219 -- conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda - sha256: 1dd84764424ffc82030c19ad70607e6f9e3b9cb8e633970766d697185652053e - md5: 84f8f77f0a9c6ef401ee96611745da8f - depends: - - python >=3.9 - license: Apache-2.0 - license_family: APACHE - size: 46718 - timestamp: 1733157432924 -- conda: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda - sha256: a750202ae2a31d8e5ee5a5c127fcc7fa783cd0fbedbc0bf1ab549a109881fa9f - md5: 237db148cc37a466e4222d589029b53e - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 898402 - timestamp: 1733128654300 -- conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_8.conda - sha256: 93807369ab91f230cf9e6e2a237eaa812492fe00face5b38068735858fba954f - md5: 46e441ba871f524e2b067929da3051c2 - depends: - - __win - - python >=3.9 - license: LicenseRef-Public-Domain - size: 9555 - timestamp: 1733130678956 -- conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - sha256: 9df10c5b607dd30e05ba08cbd940009305c75db242476f4e845ea06008b0a283 - md5: 1cee351bf20b830d991dbe0bc8cd7dfe - license: MIT - license_family: MIT - size: 1176306 -- conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.17.2-py312h66e93f0_0.conda - sha256: ed3a1700ecc5d38c7e7dc7d2802df1bc1da6ba3d6f6017448b8ded0affb4ae00 - md5: 669e63af87710f8d52fdec9d4d63b404 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: BSD-2-Clause - license_family: BSD - size: 63590 - timestamp: 1736869574299 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/wrapt-1.17.2-py312hea69d52_0.conda - sha256: 6a3e68b57de29802e8703d1791dcacb7613bfdc17bbb087c6b2ea2796e6893ef - md5: e49608c832fcf438f70cbcae09c3adc5 - depends: - - __osx >=11.0 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: BSD-2-Clause - license_family: BSD - size: 61198 - timestamp: 1736869673767 -- conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.17.2-py312h4389bb4_0.conda - sha256: a1b86d727cc5f9d016a6fc9d8ac8b3e17c8e137764e018555ecadef05979ce93 - md5: b9a81b36e0d35c9a172587ead532273b - depends: - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-2-Clause - license_family: BSD - size: 62232 - timestamp: 1736869967220 -- conda: https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2 - sha256: 76c7405bcf2af639971150f342550484efac18219c0203c5ee2e38b8956fe2a0 - md5: e7f6ed84d4623d52ee581325c1587a6b - depends: - - libgcc-ng >=10.3.0 - - libstdcxx-ng >=10.3.0 - license: GPL-2.0-or-later - license_family: GPL - size: 3357188 - timestamp: 1646609687141 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/x265-3.5-hbc6ce65_3.tar.bz2 - sha256: 2fed6987dba7dee07bd9adc1a6f8e6c699efb851431bcb6ebad7de196e87841d - md5: b1f7f2780feffe310b068c021e8ff9b2 - depends: - - libcxx >=12.0.1 - license: GPL-2.0-or-later - license_family: GPL - size: 1832744 - timestamp: 1646609481185 -- conda: https://conda.anaconda.org/conda-forge/win-64/x265-3.5-h2d74725_3.tar.bz2 - sha256: 02b9874049112f2b7335c9a3e880ac05d99a08d9a98160c5a98898b2b3ac42b2 - md5: ca7129a334198f08347fb19ac98a2de9 - depends: - - vc >=14.1,<15 - - vs2015_runtime >=14.16.27033 - license: GPL-2.0-or-later - license_family: GPL - size: 5517425 - timestamp: 1646611941216 -- conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.2-pyhd8ed1ab_0.conda - sha256: 0f59c2718573770b01d849e05a56a7fe1461f55bf7525c4df5552079c5c03427 - md5: b8d9af89c48fa3359f05f3324809fcde - depends: - - numpy >=1.24 - - packaging >=23.2 - - pandas >=2.1 - - python >=3.10 - constrains: - - pint >=0.22 - - netcdf4 >=1.6.0 - - cartopy >=0.22 - - iris >=3.7 - - h5py >=3.8 - - nc-time-axis >=1.4 - - sparse >=0.14 - - bottleneck >=1.3 - - scipy >=1.11 - - numba >=0.57 - - toolz >=0.12 - - h5netcdf >=1.3 - - distributed >=2023.11 - - dask-core >=2023.11 - - flox >=0.7 - - seaborn-base >=0.13 - - zarr >=2.16 - - matplotlib-base >=3.8 - - cftime >=1.6 - - hdf5 >=1.12 - license: Apache-2.0 - license_family: APACHE - size: 837969 - timestamp: 1738313762187 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda - sha256: 339ab0ff05170a295e59133cd0fa9a9c4ba32b6941c8a2a73484cc13f81e248a - md5: 9dda9667feba914e0e80b95b82f7402b - depends: - - __glibc >=2.17,<3.0.a0 - - icu >=75.1,<76.0a0 - - libgcc >=13 - - libnsl >=2.0.1,<2.1.0a0 - - libstdcxx >=13 - license: Apache-2.0 - license_family: Apache - size: 1648243 - timestamp: 1727733890754 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xerces-c-3.2.5-h92fc2f4_2.conda - sha256: 863a7c2a991a4399d362d42c285ebc20748a4ea417647ebd3a171e2220c7457d - md5: 50b7325437ef0901fe25dc5c9e743b88 - depends: - - __osx >=11.0 - - icu >=75.1,<76.0a0 - - libcxx >=17 - license: Apache-2.0 - license_family: Apache - size: 1277884 - timestamp: 1727733870250 -- conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_2.conda - sha256: 759ae22a0a221dc1c0ba39684b0dcf696aab4132478e17e56a0366ded519e54e - md5: 82b6eac3c198271e98b48d52d79726d8 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 3574017 - timestamp: 1727734520239 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda - sha256: 0d89b5873515a1f05d311f37ea4e087bbccc0418afa38f2f6189e97280db3179 - md5: f725c7425d6d7c15e31f3b99a88ea02f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 389475 - timestamp: 1727840188958 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda - sha256: c12396aabb21244c212e488bbdc4abcdef0b7404b15761d9329f5a4a39113c4b - md5: fb901ff28063514abb6046c9ec2c4a45 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 58628 - timestamp: 1734227592886 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libice-1.1.2-h0e40799_0.conda - sha256: bf1d34142b1bf9b5a4eed96bcc77bc4364c0e191405fd30d2f9b48a04d783fd3 - md5: 105cb93a47df9c548e88048dc9cbdbc9 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 236306 - timestamp: 1734228116846 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda - sha256: 760f43df6c2ce8cbbbcb8f2f3b7fc0f306716c011e28d1d340f3dfa8ccf29185 - md5: 4c3e9fab69804ec6077697922d70c6e2 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libuuid >=2.38.1,<3.0a0 - - xorg-libice >=1.1.2,<2.0a0 - license: MIT - license_family: MIT - size: 27198 - timestamp: 1734229639785 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libsm-1.2.5-h0e40799_0.conda - sha256: f005a6b5d77f97aa59583fb0ce66777b36e9e47fdc8696a949116b7256dd53c4 - md5: 6a9bc84b3780f5c6f32dc53078fda7f5 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - - xorg-libice >=1.1.1,<2.0a0 - license: MIT - license_family: MIT - size: 96698 - timestamp: 1734229863516 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.11-h4f16b4b_0.conda - sha256: a0e7fca9e341dc2455b20cd320fc1655e011f7f5f28367ecf8617cccd4bb2821 - md5: b6eb6d0cb323179af168df8fe16fb0a1 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libxcb >=1.17.0,<2.0a0 - license: MIT - license_family: MIT - size: 835157 - timestamp: 1738613163812 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libx11-1.8.11-hf48077a_0.conda - sha256: 7f460b3aecf2807858ba3d650f5bc7597607e30999232e05d7d4fa24e78aa99f - md5: 7d971d982bf20fd0dbc23ec41a45659c - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - libxcb >=1.17.0,<2.0a0 - - ucrt >=10.0.20348.0 - license: MIT - license_family: MIT - size: 947677 - timestamp: 1738614121022 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda - sha256: ed10c9283974d311855ae08a16dfd7e56241fac632aec3b92e3cfe73cff31038 - md5: f6ebe2cb3f82ba6c057dde5d9debe4f7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 14780 - timestamp: 1734229004433 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.12-h5505292_0.conda - sha256: f33e6f013fc36ebc200f09ddead83468544cb5c353a3b50499b07b8c34e28a8d - md5: 50901e0764b7701d8ed7343496f4f301 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 13593 - timestamp: 1734229104321 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.12-h0e40799_0.conda - sha256: 047836241b2712aab1e29474a6f728647bff3ab57de2806b0bb0a6cf9a2d2634 - md5: 2ffbfae4548098297c033228256eb96e - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - license: MIT - license_family: MIT - size: 108013 - timestamp: 1734229474049 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda - sha256: 753f73e990c33366a91fd42cc17a3d19bb9444b9ca5ff983605fa9e953baf57f - md5: d3c295b50f092ab525ffe3c2aa4b7413 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - license: MIT - license_family: MIT - size: 13603 - timestamp: 1727884600744 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda - sha256: 832f538ade441b1eee863c8c91af9e69b356cd3e9e1350fff4fe36cc573fc91a - md5: 2ccd714aa2242315acaf0a67faea780b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - - xorg-libxrender >=0.9.11,<0.10.0a0 - license: MIT - license_family: MIT - size: 32533 - timestamp: 1730908305254 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda - sha256: 43b9772fd6582bf401846642c4635c47a9b0e36ca08116b3ec3df36ab96e0ec0 - md5: b5fcc7172d22516e1f965490e65e33a4 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - license: MIT - license_family: MIT - size: 13217 - timestamp: 1727891438799 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda - sha256: 6b250f3e59db07c2514057944a3ea2044d6a8cdde8a47b6497c254520fade1ee - md5: 8035c64cb77ed555e3f150b7b3972480 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 19901 - timestamp: 1727794976192 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.5-hd74edd7_0.conda - sha256: 9939a166d780700d81023546759102b33fdc2c5f11ef09f5f66c77210fd334c8 - md5: 77c447f48cab5d3a15ac224edb86a968 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 18487 - timestamp: 1727795205022 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.5-h0e40799_0.conda - sha256: 9075f98dcaa8e9957e4a3d9d30db05c7578a536950a31c200854c5c34e1edb2c - md5: 8393c0f7e7870b4eb45553326f81f0ff - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - license: MIT - license_family: MIT - size: 69920 - timestamp: 1727795651979 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda - sha256: da5dc921c017c05f38a38bd75245017463104457b63a1ce633ed41f214159c14 - md5: febbab7d15033c913d53c7a2c102309d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 50060 - timestamp: 1727752228921 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxext-1.3.6-h0e40799_0.conda - sha256: 7fdc3135a340893aa544921115c3994ef4071a385d47cc11232d818f006c63e4 - md5: 4cd74e74f063fb6900d6eed2e9288112 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 284715 - timestamp: 1727752838922 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda - sha256: 2fef37e660985794617716eb915865ce157004a4d567ed35ec16514960ae9271 - md5: 4bdb303603e9821baf5fe5fdff1dc8f8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 19575 - timestamp: 1727794961233 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda - sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a - md5: 17dcc85db3c7886650b8908b183d6876 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - license: MIT - license_family: MIT - size: 47179 - timestamp: 1727799254088 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda - sha256: 1b9141c027f9d84a9ee5eb642a0c19457c788182a5a73c5a9083860ac5c20a8c - md5: 5e2eb9bf77394fc2e5918beefec9f9ab - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - license: MIT - license_family: MIT - size: 13891 - timestamp: 1727908521531 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxpm-3.5.17-h0e40799_1.conda - sha256: a605b43b2622a4cae8df6edc148c02b527da4ea165ec67cabb5c9bc4f3f8ef13 - md5: e8b816fb37bc61aa3f1c08034331ef53 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxt >=1.3.0,<2.0a0 - license: MIT - license_family: MIT - size: 236112 - timestamp: 1727801849623 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda - sha256: ac0f037e0791a620a69980914a77cb6bb40308e26db11698029d6708f5aa8e0d - md5: 2de7f99d6581a4a7adbff607b5c278ca - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxrender >=0.9.11,<0.10.0a0 - license: MIT - license_family: MIT - size: 29599 - timestamp: 1727794874300 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda - sha256: 044c7b3153c224c6cedd4484dd91b389d2d7fd9c776ad0f4a34f099b3389f4a1 - md5: 96d57aba173e878a2089d5638016dc5e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 33005 - timestamp: 1734229037766 -- conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxt-1.3.1-h0e40799_0.conda - sha256: c940a6b71a1e59450b01ebfb3e21f3bbf0a8e611e5fbfc7982145736b0f20133 - md5: 31baf0ce8ef19f5617be73aee0527618 - depends: - - libgcc >=13 - - libwinpthread >=12.0.0.r4.gg4f2fc60ca - - ucrt >=10.0.20348.0 - - xorg-libice >=1.1.1,<2.0a0 - - xorg-libsm >=1.2.4,<2.0a0 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 918674 - timestamp: 1731861024233 -- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a - md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxi >=1.7.10,<2.0a0 - license: MIT - license_family: MIT - size: 32808 - timestamp: 1727964811275 -- conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.1.0-pyhd8ed1ab_0.conda - sha256: 9978c22319e85026d5a4134944f73bac820c948ca6b6c32af6b6985b5221cd8a - md5: fdf07e281a9e5e10fc75b2dd444136e9 - depends: - - python >=3.8 - license: BSD-3-Clause - license_family: BSD - size: 48641 - timestamp: 1737234992057 -- conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - sha256: a4e34c710eeb26945bdbdaba82d3d74f60a78f54a874ec10d373811a5d217535 - md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae - depends: - - libgcc-ng >=9.4.0 - license: MIT - license_family: MIT - size: 89141 - timestamp: 1641346969816 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h3422bc3_2.tar.bz2 - sha256: 93181a04ba8cfecfdfb162fc958436d868cc37db504c58078eab4c1a3e57fbb7 - md5: 4bb3f014845110883a3c5ee811fd84b4 - license: MIT - license_family: MIT - size: 88016 - timestamp: 1641347076660 -- conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - sha256: 4e2246383003acbad9682c7c63178e2e715ad0eb84f03a8df1fbfba455dfedc5 - md5: adbfb9f45d1004a26763652246a33764 - depends: - - vc >=14.1,<15.0a0 - - vs2015_runtime >=14.16.27012 - license: MIT - license_family: MIT - size: 63274 - timestamp: 1641347623319 -- conda: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h178313f_1.conda - sha256: 6b054c93dd19fd7544af51b41a8eacca2ab62271f6c0c5a2a0cffe80dc37a0ce - md5: 6822c49f294d4355f19d314b8b6063d8 - depends: - - __glibc >=2.17,<3.0.a0 - - idna >=2.0 - - libgcc >=13 - - multidict >=4.0 - - propcache >=0.2.1 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 152305 - timestamp: 1737575898300 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.18.3-py312h998013c_1.conda - sha256: 48821d23567ca0f853eee6f7812c74392867e123798b5b3c44f58758d8eb580e - md5: 092d3b40acc67c470f379049be343a7a - depends: - - __osx >=11.0 - - idna >=2.0 - - multidict >=4.0 - - propcache >=0.2.1 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - license: Apache-2.0 - license_family: Apache - size: 145543 - timestamp: 1737576074753 -- conda: https://conda.anaconda.org/conda-forge/win-64/yarl-1.18.3-py312h31fea79_1.conda - sha256: ed25427ab892f0e9aa37514316b408d2f3739583dab600d3c744eaae9cbcf6f8 - md5: 004fb3779f2f70e82c6154369d711125 - depends: - - idna >=2.0 - - multidict >=4.0 - - propcache >=0.2.1 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 141616 - timestamp: 1737576608333 -- conda: https://conda.anaconda.org/conda-forge/noarch/zarr-3.0.3-pyhd8ed1ab_0.conda - sha256: 1a59dc8a8c68413edd3ada0656eb492b3b233ab0db82f09f29907767225d7c24 - md5: 7fc61289bd623366ccfdb821fc5d885a - depends: - - crc32c - - donfig >=0.8 - - numcodecs >=0.14 - - numpy >=1.25 - - packaging >=22.0 - - python >=3.11 - - typing_extensions >=4.9 - constrains: - - fsspec >=2023.10.0 - license: MIT - license_family: MIT - size: 191015 - timestamp: 1739705524309 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda - sha256: a4dc72c96848f764bb5a5176aa93dd1e9b9e52804137b99daeebba277b31ea10 - md5: 3947a35e916fcc6b9825449affbf4214 - depends: - - __glibc >=2.17,<3.0.a0 - - krb5 >=1.21.3,<1.22.0a0 - - libgcc >=13 - - libsodium >=1.0.20,<1.0.21.0a0 - - libstdcxx >=13 - license: MPL-2.0 - license_family: MOZILLA - size: 335400 - timestamp: 1731585026517 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc1bb282_7.conda - sha256: 9e585569fe2e7d3bea71972cd4b9f06b1a7ab8fa7c5139f92a31cbceecf25a8a - md5: f7e6b65943cb73bce0143737fded08f1 - depends: - - __osx >=11.0 - - krb5 >=1.21.3,<1.22.0a0 - - libcxx >=18 - - libsodium >=1.0.20,<1.0.21.0a0 - license: MPL-2.0 - license_family: MOZILLA - size: 281565 - timestamp: 1731585108039 -- conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_7.conda - sha256: 15cc8e2162d0a33ffeb3f7b7c7883fd830c54a4b1be6a4b8c7ee1f4fef0088fb - md5: e03f2c245a5ee6055752465519363b1c - depends: - - krb5 >=1.21.3,<1.22.0a0 - - libsodium >=1.0.20,<1.0.21.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: MPL-2.0 - license_family: MOZILLA - size: 2527503 - timestamp: 1731585151036 -- conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda - sha256: 5488542dceeb9f2874e726646548ecc5608060934d6f9ceaa7c6a48c61f9cc8d - md5: e52c2ef711ccf31bb7f70ca87d144b9e - depends: - - python >=3.9 - license: BSD-3-Clause - license_family: BSD - size: 36341 - timestamp: 1733261642963 -- conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda - sha256: 567c04f124525c97a096b65769834b7acb047db24b15a56888a322bf3966c3e1 - md5: 0c3cc595284c5e8f0f9900a9b228a332 - depends: - - python >=3.9 - license: MIT - license_family: MIT - size: 21809 - timestamp: 1732827613585 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - sha256: 5d7c0e5f0005f74112a34a7425179f4eb6e73c92f5d109e6af4ddeca407c92ab - md5: c9f075ab2f33b3bbee9e62d4ad0a6cd8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libzlib 1.3.1 hb9d3cd8_2 - license: Zlib - license_family: Other - size: 92286 - timestamp: 1727963153079 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-h8359307_2.conda - sha256: 58f8860756680a4831c1bf4f294e2354d187f2e999791d53b1941834c4b37430 - md5: e3170d898ca6cb48f1bb567afb92f775 - depends: - - __osx >=11.0 - - libzlib 1.3.1 h8359307_2 - license: Zlib - license_family: Other - size: 77606 - timestamp: 1727963209370 -- conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_2.conda - sha256: 8c688797ba23b9ab50cef404eca4d004a948941b6ee533ead0ff3bf52012528c - md5: be60c4e8efa55fddc17b4131aa47acbd - depends: - - libzlib 1.3.1 h2466b09_2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Zlib - license_family: Other - size: 107439 - timestamp: 1727963788936 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - sha256: b97015e146437283f2213ff0e95abdc8e2480150634d81fbae6b96ee09f5e50b - md5: 8b7069e9792ee4e5b4919a7a306d2e67 - depends: - - __glibc >=2.17,<3.0.a0 - - cffi >=1.11 - - libgcc >=13 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - zstd >=1.5.6,<1.5.7.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 419552 - timestamp: 1725305670210 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py312h15fbf35_1.conda - sha256: d00ca25c1e28fd31199b26a94f8c96574475704a825d244d7a6351ad3745eeeb - md5: a4cde595509a7ad9c13b1a3809bcfe51 - depends: - - __osx >=11.0 - - cffi >=1.11 - - python >=3.12,<3.13.0a0 - - python >=3.12,<3.13.0a0 *_cpython - - python_abi 3.12.* *_cp312 - - zstd >=1.5.6,<1.5.7.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 330788 - timestamp: 1725305806565 -- conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - sha256: 3e0c718aa18dcac7f080844dbe0aea41a9cea75083019ce02e8a784926239826 - md5: a92cc3435b2fd6f51463f5a4db5c50b1 - depends: - - cffi >=1.11 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - zstd >=1.5.6,<1.5.7.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 320624 - timestamp: 1725305934189 -- conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - sha256: c558b9cc01d9c1444031bd1ce4b9cff86f9085765f17627a6cd85fc623c8a02b - md5: 4d056880988120e29d75bfff282e0f45 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 554846 - timestamp: 1714722996770 -- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.6-hb46c0d2_0.conda - sha256: 2d4fd1ff7ee79cd954ca8e81abf11d9d49954dd1fef80f27289e2402ae9c2e09 - md5: d96942c06c3e84bfcc5efb038724a7fd - depends: - - __osx >=11.0 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 405089 - timestamp: 1714723101397 -- conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda - sha256: 768e30dc513568491818fb068ee867c57c514b553915536da09e5d10b4ebf3c3 - md5: 9a17230f95733c04dc40a2b1e5491d74 - depends: - - libzlib >=1.2.13,<2.0.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - size: 349143 - timestamp: 1714723445995 diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index e2e408db..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,98 +0,0 @@ -[project] -name = "coincident" -description = "Xarray Tutorial Website" -readme = "README.md" -license.file = "LICENSE" -requires-python = ">=3.10" -classifiers = [ - "Development Status :: 1 - Planning", - "Intended Audience :: Science/Research", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Topic :: Scientific/Engineering", - "Typing :: Typed", -] - -[tool.black] -line-length = 100 -target-version = ['py38'] -skip-string-normalization = true - -[tool.flake8] -#exclude = -ignore = "E203,E266,E501,W503,E722,E402,C901" -max-line-length = 100 -max-complexity = 18 -select = "B,C,E,F,W,T4,B9" - -[tool.isort] -#known_first_party= -#known_third_party= -multi_line_output=3 -include_trailing_comma=true -force_grid_wrap=0 -combine_as_imports=true -line_length=100 -#skip= - -[tool.codespell] -ignore-words-list="nd" -skip="pixi.lock,.pixi,qaqc.yaml" - -[tool.pixi.project] -name = "xarray-tutorial" -channels = ["conda-forge"] -platforms = ["osx-arm64", "linux-64", "win-64"] - -[tool.pixi.pypi-dependencies] - -[tool.pixi.tasks] -tutorial = "jupyter lab --LabApp.default_url='/lab/tree/overview/xarray-in-45-min.ipynb'" -build = "jupyter-book build ./ --warningiserror --keep-going" -precommit = "pre-commit run --all" -checklinks = "jupyter-book build ./ --builder linkcheck" - -[tool.pixi.dependencies] -jupyter-book = ">=1.0.3,<2" -pre-commit = ">=4.1.0,<5" -dask-labextension = ">=7.0.0,<8" -jupyterlab = ">=4.3.5,<5" -jupyter_bokeh = ">=4.0.5,<5" -jupyterlab-myst = ">=2.4.2,<3" -jupyter-resource-usage = ">=1.1.1,<2" -cartopy = ">=0.24.0,<0.25" -cf_xarray = ">=0.10.0,<0.11" -dask = ">=2025.2.0,<2026" -datashader = ">=0.17.0,<0.18" -distributed = ">=2025.2.0,<2026" -gcsfs = ">=2025.2.0,<2026" -geoviews-core = ">=1.14.0,<2" -gsw = ">=3.6.19,<4" -hvplot = ">=0.11.2,<0.12" -h5netcdf = ">=1.5.0,<2" -ipykernel = ">=6.29.5,<7" -matplotlib-base = ">=3.10.0,<4" -netcdf4 = ">=1.7.2,<2" -numpy = ">=2.1.3,<3" -pint-xarray = ">=0.4,<0.5" -pydap = ">=3.5.3,<4" -python-graphviz = ">=0.20.3,<0.21" -pooch = ">=1.8.2,<2" -rioxarray = ">=0.18.2,<0.19" -scipy = ">=1.15.2,<2" -sphinx-codeautolink = ">=0.17.0,<0.18" -sphinxcontrib-mermaid = ">=1.0.0,<2" -sphinx-notfound-page = ">=1.0.4,<2" -sphinxext-rediraffe = ">=0.2.7,<0.3" -s3fs = ">=2025.2.0,<2026" -xarray = ">=2025.1.2,<2026" -zarr = ">=3.0.3,<4" -flox = ">=0.10.0,<0.11" -numbagg = ">=0.9.0,<0.10" diff --git a/reference/glossary.md b/reference/glossary.md deleted file mode 100644 index 99ab718d..00000000 --- a/reference/glossary.md +++ /dev/null @@ -1,23 +0,0 @@ -# Glossary - -For Xarray data structure terminology see https://docs.xarray.dev/en/stable/user-guide/terminology.html - -```{glossary} -[Xarray](https://docs.xarray.dev) - An open source project and Python package that makes working with labelled multi-dimensional arrays simple, efficient, and fun! - -[Zarr](https://zarr.readthedocs.io) - A Python package that provides an implementation of chunked, compressed, N-dimensional arrays - -[GDAL](https://gdal.org) - (Geospatial Data Abstraction Library) a translator library for raster and vector geospatial data formats - -[HDF](https://www.hdfgroup.org/solutions/hdf5/) - (Hierarchical Data Format) binary file format for heterogeneous N-dimensional datasets - -[netCDF](https://www.unidata.ucar.edu/software/netcdf/) - (Network Common Data Form) binary file format for N-dimensional self-described datasets that originated in the geosciences - -[CF Conventions](https://cfconventions.org) - Metadata conventions designed to promote the processing and sharing of netCDF files -``` diff --git a/reference/references.bib b/reference/references.bib deleted file mode 100644 index e8bf1925..00000000 --- a/reference/references.bib +++ /dev/null @@ -1,20 +0,0 @@ -@article{hoyerhamman2017, - title = {xarray: {N-D} labeled arrays and datasets in {Python}}, - author = {Hoyer, S. and J. Hamman}, - journal = {Journal of Open Research Software}, - volume = {5}, - number = {1}, - year = {2017}, - publisher = {Ubiquity Press}, - doi = {10.5334/jors.148}, - url = {https://doi.org/10.5334/jors.148} -} - -@misc{xarray_v202230, - author = {Stephan Hoyer and Clark Fitzgerald and Joe Hamman and others}, - title = {xarray: v2022.3.0}, - month = May, - year = 2022, - doi = {10.5281/zenodo.59499}, - url = {https://doi.org/10.5281/zenodo.59499} - } diff --git a/reference/resources.md b/reference/resources.md deleted file mode 100644 index 854da382..00000000 --- a/reference/resources.md +++ /dev/null @@ -1,41 +0,0 @@ -# Keep Exploring! - -To help you go deeper, we've also create a list of notebooks that -demonstrate real-world applications of Xarray in a variety of use cases. These -need not be explored in any particular sequence, instead they are meant to -provide a sampling of what Xarray can be used for. - -```{seealso} -1. The [Xarray blog](https://xarray.dev/blog) has a number of user stories showcasing Xarray in -many scientific domains. -2. [Project Pythia Foundations](https://foundations.projectpythia.org/) is a great collection of -material on Xarray and related packages. -``` - -## Weather and Climate - -1. [Global Mean Surface Temperature from CMIP6](https://gallery.pangeo.io/repos/pangeo-gallery/cmip6/): - Start with `global_mean_surface_temp.ipynb` then feel free to explore the - rest of the notebooks. - -1. [National Water Model Streamflow Analysis](https://gallery.pangeo.io/repos/rsignell-usgs/esip-gallery/): - Start with `02_National_Water_Model.ipynb` then feel free to explore the rest - of the notebooks. -1. The [Project Pythia Cookbooks](https://cookbooks.projectpythia.org/) are a great collection of - resources showcasing use of Xarray with real world datasets. -1. [Examples using the Microsoft Planetary Computer](https://github.com/microsoft/PlanetaryComputerExamples) - -## Bayesian Statistical Modeling - -1. [Xarray and PyMC3](https://mybinder.org/v2/gh/pymc-devs/pymc3/main?filepath=%2Fdocs%2Fsource%2Fnotebooks): - Start with `multilevel_modeling.ipynb` then feel free to explore the rest of - the notebooks. Also checkout [Arviz](https://arviz-devs.github.io/arviz/) - which uses Xarray as its data model. - -## Genomics - -1. [From "data analysis for genomic surveillance of African malaria vectors" ](https://anopheles-genomic-surveillance.github.io/workshop-5/module-1-xarray.html) - -## Electrophysiology - -1. ["Analyzing intracranial electrophysiology data with xarray"](https://chrisholdgraf.com/blog/2019/2019-10-22-xarray-neuro) diff --git a/workshops/oceanhackweek2020/README.md b/workshops/oceanhackweek2020/README.md deleted file mode 100644 index 9155eae2..00000000 --- a/workshops/oceanhackweek2020/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Oceanhackweek 2020 - -Presented August 2020 at [OceanHackWeek](https://oceanhackweek.github.io) by Deepak Cheerian - -This 45-minute tutorial covers the basics of Xarray data structures. - -Video Recording: -https://www.youtube.com/watch?v=q020HHnGAWo - -By the end of the lesson, we will be able to: - -- Understand the basic data structures in Xarray -- Inspect `DataArray` and `Dataset` objects. -- Read and write netCDF files using Xarray. -- Understand that there are many packages that build on top of xarray diff --git a/workshops/online-tutorial-series/01_xarray_fundamentals.ipynb b/workshops/online-tutorial-series/01_xarray_fundamentals.ipynb deleted file mode 100644 index 03eded1c..00000000 --- a/workshops/online-tutorial-series/01_xarray_fundamentals.ipynb +++ /dev/null @@ -1,448 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Xarray Fundamentals\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Learning Objectives\n", - "\n", - "- Provide an overview of xarray\n", - "- Describe the core xarray data structures, the DataArray and the Dataset, and\n", - " the components that make them up\n", - "- Load xarray dataset from a netCDF file\n", - "- View and set attributes\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## What Is Xarray?\n", - "\n", - "- Unlabeled, N-dimensional arrays of numbers (e.g., NumPy’s ndarray) are the\n", - " most widely used data structure in scientific computing. However, they lack a\n", - " meaningful representation of the metadata associated with their data.\n", - " Implementing such functionality is left to individual users and\n", - " domain-specific packages.\n", - "\n", - "- xarray expands on the capabilities of NumPy arrays, providing a lot of\n", - " streamline data manipulation.\n", - "\n", - "- Xarray's interface is based largely on the netCDF data model (variables,\n", - " attributes, and dimensions), but it goes beyond the traditional netCDF\n", - " interfaces to provide functionality similar to netCDF-java's Common Data Model\n", - " (CDM).\n", - "\n", - "- xarray is motivated by weather and climate use cases but is **domain\n", - " agnostic**...\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Core Data Structures\n", - "\n", - "- xarray has 2 fundamental data structures:\n", - "\n", - " - `DataArray`, which holds single multi-dimensional variables and its\n", - " coordinates\n", - " - `Dataset`, which holds multiple variables that potentially share the same\n", - " coordinates\n", - "\n", - "![](../../images/xarray-data-structures.png)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Loading data from netCDF\n", - "\n", - "- NetCDF (network Common Data Form) is a file format for storing\n", - " multidimensional array data\n", - "- NetCDF is a self-describing, meaning that a netCDF file includes information\n", - " about the data it contains, and the necessary metadata such as coordinate\n", - " system used, attributes describing the data, etc...\n", - "- NetCDF is used extensively in the geoscience communities\n", - "- Xarray's interface is based largely on the netCDF data model\n", - "\n", - "Learn more about netCDF\n", - "[here](https://docs.unidata.ucar.edu/netcdf-c/current/faq.html#What-Is-netCDF).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Load mean sea surface temperature dataset\n", - "ds = xr.open_dataset(\"../../data/sst.mnmean.nc\", engine=\"netcdf4\")\n", - "\n", - "# xarray's HTML representation\n", - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# If you prefer a text based repr, you can set the display_style='text' by uncommenting the line below\n", - "# xr.set_options(display_style=\"text\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output", - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "# Look at the netCDF representation\n", - "ds.info()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### `Dataset`\n", - "\n", - "- Xarray's `Dataset` is a dict-like container of labeled arrays (`DataArrays`)\n", - " with aligned dimensions. - It is designed as an in-memory representation of a\n", - " netCDF dataset.\n", - "- In addition to the dict-like interface of the dataset itself, which can be\n", - " used to access any `DataArray` in a `Dataset`. Datasets have the following key\n", - " properties:\n", - "\n", - "| Attribute | Description |\n", - "| ----------- | ---------------------------------------------------------------------------------------------------------------------------------------- |\n", - "| `data_vars` | OrderedDict of `DataArray` objects corresponding to data variables. |\n", - "| `dims` | dictionary mapping from dimension names to the fixed length of each dimension (e.g., {`lat`: 6, `lon`: 6, `time`: 8}). |\n", - "| `coords` | a dict-like container of arrays (coordinates) that label each point (e.g., 1-dimensional arrays of numbers, datetime objects or strings) |\n", - "| `attrs` | OrderedDict to hold arbitrary metadata pertaining to the dataset. |\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# variables are in our dataset\n", - "ds.data_vars" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# select one variable and pick the first entry along the first axis (time)\n", - "ds.sst[0]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Plot one timestep\n", - "ds.sst[0].plot();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# dataset dimensions\n", - "ds.dims" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# dataset coordinates\n", - "ds.coords" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "# dataset global attributes\n", - "ds.attrs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### `DataArray`\n", - "\n", - "The DataArray is xarray's implementation of a labeled, multi-dimensional array.\n", - "It has several key properties:\n", - "\n", - "| Attribute | Description |\n", - "| --------- | ---------------------------------------------------------------------------------------------------------------------------------------- |\n", - "| `data` | `numpy.ndarray` or `dask.array` holding the array's values. |\n", - "| `dims` | dimension names for each axis. For example:(`x`, `y`, `z`) (`lat`, `lon`, `time`). |\n", - "| `coords` | a dict-like container of arrays (coordinates) that label each point (e.g., 1-dimensional arrays of numbers, datetime objects or strings) |\n", - "| `attrs` | an `OrderedDict` to hold arbitrary attributes/metadata (such as units) |\n", - "| `name` | an arbitrary name of the array |\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Extract the sst Variable/DataArray\n", - "ds[\"sst\"] # Equivalent to ds.sst" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "# The actual (numpy) array data\n", - "ds.sst.data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# DataArray/Variable dimensions\n", - "ds.sst.dims" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# DataArray/Variable coordinates\n", - "ds.sst.coords" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# DataArray/Variable attributes\n", - "ds.sst.attrs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Coordinates vs dimensions\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- DataArray objects inside a Dataset may have any number of dimensions but are\n", - " presumed to share a common coordinate system.\n", - "- Coordinates can also have any number of dimensions but denote\n", - " constant/independent quantities, unlike the varying/dependent quantities that\n", - " belong in data.\n", - "- A dimension is just a name of an axis, like \"time\"\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.dims" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.coords" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "# extracting a coordinate variable\n", - "ds.sst.lon" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "# extracting a coordinate variable from .coords\n", - "ds.coords[\"time\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Attributes\n", - "\n", - "Attributes can be used to store metadata. What metadata should you store? It\n", - "depends on your domain and your needs\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "output_scroll" - ] - }, - "outputs": [], - "source": [ - "# Look at global attributes\n", - "ds.attrs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Look at variable specific attributes\n", - "ds.sst.attrs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Set some arbitrary attribute on a data Variable/DataArray\n", - "ds.sst.attrs[\"my_custom_attribute\"] = \"Foo Bar\"\n", - "ds.sst.attrs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Going Further\n", - "\n", - "- Xarray Documentation on Data Structures:\n", - " https://docs.xarray.dev/en/stable/data-structures.html\n", - "- Xarray Documentation on Reading files and writing files:\n", - " https://docs.xarray.dev/en/stable/user-guide/io.html\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "
\n", - "

Next: Indexing

\n", - "
\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - }, - "toc-autonumbering": true - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/workshops/online-tutorial-series/02_indexing.ipynb b/workshops/online-tutorial-series/02_indexing.ipynb deleted file mode 100644 index 8e7ffe79..00000000 --- a/workshops/online-tutorial-series/02_indexing.ipynb +++ /dev/null @@ -1,351 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Indexing and Selecting Data\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Learning Objectives\n", - "\n", - "- Select data by position using `.isel` with values or slices\n", - "- Select data by label using `.sel` with values or slices\n", - "- Select timeseries data by date/time with values or slices\n", - "- Use nearest-neighbor lookups with `.sel`\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Why do we need label-based indexing?\n", - "\n", - "Scientific data is inherently labeled. For example, time series data includes\n", - "timestamps that label individual periods or points in time, spatial data has\n", - "coordinates (e.g. longitude, latitude, elevation), and model or laboratory\n", - "experiments are often identified by unique identifiers.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr\n", - "\n", - "%config InlineBackend.figure_format='retina'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.open_dataset(\"../../data/sst.mnmean.nc\")\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## NumPy Positional Indexing\n", - "\n", - "When working with numpy, indexing is done by position (slices/ranges/scalars).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "t = ds[\"sst\"].data # numpy array\n", - "t" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "t.shape" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "# extract a time-series for one spatial location\n", - "t[:, 20, 40]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "
\n", - "but wait, what labels go with 20 and 40? Was that lat/lon or lon/lat? Where are the timestamps that go along with this time-series?\n", - "
\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Indexing with xarray\n", - "\n", - "xarray offers extremely flexible indexing routines that combine the best\n", - "features of NumPy and pandas for data selection.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "da = ds[\"sst\"] # Extract data array\n", - "da" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- **NumPy style indexing still works (but preserves the labels/metadata)**\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "da[:, 20, 40]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- **Positional indexing using dimension names**\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.isel(lat=60, lon=40).plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- **Label-based indexing**\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.sel(lat=-32, lon=80).plot();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.sel(lat=50.0, lon=200.0, time=\"2020\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# demonstrate slicing\n", - "ds.sel(time=slice(\"2019-05\", \"2020-07\"))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- **Nearest Neighbor Lookups**\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "da.sel(lat=52.25, lon=251.8998, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- **All of these indexing methods work on the dataset too:**\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.sel(lat=52.25, lon=251.8998, method=\"nearest\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Vectorized Indexing\n", - "\n", - "Like numpy and pandas, xarray supports indexing many array elements at once in a\n", - "vectorized manner:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# generate a coordinates for a transect of points\n", - "lat_points = xr.DataArray([60, 80, 90], dims=\"points\")\n", - "lon_points = xr.DataArray([250, 250, 250], dims=\"points\")\n", - "lat_points" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "lon_points" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# nearest neighbor selection along the transect\n", - "da.sel(lat=lat_points, lon=lon_points, method=\"nearest\").plot();" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Indexing with `where()`\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "hide-output" - ] - }, - "outputs": [], - "source": [ - "# Let's replace the missing values (nan) with some placeholder\n", - "ds.sst.where(ds.sst.notnull(), -99)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Going Further\n", - "\n", - "- [Xarray Docs - Indexing and Selecting Data](https://docs.xarray.dev/en/stable/indexing.html)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "
\n", - "

Previous: xarray fundamentals

\n", - "

Next: Computation

\n", - "
\n" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/workshops/online-tutorial-series/03_computation.ipynb b/workshops/online-tutorial-series/03_computation.ipynb deleted file mode 100644 index f4ae294e..00000000 --- a/workshops/online-tutorial-series/03_computation.ipynb +++ /dev/null @@ -1,312 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Computation\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Learning Objectives\n", - "\n", - "- Do basic arithmetic with DataArrays and Datasets\n", - "- Perform aggregation (reduction) along one or multiple dimensions of a\n", - " DataArray or Dataset\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Arithmetic Operations\n", - "\n", - "Arithmetic operations with a single DataArray automatically vectorize (like\n", - "numpy) over all array values:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import xarray as xr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds = xr.open_dataset(\"../../data/sst.mnmean.nc\")\n", - "da = ds[\"sst\"]\n", - "da" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da + 273.15" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Aggregation (Reduction) Methods\n", - "\n", - "Xarray supports many of the aggregations methods that numpy has. A partial list\n", - "includes: all, any, argmax, argmin, max, mean, median, min, prod, sum, std, var.\n", - "\n", - "Whereas the numpy syntax would require scalar axes, xarray can use dimension\n", - "names:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_mean = da.mean(dim=\"time\")\n", - "da_mean" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.std(dim=[\"lat\", \"lon\"]).plot()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Broadcasting:\n", - "\n", - "**Broadcasting** allows an operator or a function to act on two or more arrays\n", - "to operate even if these arrays do not have the same shape. That said, not all\n", - "the dimensions can be subjected to broadcasting; they must meet certain rules.\n", - "The image below illustrates how performing an operation on arrays with\n", - "differently coordinates will result in automatic broadcasting\n", - "\n", - "![](../../images/broadcasting.png)\n", - "\n", - "Credit: Stephan Hoyer --\n", - "[xarray ECMWF Python workshop](https://docs.google.com/presentation/d/16CMY3g_OYr6fQplUZIDqVtG-SKZqsG8Ckwoj2oOqepU/)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da.shape, da.dims" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "da_mean.shape, da_mean.dims" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Subtract the mean (2D array) from the original array (3D array)\n", - "x = da - da_mean\n", - "x" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## High level computation: groupby, resample, rolling, coarsen, weighted\n", - "\n", - "Xarray has some very useful high level objects that let you do common\n", - "computations:\n", - "\n", - "- `groupby` :\n", - " [Bin data in to groups and reduce](https://docs.xarray.dev/en/stable/groupby.html)\n", - "- `resample` :\n", - " [Groupby specialized for time axes. Either downsample or upsample your data](https://docs.xarray.dev/en/stable/user-guide/time-series.html#resampling-and-grouped-operations).\n", - "- `rolling` :\n", - " [Operate on rolling windows of your data e.g. running mean](https://docs.xarray.dev/en/stable/user-guide/computation.html#rolling-window-operations)\n", - "- `coarsen` :\n", - " [Downsample your data](https://docs.xarray.dev/en/stable/user-guide/computation.html#coarsen-large-arrays)\n", - "- `weighted` :\n", - " [Weight your data before applying reductions](https://docs.xarray.dev/en/stable/user-guide/computation.html#weighted-array-reductions)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### groupby\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# seasonal groups\n", - "ds.groupby(\"time.season\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# day of the week groups\n", - "ds.groupby(\"time.dayofweek\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# compute a seasonal mean\n", - "seasonal_mean = ds.groupby(\"time.season\").mean()\n", - "seasonal_mean" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# The seasons are out of order (they are alphabetically sorted). This is a common annoyance. The solution is to use .reindex\n", - "seasonal_mean = seasonal_mean.reindex(season=[\"DJF\", \"MAM\", \"JJA\", \"SON\"])\n", - "seasonal_mean" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "seasonal_mean.sst.plot(col=\"season\", robust=True, cmap=\"turbo\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### resample\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# resample to bi-monthly frequency\n", - "ds.sst.resample(time=\"2MS\").mean()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### rolling window operations\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# A rolling mean with a window size of 7\n", - "ds.sst.rolling(time=7).mean()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Going Further\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "
\n", - "

Computation with xarray (extended version): Computation with xarray notebook

\n", - "

Plotting and visualization (extended version): Plotting and Visualization notebook

\n", - "
\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": true, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/workshops/online-tutorial-series/README.md b/workshops/online-tutorial-series/README.md deleted file mode 100644 index e777722c..00000000 --- a/workshops/online-tutorial-series/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# Xarray Online Tutorial 2020 - -Presented October 6 2020 by: - -- Anderson Banihirwe (NCAR) -- Deepak Cherian (NCAR) -- Martin Durant (Anaconda) - -This 90 minute tutorial covers fundamentals of Xarray, indexing, computation + visualization. - -Video Recoding: -https://www.youtube.com/watch?v=a339Q5F48UQ&list=PLNemzZpJM7lUu_iGP_lA2m7SeSUwKSIvR&index=8 - -## Schedule - -_These are the rough timings for the tutorial_: -| Topic | Time (min) | -| - | - | -| Xarray fundamentals | 30 | -| Indexing | 15 | -| Break | 5 | -| Computation & visualization | 40 | diff --git a/workshops/scipy2023/README.md b/workshops/scipy2023/README.md deleted file mode 100644 index 1ece6fe9..00000000 --- a/workshops/scipy2023/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# SciPy 2023 - -## Xarray: Friendly, Interactive, and Scalable Scientific Data Analysis - -Organized by: - -- Deepak Cherian (National Center for Atmospheric Research) -- Scott Henderson (Univ. Washington) -- Jessica Scheick (Univ. New Hampshire) -- Negin Sobhani (National Center for Atmospheric Research) -- Tom Nicholas (Lamont-Doherty Earth Observatory) -- Anderson Banihirwe (CarbonPlan) -- Don Setiawan (Univ. Washington) - -## Instructions - -:::{note} -You can access a recording of this tutorial [here](https://www.youtube.com/watch?v=L4FXcIOMlsY) -::: - -### Running Locally - -See instructions to set up the environment for running the tutorial material [here](get-started). - -### Github Codespaces - -This tutorial is available to run within [Github Codespaces](https://github.com/features/codespaces) - "a development environment that's hosted in the cloud" - -[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://github.com/codespaces/new/xarray-contrib/xarray-tutorial/tree/main?devcontainer_path=.devcontainer%2Fscipy2023%2Fdevcontainer.json) - -☝️ Click the button above to go to options window to launch a Github codespace. - -A codespace is a development environment that's hosted in the cloud. -You can choose from a selection of virtual machine types: 2 cores - 4 GB RAM - 32 GB storage, and 4 cores - 8 GB RAM - 32GB storage. -Additionally, you are able to chose from various Dev container configuration, for this specific workshop, please ensure that `Scipy2023` is selected. -GitHub currently gives every user [120 vCPU hours per month for free](https://docs.github.com/en/billing/managing-billing-for-github-codespaces/about-billing-for-github-codespaces#monthly-included-storage-and-core-hours-for-personal-accounts), beyond that you must pay. **So be sure to explicitly stop or shut down your codespace when you are done by going to this page (https://github.com/codespaces).** - -Once your codespace is launched, the following happens: - -- [Visual Studio Code](https://code.visualstudio.com/) Interface will open up within your browser. -- A built in terminal will open and it will execute `jupyter lab` automatically. -- Once you see a url to click within the terminal, simply `cmd + click` the given url. -- This will open up another tab in your browser, leading to a [Jupyter Lab](https://jupyterlab.readthedocs.io/en/latest/) Interface. - -## Outline - -```{dropdown} Introduction -{doc}`../../overview/get-started` -``` - -```{dropdown} Indexing -{doc}`../../fundamentals/02.1_indexing_Basic` - -{doc}`../../intermediate/indexing/advanced-indexing` - -{doc}`../../intermediate/indexing/boolean-masking-indexing` -``` - -```{dropdown} Computational Patterns -{doc}`../../intermediate/01-high-level-computation-patterns` -``` - -```{dropdown} Wrapping other arrays: dask -{doc}`../../intermediate/xarray_and_dask` -``` - -```{dropdown} Wrapping custom computation -{doc}`../../advanced/apply_ufunc/simple_numpy_apply_ufunc` - -{doc}`../../advanced/apply_ufunc/core-dimensions` - -{doc}`../../advanced/apply_ufunc/complex-output-numpy` - -{doc}`Explore the remaining material <../../advanced/apply_ufunc/apply_ufunc>` -``` diff --git a/workshops/scipy2023/index.ipynb b/workshops/scipy2023/index.ipynb deleted file mode 100644 index 3072d80e..00000000 --- a/workshops/scipy2023/index.ipynb +++ /dev/null @@ -1,67 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "\n", - "\n", - "\n", - "# Welcome to the Xarray SciPy 2023 Tutorial! \n", - "\n", - "**Xarray**: *Friendly, Interactive, and Scalable Scientific Data Analysis*\n", - "\n", - "\n", - "This *4-hour* workshop will explore content from [the Xarray tutorial](https://tutorial.xarray.dev), which contains a comprehensive collection of hands-on tutorial Jupyter Notebooks. We won't cover it all today, but instead will review a curated set of examples that will prepare you for increasingly complex real-world data analysis tasks!\n", - "\n", - "## Schedule \n", - "*Times in Central Daylight (Austin, TX)\n", - "\n", - "Use the links to navigate to the right notebooks.\n", - "\n", - "| Topic | Time | Notebook Links | \n", - "| :- | - | - | \n", - "| Introduction and Setup | 1:30 (20 min) | --- | \n", - "| Indexing | 1:50 (30 min) | [Quick Introduction to Indexing](../../fundamentals/02.1_indexing_Basic.ipynb)
[Advanced and Vectorized Indexing](../../intermediate/indexing/advanced-indexing.ipynb)
[Boolean Indexing & Masking](../../intermediate/indexing/boolean-masking-indexing.ipynb) | \n", - "| *10 minute Break* \n", - "| Computational Patterns | 2:30 (50 min) | [Computation Patterns](../../intermediate/01-high-level-computation-patterns.ipynb) | \n", - "| *10 minute Break* | \n", - "| Wrapping other arrays | 3:30 (50 min) | [Xarray and Dask](../../intermediate/xarray_and_dask.ipynb) | \n", - "| *10 minute Break* | \n", - "| Wrapping custom computation (apply_ufunc) | 4:30 (30 min)


5:00 (30 min) | [A gentle introduction](../../advanced/apply_ufunc/simple_numpy_apply_ufunc.ipynb)
[Core dimensions](../../advanced/apply_ufunc/core-dimensions.ipynb)
[Handling complex output](../../advanced/apply_ufunc/complex-output-numpy.ipynb)
Explore the rest of the material |\n", - "| | **End 5:30** | |\n", - "\n", - "\n", - "## Notes\n", - "\n", - "- Remember to select `global-global-xarray environment` for the notebooks when prompted.\n", - "\n", - "- These notebooks render best if you choose the \"Medium instance for Xarray\" option at login.\n", - "\n", - "```{tip}\n", - "Check that you see \"tip\" in a green bar 👆🏾.\n", - "```\n", - "\n", - "## Thanks for attending!\n", - "\n", - "Please continue to explore the subfolders in the JupyterLab File Browser for additional tutorial notebooks to run, or read the rendered notebooks at [https://tutorial.xarray.dev](https://tutorial.xarray.dev)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/workshops/scipy2024/index.ipynb b/workshops/scipy2024/index.ipynb deleted file mode 100644 index 18041b98..00000000 --- a/workshops/scipy2024/index.ipynb +++ /dev/null @@ -1,104 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# SciPy 2024\n", - "\n", - "## Welcome to the Xarray SciPy 2024 Tutorial! \n", - "\n", - "\n", - "\n", - "**Xarray**: *Friendly, Interactive, and Scalable Scientific Data Analysis*\n", - "\n", - "July 8, 13:30–17:30 (US/Pacific), Tacoma Convention Center Ballroom B/C\n", - "\n", - "This *4-hour* workshop will explore content from [the Xarray tutorial](https://tutorial.xarray.dev), which contains a comprehensive collection of hands-on tutorial Jupyter Notebooks. We will review a curated set of examples that will prepare you for increasingly complex real-world data analysis tasks!\n", - "\n", - ":::{admonition} Learning Goals\n", - "- Orient yourself to Xarray resources to continue on your Xarray journey!\n", - "- Effectively use Xarray’s multidimensional indexing and computational patterns\n", - "- Understand how Xarray integrates with other libraries in the scientific Python ecosystem\n", - "- Learn how to leverage Xarray’s powerful backend and extension capabilities to customize workflows and open a variety of scientific datasets\n", - ":::\n", - "\n", - "## Schedule \n", - "\n", - "*Times in US/Pacific Timezone (Tacoma, WA)\n", - "\n", - "Use the links to navigate to the right notebooks.\n", - "\n", - "| Topic | Time | Notebook Links | \n", - "| :- | - | - | \n", - "| Introduction and Setup | 1:30 (10 min) | --- | \n", - "| The Xarray Data Model | 1:40 (40 min) | [Data structures](../../fundamentals/01_data_structures.md)
[Basic Indexing](../../fundamentals/02.1_indexing_Basic.ipynb) | \n", - "| *10 minute Break* \n", - "| Indexing & Computational Patterns | 2:30 (50 min) | [Advanced Indexing](../../intermediate/indexing/indexing.md)
[Computational Patterns](../../intermediate/01-high-level-computation-patterns.ipynb)
| \n", - "| *10 minute Break* | \n", - "| Xarray Integrations and Extensions | 3:30 (50 min) | [The Xarray Ecosystem](../../intermediate/xarray_ecosystem.ipynb) | \n", - "| *10 minute Break* | \n", - "| Backends & Remote data| 4:30 (50 min) | [Remote Data](../../intermediate/remote_data/remote-data.ipynb) |\n", - "| | End 5:30 | |\n", - "\n", - "\n", - "### Tutorial Setup\n", - "\n", - "We recommend using a preconfigured GitHub Codespace for this tutorial. This section describes how to access and manage a GitHub Codespace.\n", - "\n", - ":::{note}\n", - "If you prefer to work on your own computer, refer to instructions in the [Getting Started Section](../../overview/get-started.md)\n", - ":::\n", - "\n", - "This tutorial is available to run within [Github Codespaces](https://github.com/features/codespaces) - \"a development environment that's hosted in the cloud\".\n", - "\n", - "[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://github.com/codespaces/new/xarray-contrib/xarray-tutorial/tree/main?devcontainer_path=.devcontainer%2Fscipy2024%2Fdevcontainer.json)\n", - "\n", - "☝️ Click the button above to go to options window to launch a Github Codespace.\n", - "\n", - "GitHub currently gives every user [120 vCPU-hours per month for free](https://docs.github.com/en/billing/managing-billing-for-github-codespaces/about-billing-for-github-codespaces#monthly-included-storage-and-core-hours-for-personal-accounts), beyond that you must pay. **So be sure to explicitly stop your Codespace when you are done by going to this page (https://github.com/codespaces).**\n", - "\n", - "Once your Codespace is launched, the following happens:\n", - "\n", - "- [Visual Studio Code](https://code.visualstudio.com/) Interface will open up within your browser.\n", - "- A built in terminal will open and it will execute `jupyter lab` automatically.\n", - "- Once you see a url to click within the terminal, simply `cmd + click` the given url.\n", - "- This will open up another tab in your browser, leading to a [Jupyter Lab](https://jupyterlab.readthedocs.io/en/latest/) Interface.\n", - "\n", - ":::{warning}\n", - "Consider Codespaces as ephemeral environments. You may lose your connection and any edits you make.\n", - ":::\n", - "\n", - "\n", - "## Thanks for attending!\n", - "\n", - "Please continue to explore the subfolders in the JupyterLab File Browser for additional tutorial notebooks to run, or read the rendered notebooks at [https://tutorial.xarray.dev](https://tutorial.xarray.dev)\n", - "\n", - "### SciPy 2024 Organized by:\n", - "\n", - "- Scott Henderson (Univ. Washington)\n", - "- Jessica Scheick (Univ. New Hampshire)\n", - "- Negin Sobhani (National Center for Atmospheric Research)\n", - "- Tom Nicholas [C]worthy\n", - "- Max Jones (CarbonPlan)\n", - "- Wietze Suijker (Space Intelligence)" - ] - } - ], - "metadata": { - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/workshops/thinking-like-xarray/README.md b/workshops/thinking-like-xarray/README.md deleted file mode 100644 index 3d9fdc70..00000000 --- a/workshops/thinking-like-xarray/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Thinking like Xarray 2022 - -Presented March 2022 for the [NCAR Python Seminar Series](https://ncar.github.io/esds/posts/2022/Thinking-with-Xarray/) by Deepak Cheerian - -This 60-minute, intermediate-level lesson covers common high-level computing patterns. The version of the notebook here includes material inserted during the presentation. - -Video Recording: -https://www.youtube.com/watch?v=TSw3GF_d2y8&list=PLNemzZpJM7lUu_iGP_lA2m7SeSUwKSIvR