From d33f7a52694a20430db8fde07471469e430432d7 Mon Sep 17 00:00:00 2001 From: daquinteroflex Date: Tue, 4 Nov 2025 14:55:46 +0100 Subject: [PATCH 1/3] chore: python-client autorelease flow --- .github/workflows/README.md | 212 +++++++ .../tidy3d-docs-sync-readthedocs-repo.yml | 70 +- .../tidy3d-python-client-create-tag.yml | 116 ++++ .../workflows/tidy3d-python-client-deploy.yml | 235 +++++++ .../tidy3d-python-client-develop-cli.yml | 35 +- .../tidy3d-python-client-release-tests.yml | 224 +++++++ .../tidy3d-python-client-release.yml | 598 ++++++++++++++---- .../tidy3d-python-client-submodules-test.yml | 104 --- .../workflows/tidy3d-python-client-tests.yml | 209 +++++- .../tidy3d-python-client-update-lockfile.yml | 20 + dev.Dockerfile | 7 + docs/development/release/version.rst | 153 +++++ poetry.lock | 404 ++++++++---- pyproject.toml | 9 +- 14 files changed, 2026 insertions(+), 370 deletions(-) create mode 100644 .github/workflows/README.md create mode 100644 .github/workflows/tidy3d-python-client-create-tag.yml create mode 100644 .github/workflows/tidy3d-python-client-deploy.yml create mode 100644 .github/workflows/tidy3d-python-client-release-tests.yml delete mode 100644 .github/workflows/tidy3d-python-client-submodules-test.yml create mode 100644 docs/development/release/version.rst diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000000..d2f16aad67 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,212 @@ +# GitHub Actions Workflows + +This directory contains the CI/CD workflows for the Tidy3D Python client. + +## Release Workflows + +The release process is composed of modular, manually-triggered workflows: + +### Main Release Workflow + +**`tidy3d-python-client-release.yml`** + +Orchestrates the complete release process through three stages: + +1. **Tag Creation** - Calls `tidy3d-python-client-create-tag.yml` +2. **Testing** - Validates code quality, runs test suites +3. **Deployment** - Calls `tidy3d-python-client-deploy.yml` if tests pass + +**Trigger**: Manual via GitHub Actions UI + +**Key Parameters**: +- `release_tag` (required): Version tag (e.g., `v2.10.0`, `v2.10.0rc1`) +- `release_type`: `draft` (testing, no PyPI) or `final` (production, publishes to PyPI) +- `workflow_control`: Stage control for resuming partial releases + - `start-tag`: Full release from beginning (default) + - `start-tests`: Resume from testing + - `start-deploy`: Resume from deployment + - `only-tag`, `only-tests`, `only-tag-tests`, `only-tag-deploy`: Run specific stages +- `deploy_testpypi`, `deploy_pypi`: Control deployment targets +- `client_tests`, `cli_tests`, `submodule_tests`: Enable/disable specific test suites + +**Release Types**: +- **Draft**: Creates GitHub draft release, no PyPI publish, for validation +- **Final**: Publishes to PyPI, creates public release, syncs branches and documentation +- **Non-RC Final** (e.g., `v2.10.0`): Automatically runs submodule tests, pushes to `latest` branch +- **RC Release** (e.g., `v2.10.0rc1`): Pre-release candidate, no `latest` push + +### Component Workflows + +**`tidy3d-python-client-create-tag.yml`** + +Creates and pushes git tags with validation: +- For `release_type: final`, validates `pyproject.toml` version matches tag +- Supports tag recreation (deletes existing if present) +- Tag format: `v{major}.{minor}.{patch}[rc{num}]` +- Can be called independently or as part of release workflow + +**`tidy3d-python-client-deploy.yml`** + +Handles package deployment: +- **TestPyPI**: Validation before production (`deploy_testpypi: true`) +- **PyPI**: Production package distribution (`deploy_pypi: true`) +- Creates GitHub releases (draft or final) +- Syncs documentation to ReadTheDocs mirror +- Syncs branches for final releases (maintains `latest`, `develop` consistency) +- Can be called independently or as part of release workflow + +**`tidy3d-python-client-release-tests.yml`** + +Specialized release validation tests: +- **Submodule tests**: Integration tests with dependent packages +- **Version match tests**: Validates version consistency +- Auto-enabled for non-RC final releases +- Can be toggled via workflow parameters + +## Test Workflows + +**`tidy3d-python-client-tests.yml`** + +Main CI test workflow that runs on PRs, manual triggers, and workflow calls: + +- **Code quality**: Linting (ruff), type checking (mypy), schema validation, security (zizmor) +- **Local tests**: Fast tests on self-hosted runners (Python 3.10, 3.13) +- **Remote tests**: Full matrix on GitHub runners (Python 3.10-3.13, Windows/Linux/macOS) +- **PR review tests**: Branch naming, commit message validation +- **Workflow validation**: Centralized job that validates all test results + +Test scope determined dynamically based on: +- PR approval state +- Code changes (files modified) +- Manual workflow inputs +- Workflow call parameters + +**`tidy3d-python-client-develop-cli.yml`** + +Tests for the Tidy3D develop CLI functionality: +- Can be called as part of main test workflow +- Validates CLI commands and functionality + +## Maintenance Workflows + +**`tidy3d-python-client-daily.yml`** + +Scheduled daily workflow for ongoing validation. + +**`tidy3d-python-client-update-lockfile.yml`** + +Updates Poetry lockfile with latest dependencies: +- Configured with AWS CodeArtifact authentication for private dependencies +- Uses secrets: `AWS_CODEARTIFACT_ACCESS_KEY`, `AWS_CODEARTIFACT_ACCESS_SECRET` +- Creates PR with updated lockfile + +## Documentation Workflows + +**`tidy3d-docs-sync-readthedocs-repo.yml`** + +Syncs documentation to ReadTheDocs mirror repository: + +- **Manual trigger**: Specify `source_ref` and optional `target_ref` +- **Workflow call**: Invoked by release workflow for automated sync +- **Outputs**: `workflow_success` and `synced_ref` for validation +- Supports custom source/target ref mapping + +## Best Practices + +### For Releases + +1. **Always test first**: Run `draft` release before `final` +2. **Use TestPyPI**: Enable `deploy_testpypi` to validate package before PyPI +3. **Version matching**: Update `pyproject.toml` version before final releases +4. **RC versioning**: Use RC versions (`v2.10.0rc1`) for pre-release testing +5. **Resume capability**: Use `workflow_control` stages to resume failed releases +6. **Monitor validation**: Check `workflow-validation` job for centralized test status + +### Version Validation + +For `release_type: final`, the tag creation validates: + +```bash +# pyproject.toml must contain: +version = "2.10.0" + +# To match release tag (minus 'v' prefix): +release_tag: v2.10.0 +``` + +Mismatches will fail with descriptive error. + +### Recommended Release Flow + +1. **Draft + TestPyPI** - Initial validation + ```yaml + release_tag: v2.10.0rc1 + release_type: draft + deploy_testpypi: true + ``` + +2. **Final RC** - Release candidate + ```yaml + release_tag: v2.10.0rc1 + release_type: final + deploy_pypi: true + ``` + +3. **Final Stable** - Production release + ```yaml + release_tag: v2.10.0 + release_type: final + deploy_pypi: true + # Submodule tests auto-enabled (non-RC) + ``` + +### Troubleshooting + +**Version Mismatch** +``` +Version mismatch! + pyproject.toml: 2.9.0 + Release tag: 2.10.0 +``` +Solution: Update `pyproject.toml` version to match release tag. + +**Tag Already Exists** + +The workflow automatically handles by deleting and recreating. + +**Test Failures Block Deployment** + +Check `workflow-validation` job status. Use `workflow_control: start-deploy` to bypass after fixing externally. + +**Resume from Failure** + +If deployment fails: +```yaml +release_tag: v2.10.0 +release_type: final +workflow_control: start-deploy +``` + +## Workflow Outputs + +### Release Workflow +- `tag_created`: Whether tag was successfully created +- `workflow_success`: Overall workflow success status +- `synced_ref`: Documentation ref that was synced + +### Docs Sync Workflow +- `workflow_success`: Sync success status +- `synced_ref`: The ref synced to the mirror + +## AWS CodeArtifact Integration + +Private dependencies are accessed via AWS CodeArtifact: +- Authentication in `tidy3d-python-client-update-lockfile.yml` +- Credentials injected via GitHub secrets +- Automatically configured for Poetry operations + +## Related Documentation + +- Release workflow details: `docs/development/release/version.rst` +- Development guidelines: `AGENTS.md` +- General repository info: `README.md` diff --git a/.github/workflows/tidy3d-docs-sync-readthedocs-repo.yml b/.github/workflows/tidy3d-docs-sync-readthedocs-repo.yml index 1126f70d2e..981f493417 100644 --- a/.github/workflows/tidy3d-docs-sync-readthedocs-repo.yml +++ b/.github/workflows/tidy3d-docs-sync-readthedocs-repo.yml @@ -3,20 +3,35 @@ name: "docs/tidy3d/sync-to-readthedocs-repo" on: workflow_dispatch: inputs: - target_branch: - description: 'Target mirror repo branch. Defaults to source branch/tag.' + source_ref: + description: 'Source ref (branch/tag) to sync. Defaults to current ref.' required: false type: string - push: - branches: - - main - - latest - - develop - - 'pre/*' - - 'demo/*' - tags: - - 'v*' - - 'demo/*' + default: '' + target_ref: + description: 'Target mirror repo ref. Defaults to source ref.' + required: false + type: string + default: '' + + workflow_call: + inputs: + source_ref: + description: 'Source ref (branch/tag) to sync. Required for workflow_call.' + required: true + type: string + target_ref: + description: 'Target mirror repo ref. Defaults to source ref.' + required: false + type: string + default: '' + outputs: + workflow_success: + description: 'Sync workflow success status' + value: ${{ jobs.build-and-deploy.result == 'success' }} + synced_ref: + description: 'The ref that was synced to the mirror' + value: ${{ jobs.build-and-deploy.outputs.synced_ref }} permissions: contents: read @@ -30,16 +45,26 @@ jobs: - id: extract name: Extract branch or tag name shell: bash + env: + INPUT_SOURCE_REF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.source_ref || inputs.source_ref }} run: | - REF_NAME="${GITHUB_REF#refs/*/}" + if [[ -n "$INPUT_SOURCE_REF" ]]; then + REF_NAME="$INPUT_SOURCE_REF" + echo "Using provided source_ref: $REF_NAME" + else + REF_NAME="${GITHUB_REF#refs/*/}" + echo "Extracted ref from GITHUB_REF: $REF_NAME" + fi echo "ref_name=$REF_NAME" >> $GITHUB_OUTPUT - echo "Extracted ref: $REF_NAME" + echo "Final ref: $REF_NAME" build-and-deploy: permissions: contents: write needs: extract_branch_or_tag runs-on: ubuntu-latest + outputs: + synced_ref: ${{ steps.sync-result.outputs.synced_ref }} steps: - name: full-checkout uses: actions/checkout@v4 @@ -52,20 +77,25 @@ jobs: persist-credentials: true - name: push-mirror-repo + id: sync-result env: GITHUB_TOKEN: ${{ secrets.GH_PAT }} SOURCE_REF: ${{ needs.extract_branch_or_tag.outputs.ref_name }} - TARGET_BRANCH_INPUT: ${{ github.event.inputs.target_branch }} + TARGET_REF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.target_ref || inputs.target_ref }} run: | echo "Source reference: $SOURCE_REF" git pull origin "$SOURCE_REF" git remote add mirror https://github.com/flexcompute-readthedocs/tidy3d-docs.git - if [[ -n "$TARGET_BRANCH_INPUT" && "${{ github.event_name }}" == "workflow_dispatch" ]]; then - echo "Manual trigger detected. Pushing contents of '$SOURCE_REF' to remote branch '$TARGET_BRANCH_INPUT'." - git push mirror "$SOURCE_REF:refs/heads/$TARGET_BRANCH_INPUT" --force + if [[ -n "$TARGET_REF" ]]; then + echo "Pushing contents of '$SOURCE_REF' to remote ref '$TARGET_REF'." + git push mirror "$SOURCE_REF:refs/heads/$TARGET_REF" --force + SYNCED_REF="$TARGET_REF" else - echo "Automatic trigger or manual run without target. Pushing '$SOURCE_REF' to the same ref on the mirror." - # This preserves the original behavior: pushes a branch to a branch, or a tag to a tag. + echo "Pushing '$SOURCE_REF' to the same ref on the mirror." git push mirror "$SOURCE_REF" --force + SYNCED_REF="$SOURCE_REF" fi + + echo "synced_ref=$SYNCED_REF" >> $GITHUB_OUTPUT + echo "? Successfully synced to: $SYNCED_REF" diff --git a/.github/workflows/tidy3d-python-client-create-tag.yml b/.github/workflows/tidy3d-python-client-create-tag.yml new file mode 100644 index 0000000000..6e83f26daa --- /dev/null +++ b/.github/workflows/tidy3d-python-client-create-tag.yml @@ -0,0 +1,116 @@ +name: "public/tidy3d/python-client-create-tag" + +on: + workflow_dispatch: + inputs: + release_tag: + description: 'Release Tag (e.g., v2.10.0, v2.10.0rc1)' + required: true + type: string + + release_type: + description: 'Release Type (draft=test, final=PyPI)' + type: choice + default: 'draft' + required: false + options: + - draft + - final + + workflow_call: + inputs: + release_tag: + description: 'Release Tag (e.g., v2.10.0, v2.10.0rc1)' + required: true + type: string + + release_type: + description: 'Release Type (draft=test, final=PyPI)' + type: string + default: 'draft' + required: false + + outputs: + tag_created: + description: 'Whether tag was successfully created' + value: ${{ jobs.create-and-push-tag.result == 'success' }} + +permissions: + contents: write + +jobs: + create-and-push-tag: + runs-on: ubuntu-latest + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + RELEASE_TYPE: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_type || inputs.release_type }} + steps: + - name: checkout-code + uses: actions/checkout@v4 + with: + fetch-depth: 1 + persist-credentials: true + token: ${{ secrets.GH_PAT }} + + - name: validate-version-match + if: env.RELEASE_TYPE == 'final' + run: | + set -e + echo "Validating version match with pyproject.toml..." + + # Extract version from pyproject.toml + PYPROJECT_VERSION=$(grep '^version = ' pyproject.toml | sed 's/version = "\(.*\)"/\1/') + echo "pyproject.toml version: $PYPROJECT_VERSION" + + # Strip 'v' prefix from release tag + TAG_VERSION="${RELEASE_TAG#v}" + echo "Release tag version: $TAG_VERSION" + + if [[ "$TAG_VERSION" != "$PYPROJECT_VERSION" ]]; then + echo "Version mismatch!" + echo " pyproject.toml: $PYPROJECT_VERSION" + echo " Release tag: $TAG_VERSION" + echo "" + echo "Please update pyproject.toml version to match the release tag." + exit 1 + fi + + echo "Version matches: $PYPROJECT_VERSION" + + - name: retag + run: | + set -e + echo "Creating and pushing tag: $RELEASE_TAG" + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + # Fetch existing tags + git fetch --tags origin || true + + # Check if tag exists + if git rev-parse "$RELEASE_TAG" >/dev/null 2>&1; then + echo "Tag $RELEASE_TAG already exists" + echo "Deleting existing tag locally and remotely..." + git tag -d "$RELEASE_TAG" || true + git push --delete origin "$RELEASE_TAG" || true + else + echo "Tag $RELEASE_TAG does not exist yet" + fi + + # Create new tag + echo "Creating tag $RELEASE_TAG at commit $(git rev-parse HEAD)" + git tag "$RELEASE_TAG" + + # Push tag + echo "Pushing tag to origin..." + git push origin "$RELEASE_TAG" + + echo "Successfully created and pushed tag $RELEASE_TAG" + + - name: confirm-push + run: | + echo "Tag creation complete" + echo "Tag: $RELEASE_TAG" + echo "Commit: $(git rev-parse HEAD)" + echo "Pushed to: origin" diff --git a/.github/workflows/tidy3d-python-client-deploy.yml b/.github/workflows/tidy3d-python-client-deploy.yml new file mode 100644 index 0000000000..ff4a07d4c9 --- /dev/null +++ b/.github/workflows/tidy3d-python-client-deploy.yml @@ -0,0 +1,235 @@ +name: "public/tidy3d/python-client-deploy" + +on: + workflow_dispatch: + inputs: + release_tag: + description: 'Release tag to deploy (e.g., v2.10.0, v2.10.0rc1)' + required: true + type: string + + deploy_testpypi: + description: 'Deploy to TestPyPI (recommended first step)' + type: boolean + default: false + + deploy_pypi: + description: 'Deploy to production PyPI' + type: boolean + default: false + + workflow_call: + inputs: + release_tag: + description: 'Release tag to deploy' + required: true + type: string + deploy_testpypi: + type: boolean + default: false + deploy_pypi: + type: boolean + default: false + +permissions: + contents: read + +jobs: + validate-inputs: + name: validate-deployment-inputs + runs-on: ubuntu-latest + outputs: + release_tag: ${{ env.RELEASE_TAG }} + deploy_testpypi: ${{ env.DEPLOY_TESTPYPI }} + deploy_pypi: ${{ env.DEPLOY_PYPI }} + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + DEPLOY_TESTPYPI: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_testpypi || inputs.deploy_testpypi }} + DEPLOY_PYPI: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_pypi || inputs.deploy_pypi }} + steps: + - name: validate-inputs + run: | + set -e + echo "=== Deployment Configuration ===" + echo "Release tag: $RELEASE_TAG" + echo "Deploy to TestPyPI: $DEPLOY_TESTPYPI" + echo "Deploy to PyPI: $DEPLOY_PYPI" + echo "" + + # Validate at least one target is selected + if [[ "$DEPLOY_TESTPYPI" != "true" && "$DEPLOY_PYPI" != "true" ]]; then + echo "Error: At least one deployment target must be selected" + exit 1 + fi + + # Validate tag format + TAG_REGEX='^v[0-9]+\.[0-9]+\.[0-9]+(rc[0-9]+)?$' + if [[ ! "$RELEASE_TAG" =~ $TAG_REGEX ]]; then + echo " Warning: Tag format doesn't match standard pattern v{major}.{minor}.{patch}[rc{num}]" + echo " Tag: $RELEASE_TAG" + echo " Continuing anyway..." + fi + + echo "Validation passed" + + build-package: + name: build-distribution-package + needs: validate-inputs + runs-on: ubuntu-latest + steps: + - name: checkout-tag + uses: actions/checkout@v4 + with: + ref: ${{ needs.validate-inputs.outputs.release_tag }} + persist-credentials: false + + - name: setup-python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: install-poetry + run: | + python -m pip install --upgrade pip + python -m pip install poetry + + - name: build-package + env: + RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }} + run: | + echo "Building package from tag ${RELEASE_TAG}..." + poetry build + echo "" + echo "Build artifacts:" + ls -lh dist/ + echo "" + echo "Package built successfully" + + - name: upload-artifacts + uses: actions/upload-artifact@v4 + with: + name: dist-${{ needs.validate-inputs.outputs.release_tag }} + path: dist/ + retention-days: 7 + + deploy-testpypi: + name: deploy-to-testpypi + needs: [validate-inputs, build-package] + if: needs.validate-inputs.outputs.deploy_testpypi == 'true' + runs-on: ubuntu-latest + steps: + - name: download-artifacts + uses: actions/download-artifact@v4 + with: + name: dist-${{ needs.validate-inputs.outputs.release_tag }} + path: dist/ + + - name: setup-python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: install-twine + run: | + python -m pip install --upgrade pip + python -m pip install twine + + - name: publish-to-testpypi + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }} + run: | # zizmor: ignore[use-trusted-publishing] + echo "Publishing to TestPyPI..." + python -m twine upload \ + --repository-url https://test.pypi.org/legacy/ \ + --verbose \ + dist/* + echo "" + echo "Successfully published to TestPyPI" + echo "View at: https://test.pypi.org/project/tidy3d/" + + deploy-pypi: + name: deploy-to-pypi + needs: [validate-inputs, build-package] + if: | + always() && + needs.validate-inputs.outputs.deploy_pypi == 'true' && + needs.build-package.result == 'success' + runs-on: ubuntu-latest + steps: + - run: echo "hi" + # - name: download-artifacts + # uses: actions/download-artifact@v4 + # with: + # name: dist-${{ needs.validate-inputs.outputs.release_tag }} + # path: dist/ + + # - name: setup-python + # uses: actions/setup-python@v5 + # with: + # python-version: '3.10' + + # - name: install-twine + # run: | + # python -m pip install --upgrade pip + # python -m pip install twine + + # - name: publish-to-pypi + # env: + # TWINE_USERNAME: __token__ + # TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + # run: | # zizmor: ignore[use-trusted-publishing] + # echo "Publishing to production PyPI..." + # python -m twine upload \ + # --repository pypi \ + # --verbose \ + # dist/* + # echo "" + # echo "Successfully published to PyPI" + # echo " View at: https://pypi.org/project/tidy3d/" + + deployment-summary: + name: deployment-summary + needs: [validate-inputs, build-package, deploy-testpypi, deploy-pypi] + if: always() + runs-on: ubuntu-latest + steps: + - name: generate-summary + env: + RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }} + BUILD_RESULT: ${{ needs.build-package.result }} + TESTPYPI_RESULT: ${{ needs.deploy-testpypi.result }} + PYPI_RESULT: ${{ needs.deploy-pypi.result }} + DEPLOY_TESTPYPI: ${{ needs.validate-inputs.outputs.deploy_testpypi }} + DEPLOY_PYPI: ${{ needs.validate-inputs.outputs.deploy_pypi }} + run: | + echo "=== Deployment Summary ===" + echo "Release Tag: ${RELEASE_TAG}" + echo "" + echo "Build Package: ${BUILD_RESULT}" + echo "TestPyPI: ${TESTPYPI_RESULT}" + echo "PyPI: ${PYPI_RESULT}" + echo "" + + # Check for failures + if [[ "${BUILD_RESULT}" == "failure" ]]; then + echo "Build failed" + exit 1 + fi + + # Check if any selected deployment failed + failed=false + if [[ "${DEPLOY_TESTPYPI}" == "true" && "${TESTPYPI_RESULT}" == "failure" ]]; then + echo "TestPyPI deployment failed" + failed=true + fi + if [[ "${DEPLOY_PYPI}" == "true" && "${PYPI_RESULT}" == "failure" ]]; then + echo "PyPI deployment failed" + failed=true + fi + + if [[ "$failed" == "true" ]]; then + exit 1 + fi + + echo "All selected deployments completed successfully" diff --git a/.github/workflows/tidy3d-python-client-develop-cli.yml b/.github/workflows/tidy3d-python-client-develop-cli.yml index 683727fc43..f561e1aa9b 100644 --- a/.github/workflows/tidy3d-python-client-develop-cli.yml +++ b/.github/workflows/tidy3d-python-client-develop-cli.yml @@ -2,8 +2,28 @@ name: "public/tidy3d/python-client-develop-cli" on: workflow_dispatch: + inputs: + release_tag: + description: 'Release Tag (v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + + workflow_call: + inputs: + release_tag: + description: 'Release Tag (v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + outputs: + workflow_success: + description: 'CLI tests workflow success status' + value: ${{ jobs.test-dev-commands.result == 'success' }} + schedule: - cron: '0 2 * * *' # Runs every day at 2:00 AM UTC + push: branches: - develop @@ -14,16 +34,27 @@ permissions: jobs: test-dev-commands: + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - - name: Checkout code + - name: Checkout code (HEAD) + if: ${{ !env.RELEASE_TAG }} uses: actions/checkout@v4 with: - ref: develop + fetch-depth: 1 + submodules: false + persist-credentials: false + + - name: Checkout code (TAG) + if: ${{ env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + ref: refs/tags/${{ env.RELEASE_TAG }} fetch-depth: 1 submodules: false persist-credentials: false diff --git a/.github/workflows/tidy3d-python-client-release-tests.yml b/.github/workflows/tidy3d-python-client-release-tests.yml new file mode 100644 index 0000000000..8020e28c01 --- /dev/null +++ b/.github/workflows/tidy3d-python-client-release-tests.yml @@ -0,0 +1,224 @@ +name: "public/tidy3d/python-client-release-tests" + +on: + workflow_dispatch: + inputs: + version_match_tests: + description: 'Run version consistency checks' + type: boolean + default: true + submodule_tests: + description: 'Run submodule tests' + type: boolean + default: true + release_tag: + description: 'Release Tag to validate (e.g., v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + + workflow_call: + inputs: + version_match_tests: + description: 'Run version consistency checks' + type: boolean + default: true + submodule_tests: + description: 'Run submodule tests' + type: boolean + default: true + release_tag: + description: 'Release Tag to validate (e.g., v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + outputs: + workflow_success: + description: 'Whether all release tests passed' + value: ${{ jobs.workflow-validation.result == 'success' }} + +permissions: + contents: read + +jobs: + verify-version-consistency: + name: verify-version-consistency + runs-on: ubuntu-latest + if: ${{ github.event.inputs.version_match_tests || inputs.version_match_tests }} + steps: + - name: checkout-code + uses: actions/checkout@v4 + with: + ref: ${{ inputs.release_tag || github.ref }} + persist-credentials: false + + - name: check-version-consistency + env: + RELEASE_TAG: ${{ inputs.release_tag }} + run: | + set -e + echo "=== Verifying Version Consistency ===" + echo "" + + # Extract version from pyproject.toml + PYPROJECT_VERSION=$(grep '^version = ' pyproject.toml | head -n 1 | sed 's/version = "\(.*\)"/\1/') + echo "pyproject.toml version: $PYPROJECT_VERSION" + + # Extract version from tidy3d/version.py + VERSION_PY=$(grep '__version__ = ' tidy3d/version.py | sed 's/__version__ = "\(.*\)"/\1/') + echo "tidy3d/version.py version: $VERSION_PY" + echo "" + + # Compare versions + if [[ "$PYPROJECT_VERSION" != "$VERSION_PY" ]]; then + echo "❌ ERROR: Version mismatch detected!" + echo " pyproject.toml: $PYPROJECT_VERSION" + echo " tidy3d/version.py: $VERSION_PY" + echo "" + echo "These versions must match before release." + echo "Please update both files to the same version." + exit 1 + fi + + echo "✅ Version consistency check passed: $PYPROJECT_VERSION" + echo "" + + # If release tag provided, validate it matches the version + if [[ -n "$RELEASE_TAG" ]]; then + echo "=== Validating Release Tag ===" + echo "Release tag: $RELEASE_TAG" + + # Strip 'v' prefix from tag if present + TAG_VERSION="${RELEASE_TAG#v}" + echo "Tag version (without 'v'): $TAG_VERSION" + + if [[ "$TAG_VERSION" != "$PYPROJECT_VERSION" ]]; then + echo "❌ ERROR: Release tag does not match package version!" + echo " Release tag: $RELEASE_TAG (version: $TAG_VERSION)" + echo " Package version: $PYPROJECT_VERSION" + echo "" + echo "The release tag should be 'v$PYPROJECT_VERSION'" + exit 1 + fi + + echo "✅ Release tag matches package version" + fi + + echo "" + echo "=== Version Checks Passed ===" + + test-submodules: + name: test-submodules + runs-on: ubuntu-latest + if: ${{ github.event.inputs.submodule_tests || inputs.submodule_tests }} + env: + RELEASE_TAG: ${{ inputs.release_tag }} + steps: + - name: checkout-head + if: ${{ !env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + submodules: 'recursive' + fetch-depth: 0 + persist-credentials: true + + - name: checkout-tag + if: ${{ env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + ref: ${{ env.RELEASE_TAG }} + submodules: 'recursive' + fetch-depth: 0 + persist-credentials: true + + - name: initialize-submodules + run: | + git submodule update --init --recursive + + - name: check-submodules-for-multiple-branches + shell: bash + run: | + BRANCHES=("develop" $LATEST_BRANCH) # Add your branches here + + for BRANCH in "${BRANCHES[@]}"; do + echo "Analyzing branch: $BRANCH" + + # Fetch all branches and tags + git fetch --all --verbose + + # Checkout the branch + git checkout $BRANCH + + NOTEBOOKS_PATH=docs/notebooks + FAQ_PATH=docs/faq + + # Checking Notebooks submodule + echo "Checking $NOTEBOOKS_PATH for updates..." + cd $NOTEBOOKS_PATH + NOTEBOOKS_CURRENT_COMMIT=$(git rev-parse HEAD) + echo $(git fetch --all --verbose) + echo $(git remote get-url origin) + if git show-ref --verify refs/remotes/origin/$BRANCH; then + echo "Branch $BRANCH exists." + else + echo "::error::Branch $BRANCH does not exist on remote." + exit 1 + fi + NOTEBOOKS_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/${BRANCH}) + echo "NOTEBOOKS_LATEST_COMMIT: $NOTEBOOKS_LATEST_COMMIT" + echo "NOTEBOOKS_CURRENT_COMMIT: $NOTEBOOKS_CURRENT_COMMIT" + + cd ../.. + if [ "$NOTEBOOKS_LATEST_COMMIT" != "$NOTEBOOKS_CURRENT_COMMIT" ]; then + echo "::error::Submodule $NOTEBOOKS_PATH is not up to date with the $BRANCH branch. Please update it." + exit 1 + else + echo "Submodule $NOTEBOOKS_PATH is up to date with the $BRANCH branch." + fi + + # Checking FAQs only on the develop branch + if [[ "$BRANCH" == "develop" ]]; then + echo "Checking $FAQ_PATH for updates..." + cd $FAQ_PATH + FAQ_CURRENT_COMMIT=$(git rev-parse HEAD) + echo $(git fetch --all --verbose) + echo $(git remote get-url origin) + FAQ_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) + echo "FAQ_LATEST_COMMIT: $FAQ_LATEST_COMMIT" + echo "FAQ_CURRENT_COMMIT: $FAQ_CURRENT_COMMIT" + cd ../.. + if [ "$FAQ_LATEST_COMMIT" != "$FAQ_CURRENT_COMMIT" ]; then + echo "::error::Submodule $FAQ_PATH is not up to date. Please update it." + exit 1 + else + echo "Submodule $FAQ_PATH is up to date." + fi + fi + done + + echo "" + echo "=== Submodule Checks Passed ===" + + workflow-validation: + name: workflow-validation + runs-on: ubuntu-latest + if: always() + needs: + - verify-version-consistency + - test-submodules + steps: + - name: check-version-consistency-result + if: ${{ (github.event.inputs.version_match_tests || inputs.version_match_tests) && needs.verify-version-consistency.result != 'success' && needs.verify-version-consistency.result != 'skipped' }} + run: | + echo "❌ Version consistency check failed." + exit 1 + + - name: check-submodule-tests-result + if: ${{ (github.event.inputs.submodule_tests || inputs.submodule_tests) && needs.test-submodules.result != 'success' && needs.test-submodules.result != 'skipped' }} + run: | + echo "❌ Submodule tests failed." + exit 1 + + - name: all-checks-passed + if: ${{ success() }} + run: echo "✅ All release tests passed!" diff --git a/.github/workflows/tidy3d-python-client-release.yml b/.github/workflows/tidy3d-python-client-release.yml index d6ca5df611..2e1ca7e5e7 100644 --- a/.github/workflows/tidy3d-python-client-release.yml +++ b/.github/workflows/tidy3d-python-client-release.yml @@ -2,129 +2,513 @@ name: "public/tidy3d/python-client-release" on: workflow_dispatch: + inputs: + release_tag: + description: 'Release Tag (e.g., v2.10.0, v2.10.0rc1)' + required: true + type: string + + release_type: + description: 'Release Type (determines deployment targets)' + type: choice + default: 'draft' + required: false + options: + - draft + - testpypi + - pypi + + workflow_control: + description: 'Workflow Stage Control' + default: 'start-tag' + required: false + type: choice + options: + - start-tag + - start-tests + - start-deploy + - only-tag + - only-tests + - only-tag-tests + - only-tag-deploy + + client_tests: + description: 'Run python-client-tests' + type: boolean + default: true + + cli_tests: + description: 'Run develop-cli tests' + type: boolean + default: true + + submodule_tests: + description: 'Run submodule tests' + type: boolean + default: true + + workflow_call: + inputs: + release_tag: + description: 'Release Tag (e.g., v2.10.0, v2.10.0rc1)' + required: true + type: string + + release_type: + description: 'Release Type (determines deployment targets)' + type: string + default: 'draft' + required: false + + workflow_control: + description: 'Workflow Stage Control' + default: 'start-tag' + required: false + type: string + + client_tests: + description: 'Run python-client-tests' + type: boolean + default: true + + cli_tests: + description: 'Run develop-cli tests' + type: boolean + default: true + + submodule_tests: + description: 'Run submodule tests' + type: boolean + default: true + + deploy_testpypi: + description: 'Deploy to TestPyPI' + type: boolean + default: false + + deploy_pypi: + description: 'Deploy to production PyPI' + type: boolean + default: false + + outputs: + workflow_success: + description: 'Overall release workflow success status' + value: ${{ jobs.compile-tests-results.outputs.proceed_deploy == 'true' || jobs.compile-tests-results.result == 'skipped' }} permissions: contents: read + jobs: - test-latest-submodules: + determine-workflow-scope: runs-on: ubuntu-latest - if: contains(github.ref, 'rc') == false + outputs: + release_tag: ${{ env.RELEASE_TAG }} + release_type: ${{ env.RELEASE_TYPE }} + is_rc_release: ${{ steps.determine-workflow-steps.outputs.is_rc_release }} + push_to_latest: ${{ steps.determine-workflow-steps.outputs.push_to_latest }} + run_tag: ${{ steps.determine-workflow-steps.outputs.run_tag }} + run_tests: ${{ steps.determine-workflow-steps.outputs.run_tests }} + run_deploy: ${{ steps.determine-workflow-steps.outputs.run_deploy }} + run_client_tests: ${{ steps.determine-workflow-steps.outputs.run_client_tests }} + run_cli_tests: ${{ steps.determine-workflow-steps.outputs.run_cli_tests }} + run_submodule_tests: ${{ steps.determine-workflow-steps.outputs.run_submodule_tests }} + deploy_github_release: ${{ steps.determine-workflow-steps.outputs.deploy_github_release }} + deploy_testpypi: ${{ steps.determine-workflow-steps.outputs.deploy_testpypi }} + deploy_pypi: ${{ steps.determine-workflow-steps.outputs.deploy_pypi }} + sync_readthedocs: ${{ steps.determine-workflow-steps.outputs.sync_readthedocs }} + sync_branches: ${{ steps.determine-workflow-steps.outputs.sync_branches }} + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + RELEASE_TYPE: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_type || inputs.release_type }} + WORKFLOW_CONTROL: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.workflow_control || inputs.workflow_control }} + CLIENT_TESTS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.client_tests || inputs.client_tests }} + CLI_TESTS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.cli_tests || inputs.cli_tests }} + SUBMODULE_TESTS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.submodule_tests || inputs.submodule_tests }} + DEPLOY_TESTPYPI: ${{ inputs.deploy_testpypi || false }} + DEPLOY_PYPI: ${{ inputs.deploy_pypi || false }} steps: - - name: Checkout repository with submodules - uses: actions/checkout@v4 - with: - submodules: 'recursive' - # This fetches only a single branch by default, so additional fetch is needed - fetch-depth: 0 # Optionally, set to 0 to fetch all history for all branches and tags - persist-credentials: false - - - name: Initialize and update submodule + - name: validate-tag-format run: | - git submodule update --init --recursive - - - name: Check if submodules are up to date - shell: bash + set -e + echo "Validating release tag format..." + echo "Release tag: $RELEASE_TAG" + echo "Release type: $RELEASE_TYPE" + + # Only enforce strict validation for PyPI releases + if [[ "$RELEASE_TYPE" == "pypi" ]]; then + echo "PyPI release detected - applying strict tag validation" + + # Tag must match semantic versioning: v{major}.{minor}.{patch}[rc{num}] + TAG_REGEX='^v[0-9]+\.[0-9]+\.[0-9]+(rc[0-9]+)?$' + + if [[ ! "$RELEASE_TAG" =~ $TAG_REGEX ]]; then + echo "Invalid tag format: $RELEASE_TAG" + echo " Expected format: v{major}.{minor}.{patch}[rc{num}]" + echo " Examples: v2.10.0, v2.10.0rc1, v2.10.1rc2" + exit 1 + fi + + echo "Tag format is valid" + else + echo "Non-PyPI release - skipping strict tag validation" + echo "Tag accepted: $RELEASE_TAG" + fi + + - name: determine-workflow-steps + id: determine-workflow-steps run: | - NOTEBOOKS_PATH=docs/notebooks - FAQ_PATH=docs/faq - - # Checking out Notebooks submodule with the same branch as the main project/develop - echo "Checking $NOTEBOOKS_PATH for updates..." - cd $NOTEBOOKS_PATH - NOTEBOOKS_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - if git show-ref --verify refs/remotes/origin/develop; then - echo "Branch develop exists." + set -v + echo "=== Input Parameters ===" + echo "RELEASE_TAG: $RELEASE_TAG" + echo "RELEASE_TYPE: $RELEASE_TYPE" + echo "WORKFLOW_CONTROL: $WORKFLOW_CONTROL" + echo "CLIENT_TESTS: $CLIENT_TESTS" + echo "CLI_TESTS: $CLI_TESTS" + echo "SUBMODULE_TESTS: $SUBMODULE_TESTS" + echo "DEPLOY_TESTPYPI: $DEPLOY_TESTPYPI" + echo "DEPLOY_PYPI: $DEPLOY_PYPI" + echo "" + + # ============================================ + # PART 1: WORKFLOW FLOW CONTROL + # ============================================ + run_tag=false + run_tests=false + run_deploy=false + + case "$WORKFLOW_CONTROL" in + start-tag) + run_tag=true + run_tests=true + run_deploy=true + ;; + start-tests) + run_tests=true + run_deploy=true + ;; + start-deploy) + run_deploy=true + ;; + only-tag) + run_tag=true + ;; + only-tests) + run_tests=true + ;; + only-tag-tests) + run_tag=true + run_tests=true + ;; + only-tag-deploy) + run_tag=true + run_deploy=true + ;; + *) + echo "Invalid WORKFLOW_CONTROL: $WORKFLOW_CONTROL" + exit 1 + ;; + esac + + echo "=== Workflow Stage Control ===" + echo "run_tag: $run_tag" + echo "run_tests: $run_tests" + echo "run_deploy: $run_deploy" + echo "" + + # ============================================ + # PART 2: DETERMINE RC STATUS + # ============================================ + is_rc_release=false + if [[ "$RELEASE_TAG" == *"rc"* ]]; then + is_rc_release=true + fi + + echo "=== Release Type ===" + echo "is_rc_release: $is_rc_release" + echo "" + + # ============================================ + # PART 2.5: DETERMINE PUSH TO LATEST + # ============================================ + push_to_latest=false + SEMVER_REGEX='^v[0-9]+\.[0-9]+\.[0-9]+$' + + # Only push to latest if: + # 1. Release type is pypi + # 2. Not an RC release + # 3. Tag matches semantic versioning pattern v{major}.{minor}.{patch} + if [[ "$RELEASE_TYPE" == "pypi" && "$is_rc_release" == "false" && "$RELEASE_TAG" =~ $SEMVER_REGEX ]]; then + push_to_latest=true + echo "=== Push to Latest ===" + echo "Will push to 'latest' branch in readthedocs" + echo " Conditions met: pypi release + non-RC + semantic version tag" else - echo "::error::Branch develop does not exist on remote." - exit 1 + echo "=== Push to Latest ===" + echo " Will NOT push to 'latest' branch" + if [[ "$RELEASE_TYPE" != "pypi" ]]; then + echo " Reason: Not a PyPI release (is $RELEASE_TYPE)" + elif [[ "$is_rc_release" == "true" ]]; then + echo " Reason: RC release" + elif [[ ! "$RELEASE_TAG" =~ $SEMVER_REGEX ]]; then + echo " Reason: Tag doesn't match semantic version pattern (v{major}.{minor}.{patch})" + fi fi - NOTEBOOKS_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) - echo "NOTEBOOKS_LATEST_COMMIT: $NOTEBOOKS_LATEST_COMMIT" - echo "NOTEBOOKS_CURRENT_COMMIT: $NOTEBOOKS_CURRENT_COMMIT" - - - cd ../.. - if [ "$NOTEBOOKS_LATEST_COMMIT" != "$NOTEBOOKS_CURRENT_COMMIT" ]; then - echo "::error ::Submodule $NOTEBOOKS_PATH is not up to date with the develop branch. Please update it." - exit 1 + echo "" + + # ============================================ + # PART 3: TEST CONTROL + # ============================================ + run_client_tests=false + run_cli_tests=false + run_submodule_tests=false + + if [[ "$run_tests" == "true" ]]; then + [[ "$CLIENT_TESTS" == "true" ]] && run_client_tests=true + [[ "$CLI_TESTS" == "true" ]] && run_cli_tests=true + + # Submodule tests: user input OR auto-enable for PyPI non-RC releases + if [[ "$SUBMODULE_TESTS" == "true" ]]; then + run_submodule_tests=true + echo "?? Submodule tests enabled by user input" + elif [[ "$push_to_latest" == "true" ]]; then + run_submodule_tests=true + echo "?? Submodule tests auto-enabled for PyPI non-RC release (push_to_latest=true)" + fi + fi + + echo "=== Test Control ===" + echo "run_client_tests: $run_client_tests" + echo "run_cli_tests: $run_cli_tests" + echo "run_submodule_tests: $run_submodule_tests" + echo "" + + # ============================================ + # PART 4: DEPLOYMENT CONTROL + # ============================================ + deploy_github_release=false + deploy_testpypi=false + deploy_pypi=false + sync_readthedocs=false + sync_branches=false + + if [[ "$run_deploy" == "true" ]]; then + # Always create GitHub release and sync docs + deploy_github_release=true + sync_readthedocs=true + + # Deployment target logic: + # 1. If any deployment checkbox is explicitly set, use those + # 2. Otherwise, use automatic defaults based on release_type + + if [[ "$DEPLOY_TESTPYPI" == "true" || "$DEPLOY_PYPI" == "true" ]]; then + # Manual override: use checkbox selections + echo "Using manual deployment target selections" + deploy_testpypi=$DEPLOY_TESTPYPI + deploy_pypi=$DEPLOY_PYPI + else + # Automatic defaults based on release_type + echo "Using automatic deployment defaults for release_type: $RELEASE_TYPE" + case "$RELEASE_TYPE" in + pypi) + # PyPI releases: deploy to PyPI and TestPyPI + deploy_testpypi=true + deploy_pypi=true + ;; + testpypi) + # TestPyPI releases: deploy to TestPyPI + deploy_testpypi=true + ;; + draft) + # Draft releases: no deployment + echo "Draft release - no automatic deployments" + ;; + *) + echo "Unknown release_type: $RELEASE_TYPE - no automatic deployments" + ;; + esac + fi + + # Sync branches on PyPI releases if deploying to PyPI + if [[ "$RELEASE_TYPE" == "pypi" && "$deploy_pypi" == "true" ]]; then + sync_branches=true + fi + fi + + echo "=== Deployment Control ===" + echo "deploy_github_release: $deploy_github_release" + echo "deploy_testpypi: $deploy_testpypi" + echo "deploy_pypi: $deploy_pypi" + echo "sync_readthedocs: $sync_readthedocs" + echo "sync_branches: $sync_branches" + echo "" + + # ============================================ + # PART 5: SAVE ALL OUTPUTS + # ============================================ + echo "is_rc_release=$is_rc_release" >> $GITHUB_OUTPUT + echo "push_to_latest=$push_to_latest" >> $GITHUB_OUTPUT + echo "run_tag=$run_tag" >> $GITHUB_OUTPUT + echo "run_tests=$run_tests" >> $GITHUB_OUTPUT + echo "run_deploy=$run_deploy" >> $GITHUB_OUTPUT + echo "run_client_tests=$run_client_tests" >> $GITHUB_OUTPUT + echo "run_cli_tests=$run_cli_tests" >> $GITHUB_OUTPUT + echo "run_submodule_tests=$run_submodule_tests" >> $GITHUB_OUTPUT + echo "deploy_github_release=$deploy_github_release" >> $GITHUB_OUTPUT + echo "deploy_testpypi=$deploy_testpypi" >> $GITHUB_OUTPUT + echo "deploy_pypi=$deploy_pypi" >> $GITHUB_OUTPUT + echo "sync_readthedocs=$sync_readthedocs" >> $GITHUB_OUTPUT + echo "sync_branches=$sync_branches" >> $GITHUB_OUTPUT + + echo "? Workflow scope determined" + + create-tag: + name: create-and-push-tag + needs: determine-workflow-scope + if: needs.determine-workflow-scope.outputs.run_tag == 'true' + uses: ./.github/workflows/tidy3d-python-client-create-tag.yml + permissions: + contents: write + with: + release_tag: ${{ needs.determine-workflow-scope.outputs.release_tag }} + release_type: ${{ needs.determine-workflow-scope.outputs.release_type }} + secrets: inherit # zizmor: ignore[secrets-inherit] + + run-client-tests: + name: run-python-client-tests + needs: [determine-workflow-scope, create-tag] + if: | + always() && + needs.determine-workflow-scope.outputs.run_client_tests == 'true' + uses: ./.github/workflows/tidy3d-python-client-tests.yml + permissions: + contents: read + security-events: write + pull-requests: write + with: + release_tag: ${{ needs.determine-workflow-scope.outputs.release_tag }} + local_tests: true + remote_tests: true + cli_tests: ${{ needs.determine-workflow-scope.outputs.run_cli_tests == 'true' }} + submodule_tests: ${{ needs.determine-workflow-scope.outputs.run_submodule_tests == 'true' }} + version_match_tests: true + + compile-tests-results: + name: compile-tests-results + if: | + always() && + needs.determine-workflow-scope.outputs.run_tests == 'true' && + needs.determine-workflow-scope.outputs.run_deploy == 'true' + needs: + - determine-workflow-scope + - run-client-tests + runs-on: ubuntu-latest + outputs: + proceed_deploy: ${{ steps.check-tests.outputs.proceed_deploy }} + steps: + - name: check-tests + id: check-tests + env: + RUN_CLIENT_TESTS: ${{ needs.determine-workflow-scope.outputs.run_client_tests }} + CLIENT_TESTS_RESULT: ${{ needs.run-client-tests.result }} + WORKFLOW_SUCCESS: ${{ needs.run-client-tests.outputs.workflow_success }} + run: | + echo "=== Checking Test Results ===" + echo "" + + proceed_deploy=true + + # Check client tests using the workflow_success output from tests workflow + # This output validates all tests: local, remote, CLI, submodule, lint, mypy, etc. + if [[ "$RUN_CLIENT_TESTS" == "true" ]]; then + echo "Client tests workflow result: $CLIENT_TESTS_RESULT" + echo "Client tests validation status: $WORKFLOW_SUCCESS" + + if [[ "$CLIENT_TESTS_RESULT" != "success" ]] || [[ "$WORKFLOW_SUCCESS" != "true" ]]; then + echo "Client tests failed - see workflow-validation job in tests workflow for details" + proceed_deploy=false + else + echo "All client tests passed (local, remote, CLI, submodule, and quality checks)" + fi else - echo "Submodule $NOTEBOOKS_PATH is up to date with the develop branch." + echo "Client tests: not required (skipped)" fi - - # Checking FAQs only on the develop branch. - echo "Checking $FAQ_PATH for updates..." - cd $FAQ_PATH - FAQ_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - FAQ_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) - echo "FAQ_LATEST_COMMIT: $FAQ_LATEST_COMMIT" - echo "FAQ_CURRENT_COMMIT: $FAQ_CURRENT_COMMIT" - cd ../.. - if [ "$FAQ_LATEST_COMMIT" != "$FAQ_CURRENT_COMMIT" ]; then - echo "::error ::Submodule $FAQ_PATH is not up to date. Please update it." - exit 1 + + echo "" + echo "=== Final Decision ===" + if [[ "$proceed_deploy" == "true" ]]; then + echo "All required tests passed - deployment can proceed" else - echo "Submodule $FAQ_PATH is up to date." + echo "One or more required tests failed - deployment blocked" + exit 1 fi - + + echo "proceed_deploy=$proceed_deploy" >> $GITHUB_OUTPUT + + sync-readthedocs: + name: sync-docs-to-readthedocs + needs: [determine-workflow-scope, compile-tests-results] + if: | + always() && + (needs.compile-tests-results.outputs.proceed_deploy == 'true' || needs.compile-tests-results.result == 'skipped') && + needs.determine-workflow-scope.outputs.sync_readthedocs == 'true' + uses: ./.github/workflows/tidy3d-docs-sync-readthedocs-repo.yml + permissions: + contents: write + with: + source_ref: ${{ needs.determine-workflow-scope.outputs.release_tag }} + target_ref: ${{ needs.determine-workflow-scope.outputs.push_to_latest == 'true' && 'latest' || '' }} + secrets: inherit # zizmor: ignore[secrets-inherit] + github-release: + name: create-github-release + needs: [determine-workflow-scope, compile-tests-results] + if: | + always() && + (needs.compile-tests-results.outputs.proceed_deploy == 'true' || needs.compile-tests-results.result == 'skipped') && + needs.determine-workflow-scope.outputs.deploy_github_release == 'true' runs-on: ubuntu-latest permissions: - contents: write - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.ref }} - persist-credentials: false - - name: Exit if any RC release - if: contains(github.ref, 'rc') == false - uses: everlytic/branch-merge@c4a244dc23143f824ae6c022a10732566cb8e973 # v1.1.5 - with: - github_token: ${{ secrets.GH_PAT }} - source_ref: ${{ github.ref }} - target_branch: "latest" - commit_message_template: ':tada: RELEASE: Merged {source_ref} into target {target_branch}' - - name: Release - uses: softprops/action-gh-release@aec2ec56f94eb8180ceec724245f64ef008b89f5 # v2.4.0 - with: - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GH_PAT }} - pypi-release: - runs-on: ubuntu-latest + contents: write + env: + RELEASE_TAG: ${{ needs.determine-workflow-scope.outputs.release_tag }} + IS_RC_RELEASE: ${{ needs.determine-workflow-scope.outputs.is_rc_release }} steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.ref }} - persist-credentials: false - - uses: actions/setup-python@v2 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install setuptools wheel twine build - - name: Build and publish - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - run: | # zizmor: ignore[use-trusted-publishing] - python -m build - python -m twine upload --repository pypi dist/* - sync_to_develop: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: "latest" - persist-credentials: false - - name: Exit if any RC release - if: contains(github.ref, 'rc') == false - uses: everlytic/branch-merge@c4a244dc23143f824ae6c022a10732566cb8e973 # v1.1.5 - with: - github_token: ${{ secrets.GH_PAT }} - source_ref: "latest" - target_branch: "develop" - commit_message_template: ':tada: RELEASE: Synced latest into develop' + - run: | + echo "steps" + # - name: checkout-tag + # uses: actions/checkout@v4 + # with: + # ref: ${{ env.RELEASE_TAG }} + # persist-credentials: false + + # - name: create-github-release + # uses: softprops/action-gh-release@c062e08bd532815e2082a85e87e3ef29c3e6d191 # v2.0.8 + # with: + # tag_name: ${{ env.RELEASE_TAG }} + # generate_release_notes: true + # prerelease: ${{ env.IS_RC_RELEASE == 'true' }} + # env: + # GITHUB_TOKEN: ${{ secrets.GH_PAT }} + + deploy-packages: + name: deploy-to-package-repositories + needs: [determine-workflow-scope, compile-tests-results] + if: | + always() && + (needs.compile-tests-results.outputs.proceed_deploy == 'true' || needs.compile-tests-results.result == 'skipped') && + needs.determine-workflow-scope.outputs.run_deploy == 'true' && + (needs.determine-workflow-scope.outputs.deploy_testpypi == 'true' || + needs.determine-workflow-scope.outputs.deploy_pypi == 'true') + uses: ./.github/workflows/tidy3d-python-client-deploy.yml + permissions: + contents: read + id-token: write + with: + release_tag: ${{ needs.determine-workflow-scope.outputs.release_tag }} + deploy_testpypi: ${{ needs.determine-workflow-scope.outputs.deploy_testpypi == 'true' }} + deploy_pypi: ${{ needs.determine-workflow-scope.outputs.deploy_pypi == 'true' }} + secrets: + TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/tidy3d-python-client-submodules-test.yml b/.github/workflows/tidy3d-python-client-submodules-test.yml deleted file mode 100644 index b76f7fafec..0000000000 --- a/.github/workflows/tidy3d-python-client-submodules-test.yml +++ /dev/null @@ -1,104 +0,0 @@ -name: "public/tidy3d/python-client-submodule-tests" - -on: - push: - branches: [ latest ] - - pull_request: - branches: - - latest - - workflow_dispatch: - inputs: - run-workflow: - required: true - type: boolean - default: true - - workflow_call: - inputs: - run-workflow: - required: true - type: boolean - default: true - -permissions: - contents: read - -jobs: - test-latest-submodules: - runs-on: ubuntu-latest - if: github.event.inputs.run-workflow || inputs.run-workflow - steps: - - name: Checkout repository with submodules - uses: actions/checkout@v4 - with: - submodules: 'recursive' - # This fetches only a single branch by default, so additional fetch is needed - fetch-depth: 0 # Optionally, set to 0 to fetch all history for all branches and tags - persist-credentials: true - - - name: Initialize and update submodule - run: | - git submodule update --init --recursive - - - name: Check submodules for multiple branches - shell: bash - run: | - BRANCHES=("develop" $LATEST_BRANCH) # Add your branches here - - for BRANCH in "${BRANCHES[@]}"; do - echo "Analyzing branch: $BRANCH" - - # Fetch all branches and tags - git fetch --all --verbose - - # Checkout the branch - git checkout $BRANCH - - NOTEBOOKS_PATH=docs/notebooks - FAQ_PATH=docs/faq - - # Checking Notebooks submodule - echo "Checking $NOTEBOOKS_PATH for updates..." - cd $NOTEBOOKS_PATH - NOTEBOOKS_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - if git show-ref --verify refs/remotes/origin/$BRANCH; then - echo "Branch $BRANCH exists." - else - echo "::error::Branch $BRANCH does not exist on remote." - exit 1 - fi - NOTEBOOKS_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/${BRANCH}) - echo "NOTEBOOKS_LATEST_COMMIT: $NOTEBOOKS_LATEST_COMMIT" - echo "NOTEBOOKS_CURRENT_COMMIT: $NOTEBOOKS_CURRENT_COMMIT" - - cd ../.. - if [ "$NOTEBOOKS_LATEST_COMMIT" != "$NOTEBOOKS_CURRENT_COMMIT" ]; then - echo "::error::Submodule $NOTEBOOKS_PATH is not up to date with the $BRANCH branch. Please update it." - exit 1 - else - echo "Submodule $NOTEBOOKS_PATH is up to date with the $BRANCH branch." - fi - - # Checking FAQs only on the develop branch - if [[ "$BRANCH" == "develop" ]]; then - echo "Checking $FAQ_PATH for updates..." - cd $FAQ_PATH - FAQ_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - FAQ_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) - echo "FAQ_LATEST_COMMIT: $FAQ_LATEST_COMMIT" - echo "FAQ_CURRENT_COMMIT: $FAQ_CURRENT_COMMIT" - cd ../.. - if [ "$FAQ_LATEST_COMMIT" != "$FAQ_CURRENT_COMMIT" ]; then - echo "::error::Submodule $FAQ_PATH is not up to date. Please update it." - exit 1 - else - echo "Submodule $FAQ_PATH is up to date." - fi - fi - done diff --git a/.github/workflows/tidy3d-python-client-tests.yml b/.github/workflows/tidy3d-python-client-tests.yml index 010e0fb1b6..b851f2264c 100644 --- a/.github/workflows/tidy3d-python-client-tests.yml +++ b/.github/workflows/tidy3d-python-client-tests.yml @@ -2,6 +2,7 @@ name: "public/tidy3d/python-client-tests" on: merge_group: + workflow_dispatch: inputs: remote_tests: @@ -12,6 +13,61 @@ on: description: 'local-tests' type: boolean default: false + cli_tests: + description: 'Run develop-cli tests' + type: boolean + default: false + submodule_tests: + description: 'Run submodule tests' + type: boolean + default: false + version_match_tests: + description: 'Run version consistency checks' + type: boolean + default: false + release_tag: + description: 'Release Tag (v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + + workflow_call: + inputs: + remote_tests: + description: 'remote-tests' + type: boolean + required: false + default: true + local_tests: + description: 'local-tests' + type: boolean + required: false + default: true + cli_tests: + description: 'Run develop-cli tests' + type: boolean + required: false + default: false + submodule_tests: + description: 'Run submodule tests' + type: boolean + required: false + default: false + version_match_tests: + description: 'Run version consistency checks' + type: boolean + required: false + default: false + release_tag: + description: 'Release Tag (v2.10.0, v2.10.0rc1)' + required: false + type: string + default: '' + outputs: + workflow_success: + description: 'Overall test workflow success status' + value: ${{ jobs.workflow-validation.result == 'success' }} + pull_request: branches: - latest @@ -36,6 +92,9 @@ jobs: pr_review_tests: ${{ steps.determine-test-type.outputs.pr_review_tests }} local_tests: ${{ steps.determine-test-type.outputs.local_tests }} remote_tests: ${{ steps.determine-test-type.outputs.remote_tests }} + cli_tests: ${{ steps.determine-test-type.outputs.cli_tests }} + submodule_tests: ${{ steps.determine-test-type.outputs.submodule_tests }} + version_match_tests: ${{ steps.determine-test-type.outputs.version_match_tests }} pr_approval_state: ${{ steps.approval.outputs.approved }} steps: - name: check-current-approval-status @@ -82,8 +141,11 @@ jobs: EVENT_NAME: ${{ github.event_name }} REVIEW_STATE: ${{ github.event.review.state }} REF: ${{ github.ref }} - INPUT_LOCAL: ${{ github.event.inputs.local_tests }} - INPUT_REMOTE: ${{ github.event.inputs.remote_tests }} + INPUT_LOCAL: ${{ github.event.inputs.local_tests || inputs.local_tests }} + INPUT_REMOTE: ${{ github.event.inputs.remote_tests || inputs.remote_tests }} + INPUT_CLI: ${{ github.event.inputs.cli_tests || inputs.cli_tests }} + INPUT_SUBMODULE: ${{ github.event.inputs.submodule_tests || inputs.submodule_tests }} + INPUT_VERSION_MATCH: ${{ github.event.inputs.version_match_tests || inputs.version_match_tests }} APPROVED: ${{ steps.approval.outputs.approved }} run: | echo "Event: $EVENT_NAME" @@ -92,10 +154,16 @@ jobs: echo "Git REF: $REF" echo "Input local: $INPUT_LOCAL" echo "Input remote: $INPUT_REMOTE" + echo "Input cli: $INPUT_CLI" + echo "Input submodule: $INPUT_SUBMODULE" + echo "Input version_match: $INPUT_VERSION_MATCH" echo "Approved: $APPROVED" remote_tests=false local_tests=false + cli_tests=false + submodule_tests=false + version_match_tests=false code_quality_tests=false pr_review_tests=false @@ -110,6 +178,18 @@ jobs: if [[ "$INPUT_LOCAL" == "true" ]]; then local_tests=true fi + + if [[ "$INPUT_CLI" == "true" ]]; then + cli_tests=true + fi + + if [[ "$INPUT_SUBMODULE" == "true" ]]; then + submodule_tests=true + fi + + if [[ "$INPUT_VERSION_MATCH" == "true" ]]; then + version_match_tests=true + fi fi # All PRs that have been triggered need local tests and approved ones need to re-run the remote tests @@ -145,12 +225,18 @@ jobs: echo "local_tests=$local_tests" >> $GITHUB_OUTPUT echo "remote_tests=$remote_tests" >> $GITHUB_OUTPUT + echo "cli_tests=$cli_tests" >> $GITHUB_OUTPUT + echo "submodule_tests=$submodule_tests" >> $GITHUB_OUTPUT + echo "version_match_tests=$version_match_tests" >> $GITHUB_OUTPUT echo "code_quality_tests=$code_quality_tests" >> $GITHUB_OUTPUT echo "pr_review_tests=$pr_review_tests" >> $GITHUB_OUTPUT echo "code_quality_tests=$code_quality_tests" echo "pr_review_tests=$pr_review_tests" echo "local_tests=$local_tests" echo "remote_tests=$remote_tests" + echo "cli_tests=$cli_tests" + echo "submodule_tests=$submodule_tests" + echo "version_match_tests=$version_match_tests" lint: needs: determine-test-scope @@ -466,15 +552,27 @@ jobs: defaults: run: shell: bash - env: # Set environment variables for the whole job + env: PIP_ONLY_BINARY: gdstk MPLBACKEND: agg + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} permissions: pull-requests: write steps: - - uses: actions/checkout@v4 + - name: checkout-head + if: ${{ !env.RELEASE_TAG }} + uses: actions/checkout@v4 with: - fetch-depth: 0 # Required 0 for diff report. + fetch-depth: 0 + submodules: false + persist-credentials: false + + - name: checkout-tag + if: ${{ env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + ref: refs/tags/${{ env.RELEASE_TAG }} + fetch-depth: 0 submodules: false persist-credentials: false @@ -580,17 +678,29 @@ jobs: defaults: run: shell: bash - env: # Set environment variables for the whole job + env: PIP_ONLY_BINARY: gdstk MPLBACKEND: agg + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} steps: - - uses: actions/checkout@v4 + - name: checkout-head + if: ${{ !env.RELEASE_TAG }} + uses: actions/checkout@v4 with: fetch-depth: 1 submodules: false persist-credentials: false + - name: checkout-tag + if: ${{ env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + ref: refs/tags/${{ env.RELEASE_TAG }} + fetch-depth: 1 + submodules: false + persist-credentials: false + - name: install-poetry uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a # v1.4.1 with: @@ -652,12 +762,29 @@ jobs: valColorRange: ${{ env.total }} style: "for-the-badge" - pr-requirements-pass: - name: pr-requirements-pass + develop-cli-tests: + name: develop-cli-tests + needs: determine-test-scope + if: needs.determine-test-scope.outputs.cli_tests == 'true' + uses: ./.github/workflows/tidy3d-python-client-develop-cli.yml + with: + release_tag: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + + release-tests: + name: release-tests + needs: determine-test-scope if: | always() && - (github.event_name == 'pull_request' || github.event_name == 'pull_request_review' || github.event_name == 'merge_group') && - ((needs.determine-test-scope.outputs.pr_approval_state == 'true' && needs.determine-test-scope.outputs.local_tests == 'true') || needs.determine-test-scope.outputs.remote_tests == 'true') + (needs.determine-test-scope.outputs.submodule_tests == 'true' || needs.determine-test-scope.outputs.version_match_tests == 'true') + uses: ./.github/workflows/tidy3d-python-client-release-tests.yml + with: + submodule_tests: ${{ needs.determine-test-scope.outputs.submodule_tests == 'true' }} + version_match_tests: ${{ needs.determine-test-scope.outputs.version_match_tests == 'true' }} + release_tag: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + + workflow-validation: + name: workflow-validation + if: always() needs: - determine-test-scope - local-tests @@ -668,65 +795,91 @@ jobs: - lint-commit-messages - lint-branch-name - zizmor + - develop-cli-tests + - release-tests runs-on: ubuntu-latest - env: - NEEDS_LOCAL_TESTS_RESULT: ${{ needs.local-tests.result }} - NEEDS_REMOTE_TESTS_RESULT: ${{ needs.remote-tests.result }} - NEEDS_LINT_RESULT: ${{ needs.lint.result }} - NEEDS_VERIFY_SCHEMA_CHANGE_RESULT: ${{ needs.verify-schema-change.result }} - NEEDS_LINT_COMMIT_MESSAGES_RESULT: ${{ needs.lint-commit-messages.result }} - NEEDS_LINT_BRANCH_NAME_RESULT: ${{ needs.lint-branch-name.result }} - NEEDS_ZIZMOR_RESULT: ${{ needs.zizmor.result }} steps: - name: check-linting-result - if: ${{ needs.lint.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.lint.result != 'success' && needs.lint.result != 'skipped' }} run: | echo "❌ Linting failed." exit 1 - name: check-mypy-result - if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.mypy.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.mypy.result != 'success' && needs.mypy.result != 'skipped' }} run: | echo "❌ Mypy type checking failed." exit 1 - name: check-schema-change-verification - if: ${{ needs.verify-schema-change.result != 'success' }} - + if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.verify-schema-change.result != 'success' && needs.verify-schema-change.result != 'skipped' }} run: | echo "❌ Schema change verification failed." exit 1 - name: check-local-tests-result - if: ${{ needs.local-tests.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.local_tests == 'true' && needs.local-tests.result != 'success' && needs.local-tests.result != 'skipped' }} run: | echo "❌ Local tests failed." exit 1 - name: check-remote-tests-result - if: ${{ needs.remote-tests.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.remote_tests == 'true' && needs.remote-tests.result != 'success' && needs.remote-tests.result != 'skipped' }} run: | echo "❌ Remote tests failed." exit 1 - name: check-commit-message-linting - if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.lint-commit-messages.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.lint-commit-messages.result != 'success' && needs.lint-commit-messages.result != 'skipped' }} run: | echo "❌ Commit message linting failed." exit 1 - name: check-branch-name-linting - if: ${{ needs.determine-test-scope.outputs.pr_review_tests == 'true' && needs.lint-branch-name.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.pr_review_tests == 'true' && needs.lint-branch-name.result != 'success' && needs.lint-branch-name.result != 'skipped' }} run: | echo "❌ Branch name linting failed." exit 1 - name: check-zizmor-static-analysis - if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.zizmor.result != 'success' }} + if: ${{ needs.determine-test-scope.outputs.code_quality_tests == 'true' && needs.zizmor.result != 'success' && needs.zizmor.result != 'skipped' }} run: | echo "❌ Zizmor static analysis failed." exit 1 + - name: check-cli-tests-result + if: ${{ needs.determine-test-scope.outputs.cli_tests == 'true' && needs.develop-cli-tests.result != 'success' && needs.develop-cli-tests.result != 'skipped' }} + run: | + echo "❌ CLI tests failed." + exit 1 + + - name: check-release-tests-result + if: ${{ (needs.determine-test-scope.outputs.submodule_tests == 'true' || needs.determine-test-scope.outputs.version_match_tests == 'true') && needs.release-tests.result != 'success' && needs.release-tests.result != 'skipped' }} + run: | + echo "❌ Release tests failed." + exit 1 + + - name: all-checks-passed + if: ${{ success() }} + run: echo "✅ All required jobs passed!" + + pr-requirements-pass: + name: pr-requirements-pass + if: | + always() && + (github.event_name == 'pull_request' || github.event_name == 'pull_request_review' || github.event_name == 'merge_group') && + ((needs.determine-test-scope.outputs.pr_approval_state == 'true' && needs.determine-test-scope.outputs.local_tests == 'true') || needs.determine-test-scope.outputs.remote_tests == 'true') + needs: + - determine-test-scope + - workflow-validation + runs-on: ubuntu-latest + steps: + - name: check-workflow-validation + if: ${{ needs.workflow-validation.result != 'success' }} + run: | + echo "❌ Workflow validation failed. See workflow-validation job for details." + exit 1 + - name: all-checks-passed if: ${{ success() }} run: echo "✅ All required jobs passed!" diff --git a/.github/workflows/tidy3d-python-client-update-lockfile.yml b/.github/workflows/tidy3d-python-client-update-lockfile.yml index d89c7a3b37..7e63dd8ff7 100644 --- a/.github/workflows/tidy3d-python-client-update-lockfile.yml +++ b/.github/workflows/tidy3d-python-client-update-lockfile.yml @@ -47,6 +47,26 @@ jobs: virtualenvs-create: true virtualenvs-in-project: true + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 + with: + aws-access-key-id: ${{ secrets.AWS_CODEARTIFACT_ACCESS_KEY }} + aws-secret-access-key: ${{ secrets.AWS_CODEARTIFACT_ACCESS_SECRET }} + aws-region: us-east-1 + + - name: Configure CodeArtifact authentication + run: | + set -e + echo "Getting CodeArtifact token..." + CODEARTIFACT_AUTH_TOKEN=$(aws codeartifact get-authorization-token \ + --domain flexcompute \ + --domain-owner 625554095313 \ + --query authorizationToken \ + --output text) + + echo "Configuring Poetry with CodeArtifact credentials..." + poetry config http-basic.codeartifact aws $CODEARTIFACT_AUTH_TOKEN + - name: Update lock file run: | set -e diff --git a/dev.Dockerfile b/dev.Dockerfile index 7982e47693..de7dce7c78 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -10,6 +10,13 @@ RUN apt-get update && \ xsel \ xclip +RUN apt-get update && apt-get install -y zip unzip curl \ + && rm -rf /var/lib/apt/lists/* \ + && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \ + && unzip awscliv2.zip \ + && ./aws/install \ + && rm -rf aws awscliv2.zip + ENV POETRY_HOME=/opt/poetry RUN curl -sSL https://install.python-poetry.org | python3 - ENV PATH="/root/.local/bin:${POETRY_HOME}/bin:${PATH}" diff --git a/docs/development/release/version.rst b/docs/development/release/version.rst new file mode 100644 index 0000000000..ae01557d85 --- /dev/null +++ b/docs/development/release/version.rst @@ -0,0 +1,153 @@ +Release Workflow +----------------- + +The release process is automated through the ``tidy3d-python-client-release`` GitHub Actions workflow. + +Triggering a Release +^^^^^^^^^^^^^^^^^^^^ + +Releases are triggered manually via GitHub Actions: + +1. Go to **Actions** ? **public/tidy3d/python-client-release** +2. Click **Run workflow** +3. Configure the release parameters + +Release Parameters +^^^^^^^^^^^^^^^^^^ + +**Required:** + +- ``release_tag``: Version tag (e.g., ``v2.10.0``, ``v2.10.0rc1``) + +**Optional:** + +- ``release_type``: + + - ``draft`` (default): Test release, no PyPI publish + - ``final``: Official release, publishes to PyPI + +- ``workflow_control``: Stage control for resuming partial releases + + - ``start-tag``: Full release from tag creation + - ``start-tests``: Resume from tests + - ``start-deploy``: Resume from deployment + - ``only-tag``: Create tag only + - ``only-tests``: Run tests only + - ``only-tag-tests``: Tag + tests only + - ``only-tag-deploy``: Tag + deploy only + +- ``client_tests``, ``cli_tests``, ``submodule_tests``: Enable/disable specific test suites (default: all ``true``) + +Release Workflow Stages +^^^^^^^^^^^^^^^^^^^^^^^^ + +**1. Tag Creation** + +Creates and pushes a git tag for the release version. + +**2. Tests** + +Runs comprehensive test suite including: + +- **Local tests**: Fast tests on self-hosted runners (Python 3.10, 3.13) +- **Remote tests**: Full matrix tests on GitHub runners (Python 3.10-3.13, Windows/Linux/macOS) +- **CLI tests**: Develop CLI functionality tests +- **Submodule tests**: Integration tests with dependent packages +- **Code quality**: Linting, type checking, commit message validation, security analysis + +.. note:: + Submodule tests are **automatically enabled** for final non-RC releases (versions that push to ``latest``). + +**3. Deployment** (if tests pass) + +- Creates GitHub release (draft or final) +- Publishes to PyPI (final releases only) +- Syncs documentation to ReadTheDocs +- Syncs branches (final releases only) + +Test Validation +^^^^^^^^^^^^^^^ + +All tests are validated through a centralized ``workflow-validation`` job in the tests workflow. The release gatekeeper checks this validation before allowing deployment. + +If any required test fails, deployment is automatically blocked. + +Release Types +^^^^^^^^^^^^^ + +**Draft Release** (``release_type: draft``) + +- For testing and verification +- Creates GitHub release as draft +- Does **not** publish to PyPI +- Does **not** sync branches +- Tag format not strictly enforced + +**Final Release** (``release_type: final``) + +- For production deployment +- Creates public GitHub release +- Publishes to PyPI +- Syncs branches to maintain consistency +- Tag format strictly enforced: ``v{major}.{minor}.{patch}[rc{num}]`` + +**Non-RC Final Release** (e.g., ``v2.10.0``) + +- Pushes to ``latest`` branch +- **Automatically runs submodule tests** +- Represents stable production release + +**RC Release** (e.g., ``v2.10.0rc1``) + +- Pre-release candidate +- Does **not** push to ``latest`` +- Submodule tests run only if explicitly enabled + +Examples +^^^^^^^^ + +**Standard Release:** + +.. code-block:: yaml + + release_tag: v2.10.0 + release_type: final + workflow_control: start-tag + # All tests enabled by default + # Submodule tests auto-enabled (non-RC) + +**RC Release:** + +.. code-block:: yaml + + release_tag: v2.10.0rc1 + release_type: final + workflow_control: start-tag + # Submodule tests not auto-enabled (RC) + +**Test-Only Run:** + +.. code-block:: yaml + + release_tag: v2.10.0 + release_type: draft + workflow_control: only-tests + client_tests: true + +**Resume Failed Release (from deployment):** + +.. code-block:: yaml + + release_tag: v2.10.0 + release_type: final + workflow_control: start-deploy + # Skips tag creation and tests + +Best Practices +^^^^^^^^^^^^^^ + +1. **Always test first**: Run a ``draft`` release before ``final`` +2. **Use RC versions**: Test with ``v2.10.0rc1`` before releasing ``v2.10.0`` +3. **Monitor test results**: Check the ``workflow-validation`` job for detailed test status +4. **Resume on failure**: Use ``workflow_control`` stages to resume from failure points +5. **Verify submodules**: For final releases, ensure submodule tests pass (auto-enabled for non-RC) diff --git a/poetry.lock b/poetry.lock index 2b302d956a..b929aa8621 100644 --- a/poetry.lock +++ b/poetry.lock @@ -396,18 +396,18 @@ css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "boto3" -version = "1.40.70" +version = "1.40.73" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.40.70-py3-none-any.whl", hash = "sha256:e8c2f4f4cb36297270f1023ebe5b100333e0e88ab6457a9687d80143d2e15bf9"}, - {file = "boto3-1.40.70.tar.gz", hash = "sha256:191443707b391232ed15676bf6bba7e53caec1e71aafa12ccad2e825c5ee15cc"}, + {file = "boto3-1.40.73-py3-none-any.whl", hash = "sha256:85172e11e3b8d5a09504bc532b6589730ac68845410403ca3793d037b8a5d445"}, + {file = "boto3-1.40.73.tar.gz", hash = "sha256:3716703cb8b126607533853d7e2a85f0bb23b0b9d4805c69170abead33d725ef"}, ] [package.dependencies] -botocore = ">=1.40.70,<1.41.0" +botocore = ">=1.40.73,<1.41.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.14.0,<0.15.0" @@ -416,14 +416,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.40.70" +version = "1.40.73" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.40.70-py3-none-any.whl", hash = "sha256:4a394ad25f5d9f1ef0bed610365744523eeb5c22de6862ab25d8c93f9f6d295c"}, - {file = "botocore-1.40.70.tar.gz", hash = "sha256:61b1f2cecd54d1b28a081116fa113b97bf4e17da57c62ae2c2751fe4c528af1f"}, + {file = "botocore-1.40.73-py3-none-any.whl", hash = "sha256:87524c5fe552ecceaea72f51163b37ab35eb82aaa6a64eb80489ade7340c1d23"}, + {file = "botocore-1.40.73.tar.gz", hash = "sha256:0650ceada268824282da9af8615f3e4cf2453be8bf85b820f9207eff958d56d0"}, ] [package.dependencies] @@ -436,27 +436,27 @@ crt = ["awscrt (==0.27.6)"] [[package]] name = "cachetools" -version = "6.2.1" +version = "6.2.2" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "cachetools-6.2.1-py3-none-any.whl", hash = "sha256:09868944b6dde876dfd44e1d47e18484541eaf12f26f29b7af91b26cc892d701"}, - {file = "cachetools-6.2.1.tar.gz", hash = "sha256:3f391e4bd8f8bf0931169baf7456cc822705f4e2a31f840d218f445b9a854201"}, + {file = "cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace"}, + {file = "cachetools-6.2.2.tar.gz", hash = "sha256:8e6d266b25e539df852251cfd6f990b4bc3a141db73b939058d809ebd2590fc6"}, ] [[package]] name = "certifi" -version = "2025.10.5" +version = "2025.11.12" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, - {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, ] [[package]] @@ -1373,15 +1373,15 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.1.1" +version = "2.1.2" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, - {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, + {file = "execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec"}, + {file = "execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd"}, ] [package.extras] @@ -3665,6 +3665,27 @@ traitlets = ">=5.1" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["pep440", "pre-commit", "pytest", "testpath"] +[[package]] +name = "nbsphinx" +version = "0.9.6" +description = "Jupyter Notebook Tools for Sphinx" +optional = true +python-versions = ">=3.6" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, + {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, +] + +[package.dependencies] +docutils = ">=0.18.1" +jinja2 = "*" +nbconvert = ">=5.3,<5.4 || >5.4" +nbformat = "*" +sphinx = ">=1.8" +traitlets = ">=5" + [[package]] name = "nbsphinx" version = "0.9.7" @@ -3672,7 +3693,7 @@ description = "Jupyter Notebook Tools for Sphinx" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "python_version == \"3.10\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "nbsphinx-0.9.7-py3-none-any.whl", hash = "sha256:7292c3767fea29e405c60743eee5393682a83982ab202ff98f5eb2db02629da8"}, {file = "nbsphinx-0.9.7.tar.gz", hash = "sha256:abd298a686d55fa894ef697c51d44f24e53aa312dadae38e82920f250a5456fe"}, @@ -4729,22 +4750,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "6.33.0" +version = "6.33.1" description = "" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035"}, - {file = "protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee"}, - {file = "protobuf-6.33.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455"}, - {file = "protobuf-6.33.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90"}, - {file = "protobuf-6.33.0-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298"}, - {file = "protobuf-6.33.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef"}, - {file = "protobuf-6.33.0-cp39-cp39-win32.whl", hash = "sha256:cd33a8e38ea3e39df66e1bbc462b076d6e5ba3a4ebbde58219d777223a7873d3"}, - {file = "protobuf-6.33.0-cp39-cp39-win_amd64.whl", hash = "sha256:c963e86c3655af3a917962c9619e1a6b9670540351d7af9439d06064e3317cc9"}, - {file = "protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995"}, - {file = "protobuf-6.33.0.tar.gz", hash = "sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954"}, + {file = "protobuf-6.33.1-cp310-abi3-win32.whl", hash = "sha256:f8d3fdbc966aaab1d05046d0240dd94d40f2a8c62856d41eaa141ff64a79de6b"}, + {file = "protobuf-6.33.1-cp310-abi3-win_amd64.whl", hash = "sha256:923aa6d27a92bf44394f6abf7ea0500f38769d4b07f4be41cb52bd8b1123b9ed"}, + {file = "protobuf-6.33.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:fe34575f2bdde76ac429ec7b570235bf0c788883e70aee90068e9981806f2490"}, + {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:f8adba2e44cde2d7618996b3fc02341f03f5bc3f2748be72dc7b063319276178"}, + {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:0f4cf01222c0d959c2b399142deb526de420be8236f22c71356e2a544e153c53"}, + {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:8fd7d5e0eb08cd5b87fd3df49bc193f5cfd778701f47e11d127d0afc6c39f1d1"}, + {file = "protobuf-6.33.1-cp39-cp39-win32.whl", hash = "sha256:023af8449482fa884d88b4563d85e83accab54138ae098924a985bcbb734a213"}, + {file = "protobuf-6.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:df051de4fd7e5e4371334e234c62ba43763f15ab605579e04c7008c05735cd82"}, + {file = "protobuf-6.33.1-py3-none-any.whl", hash = "sha256:d595a9fd694fdeb061a62fbe10eb039cc1e444df81ec9bb70c7fc59ebcb1eafa"}, + {file = "protobuf-6.33.1.tar.gz", hash = "sha256:97f65757e8d09870de6fd973aeddb92f85435607235d20b2dfed93405d00c85b"}, ] [[package]] @@ -5061,19 +5082,19 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "4.0.2" +version = "4.0.3" description = "python code static checker" optional = true python-versions = ">=3.10.0" groups = ["main"] markers = "extra == \"dev\" or extra == \"docs\"" files = [ - {file = "pylint-4.0.2-py3-none-any.whl", hash = "sha256:9627ccd129893fb8ee8e8010261cb13485daca83e61a6f854a85528ee579502d"}, - {file = "pylint-4.0.2.tar.gz", hash = "sha256:9c22dfa52781d3b79ce86ab2463940f874921a3e5707bcfc98dd0c019945014e"}, + {file = "pylint-4.0.3-py3-none-any.whl", hash = "sha256:896d09afb0e78bbf2e030cd1f3d8dc92771a51f7e46828cbc3948a89cd03433a"}, + {file = "pylint-4.0.3.tar.gz", hash = "sha256:a427fe76e0e5355e9fb9b604fd106c419cafb395886ba7f3cebebb03f30e081d"}, ] [package.dependencies] -astroid = ">=4.0.1,<=4.1.dev0" +astroid = ">=4.0.2,<=4.1.dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -5165,15 +5186,15 @@ test = ["alabaster (==0.7.12)", "attrs (==18.1.0)", "babel (==2.6.0)", "backcall [[package]] name = "pytest" -version = "9.0.0" +version = "9.0.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.10" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "pytest-9.0.0-py3-none-any.whl", hash = "sha256:e5ccdf10b0bac554970ee88fc1a4ad0ee5d221f8ef22321f9b7e4584e19d7f96"}, - {file = "pytest-9.0.0.tar.gz", hash = "sha256:8f44522eafe4137b0f35c9ce3072931a788a21ee40a2ed279e817d3cc16ed21e"}, + {file = "pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad"}, + {file = "pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8"}, ] [package.dependencies] @@ -5663,6 +5684,23 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +description = "Manipulate well-formed Roman numerals" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, + {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, +] + +[package.extras] +lint = ["mypy (==1.15.0)", "pyright (==1.1.394)", "ruff (==0.9.7)"] +test = ["pytest (>=8)"] + [[package]] name = "rpds-py" version = "0.28.0" @@ -6447,7 +6485,7 @@ description = "Python documentation generator" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "python_version == \"3.10\" and (extra == \"dev\" or extra == \"docs\")" files = [ {file = "sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2"}, {file = "sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927"}, @@ -6477,6 +6515,43 @@ docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "mypy (==1.11.1)", "pyright (==1.1.384)", "pytest (>=6.0)", "ruff (==0.6.9)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.18.0.20240506)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241005)", "types-requests (==2.32.0.20240914)", "types-urllib3 (==1.26.25.14)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] +[[package]] +name = "sphinx" +version = "8.2.3" +description = "Python documentation generator" +optional = true +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.11\" and (extra == \"dev\" or extra == \"docs\")" +files = [ + {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, + {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, +] + +[package.dependencies] +alabaster = ">=0.7.14" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +roman-numerals-py = ">=1.0.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = ">=1.0.7" +sphinxcontrib-devhelp = ">=1.0.6" +sphinxcontrib-htmlhelp = ">=2.0.6" +sphinxcontrib-jsmath = ">=1.0.1" +sphinxcontrib-qthelp = ">=1.0.6" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + [[package]] name = "sphinx-book-theme" version = "1.1.4" @@ -6831,6 +6906,7 @@ description = "Read and write large, multi-dimensional arrays" optional = true python-versions = ">=3.10" groups = ["main"] +markers = "python_version == \"3.10\"" files = [ {file = "tensorstore-0.1.78-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:f1bc58164ad964d9cc298d20b62ca704ab6241639a21015e47ce6ea5b5cae27f"}, {file = "tensorstore-0.1.78-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1910101ea85b6507958da28628ef53712c5311df19a795f449604f82bae6a24b"}, @@ -6859,6 +6935,42 @@ files = [ ml_dtypes = ">=0.5.0" numpy = ">=1.22.0" +[[package]] +name = "tensorstore" +version = "0.1.79" +description = "Read and write large, multi-dimensional arrays" +optional = true +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "tensorstore-0.1.79-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:11a2c62694ea9c21770bc5a09938d3d15c4b9662b738ae6e1e513c26ed96251a"}, + {file = "tensorstore-0.1.79-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e152d334bf34fbabdfe8e5bc35b87d1f9947065924ff83c29e659308b36e948"}, + {file = "tensorstore-0.1.79-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4230b8fd29795e88e441f749d881973eca8dadf33c5262b367839fb8891f79b"}, + {file = "tensorstore-0.1.79-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83072ee0e551d6dca582e154b64c8b8066d276ec0759784e3149c28212a61f18"}, + {file = "tensorstore-0.1.79-cp311-cp311-win_amd64.whl", hash = "sha256:6c98c6b74c00e00eba7969292144e471d5c45d67088f0dc08e3a4c60a15ee191"}, + {file = "tensorstore-0.1.79-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:71aa9b45436d888c37b965f7b71195916d15438119b7dccb66a3b0776bfba367"}, + {file = "tensorstore-0.1.79-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:108c0e867aa2c87d4982cc6325a2de0c4f5bd63c2bea18adb193a370c40594ce"}, + {file = "tensorstore-0.1.79-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debd435042c00be68ba1fb3cf59325a7babb3f4a3cf4744c87dde346802cbbb4"}, + {file = "tensorstore-0.1.79-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:608f7178ec6e4e4a3c26545b0a44f44bf83438d04bf2d960cd0e7699eaa99ef6"}, + {file = "tensorstore-0.1.79-cp312-cp312-win_amd64.whl", hash = "sha256:a071c6c255b7e412957a6aa563bc4250242c7894edad06ae6358e3d30b7d88ce"}, + {file = "tensorstore-0.1.79-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:1e8e2d098829919caac6a62cf568902e34789069ceddb28497d6e36ebcb95c0b"}, + {file = "tensorstore-0.1.79-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:29cf4336153af136ac8ac528e2ed46df19367edae7e14e37bca1a8b7c4848ef2"}, + {file = "tensorstore-0.1.79-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94d8fc9df1721b0287046aca7209fd5040889cad4202e7b73a1fdb77cd9b71c6"}, + {file = "tensorstore-0.1.79-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9f2dc3342e4686af98f6e259dc9fb377f1bf657b649c247bf6647bbe4f98090"}, + {file = "tensorstore-0.1.79-cp313-cp313-win_amd64.whl", hash = "sha256:0fd6165f3df49abc7c9de029b2b72d74bebd2ff2481a5ced003607eb61c56d3e"}, + {file = "tensorstore-0.1.79-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6f8f5a940eab434a951c2dadcc7c0516c7bef6d8b7a7144054f7a0c56152b5f5"}, + {file = "tensorstore-0.1.79-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:97756d2cba3c5ce21e15602c2af5a02521cc0ecda7f9fb6d18da2f3bd51827f4"}, + {file = "tensorstore-0.1.79-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847982652273fb7b2d694b789205747aaf3e50ae64738c5cb7b5eb03d86a9947"}, + {file = "tensorstore-0.1.79-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7af9422269c2bfcdecf9dd55309060665ab9c2d7f6c892377ed32c032400feea"}, + {file = "tensorstore-0.1.79-cp314-cp314-win_amd64.whl", hash = "sha256:bbd8c1ab7d2e3c03ded3d40bb373ee9a67668e33a564484927865ce43b210386"}, + {file = "tensorstore-0.1.79.tar.gz", hash = "sha256:8dad44a8a7f2952a5d0030a8bd868b3cfdff048bd40ab53e7226f3d8b0881c5e"}, +] + +[package.dependencies] +ml_dtypes = ">=0.5.0" +numpy = ">=1.22.0" + [[package]] name = "terminado" version = "0.18.1" @@ -6895,6 +7007,83 @@ files = [ {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, ] +[[package]] +name = "tidy3d-extras" +version = "2.10.0rc3" +description = "tidy3d-extras is an optional plugin for Tidy3D providing addtional, more advanced local functionality." +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"extras\"" +files = [ + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:77289824c9c6a8839e16086cf2cde827a341895d7fe7e0f88cc7c9afc0fc4c5f"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:050ce4f5f75eeeb8d56ccb3b6e3001662eddae343e26e99b5b6e1fc344fc65f3"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d5b10b8a1402a3520b0e3333da42f45a956207b2f0542357b633b4406d7b2d"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97fcfae707e564c23ee8060666ab751f0468b5ca9a7befc888936af60d4925cd"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d08287adcb0e03ec1edaca9a7071152e101398fe90af5d358539b8373388550"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0ac7b1696d5df18ce95bf48d2bfead55cd805e66462017779c39505648433f5e"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2fa378e4618b7326a5cb39d30d409272861711bb9385b3389a027b8b6dcba532"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4ef486b3c350dc8237643df42408e442a39674d61b6e756f513e6b12231acd64"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5d8c29b6048b0c975f27695f7dabdae02112f0101d8a8746e91d159f57c84eb8"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f501e44116a64180b175f9fc051bfa1b9f78750d15db10044aa4f718567a4ac6"}, + {file = "tidy3d_extras-2.10.0rc3-cp310-cp310-win_amd64.whl", hash = "sha256:f05e7b949a90d38b933e579c9f36fed9b1a9eee51d60fe27024e64ac4804c0d9"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:404f6d2f67bd506811d9ef862e20ff847a4b2ec6e4395449ea62b6e2b41072d0"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de144ed08ea7b539ea9f50a8ce3904e39bac5524d128ebb38b923afc14a8281d"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3af64e1db3e376b0d9efa0277f00e6462b99ad705d57296d01f76369a2553273"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1eeb496d343de81f1f697b24ab00fa5f4f7713188b6e5bf083554d171f573b8"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3e7b5047a763cee035820d4ae727b16607bfde9b21fb1570954fbb8e83ff508"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0bd21969b8aad5b5fa551f0126f5ee579e198b54fcd0c4eb0b4324fd869cedd4"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e5077dec3851995cf784ad036b3a49f5e9989c7ab0a770f5668187c53af622a5"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46246dc548eac2e4ebd176c17b7e8c8df8dcb93c36c2fbd87d4a815e3dde34aa"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:42f162395294ed38559b5e98a94960679bad1e980163ddd9b07baf5b49276165"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc315cc67b40097bcc9ee8688265fe24fe306d2c237f0df39de414a440fcce54"}, + {file = "tidy3d_extras-2.10.0rc3-cp311-cp311-win_amd64.whl", hash = "sha256:bf55d2a1681be552c4b865355baf5891ee5312d8bcde0886195af16b5130d916"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7c325b4ed85f49049edb2995dcd3569a80faeb9a864d250ab2740c756237d7ca"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f0b51ba2f7e3309bc1ff5f9f7821888706410879f12dc5f28d432108dd0fc48e"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9ea3077d9eab4c080a79f614b6cde6b3e86f4ffdf2cfcded2e79b046ac986a"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9306c3838f82bd983ca6d684710fd539a6e6164996ef31ea0e1519de84a8a83"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef04d798692caed9c1690aed606505eee8ccc4461efbc236815aa33168c0310b"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c15b9b7837360ffda8a158b757cc874e0b63bb67e46667bdb0ed97a1c00e5a5a"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdf6fac18d29f7ad7b31a9f3e97c4a1b619af37fa935f82e22fefacdeb718d65"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:043e303e2c6c75810360db3f398ba8550e88f878bd603d7bb0d524c1430501ab"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ac2779391d1e1ec99d71460d7825c1d1761a26f5f07c33bb71825b3ddc2cdb85"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0e906b0e6205c36755ba39d2e97af750b4ad8812a9e64451864f98551e3936c7"}, + {file = "tidy3d_extras-2.10.0rc3-cp312-cp312-win_amd64.whl", hash = "sha256:84c91bbfab7665cfadb1e63842ac02de1153631ed51e6e1916d113c94cdc37bb"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:0852fecb88a8ab7fa405a5b45f5279ed5e3719ae0a3f1a8c8e80a93a70af20d2"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6eb3b9a9c5930924f22d85a8c20aadff12dbfa6407a33d4e86ab0f1bc25c866b"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f26b89bb327a4ff8a971357c1ebd795c7fa8cf646ccf09078913955aa742d3"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbd7eb3354f058a5a17f10a74cb50d26c93c3eda3ed490dc3aa6960bf3de4a91"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af14f59ec5197cf5b5d42fc3020a51a925583cdc397ae200cba8037133c1ec6c"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:fd34b6c33a861cf9d5882ad6234ac9cd275d66fd0df25000dd7fe8412e95304b"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:d6357578f8feaf54e86905565b94d994c0b300c71a0183470edf6184e69182ae"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:01d5df1cb05c3c74c58eacbbbba017f306014eeb19031acf41aaf0b26d8dc169"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:27551a8c086749e276920a38988acd5800f01ccc22576d23cbb25e390f2e8772"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b6df841ab328f72d405327b346e769ace6bf82ff76e11ae7516b49002497b13f"}, + {file = "tidy3d_extras-2.10.0rc3-cp313-cp313-win_amd64.whl", hash = "sha256:0ac3dbd4ab9bf9b2a592d0ef39b022fd69ddec1a2594861f5362a0790d85c7f3"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:8c456dc72ba157905a5c9943440b287367e4c5acb32ceca45de9f069eadb7a1f"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8767d9ab1d1d8257ffa4ee912ebbef5718ab707557a44555ac3b95b8db88c4c3"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9e7f18742b24222bd66dc5b777857c8999a2da9ee19b6779606a2eb75b5b2dd"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65648f7914920e82712fa096e5dc43f8bf78021a6e80a7b7500d6f183dfb64e4"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd811aeecb03f381446beb6e885bc76fca90947fb8a0a32fd31c88dc924801a2"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:855a8f68e23c75a0e7ff03b827ae14cf913805ca1bff79a5ba728b8c1beed214"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f8ac176e8935154839c69573548e1b67ff22544de49a67d57db84df451b6752"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fcef781e77ede69eea9951699bf4732bb4b4bc30c975a6f13ee7d5dfce16cf73"}, + {file = "tidy3d_extras-2.10.0rc3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63c5f59fbe92b9f84d19e84eb8cf7d33a3a190646bcf7a52ab9a11b01b2f8763"}, +] + +[package.dependencies] +numpy = ">=2.0" +tidy3d = "2.10.0rc3" +xarray = ">=2024.6" + +[package.extras] +test = ["pytest (>=7.2)"] + +[package.source] +type = "legacy" +url = "https://flexcompute-625554095313.d.codeartifact.us-east-1.amazonaws.com/pypi/pypi-releases/simple" +reference = "codeartifact" + [[package]] name = "tinycss2" version = "1.4.0" @@ -7019,41 +7208,41 @@ files = [ [[package]] name = "torch" -version = "2.9.0" +version = "2.9.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = true python-versions = ">=3.10" groups = ["main"] markers = "(extra == \"dev\" or extra == \"pytorch\") and sys_platform == \"darwin\"" files = [ - {file = "torch-2.9.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:030bbfe367379ae6a4ae4042b6c44da25383343b8b3c68abaa9c7231efbaf2dd"}, - {file = "torch-2.9.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:51cb63902182a78e90886e8068befd8ea102af4b00e420263591a3d70c7d3c6c"}, - {file = "torch-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:3f6aad4d2f0ee2248bac25339d74858ff846c3969b27d14ac235821f055af83d"}, - {file = "torch-2.9.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:413e1654c9203733138858780e184d9fc59442f0b3b209e16f39354eb893db9b"}, - {file = "torch-2.9.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c596708b5105d0b199215acf0c9be7c1db5f1680d88eddadf4b75a299259a677"}, - {file = "torch-2.9.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:51de31219c97c51cf4bf2be94d622e3deb5dcc526c6dc00e97c17eaec0fc1d67"}, - {file = "torch-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd515c70059afd95f48b8192733764c08ca37a1d19803af6401b5ecad7c8676e"}, - {file = "torch-2.9.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:614a185e4986326d526a91210c8fc1397e76e8cfafa78baf6296a790e53a9eec"}, - {file = "torch-2.9.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e5f7af1dc4c0a7c4a260c2534f41ddaf209714f7c89145e644c44712fbd6b642"}, - {file = "torch-2.9.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:01cff95ecd9a212ea2f141db28acccdceb6a4c54f64e6c51091146f5e2a772c6"}, - {file = "torch-2.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:4582b162f541651f0cb184d3e291c05c2f556c7117c64a9873e2ee158d40062b"}, - {file = "torch-2.9.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:33f58e9a102a91259af289d50525c30323b5c9ae1d31322b6447c0814da68695"}, - {file = "torch-2.9.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c30a17fc83eeab346913e237c64b15b5ba6407fff812f6c541e322e19bc9ea0e"}, - {file = "torch-2.9.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8f25033b8667b57857dfd01458fbf2a9e6a6df1f8def23aef0dc46292f6aa642"}, - {file = "torch-2.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:d037f1b4ffd25013be4a7bf3651a0a910c68554956c7b2c92ebe87c76475dece"}, - {file = "torch-2.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e4e5b5cba837a2a8d1a497ba9a58dae46fa392593eaa13b871c42f71847503a5"}, - {file = "torch-2.9.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:64693568f5dc4dbd5f880a478b1cea0201cc6b510d91d1bc54fea86ac5d1a637"}, - {file = "torch-2.9.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:f8ed31ddd7d10bfb3fbe0b9fe01b1243577f13d75e6f4a0839a283915ce3791e"}, - {file = "torch-2.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:eff527d4e4846e6f70d2afd8058b73825761203d66576a7e04ea2ecfebcb4ab8"}, - {file = "torch-2.9.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:f8877779cf56d1ce431a7636703bdb13307f5960bb1af49716d8b179225e0e6a"}, - {file = "torch-2.9.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7e614fae699838038d888729f82b687c03413c5989ce2a9481f9a7e7a396e0bb"}, - {file = "torch-2.9.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:dfb5b8cd310ba3436c7e14e8b7833ef658cf3045e50d2bdaed23c8fc517065eb"}, - {file = "torch-2.9.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b3d29524993a478e46f5d598b249cd824b7ed98d7fba538bd9c4cde6c803948f"}, - {file = "torch-2.9.0-cp314-cp314-win_amd64.whl", hash = "sha256:71c7578984f5ec0eb645eb4816ac8435fcf3e3e2ae1901bcd2f519a9cafb5125"}, - {file = "torch-2.9.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:71d9309aee457bbe0b164bce2111cd911c4ed4e847e65d5077dbbcd3aba6befc"}, - {file = "torch-2.9.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c08fb654d783899e204a32cca758a7ce8a45b2d78eeb89517cc937088316f78e"}, - {file = "torch-2.9.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ec8feb0099b2daa5728fbc7abb0b05730fd97e0f359ff8bda09865aaa7bd7d4b"}, - {file = "torch-2.9.0-cp314-cp314t-win_amd64.whl", hash = "sha256:695ba920f234ad4170c9c50e28d56c848432f8f530e6bc7f88fcb15ddf338e75"}, + {file = "torch-2.9.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1cc208435f6c379f9b8fdfd5ceb5be1e3b72a6bdf1cb46c0d2812aa73472db9e"}, + {file = "torch-2.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:9fd35c68b3679378c11f5eb73220fdcb4e6f4592295277fbb657d31fd053237c"}, + {file = "torch-2.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:2af70e3be4a13becba4655d6cc07dcfec7ae844db6ac38d6c1dafeb245d17d65"}, + {file = "torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a83b0e84cc375e3318a808d032510dde99d696a85fe9473fc8575612b63ae951"}, + {file = "torch-2.9.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:62b3fd888277946918cba4478cf849303da5359f0fb4e3bfb86b0533ba2eaf8d"}, + {file = "torch-2.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d033ff0ac3f5400df862a51bdde9bad83561f3739ea0046e68f5401ebfa67c1b"}, + {file = "torch-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:0d06b30a9207b7c3516a9e0102114024755a07045f0c1d2f2a56b1819ac06bcb"}, + {file = "torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475"}, + {file = "torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6"}, + {file = "torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4"}, + {file = "torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083"}, + {file = "torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e"}, + {file = "torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb"}, + {file = "torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9"}, + {file = "torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2"}, + {file = "torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e"}, + {file = "torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a"}, + {file = "torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2"}, + {file = "torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db"}, + {file = "torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587"}, + {file = "torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a"}, + {file = "torch-2.9.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:19d144d6b3e29921f1fc70503e9f2fc572cde6a5115c0c0de2f7ca8b1483e8b6"}, + {file = "torch-2.9.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:c432d04376f6d9767a9852ea0def7b47a7bbc8e7af3b16ac9cf9ce02b12851c9"}, + {file = "torch-2.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d"}, + {file = "torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c"}, + {file = "torch-2.9.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:0a2bd769944991c74acf0c4ef23603b9c777fdf7637f115605a4b2d8023110c7"}, + {file = "torch-2.9.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:07c8a9660bc9414c39cac530ac83b1fb1b679d7155824144a40a54f4a47bfa73"}, + {file = "torch-2.9.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e"}, ] [package.dependencies] @@ -7078,7 +7267,7 @@ nvidia-nvshmem-cu12 = {version = "3.3.20", markers = "platform_system == \"Linux nvidia-nvtx-cu12 = {version = "12.8.90", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} setuptools = {version = "*", markers = "python_version >= \"3.12\""} sympy = ">=1.13.3" -triton = {version = "3.5.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +triton = {version = "3.5.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} typing-extensions = ">=4.10.0" [package.extras] @@ -7088,37 +7277,37 @@ pyyaml = ["pyyaml"] [[package]] name = "torch" -version = "2.9.0+cpu" +version = "2.9.1+cpu" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = true python-versions = ">=3.10" groups = ["main"] markers = "(extra == \"dev\" or extra == \"pytorch\") and sys_platform != \"darwin\"" files = [ - {file = "torch-2.9.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b224792ea567b52c7f1ce1d789567f6920e06fd3b339fa1e1b05948845f783ad"}, - {file = "torch-2.9.0+cpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:bd2a257e670ede9fc01c6d76dccdc473040913b8e9328169bf177dbdc38e2484"}, - {file = "torch-2.9.0+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:96f3f7aa4eb9e7fc5af8a722eaf1e5e32e3039dbafe817178d7b90a8566be32d"}, - {file = "torch-2.9.0+cpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:da77341ccaba31762d9238b0942c165c4582a26818f3045b052b39cebdd7ad9d"}, - {file = "torch-2.9.0+cpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:add3e93ecc1eeaa6853f6a973ce60ffb3cb14ed2e80f5055e139b09385dce0a7"}, - {file = "torch-2.9.0+cpu-cp311-cp311-win_amd64.whl", hash = "sha256:389e1e0b8083fd355f7caf5ba82356b5e01c318998bd575dbf2285a0d8137089"}, - {file = "torch-2.9.0+cpu-cp311-cp311-win_arm64.whl", hash = "sha256:5ce3d01aef91dc078fbb121814e556d55bc886d303efaf42c4fe67e411f5f9ad"}, - {file = "torch-2.9.0+cpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3a651434ae1248b0568c12b5f9e3acc8942eb28378d9d04a79302938b68c6f24"}, - {file = "torch-2.9.0+cpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:28f6eb31b08180a5c5e98d5bc14eef6909c9f5a1dbff9632c3e02a8773449349"}, - {file = "torch-2.9.0+cpu-cp312-cp312-win_amd64.whl", hash = "sha256:e438061b87ec7dd6018fca9f975219889aa0a3f6cdc3ea10dd0ae2bc7f1c47ce"}, - {file = "torch-2.9.0+cpu-cp312-cp312-win_arm64.whl", hash = "sha256:eb13ff1c34e338d722e76a4fd83b8d282782505bd1b99af4b3c32da66eba6eb4"}, - {file = "torch-2.9.0+cpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:be4438d8dad7f0d5a5e54f0feef8a893446894ec87f102bb1d82dcc4518542e4"}, - {file = "torch-2.9.0+cpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6c9b217584400963d5b4daddb3711ec7a3778eab211e18654fba076cce3b8682"}, - {file = "torch-2.9.0+cpu-cp313-cp313-win_amd64.whl", hash = "sha256:728372e3f58c5826445f677746e5311c1935c1a7c59599f73a49ded850e038e8"}, - {file = "torch-2.9.0+cpu-cp313-cp313-win_arm64.whl", hash = "sha256:95e56c26f919fbb98f16e7a0b87af494b893f9da9a65a020f17a01c13e520a81"}, - {file = "torch-2.9.0+cpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:6c777160288b08555820781ae0f3a2c67a59bd24b065e88ca1ec20e2f9dc8ac7"}, - {file = "torch-2.9.0+cpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:528fd338311f31c9fb18038cafd00e6eae0bf5ad5577521701acb62510753d18"}, - {file = "torch-2.9.0+cpu-cp313-cp313t-win_amd64.whl", hash = "sha256:d572863990e7d2762b547735ef589f6350d9eb4e441d38753a1c33636698cf4c"}, - {file = "torch-2.9.0+cpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:44aadb735774d4a99525d2ec29126b23016c44a07b02ce6c237dfa61a223dd52"}, - {file = "torch-2.9.0+cpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b355e07b7f0c369cb031adfcbff5c37a609abcea091b918a39886412afd2e07d"}, - {file = "torch-2.9.0+cpu-cp314-cp314-win_amd64.whl", hash = "sha256:c2698999361d73c2d25d7cc8a787130188d49b183abb18b554228daa102e1594"}, - {file = "torch-2.9.0+cpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:fa0d1373d04b30ff8f12d542135d292f1a1ddb7c0d852a3d487a320360e5dab9"}, - {file = "torch-2.9.0+cpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:2f49bb57a5fe0dc7f8e73ea9e5d36ebda2ea25b8a714a788f0fc2fc47d20a830"}, - {file = "torch-2.9.0+cpu-cp314-cp314t-win_amd64.whl", hash = "sha256:3a60d1ecf27a9cce839b3aa665b26f0af1b1007b9c9f1e7f597f6b7bdf107617"}, + {file = "torch-2.9.1+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:10866c8a48c4aa5ae3f48538dc8a055b99c57d9c6af2bf5dd715374d9d6ddca3"}, + {file = "torch-2.9.1+cpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7210713b66943fdbfcc237b2e782871b649123ac5d29f548ce8c85be4223ab38"}, + {file = "torch-2.9.1+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:d6e8441453dc27524e3f1037fbf27b90a02644b84e42944b9354b4024cb51cc1"}, + {file = "torch-2.9.1+cpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0e611cfb16724e62252b67d31073bc5c490cb83e92ecdc1192762535e0e44487"}, + {file = "torch-2.9.1+cpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:3de2adb9b4443dc9210ef1f1b16da3647ace53553166d6360bbbd7edd6f16e4d"}, + {file = "torch-2.9.1+cpu-cp311-cp311-win_amd64.whl", hash = "sha256:69b3785d28be5a9c56ab525788ec5000349ec59132a74b7d5e954b905015b992"}, + {file = "torch-2.9.1+cpu-cp311-cp311-win_arm64.whl", hash = "sha256:15b4ae6fe371d96bffb8e1e9af62164797db20a0dc1337345781659cfd0b8bb1"}, + {file = "torch-2.9.1+cpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf9b442a51a2948e41216a76d7ab00f0694cfcaaa51b6f9bcab57b7f89843e6"}, + {file = "torch-2.9.1+cpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7417d8c565f219d3455654cb431c6d892a3eb40246055e14d645422de13b9ea1"}, + {file = "torch-2.9.1+cpu-cp312-cp312-win_amd64.whl", hash = "sha256:a4e06b4f441675d26b462123c8a83e77c55f1ec8ebc081203be2db1ea8054add"}, + {file = "torch-2.9.1+cpu-cp312-cp312-win_arm64.whl", hash = "sha256:1abe31f14b560c1f062699e966cb08ef5b67518a1cfac2d8547a3dbcd8387b06"}, + {file = "torch-2.9.1+cpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e532e553b37ee859205a9b2d1c7977fd6922f53bbb1b9bfdd5bdc00d1a60ed4"}, + {file = "torch-2.9.1+cpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:39b3dff6d8fba240ae0d1bede4ca11c2531ae3b47329206512d99e17907ff74b"}, + {file = "torch-2.9.1+cpu-cp313-cp313-win_amd64.whl", hash = "sha256:404a7ab2fffaf2ca069e662f331eb46313692b2f1630df2720094284f390ccef"}, + {file = "torch-2.9.1+cpu-cp313-cp313-win_arm64.whl", hash = "sha256:161decbff26a33f13cb5ba6d2c8f458bbf56193bcc32ecc70be6dd4c7a3ee79d"}, + {file = "torch-2.9.1+cpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:01b1884f724977a20c7da2f640f1c7b37f4a2c117a7f4a6c1c0424d14cb86322"}, + {file = "torch-2.9.1+cpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:031a597147fa81b1e6d79ccf1ad3ccc7fafa27941d6cf26ff5caaa384fb20e92"}, + {file = "torch-2.9.1+cpu-cp313-cp313t-win_amd64.whl", hash = "sha256:e586ab1363e3f86aa4cc133b7fdcf98deb1d2c13d43a7a6e5a6a18e9c5364893"}, + {file = "torch-2.9.1+cpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:65010ab4aacce6c9a1ddfc935f986c003ca8638ded04348fd326c3e74346237c"}, + {file = "torch-2.9.1+cpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:88adf5157db5da1d54b1c9fe4a6c1d20ceef00e75d854e206a87dbf69e3037dc"}, + {file = "torch-2.9.1+cpu-cp314-cp314-win_amd64.whl", hash = "sha256:f60e2565f261542efac07e25208fb3fc55c6fe82314a5a9cbee971edb5f27713"}, + {file = "torch-2.9.1+cpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3ac2b8df2c55430e836dcda31940d47f1f5f94b8731057b6f20300ebea394dd9"}, + {file = "torch-2.9.1+cpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5b688445f928f13563b7418b17c57e97bf955ab559cf73cd8f2b961f8572dbb3"}, + {file = "torch-2.9.1+cpu-cp314-cp314t-win_amd64.whl", hash = "sha256:cf9c3e50b595721ca6b488bdcc326e0f1af73ed28b9b66eff504a96649bb5c96"}, ] [package.dependencies] @@ -7276,27 +7465,27 @@ test-more = ["coveralls", "ezdxf", "ipython", "marimo", "matplotlib", "pymeshlab [[package]] name = "triton" -version = "3.5.0" +version = "3.5.1" description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "<3.15,>=3.10" groups = ["main"] markers = "(extra == \"dev\" or extra == \"pytorch\") and platform_system == \"Linux\" and platform_machine == \"x86_64\" and sys_platform == \"darwin\"" files = [ - {file = "triton-3.5.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f90de6a6566bb619b4c0adc9855729e1b1b5e26533fca1bf6206e96b6d277a3"}, - {file = "triton-3.5.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5d3b3d480debf24eaa739623c9a42446b0b77f95593d30eb1f64cd2278cc1f0"}, - {file = "triton-3.5.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8457b22148defefdcb7fa8144b05ce211b9faefad650a1ce85b23df488d5549c"}, - {file = "triton-3.5.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f34bfa21c5b3a203c0f0eab28dcc1e49bd1f67d22724e77fb6665a659200a4ec"}, - {file = "triton-3.5.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7da21fccceafc163e3a5e857abe34351ef76345af06cabf9637a914742671f0b"}, - {file = "triton-3.5.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9e71db82261c4ffa3921cd050cd5faa18322d2d405c30eb56084afaff3b0833"}, - {file = "triton-3.5.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:188da5b81fa2f8322c27fec1627703eac24cb9bb7ab0dfbe9925973bc1b070d3"}, - {file = "triton-3.5.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6bb9aa5519c084a333acdba443789e50012a4b851cd486c54f0b8dc2a8d3a12"}, - {file = "triton-3.5.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03127d9b33aaf979c856676b394bc059ec1d68cb6da68ae03f62dd8ad77a04ae"}, - {file = "triton-3.5.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c83f2343e1a220a716c7b3ab9fccfcbe3ad4020d189549200e2d2e8d5868bed9"}, - {file = "triton-3.5.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:468936651d383f4a6d10068d34a627505e13af55be5d002b9f27b987e7a5f0ac"}, - {file = "triton-3.5.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da0fa67ccd76c3dcfb0bffe1b1c57c685136a6bd33d141c24d9655d4185b1289"}, - {file = "triton-3.5.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7ceef21410229ac23173a28eee5cfc0e37c1dfdb8b4bc11ecda2e3ecec7c686"}, - {file = "triton-3.5.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:317fe477ea8fd4524a6a8c499fb0a36984a56d0b75bf9c9cb6133a1c56d5a6e7"}, + {file = "triton-3.5.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f63e34dcb32d7bd3a1d0195f60f30d2aee8b08a69a0424189b71017e23dfc3d2"}, + {file = "triton-3.5.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fc53d849f879911ea13f4a877243afc513187bc7ee92d1f2c0f1ba3169e3c94"}, + {file = "triton-3.5.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da47169e30a779bade679ce78df4810fca6d78a955843d2ddb11f226adc517dc"}, + {file = "triton-3.5.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61413522a48add32302353fdbaaf92daaaab06f6b5e3229940d21b5207f47579"}, + {file = "triton-3.5.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:275a045b6ed670dd1bd005c3e6c2d61846c74c66f4512d6f33cc027b11de8fd4"}, + {file = "triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232"}, + {file = "triton-3.5.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56765ffe12c554cd560698398b8a268db1f616c120007bfd8829d27139abd24a"}, + {file = "triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba"}, + {file = "triton-3.5.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02c770856f5e407d24d28ddc66e33cf026e6f4d360dcb8b2fabe6ea1fc758621"}, + {file = "triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8"}, + {file = "triton-3.5.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f617aa7925f9ea9968ec2e1adaf93e87864ff51549c8f04ce658f29bbdb71e2d"}, + {file = "triton-3.5.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0637b1efb1db599a8e9dc960d53ab6e4637db7d4ab6630a0974705d77b14b60"}, + {file = "triton-3.5.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8932391d7f93698dfe5bc9bead77c47a24f97329e9f20c10786bb230a9083f56"}, + {file = "triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478"}, ] [package.extras] @@ -7634,6 +7823,7 @@ files = [ design = ["bayesian-optimization", "pygad", "pyswarms"] dev = ["bayesian-optimization", "cma", "coverage", "devsim", "diff-cover", "dill", "gdstk", "grcwa", "ipython", "ipython", "jinja2", "jupyter", "memory_profiler", "mypy", "myst-parser", "nbconvert", "nbdime", "nbsphinx", "networkx", "openpyxl", "optax", "pre-commit", "psutil", "pydata-sphinx-theme", "pygad", "pylint", "pyswarms", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "rtree", "ruff", "sax", "scikit-rf", "signac", "sphinx", "sphinx-book-theme", "sphinx-copybutton", "sphinx-design", "sphinx-favicon", "sphinx-notfound-page", "sphinx-sitemap", "sphinx-tabs", "sphinxemoji", "tmm", "torch", "torch", "tox", "trimesh", "vtk", "zizmor"] docs = ["cma", "devsim", "gdstk", "grcwa", "ipython", "jinja2", "jupyter", "myst-parser", "nbconvert", "nbdime", "nbsphinx", "openpyxl", "optax", "pydata-sphinx-theme", "pylint", "sax", "signac", "sphinx", "sphinx-book-theme", "sphinx-copybutton", "sphinx-design", "sphinx-favicon", "sphinx-notfound-page", "sphinx-sitemap", "sphinx-tabs", "sphinxemoji", "tmm"] +extras = ["tidy3d-extras"] gdstk = ["gdstk"] heatcharge = ["devsim", "trimesh", "vtk"] pytorch = ["torch", "torch"] @@ -7645,4 +7835,4 @@ vtk = ["vtk"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "9a217bd12d87a82f8fa9c0e1a2047e922714440ccb16c89ddec08773828acfd7" +content-hash = "ef1920fb4451202ac1302c413fbfc3c93085b8a33bc0eb405fde21a2c526835d" diff --git a/pyproject.toml b/pyproject.toml index b69d2d5cad..5979e9d106 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,7 +120,7 @@ sphinxemoji = { version = "*", optional = true } devsim = { version = "*", optional = true } cma = { version = "*", optional = true } openpyxl = { version = "*", optional = true } -# tidy3d-extras = { version = "2.10.0rc3", optional = true } +tidy3d-extras = { version = "2.10.0rc3", optional = true } [tool.poetry.extras] dev = [ @@ -217,7 +217,7 @@ heatcharge = ["trimesh", "vtk", "devsim"] # plugins with extra deps pytorch = ["torch"] design = ["bayesian-optimization", "pygad", "pyswarms"] -# extras = ["tidy3d-extras"] +extras = ["tidy3d-extras"] [tool.poetry.scripts] tidy3d = "tidy3d.web.cli:tidy3d_cli" @@ -233,6 +233,11 @@ url = "https://download.pytorch.org/whl/cpu" priority = "explicit" +[[tool.poetry.source]] +name = "codeartifact" +url = "https://flexcompute-625554095313.d.codeartifact.us-east-1.amazonaws.com/pypi/pypi-releases/simple/" +priority = "supplemental" + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From f1cf3f4e21ec8a13bf6828778b322ba7c677d377 Mon Sep 17 00:00:00 2001 From: daquinteroflex Date: Fri, 14 Nov 2025 17:57:43 +0100 Subject: [PATCH 2/3] a --- AGENTS.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index 8e407e0bd6..9bd650e419 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -10,12 +10,23 @@ - Prefix every repo command with `poetry run` to match CI. - Re-run `poetry run pytest` locally as part of your development loop; `pyproject.toml` already wires markers, doctests, coverage, and env vars. - The pre-commit hooks you enabled during onboarding run automatically; still run `poetry run pre-commit run --all-files` before opening a PR or when new hooks land so your tree matches `.pre-commit-config.yaml` and the checks in `.github/workflows/tidy3d-python-client-tests.yml` (covers `ruff format`, `ruff check`, doc hooks). +- When editing YAML, Python, or docs, match the surrounding indentation exactly; never re-indent or reformat lines you didn’t otherwise modify. ### Do / Don't - **Do** run `poetry run pre-commit run --all-files` before opening a PR; **don't** skip it even if individual hooks passed earlier. - **Do** stick to `poetry run …` commands; **don't** invoke tools outside Poetry, since that drifts from CI environments. - **Do** reuse `scripts/` utilities; **don't** add new helper modules without checking for an existing script first. +## CI / Tests Workflow Architecture +- **Multi-entry workflow**: `.github/workflows/tidy3d-python-client-tests.yml` listens to `workflow_dispatch`, `workflow_call`, `merge_group`, `pull_request`, and `pull_request_review`, letting release tooling, automation, and reviewers share a single test surface while still supporting manual knobs like `release_tag` and per-suite toggles. +- **determine-test-scope gateway**: A first-class `determine-test-scope` job inspects event metadata, review state, and manual inputs, then emits booleans (e.g., `local_tests`, `remote_tests`, `code_quality_tests`, `pr_review_tests`) that gate every downstream job—only the suites needed for the current context actually run. +- **Code-quality slice**: Lint (`ruff` inside an `uv` container), `mypy`, branch/commit lint, `zizmor`, and schema verification all key off `code_quality_tests`; they share fast Ubuntu runners, skip automatically when not required, and push SARIF plus human-readable summaries for traceability. +- **Execution layers**: Heavy pytest coverage runs are split into self-hosted `local-tests` (Ubuntu container on Slurm with `uv` virtualenvs, multi-Python coverage+diff coverage) and hosted `remote-tests` (GitHub runners across {Ubuntu, macOS, Windows} × Python 3.10–3.13, Poetry-managed envs, doctests first), each protected by concurrency groups and optional tag-based checkout for release verification. +- **Specialized workflows**: `develop-cli-tests` and `release-tests` are invoked via `uses: ./.github/workflows/...`, so this workflow orchestrates CLI and schema/submodule checks without duplicating steps; manual dispatch inputs decide whether those paths should run. +- **Validation & merge gating**: `workflow-validation` aggregates the status of every conditional job and fails fast if any mandatory suite errored, while `pr-requirements-pass` focuses on pull-request contexts, ensuring approvals + required suites finish before signaling “✅” back to branch protection rules. +- **Fail-safe if guards**: Terminal enforcement stages add `if: always()` so `workflow-validation` and `pr-requirements-pass` execute even if an upstream job failed or was skipped; each step then re-checks the relevant booleans to surface actionable failure messages instead of silently succeeding. +- **Environment scoping**: The workflow leans on explicit env vars—`determine-test-scope` exports `$INPUT_*`, `$REVIEW_STATE`, etc., local/remote pytest runs set `PIP_ONLY_BINARY=gdstk`, `MPLBACKEND=agg`, and a computed `RELEASE_TAG`, while coverage/diff steps use temporary exports like `PYTHONUNBUFFERED` or matrix-specific Python versions—so every job receives deterministic configuration without relying on implicit runner state. + ## Coding Style & Naming - Favor descriptive `snake_case` functions and `PascalCase` classes tied to the physics domain. - Most simulation and monitor classes subclass `Tidy3dBaseModel`; reserve `pydantic.BaseModel` for lightweight helpers (see `tidy3d/updater.py`), and rely on `.updated_copy(...)` plus shared validators in `tidy3d/components/validators.py`. From 8771196d59dc5ed2da24b1da7cfb8db5900f29bd Mon Sep 17 00:00:00 2001 From: daquinteroflex Date: Fri, 14 Nov 2025 18:49:28 +0100 Subject: [PATCH 3/3] final integration --- .../tidy3d-python-client-release-tests.yml | 224 ------------------ .../workflows/tidy3d-python-client-tests.yml | 183 ++++++++++++-- 2 files changed, 169 insertions(+), 238 deletions(-) delete mode 100644 .github/workflows/tidy3d-python-client-release-tests.yml diff --git a/.github/workflows/tidy3d-python-client-release-tests.yml b/.github/workflows/tidy3d-python-client-release-tests.yml deleted file mode 100644 index 8020e28c01..0000000000 --- a/.github/workflows/tidy3d-python-client-release-tests.yml +++ /dev/null @@ -1,224 +0,0 @@ -name: "public/tidy3d/python-client-release-tests" - -on: - workflow_dispatch: - inputs: - version_match_tests: - description: 'Run version consistency checks' - type: boolean - default: true - submodule_tests: - description: 'Run submodule tests' - type: boolean - default: true - release_tag: - description: 'Release Tag to validate (e.g., v2.10.0, v2.10.0rc1)' - required: false - type: string - default: '' - - workflow_call: - inputs: - version_match_tests: - description: 'Run version consistency checks' - type: boolean - default: true - submodule_tests: - description: 'Run submodule tests' - type: boolean - default: true - release_tag: - description: 'Release Tag to validate (e.g., v2.10.0, v2.10.0rc1)' - required: false - type: string - default: '' - outputs: - workflow_success: - description: 'Whether all release tests passed' - value: ${{ jobs.workflow-validation.result == 'success' }} - -permissions: - contents: read - -jobs: - verify-version-consistency: - name: verify-version-consistency - runs-on: ubuntu-latest - if: ${{ github.event.inputs.version_match_tests || inputs.version_match_tests }} - steps: - - name: checkout-code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.release_tag || github.ref }} - persist-credentials: false - - - name: check-version-consistency - env: - RELEASE_TAG: ${{ inputs.release_tag }} - run: | - set -e - echo "=== Verifying Version Consistency ===" - echo "" - - # Extract version from pyproject.toml - PYPROJECT_VERSION=$(grep '^version = ' pyproject.toml | head -n 1 | sed 's/version = "\(.*\)"/\1/') - echo "pyproject.toml version: $PYPROJECT_VERSION" - - # Extract version from tidy3d/version.py - VERSION_PY=$(grep '__version__ = ' tidy3d/version.py | sed 's/__version__ = "\(.*\)"/\1/') - echo "tidy3d/version.py version: $VERSION_PY" - echo "" - - # Compare versions - if [[ "$PYPROJECT_VERSION" != "$VERSION_PY" ]]; then - echo "❌ ERROR: Version mismatch detected!" - echo " pyproject.toml: $PYPROJECT_VERSION" - echo " tidy3d/version.py: $VERSION_PY" - echo "" - echo "These versions must match before release." - echo "Please update both files to the same version." - exit 1 - fi - - echo "✅ Version consistency check passed: $PYPROJECT_VERSION" - echo "" - - # If release tag provided, validate it matches the version - if [[ -n "$RELEASE_TAG" ]]; then - echo "=== Validating Release Tag ===" - echo "Release tag: $RELEASE_TAG" - - # Strip 'v' prefix from tag if present - TAG_VERSION="${RELEASE_TAG#v}" - echo "Tag version (without 'v'): $TAG_VERSION" - - if [[ "$TAG_VERSION" != "$PYPROJECT_VERSION" ]]; then - echo "❌ ERROR: Release tag does not match package version!" - echo " Release tag: $RELEASE_TAG (version: $TAG_VERSION)" - echo " Package version: $PYPROJECT_VERSION" - echo "" - echo "The release tag should be 'v$PYPROJECT_VERSION'" - exit 1 - fi - - echo "✅ Release tag matches package version" - fi - - echo "" - echo "=== Version Checks Passed ===" - - test-submodules: - name: test-submodules - runs-on: ubuntu-latest - if: ${{ github.event.inputs.submodule_tests || inputs.submodule_tests }} - env: - RELEASE_TAG: ${{ inputs.release_tag }} - steps: - - name: checkout-head - if: ${{ !env.RELEASE_TAG }} - uses: actions/checkout@v4 - with: - submodules: 'recursive' - fetch-depth: 0 - persist-credentials: true - - - name: checkout-tag - if: ${{ env.RELEASE_TAG }} - uses: actions/checkout@v4 - with: - ref: ${{ env.RELEASE_TAG }} - submodules: 'recursive' - fetch-depth: 0 - persist-credentials: true - - - name: initialize-submodules - run: | - git submodule update --init --recursive - - - name: check-submodules-for-multiple-branches - shell: bash - run: | - BRANCHES=("develop" $LATEST_BRANCH) # Add your branches here - - for BRANCH in "${BRANCHES[@]}"; do - echo "Analyzing branch: $BRANCH" - - # Fetch all branches and tags - git fetch --all --verbose - - # Checkout the branch - git checkout $BRANCH - - NOTEBOOKS_PATH=docs/notebooks - FAQ_PATH=docs/faq - - # Checking Notebooks submodule - echo "Checking $NOTEBOOKS_PATH for updates..." - cd $NOTEBOOKS_PATH - NOTEBOOKS_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - if git show-ref --verify refs/remotes/origin/$BRANCH; then - echo "Branch $BRANCH exists." - else - echo "::error::Branch $BRANCH does not exist on remote." - exit 1 - fi - NOTEBOOKS_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/${BRANCH}) - echo "NOTEBOOKS_LATEST_COMMIT: $NOTEBOOKS_LATEST_COMMIT" - echo "NOTEBOOKS_CURRENT_COMMIT: $NOTEBOOKS_CURRENT_COMMIT" - - cd ../.. - if [ "$NOTEBOOKS_LATEST_COMMIT" != "$NOTEBOOKS_CURRENT_COMMIT" ]; then - echo "::error::Submodule $NOTEBOOKS_PATH is not up to date with the $BRANCH branch. Please update it." - exit 1 - else - echo "Submodule $NOTEBOOKS_PATH is up to date with the $BRANCH branch." - fi - - # Checking FAQs only on the develop branch - if [[ "$BRANCH" == "develop" ]]; then - echo "Checking $FAQ_PATH for updates..." - cd $FAQ_PATH - FAQ_CURRENT_COMMIT=$(git rev-parse HEAD) - echo $(git fetch --all --verbose) - echo $(git remote get-url origin) - FAQ_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) - echo "FAQ_LATEST_COMMIT: $FAQ_LATEST_COMMIT" - echo "FAQ_CURRENT_COMMIT: $FAQ_CURRENT_COMMIT" - cd ../.. - if [ "$FAQ_LATEST_COMMIT" != "$FAQ_CURRENT_COMMIT" ]; then - echo "::error::Submodule $FAQ_PATH is not up to date. Please update it." - exit 1 - else - echo "Submodule $FAQ_PATH is up to date." - fi - fi - done - - echo "" - echo "=== Submodule Checks Passed ===" - - workflow-validation: - name: workflow-validation - runs-on: ubuntu-latest - if: always() - needs: - - verify-version-consistency - - test-submodules - steps: - - name: check-version-consistency-result - if: ${{ (github.event.inputs.version_match_tests || inputs.version_match_tests) && needs.verify-version-consistency.result != 'success' && needs.verify-version-consistency.result != 'skipped' }} - run: | - echo "❌ Version consistency check failed." - exit 1 - - - name: check-submodule-tests-result - if: ${{ (github.event.inputs.submodule_tests || inputs.submodule_tests) && needs.test-submodules.result != 'success' && needs.test-submodules.result != 'skipped' }} - run: | - echo "❌ Submodule tests failed." - exit 1 - - - name: all-checks-passed - if: ${{ success() }} - run: echo "✅ All release tests passed!" diff --git a/.github/workflows/tidy3d-python-client-tests.yml b/.github/workflows/tidy3d-python-client-tests.yml index b851f2264c..74c3e88208 100644 --- a/.github/workflows/tidy3d-python-client-tests.yml +++ b/.github/workflows/tidy3d-python-client-tests.yml @@ -770,17 +770,165 @@ jobs: with: release_tag: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} - release-tests: - name: release-tests + verify-version-consistency: + name: verify-version-consistency + runs-on: ubuntu-latest needs: determine-test-scope - if: | - always() && - (needs.determine-test-scope.outputs.submodule_tests == 'true' || needs.determine-test-scope.outputs.version_match_tests == 'true') - uses: ./.github/workflows/tidy3d-python-client-release-tests.yml - with: - submodule_tests: ${{ needs.determine-test-scope.outputs.submodule_tests == 'true' }} - version_match_tests: ${{ needs.determine-test-scope.outputs.version_match_tests == 'true' }} - release_tag: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + if: needs.determine-test-scope.outputs.version_match_tests == 'true' + steps: + - name: checkout-code + uses: actions/checkout@v4 + with: + ref: ${{ (github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag) || github.ref }} + persist-credentials: false + + - name: check-version-consistency + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + run: | + set -e + echo "=== Verifying Version Consistency ===" + echo "" + + # Extract version from pyproject.toml + PYPROJECT_VERSION=$(grep '^version = ' pyproject.toml | head -n 1 | sed 's/version = "\(.*\)"/\1/') + echo "pyproject.toml version: $PYPROJECT_VERSION" + + # Extract version from tidy3d/version.py + VERSION_PY=$(grep '__version__ = ' tidy3d/version.py | sed 's/__version__ = "\(.*\)"/\1/') + echo "tidy3d/version.py version: $VERSION_PY" + echo "" + + # Compare versions + if [[ "$PYPROJECT_VERSION" != "$VERSION_PY" ]]; then + echo "❌ ERROR: Version mismatch detected!" + echo " pyproject.toml: $PYPROJECT_VERSION" + echo " tidy3d/version.py: $VERSION_PY" + echo "" + echo "These versions must match before release." + echo "Please update both files to the same version." + exit 1 + fi + + echo "✅ Version consistency check passed: $PYPROJECT_VERSION" + echo "" + + # If release tag provided, validate it matches the version + if [[ -n "$RELEASE_TAG" ]]; then + echo "=== Validating Release Tag ===" + echo "Release tag: $RELEASE_TAG" + + # Strip 'v' prefix from tag if present + TAG_VERSION="${RELEASE_TAG#v}" + echo "Tag version (without 'v'): $TAG_VERSION" + + if [[ "$TAG_VERSION" != "$PYPROJECT_VERSION" ]]; then + echo "❌ ERROR: Release tag does not match package version!" + echo " Release tag: $RELEASE_TAG (version: $TAG_VERSION)" + echo " Package version: $PYPROJECT_VERSION" + echo "" + echo "The release tag should be 'v$PYPROJECT_VERSION'" + exit 1 + fi + + echo "✅ Release tag matches package version" + fi + + echo "" + echo "=== Version Checks Passed ===" + + test-submodules: + name: test-submodules + runs-on: ubuntu-latest + needs: determine-test-scope + if: ${{ always() && (needs.determine-test-scope.outputs.submodule_tests == 'true') && (github.event.inputs.release_tag || inputs.release_tag) && !contains(github.event.inputs.release_tag || inputs.release_tag, 'rc') }} + env: + RELEASE_TAG: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.release_tag || inputs.release_tag }} + steps: + - name: checkout-head + if: ${{ !env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + submodules: 'recursive' + fetch-depth: 0 + persist-credentials: true + + - name: checkout-tag + if: ${{ env.RELEASE_TAG }} + uses: actions/checkout@v4 + with: + ref: ${{ env.RELEASE_TAG }} + submodules: 'recursive' + fetch-depth: 0 + persist-credentials: true + + - name: initialize-submodules + run: | + git submodule update --init --recursive + + - name: check-submodules-for-multiple-branches + shell: bash + run: | + BRANCHES=("develop" $LATEST_BRANCH) # Add your branches here + + for BRANCH in "${BRANCHES[@]}"; do + echo "Analyzing branch: $BRANCH" + + # Fetch all branches and tags + git fetch --all --verbose + + # Checkout the branch + git checkout $BRANCH + + NOTEBOOKS_PATH=docs/notebooks + FAQ_PATH=docs/faq + + # Checking Notebooks submodule + echo "Checking $NOTEBOOKS_PATH for updates..." + cd $NOTEBOOKS_PATH + NOTEBOOKS_CURRENT_COMMIT=$(git rev-parse HEAD) + echo $(git fetch --all --verbose) + echo $(git remote get-url origin) + if git show-ref --verify refs/remotes/origin/$BRANCH; then + echo "Branch $BRANCH exists." + else + echo "::error::Branch $BRANCH does not exist on remote." + exit 1 + fi + NOTEBOOKS_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/${BRANCH}) + echo "NOTEBOOKS_LATEST_COMMIT: $NOTEBOOKS_LATEST_COMMIT" + echo "NOTEBOOKS_CURRENT_COMMIT: $NOTEBOOKS_CURRENT_COMMIT" + + cd ../.. + if [ "$NOTEBOOKS_LATEST_COMMIT" != "$NOTEBOOKS_CURRENT_COMMIT" ]; then + echo "::error::Submodule $NOTEBOOKS_PATH is not up to date with the $BRANCH branch. Please update it." + exit 1 + else + echo "Submodule $NOTEBOOKS_PATH is up to date with the $BRANCH branch." + fi + + # Checking FAQs only on the develop branch + if [[ "$BRANCH" == "develop" ]]; then + echo "Checking $FAQ_PATH for updates..." + cd $FAQ_PATH + FAQ_CURRENT_COMMIT=$(git rev-parse HEAD) + echo $(git fetch --all --verbose) + echo $(git remote get-url origin) + FAQ_LATEST_COMMIT=$(git rev-parse refs/remotes/origin/develop) + echo "FAQ_LATEST_COMMIT: $FAQ_LATEST_COMMIT" + echo "FAQ_CURRENT_COMMIT: $FAQ_CURRENT_COMMIT" + cd ../.. + if [ "$FAQ_LATEST_COMMIT" != "$FAQ_CURRENT_COMMIT" ]; then + echo "::error::Submodule $FAQ_PATH is not up to date. Please update it." + exit 1 + else + echo "Submodule $FAQ_PATH is up to date." + fi + fi + done + + echo "" + echo "=== Submodule Checks Passed ===" workflow-validation: name: workflow-validation @@ -796,7 +944,8 @@ jobs: - lint-branch-name - zizmor - develop-cli-tests - - release-tests + - verify-version-consistency + - test-submodules runs-on: ubuntu-latest steps: - name: check-linting-result @@ -853,10 +1002,16 @@ jobs: echo "❌ CLI tests failed." exit 1 - - name: check-release-tests-result - if: ${{ (needs.determine-test-scope.outputs.submodule_tests == 'true' || needs.determine-test-scope.outputs.version_match_tests == 'true') && needs.release-tests.result != 'success' && needs.release-tests.result != 'skipped' }} + - name: check-version-consistency-result + if: ${{ needs.determine-test-scope.outputs.version_match_tests == 'true' && needs.verify-version-consistency.result != 'success' && needs.verify-version-consistency.result != 'skipped' }} + run: | + echo "❌ Version consistency check failed." + exit 1 + + - name: check-submodule-tests-result + if: ${{ needs.determine-test-scope.outputs.submodule_tests == 'true' && needs.test-submodules.result != 'success' && needs.test-submodules.result != 'skipped' }} run: | - echo "❌ Release tests failed." + echo "❌ Submodule tests failed." exit 1 - name: all-checks-passed