diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..695ee6270c --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt +# https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view + +# Eslint naming convention enforcement +21be3183a28d91ae8c1ec7c9dffa282ef091c319 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 604da00c69..b1121cea52 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,16 +1,13 @@ # This file is used to auto request reviews for a pull request # https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners -* @autodesk/synthesis-devs hunter.barclay@autodesk.com +* @autodesk/synthesis-devs @autodesk/synthesis-admin -/exporter/ @autodesk/fusion hunter.barclay@autodesk.com +/exporter/ @autodesk/fusion @autodesk/synthesis-admin -/fission/src/aps/ @autodesk/fusion hunter.barclay@autodesk.com -/fission/src/mirabuf/ @autodesk/fusion hunter.barclay@autodesk.com -/fission/src/proto/ @autodesk/fusion hunter.barclay@autodesk.com +/fission/src/aps/ @autodesk/fusion @autodesk/synthesis-admin +/fission/src/mirabuf/ @autodesk/fusion @autodesk/synthesis-admin +/fission/src/proto/ @autodesk/fusion @autodesk/synthesis-admin -/fission/src/ui/components/ julian.wright@autodesk.com luca.haverty@autodesk.com hunter.barclay@autodesk.com -/fission/**/*.css julian.wright@autodesk.com luca.haverty@autodesk.com hunter.barclay@autodesk.com - -/fission/ @autodesk/fission hunter.barclay@autodesk.com -/installer/ @autodesk/fusion hunter.barclay@autodesk.com +/fission/ @autodesk/fission @autodesk/synthesis-admin +/installer/ @autodesk/fusion @autodesk/synthesis-admin diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index dd98a21148..86cf0395c4 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,10 +1,47 @@ -### Description -Lorem Ipsum +## Task -### Objectives -- [ ] Lorem Ipsum + -### Testing Done -- Lorem Ipsum +AARD- -[JIRA Issue](https://jira.autodesk.com/browse/AARD-XXXX) + + +## Symptom + + +## Solution + + + +## Verification + + + +--- + +Before merging, ensure the following criteria are met: + +- [ ] All acceptance criteria outlined in the ticket are met. +- [ ] Necessary test cases have been added and updated. +- [ ] A feature toggle or safe disable path has been added (if applicable). +- [ ] User-facing polish: + - Ask: *"Is this ready-looking?"* +- [ ] Cross-linking between Jira and GitHub: + - PR links to the relevant Jira issue. + - Jira ticket has a comment referencing this PR. diff --git a/.github/workflows/FissionBiome.yml b/.github/workflows/FissionBiome.yml new file mode 100644 index 0000000000..deeaa84cee --- /dev/null +++ b/.github/workflows/FissionBiome.yml @@ -0,0 +1,48 @@ +name: Fission - Biome Format and Lint Validation + +on: + workflow_dispatch: {} + push: + branches: [prod, dev] + pull_request: + branches: [prod, dev] + +jobs: + runBiomeValidationScript: + name: Biome Validation + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v4 + - name: Bun Runtime Setup + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Cache Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fission-${{hashFiles('fission/package.json')}}" + path: "fission/node_modules" + restore-keys: | + ${{runner.os}}-npm-fission- + ${{runner.os}}-npm + + - name: Install Dependencies + run: | + cd fission + bun install + + - name: Lint + id: lint-validation + if: ${{ always() }} + run: | + cd fission + bun run lint --diagnostic-level=error + + - name: Format + id: format-validation + if: ${{ always() }} + run: | + cd fission + bun run fmt diff --git a/.github/workflows/FissionBuild.yml b/.github/workflows/FissionBuild.yml index 7ca377f98c..037c976860 100644 --- a/.github/workflows/FissionBuild.yml +++ b/.github/workflows/FissionBuild.yml @@ -3,7 +3,7 @@ name: Fission - Build on: workflow_dispatch: {} pull_request: - branches: [ prod, dev ] + branches: [prod, dev] jobs: buildFission: @@ -11,18 +11,32 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v2 - - name: JavaScript Setup - uses: actions/setup-node@v2 + uses: actions/checkout@v4 + - name: Bun Runtime Setup + uses: oven-sh/setup-bun@v2 with: - node-version: 20 + bun-version: latest + + - name: Cache Node Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fission-${{hashFiles('fission/package.json')}}" + path: "fission/node_modules" + restore-keys: | + ${{runner.os}}-npm-fission- + ${{runner.os}}-npm - name: Install Dependencies run: | cd fission - npm install + bun install - name: Build Fission run: | cd fission - npm run build && echo "Build Passed" || (echo "Build Failed" && exit 1) + bun run build && echo "Build Passed" || (echo "Build Failed" && exit 1) + + - name: Build Electron Version of Fission + run: | + cd fission + bun run electron:make && echo "Build Passed" || (echo "Build Failed" && exit 1) diff --git a/.github/workflows/FissionESLintFormat.yml b/.github/workflows/FissionESLintFormat.yml deleted file mode 100644 index 7ac950f82f..0000000000 --- a/.github/workflows/FissionESLintFormat.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Fission - ES Lint Format Validation - -on: - workflow_dispatch: {} - push: - branches: [ prod, dev ] - pull_request: - branches: [ prod, dev ] - -jobs: - runFormatValidationScript: - name: ESLint Format Validation - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v2 - - name: JavaScript Setup - uses: actions/setup-node@v2 - with: - node-version: 20 - - name: Install Dependencies - run: | - cd fission - npm install - - name: Linter - id: linter-validation - if: ${{ always() }} - run: | - cd fission - npm run lint && echo "ESLint Validation Passed" || (echo "ESLint Validation Failed" && exit 1) - - name: Prettier - id: prettier-validation - if: ${{ always() }} - run: | - cd fission - npx prettier --version - npm run prettier && echo "Prettier Validation Passed" || (echo "Prettier Validation Failed" && exit 1) diff --git a/.github/workflows/FissionPackage.yml b/.github/workflows/FissionPackage.yml index e181cf1182..ac0fb45bf6 100644 --- a/.github/workflows/FissionPackage.yml +++ b/.github/workflows/FissionPackage.yml @@ -3,53 +3,62 @@ name: Fission - Package on: workflow_dispatch: {} push: - branches: [ prod, dev ] + branches: [prod, dev] jobs: runUnitTests: name: Package runs-on: ubuntu-latest steps: - - name: Checkout Code - uses: actions/checkout@v2 - - name: JavaScript Setup - uses: actions/setup-node@v2 - with: - node-version: 20 - - - name: Get date - id: date # this is used on variable path - run: | - echo "timestamp=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_OUTPUT - - - name: Install Dependencies - run: | - cd fission - npm install - - - name: Get package info - id: info - uses: codex-team/action-nodejs-package-info@v1.1 - with: - path: fission/ - - - name: Build - id: build - run: | - cd fission - npm run build:prod - npm run build:dev - - - name: Upload Artifact - uses: actions/upload-artifact@v4 - id: upload-artifact-prod - with: - name: "${{ steps.info.outputs.name }}@${{ steps.info.outputs.version }}[${{ steps.date.outputs.timestamp }}]" - path: fission/dist/prod/ - - - name: Upload Artifact - uses: actions/upload-artifact@v4 - id: upload-artifact-dev - with: - name: "${{ steps.info.outputs.name }}-dev@${{ steps.info.outputs.version }}[${{ steps.date.outputs.timestamp }}]" - path: fission/dist/dev/ \ No newline at end of file + - name: Checkout Code + uses: actions/checkout@v4 + - name: Bun Runtime Setup + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + + - name: Get date + id: date # this is used on variable path + run: | + echo "timestamp=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_OUTPUT + + - name: Cache Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fission-${{hashFiles('fission/package.json')}}" + path: "fission/node_modules" + restore-keys: | + ${{runner.os}}-npm-fission- + ${{runner.os}}-npm + + - name: Install Dependencies + run: | + cd fission + bun install + + - name: Get package info + id: info + uses: codex-team/action-nodejs-package-info@v1.1 + with: + path: fission/ + + - name: Build + id: build + run: | + cd fission + bun run build:prod + bun run build:dev + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + id: upload-artifact-prod + with: + name: "${{ steps.info.outputs.name }}@${{ steps.info.outputs.version }}[${{ steps.date.outputs.timestamp }}]" + path: fission/dist/prod/ + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + id: upload-artifact-dev + with: + name: "${{ steps.info.outputs.name }}-dev@${{ steps.info.outputs.version }}[${{ steps.date.outputs.timestamp }}]" + path: fission/dist/dev/ diff --git a/.github/workflows/FissionUnitTest.yml b/.github/workflows/FissionUnitTest.yml index b751de71d4..b730d231e4 100644 --- a/.github/workflows/FissionUnitTest.yml +++ b/.github/workflows/FissionUnitTest.yml @@ -3,73 +3,95 @@ name: Fission - Unit Test on: workflow_dispatch: {} push: - branches: [ prod, dev ] + branches: [prod, dev] pull_request: - branches: [ prod, dev ] + branches: [prod, dev] jobs: runUnitTests: - name: Unit Tests + name: Playwright Unit Tests + container: + image: mcr.microsoft.com/playwright:v1.54.2-noble runs-on: ubuntu-latest + defaults: + run: + working-directory: "fission" steps: - name: Checkout Code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: JavaScript Setup - uses: actions/setup-node@v2 + uses: actions/setup-node@v4 with: node-version: 20 - - name: Cache downloaded assets + - name: Cache Unzipped Synthesis Assets id: cache-assets uses: actions/cache@v3 with: - path: fission/public/ - key: ${{ runner.os }}-assets-v1 - restore-keys: | - ${{ runner.os }}-assets- + path: fission/public/Downloadables + key: ${{ runner.os }}-assets-${{hashFiles('fission/public/assetpack.zip')}} - - name: Download assets if not cached + - name: Download Synthesis assetpack if not cached if: steps.cache-assets.outputs.cache-hit != 'true' run: | - cd fission - curl -o public/assetpack.zip https://synthesis.autodesk.com/downloadables/assetpack.zip && unzip -o public/assetpack.zip -d public/ + cd .. + apt update + apt install git-lfs unzip + git config --global --add safe.directory `pwd` + git lfs pull - - name: Get installed Playwright version - id: playwright-version - run: echo "PLAYWRIGHT_VERSION=$(node -e "console.log(require('./package-lock.json').dependencies['@playwright/test'].version)")" >> $GITHUB_ENV + cd fission + unzip -o public/assetpack.zip -d public/ || echo - - name: Cache downloaded browsers - id: cache-browsers + - name: Cache Node Dependencies uses: actions/cache@v3 with: - path: | - ~/.cache/ms-playwright/ - key: ${{ runner.os }}-assets-playwright-${{ env.PLAYWRIGHT_VERSION }}-v2 + key: "${{runner.os}}-npm-fission-${{hashFiles('fission/package.json')}}" + path: "fission/node_modules" + restore-keys: | + ${{runner.os}}-npm-fission- + ${{runner.os}}-npm - name: Install Dependencies - run: | - cd fission - npm install + run: npm install - - name: Download playwright if not cached - if: steps.cache-browsers.outputs.cache-hit != 'true' - run: | - cd fission - npx playwright install --with-deps - npx playwright install-deps + - name: Run Tests + run: HOME=/root npm run test --bail=1 - - name: Unit Tests - id: unit-tests - run: | - cd fission - npm run test - continue-on-error: true + runAssetpackTests: + name: Assetpack Tests + needs: runUnitTests + container: + image: mcr.microsoft.com/playwright:v1.54.2-noble + runs-on: ubuntu-latest + defaults: + run: + working-directory: "fission" + steps: + - name: Checkout Code + uses: actions/checkout@v4 + - name: JavaScript Setup + uses: actions/setup-node@v4 + with: + node-version: 20 - - name: Check Success - run: | - if [ ${{ steps.unit-tests.outcome }} == "success" ]; then - echo "Format Validation Passed" - else - echo "Format Validation Failed" - exit 1 - fi + - name: Cache Unzipped Synthesis Assets + id: cache-assets + uses: actions/cache@v3 + with: + path: fission/public/Downloadables + key: ${{ runner.os }}-assets-${{hashFiles('fission/public/assetpack.zip')}} + + - name: Cache Node Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fission-${{hashFiles('fission/package.json')}}" + path: "fission/node_modules" + restore-keys: | + ${{runner.os}}-npm-fission- + ${{runner.os}}-npm + + - name: Run Assetpack Tests + run: HOME=/root npm run test src/test/mirabuf/DefaultAssets.test.ts --bail=1 + env: + VITE_RUN_ASSETPACK_TEST: true diff --git a/.github/workflows/FusionTyping.yml b/.github/workflows/FusionTyping.yml index 13de49fc2f..e156d71677 100644 --- a/.github/workflows/FusionTyping.yml +++ b/.github/workflows/FusionTyping.yml @@ -18,6 +18,6 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: "3.12" - run: pip install -r requirements-mypy.txt - run: mypy diff --git a/.github/workflows/FusionWebUI.yml b/.github/workflows/FusionWebUI.yml new file mode 100644 index 0000000000..283fd3fcb4 --- /dev/null +++ b/.github/workflows/FusionWebUI.yml @@ -0,0 +1,73 @@ +name: Fusion - WebUI Build and Format + +on: + workflow_dispatch: {} + + push: + branches: [ prod, dev ] + paths: + - 'exporter/SynthesisFusionAddin/web/**' + pull_request: + branches: [ prod, dev ] + paths: + - 'exporter/SynthesisFusionAddin/web/**' + + +jobs: + runFormatValidationScript: + defaults: + run: + working-directory: exporter/SynthesisFusionAddin/web + name: Biome Format Validation + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v4 + - name: Bun Runtime Setup + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + - name: Cache Node Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fusion-${{hashFiles('exporter/SynthesisFusionAddin/web/bun.lock')}}" + path: 'exporter/SynthesisFusionAddin/web/node_modules' + restore-keys: | + ${{runner.os}}-npm-fusion- + ${{runner.os}}-npm + + - name: Install Dependencies + run: bun install --frozen-lockfile + + - name: Linter & Formatter + run: | + bunx biome --version + bunx biome ci --error-on-warnings + + runBuildScript: + name: Build + runs-on: ubuntu-latest + defaults: + run: + working-directory: exporter/SynthesisFusionAddin/web + steps: + - name: Checkout Code + uses: actions/checkout@v4 + - name: Bun Runtime Setup + uses: oven-sh/setup-bun@v2 + with: + bun-version: latest + - name: Cache Node Dependencies + uses: actions/cache@v3 + with: + key: "${{runner.os}}-npm-fusion-${{hashFiles('exporter/SynthesisFusionAddin/web/bun.lock')}}" + path: 'exporter/SynthesisFusionAddin/web/node_modules' + restore-keys: | + ${{runner.os}}-npm-fusion- + ${{runner.os}}-npm + + - name: Install Dependencies + run: bun install --frozen-lockfile + + - name: Build + run: bun run build \ No newline at end of file diff --git a/.gitignore b/.gitignore index a57a98b5ac..bdcb57b628 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,10 @@ .vscode/ build/ dist/ +out/ +renderer/ *.log .DS_Store *.pkg *.exe +.idea \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index 8f7b277463..00d28c789f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -3,4 +3,4 @@ url = https://github.com/HiceS/mirabuf.git [submodule "jolt"] path = jolt - url = https://github.com/HunterBarclay/JoltPhysics.js.git + url = https://github.com/azaleacolburn/JoltPhysics.js.git diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3ba8f13e09..849ff17e3f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,50 +2,71 @@ ![Synthesis: An Autodesk Technology](/engine/Assets/Resources/Branding/Synthesis/Synthesis-An-Autodesk-Technology-2023-lockup-Wht-OL-No-Year-stacked.png#gh-dark-mode-only) # Synthesis Contribution Guide -Synthesis is 100% open source and relies on the FIRST community to help make it better. The Synthesis Contribution Guide suggests ways in which you can get involved through development and non-development avenues. + +Synthesis is 100% open source and relies on the FIRST community to help shape its growth. The Synthesis Contribution Guide suggests ways in which you can get involved through development and non-development avenues. + +# How to Contribute + +Before you contribute to this repository, please first discuss the change you wish to make a GitHub issue or reach out through our [community Discord](https://www.discord.gg/hHcF9AVgZA). This way we can ensure that there is no overlap between outside contributors and internal development work. + +When ready to contribute, fork the Synthesis repository, make your changes, and submit a pull request. When contributing to Synthesis, please branch from and submit to our `dev` branch. The `prod` branch is intended to be a copy of either exactly what is in production, or what is ready for production. We like to keep changes to the dev branch so they have time to simmer and be distributed via beta releases. + +Be sure to fill out the pull request template accordingly to make reviewing your work as smooth as possible. # Why Contribute? Benefits to Contributing -* Prepare for an internship - share your contributions when applying to the [Synthesis Summer Internship.](https://synthesis.autodesk.com/internship.html) -* Add your contributions to Synthesis: An Autodesk Technology to your portfolio -* Meet other members of the FIRST community -* Get involved and learn more about Autodesk products + +- Prepare for an internship - share your contributions when applying to the [Synthesis Summer Internship.](https://synthesis.autodesk.com/internship.html) +- Add your contributions to Synthesis: An Autodesk Technology to your portfolio +- Meet other members of the FIRST community +- Get involved and learn more about Autodesk products +- Improve a product that you care about - if you use Synthesis and notice a feature you want, get involved! # How to Contribute + ### Found a bug? Have an idea for a feature? + Please [contact us](#Contact-Us) to let us know about the issue or feature! -*A Note to Developers*: When contributing to this repository and making large changes, please first discuss the change you wish to make via issue, email, or any other method with the owners of this repository before making a change. This way, we can ensure that there is no overlap between contributions and internal development work. You may contact us using any of [these methods](#Contact-Us), although email is preferred in this case. +_A Note to Developers_: Please first discuss the change you wish to make via issue, email, or any other method with the owners of this repository before making a change. This way, we can ensure that there is no overlap between contributions and internal development work. You may contact us using any of [these methods](#Contact-Us), although email is preferred in this case. -For smaller changes, just submit a pull request and be sure to include a clear and detailed description of the changes you've made so that we can verify them and eventually merge. +For smaller changes, just submit a pull request and be sure to follow the PR template to create a clear and detailed description of the changes you've made. ### Submit a CAD Model + Submit your team's CAD model to be added to the Synthesis robot and field libraries by emailing your designs to frc@autodesk.com. Please share them in the form of a Fusion360 Share-link. Raw Mirabuf files will not be accepted. ### Write Tutorials, Increase Documentation -We are always interested in ways to make our tutorials and documentation more clear to our end users. If there is content missing or could be refined, please follow our [contribution guidelines](#How-to-Contribute) for submitting a change. + +We are always interested in ways to make our tutorials and documentation clearer for our end users. If there is content missing or could be refined, please follow our [contribution guidelines](#How-to-Contribute) for submitting a change. ### Translate our Tutorials and Documentation + If you or someone you know can read and write in another language, we would like to translate our text-based resources to make them available in more languages. Contact frc@autodesk.com for more details. ### Create How-To or Project DIY and Inspiration Guides + Did you add a feature to Synthesis, or learn how to use a specific feature? Write a how-to guide or [share your project with us](#Contact-Us). -### Share a Use Case Story -Hearing how you use Synthesis is valuable feedback to our team. Share your stories by tagging us [@synthesis.adsk](https://www.instagram.com/synthesis.adsk/) on Instagram, posting on [ChiefDelphi](https://www.chiefdelphi.com/), or talking about it on [Discord](https://discord.gg/FuuQ9UGycM). +### Share Your Use Case + +Hearing how you use Synthesis is valuable feedback to our team. Share your use cases by tagging us [@synthesis.adsk](https://www.instagram.com/synthesis.adsk/) on Instagram, posting on [ChiefDelphi](https://www.chiefdelphi.com/), or talking about it on [our Discord](https://discord.gg/FuuQ9UGycM). ### Expand FIRST Support + FIRST control systems and essentials like sensors, cameras, various motors, etc. would greatly increase simulation support. Learn more about [contributing development here](#How-to-Contribute). ### Beta Testing + Help us try and break Synthesis! At the end of summer development, we provide a Synthesis beta for users to test and sometimes in exchange for your time we offer incentives to users. You can stay up-to-date with any Synthesis releases by joining our [Discord server](https://www.discord.gg/hHcF9AVgZA) and/or following us on Instagram [@synthesis.adsk](https://www.instagram.com/synthesis.adsk/). ### Contact Us -| Platform | Link | -| :--- | :---: | -| Discord | [Synthesis Community Discord](https://discord.gg/FuuQ9UGycM) | -| Email | [frc@autodesk.com](mailto:frc@autodesk.com) | -| Instagram | [@synthesis.adsk](https://www.instagram.com/synthesis.adsk/) | -| Reddit | [u/synthesis_adsk](https://www.reddit.com/user/synthesis_adsk/) | + +| Platform | Link | +| :---------- | :--------------------------------------------------------------------: | +| Discord | [Synthesis Community Discord](https://discord.gg/FuuQ9UGycM) | +| Email | [frc@autodesk.com](mailto:frc@autodesk.com) | +| Instagram | [@synthesis.adsk](https://www.instagram.com/synthesis.adsk/) | +| Reddit | [u/synthesis_adsk](https://www.reddit.com/user/synthesis_adsk/) | | ChiefDelphi | [synthesis_adsk](https://www.chiefdelphi.com/u/synthesis_adsk/summary) | To let us know about an issue with Synthesis, you can submit a [GitHub issue](https://github.com/Autodesk/synthesis/issues/new/choose). diff --git a/NOTICE.txt b/NOTICE.txt new file mode 100644 index 0000000000..a1f1b7acef --- /dev/null +++ b/NOTICE.txt @@ -0,0 +1,19 @@ +This product bundles the following third-party sound assets: + +1. Files licensed under the Creative Commons CC0 1.0 Universal Public Domain Dedication: + - fission/src/assets/sound-files/checkdown.wav + - fission/src/assets/sound-files/checkup.wav + - fission/src/assets/sound-files/clickdown.wav + - fission/src/assets/sound-files/clickup.wav + - fission/src/assets/sound-files/DullClick.wav + - fission/src/assets/sound-files/beep.wav + + License: CC0 1.0 Universal (Public Domain Dedication) + URL: https://creativecommons.org/publicdomain/zero/1.0/ + +2. Files licensed from FIRST Robotics: + - fission/src/assets/sound-files/MathStart.wav + - fission/src/assets/sound-files/MatchEnd.wav + - fission/src/assets/sound-files/MatchResume.wav + + For more information, contact FIRST Robotics or refer to your licensing agreement. diff --git a/README.md b/README.md index b82638bfa0..879da48e5b 100644 --- a/README.md +++ b/README.md @@ -3,10 +3,11 @@
-[![Fission - Unit Test](https://github.com/Autodesk/synthesis/actions/workflows/FissionUnitTest.yml/badge.svg?branch=prod)](https://github.com/Autodesk/synthesis/actions/workflows/FissionUnitTest.yml) -[![Fission - Packaging](https://github.com/Autodesk/synthesis/actions/workflows/FissionPackage.yml/badge.svg?branch=prod)](https://github.com/Autodesk/synthesis/actions/workflows/FissionPackage.yml) -[![Fission - Lint/Format](https://github.com/Autodesk/synthesis/actions/workflows/FissionESLintFormat.yml/badge.svg?branch=prod)](https://github.com/Autodesk/synthesis/actions/workflows/FissionESLintFormat.yml) -[![Fusion - Format](https://github.com/Autodesk/synthesis/actions/workflows/BlackFormat.yml/badge.svg?branch=prod)](https://github.com/Autodesk/synthesis/actions/workflows/BlackFormat.yml) +![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Autodesk/synthesis/FissionUnitTest.yml?branch=prod&style=for-the-badge&logoSize=auto&label=Fission%20Unit%20Tests&link=https%3A%2F%2Fgithub.com%2FAutodesk%2Fsynthesis%2Factions%2Fworkflows%2FFissionUnitTest.yml) +![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Autodesk/synthesis/FissionPackage.yml?branch=prod&style=for-the-badge&logoSize=auto&label=Fission%20-%20Packaging&link=https%3A%2F%2Fgithub.com%2FAutodesk%2Fsynthesis%2Factions%2Fworkflows%2FFissionPackage.yml) +![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Autodesk/synthesis/FissionBiome.yml?branch=prod&style=for-the-badge&logoSize=auto&label=Fission%20Lint%2FFormat&link=https%3A%2F%2Fgithub.com%2FAutodesk%2Fsynthesis%2Factions%2Fworkflows%2FFissionBiome.yml) +![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Autodesk/synthesis/BlackFormat.yml?branch=prod&style=for-the-badge&logoSize=auto&label=Fusion%20Exporter%20Format&link=https%3A%2F%2Fgithub.com%2FAutodesk%2Fsynthesis%2Factions%2Fworkflows%2FBlackFormat.yml) +![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Autodesk/synthesis/FusionWebUI.yml?branch=prod&style=for-the-badge&logoSize=auto&label=Fusion%20Exporter%20WebUI%20Build&link=https%3A%2F%2Fgithub.com%2FAutodesk%2Fsynthesis%2Factions%2Fworkflows%2FFusionWebUI.yml) Synthesis is a robotics simulator designed by and for [FIRST®](https://www.firstinspires.org/) robotics students to help teams design, strategize, test and practice. Teams have the ability to import their own robots and fields using our [Fusion Exporter](/exporter/) or use the pre-made ones available within Synthesis. @@ -14,90 +15,74 @@ For more information on the product itself or the team, visit [http://synthesis. ## Goals -Synthesis is built with a direct focus on the FIRST® community. Every single one of our developers is a FIRST® student. We've also made the project completely open source in order to better involve the community. This way contributors can help make Synthesis better or modify Synthesis to better suit their team’s needs. +Synthesis is built with a direct focus on the FIRST® community. Every single one of our developers is or was a FIRST® student. We've also made the project completely open source in order to better involve the community. This way contributors can help improve Synthesis broadly or adapt it to their team’s needs. Here are some of our primary goals for Synthesis: -- **Ease of Use**: It's important for us that Synthesis is out of the box ready for teams to use. We want to make sure that teams can get up and running with Synthesis as quickly as possible. To that end, Synthesis comes ready with a variety of robots and fields; in addition to the ability to export and import your own. +- **Ease of Use**: It's important for us that Synthesis is out of the box ready for teams to use. We want to make sure that teams can get up and running with Synthesis as quickly as possible. To that end, Synthesis comes ready with a variety of robots and fields in addition to the ability to export and import your own. - **Testing Robot Designs**: Synthesis is designed to be a tool for teams to quickly test their robot designs in a semi-realistic environment. Are you a builder who wants to use some crazy virtual four-bar linkage and your team says it's a waste of time? Well now you can prove them wrong by testing it in Synthesis! - **Exploring the Field Environment**: Every year on kickoff, for both FTC and FRC FIRST® competitions, Synthesis has the newest field available immediately. This allows teams to explore the field through a 3D model, drive a robot around, and begin to strategize for the upcoming season's game. -- **Driver Practice & Strategy**: Not getting enough driver practice or don't have a full field available to you? Synthesis has you covered with the ability to drive your robot around with a gamepad from a first-person view at the driver station; allowing you to get a feel for potential control scheme layouts and any line-of-sight challenges that may arise. This also allows the drive team and the programmers to communicate about what control layouts work best for each driver. +- **Driver Practice & Strategy**: Not getting enough driver practice or don't have a full field available to you? Synthesis has you covered with the ability to play full simulated matches, controlling your robot with a gamepad from a first-person view at the driver station. This allows you to get a feel for potential control scheme layouts and any line-of-sight challenges that may arise. This also allows the drive team and the programmers to communicate about what control layouts work best for each driver. ## Getting Started -If you are a FIRST robotics student who just wants to use Synthesis, you *don't* need this repo. Simply **install the latest release of Synthesis from [synthesis.autodesk.com/download](https://synthesis.autodesk.com/download.html)**. +If you are a FIRST robotics student who just wants to use Synthesis, you _don't_ need this repo. Simply go follow [this link](https://synthesis.autodesk.com/fission) to the simulator and start spawning in robots! -> [!IMPORTANT] -> Moving to [synthesis.autodesk.com](http://synthesis.autodesk.com/). - -If you're a developer who wants to contribute to Synthesis, you're in the right place. Synthesis is comprised of 3 main components that can be developed separately. These components include: +If you're a developer who wants to contribute to Synthesis, you're in the right place. Synthesis is comprised of 2 main components that can be developed separately: - [Fission (Core Web App)](/fission/README.md) - [Fusion Exporter (Fusion exporter to Mirabuf file format)](/exporter/SynthesisFusionAddin/README.md) -- [Fusion Exporter Installer](/installer/) - -Follow the above links to the respective READMEs on how to build and run each component. + -### Compatibility Notes +Follow the above links to the respective READMEs on how to build, run, and test each component. -As Fusion is not officially supported on Linux, we do not provide an installer for the Fusion Exporter on Linux. +> [!NOTE] +> As Fusion is not officially supported on Linux, we do not provide an installer for the Fusion Exporter on Linux. ## Contributing -This project welcomes community suggestions and contributions. Synthesis is nearly 100% open source and relies on the FIRST® community to help make it better. The [Synthesis Contribution Guide](/CONTRIBUTING.md) suggests ways in which you can get involved through development and non-development avenues. - -Before you contribute to this repository, please first discuss the change you wish to make via a GitHub issue, email us ([frc@autodesk.com](mailto:frc@autodesk.com)), or reach out through our [community discord](https://www.discord.gg/hHcF9AVgZA). This way we can ensure that there is no overlap between outside contributors and internal development work. - -When ready to contribute, fork the synthesis repository, make your changes, and submit a pull request. When contributing to Synthesis, please branch from our `dev` branch and submit pull requests to that branch. The `prod` branch is intended to be a copy of either exactly what is in production, or what is ready for production. We like to keep changes to the dev branch so they have time to simmer and be distributed via beta releases. Be sure to fill out the pull request template accordingly to make reviewing your work as smooth as possible. Feel free to check out our [contributing guidelines](/CONTRIBUTING.md) to learn more. - -## Code Formatting And Style - -All code is under a configured formatting utility. See each component for more details. +See [CONTRIBUTING.md](/CONTRIBUTING.md) for information on how you can help build synthesis ## Other Components -### Mirabuf - -Mirabuf is a file format we use to store physical data from Fusion to load into the Synthesis simulator (Fission). This is a separate project that is a submodule of Synthesis. [See Mirabuf](https://github.com/HiceS/mirabuf/) - -### Jolt Physics +### [Mirabuf](https://github.com/HiceS/mirabuf/) -Jolt is the core physics engine for our web biased simulator. [See JoltPhysics.js](https://github.com/HunterBarclay/JoltPhysics.js) for more information. +Mirabuf is a file format we use to store physical data from Fusion to load into the Synthesis simulator (Fission). This is a separate project that is a submodule of Synthesis. -### Tutorials +### [Jolt Physics](https://github.com/HunterBarclay/JoltPhysics.js) -Our source code for the tutorials featured on our [Tutorials Page](https://synthesis.autodesk.com/tutorials.html). +Jolt is the core physics engine for our web-based simulator. ### Protocols Additional protobuf files that we use in addition to Mirabuf. [See Protocols](/protocols/README.md) -## Tutorials +## [Tutorials](https://synthesis.autodesk.com/tutorials.html) -We have a variety of tutorials available to help you get started with Synthesis. These tutorials can be found on our [Tutorials Page](https://synthesis.autodesk.com/tutorials.html) on our website. Additionally, you can view these same tutorials as Markdown files in the [tutorials](/tutorials/) directory of this repository. +We have a variety of tutorials available to help you get started with Synthesis. Additionally, you can view these same tutorials as Markdown files in the [tutorials](/tutorials/) directory of this repository. -Updating our tutorials is a ongoing process. If you are at all interested in helping, checkout the [Synthesis Contribution Guide](/CONTRIBUTING.md) for more information on how to get started. +Updating our tutorials is an ongoing process. If you are at all interested in helping, check out the [Synthesis Contribution Guide](/CONTRIBUTING.md) for more information on how to get started. -## Immersion Program +## [Immersion Program](https://synthesis.autodesk.com/internship.html) -Annually, since 2014, Autodesk has sponsored the Synthesis Immersion Program for FIRST robotics students to develop Synthesis. The immersion program is a 10 week paid work experience at the Portland, Oregon Autodesk office from June 20th to August 25th. The immersion program focuses on not only developing Synthesis, but also allowing for opportunities to meet and collaborate with other Autodesk employees. For more information about the immersion program, visit our website at [synthesis.autodesk.com/internship](https://synthesis.autodesk.com/internship.html). +Annually, since 2014, Autodesk has sponsored the Synthesis Immersion Program for FIRST robotics students to develop Synthesis. The immersion program is a 10 week paid work experience at the Portland, Oregon Autodesk office from June 16th to August 22nd. The immersion program focuses on not only developing Synthesis, but also allowing for opportunities to meet and collaborate with other Autodesk employees. ### Want To Be A Part Of The Team? If you're a FIRST robotics student who wants to be a part of the Synthesis development team here is some basic information about applying. -Applicants must be: +Applicants must: -- At least 16 years of age -- Been a member of a FIRST Robotics team for at least one full season +- Be at least 16 years of age (at the start of the internship) +- Have been a member of a FIRST Robotics team for at least one full season -Applications open each year during the spring. For more information about applying, exceptions to these requirements or for more info about specific positions offered, please visit the [Synthesis Immersion Program](https://synthesis.autodesk.com/internship.html) website. +Applications open each year during the spring. For more information about applying, exceptions to these requirements or for more info about specific positions offered, please visit [Synthesis Immersion Program](https://synthesis.autodesk.com/internship.html). ## Contact -If you have any questions about Synthesis or the Immersion Program, you can contact us through email ([frc@autodesk.com](mailto:frc@autodesk.com)). Additionally please reach out through our [community discord](https://www.discord.gg/hHcF9AVgZA). It's the best way to get in touch with not only the community, but Synthesis' current development team. +If you have any questions about Synthesis or the Immersion Program, you can contact us through email ([frc@autodesk.com](mailto:frc@autodesk.com)). Additionally, please reach out through our [community Discord](https://www.discord.gg/hHcF9AVgZA). It's the best way to get in touch with not only the community, but Synthesis' current development team. -## License +## [License](/LICENSE.txt) Copyright (c) Autodesk diff --git a/docs/swerve_detection.md b/docs/swerve_detection.md new file mode 100644 index 0000000000..3bf011e372 --- /dev/null +++ b/docs/swerve_detection.md @@ -0,0 +1,116 @@ +# Swerve Module Detection & Configuration + +This document describes the **developer-facing** implementation details for how swerve modules are discovered, paired, +and configured. + +## Overview + +We treat each swerve pod as two abstract drivers: + +* \`rotator\` for the azimuth (steering) joint +* \`driver\` for the drive (rolling) joint + +At runtime, we: + +1. Collect all existing drivers for this robot +2. Filter out which are candidate steering joints +3. Collect all drive wheels +4. Pair each wheel with its closest steering joint (by anchor position) +5. Wrap these "driver and rotator" pairs + +With this approach, as long as your CAD export yields one steering and one drive joint per pod, the system will self‑configure. +This also comes with the advantage of zero overhead from the perspective of our robot exporter. + +## Prerequisites & Assumptions + +1. **One-to-one pods**: Each physical wheel pod must export exactly one steering joint and one rolling joint. +2. **Azimuth axis orientation**: Steering axes must be nearly horizontal (perpendicular to gravity). +3. **Anchor proximity**: The world‑space `Anchor` of a `WheelDriver` and its matching `RotationalDriver` must be spatially close (co‑located) within a pod. +4. **Driver registration**: The assembly import process must register both driver types with `SimulationManager.Drivers[robotName]`. + +Violating any of these may cause detection to fail; in that case, a fall back occurs to a different drivetrain mode. +One that does not require rotational drivers (e.g., Tank or Arcade). + +## Detection Algorithm + +### Gathering Drivers + +```csharp +var allDrivers = ... +``` + +This collection contains all joint instances exported from Fusion, created from the imported +mirabuf assembly. + +### Filtering Azimuth (Steering) Drivers + +```csharp +var potentialAzimuthDrivers = allDrivers + .OfType() // only hinge joints + .Where(d => !d.IsWheel) // exclude any rotational drivers marked as wheels + .Where(d => IsHorizontal(d.Axis)) // axis nearly horizontal + .ToList(); +``` + +* `IsHorizontal()` is closely implemented as: + + ```csharp + bool IsHorizontal(Vector3 axis) => + (axis - Vector3.Dot(Vector3.up, axis) * Vector3.up).magnitude < 0.05f; + ``` + +* This selects only steering pivots whose hinge axis lies in the horizontal plane. + +### Identifying Wheel Drivers + +```csharp +var wheelDrivers = allDrivers.OfType(); +``` + +All `WheelDriver` instances correspond to the actual drive wheels. These are the rotational joints +marked as wheels during the robot export process. + +### Pairing Algorithm (Nearest‑Neighbor) + +```csharp +if (potentialAzimuthDrivers.Count < wheelDrivers.Count) + return; // not enough pods + +var modules = new (RotationalDriver azimuth, WheelDriver drive)[wheelDrivers.Count]; +int i = 0; + +foreach (var wheel in wheelDrivers) { + // find the steering joint whose Anchor is closest to this wheel’s Anchor + var closest = potentialAzimuthDrivers + .OrderBy(d => (d.Anchor - wheel.Anchor).sqrMagnitude) + .First(); + + modules[i++] = (closest, wheel); + potentialAzimuthDrivers.Remove(closest); +} +``` + +## Common Pitfalls + +* **Axis tilt**: If the azimuth hinge axis tilts more than \~3°, it may not be detected as horizontal. +* **Mismatched anchors**: CAD pods must export pivot and wheel with matching origin positions. +* **Missing drivers**: Ensure both joints appear in the mirabuf import. + +Refer to the debug logs for `Failed to switch to 'Swerve'` messages if detection returns false. + +## Final Notes + +This implementation was decided upon at the current time to avoid adding additional complexity +to the robot export process. This however does not mean that this automatic swerve module detection +system cannot also exist alongside a more comprehensive system implemented within the exporter. + +Such exporter additions (if done correctly) have the advantage of improving clarity for how swerve +is handled within the simulator. + +For more information on the original implementation of this swerve module detection system you can here: + +https://github.com/Autodesk/synthesis/blob/636668d534564610eca7e80db856f2eb43fc60e9/engine/Assets/Scripts/SimObjects/RobotSimObject.cs#L540-L579 + +This commit hash represents v6 of Synthesis. + +> *Last updated: 2025‑07‑08* diff --git a/exporter/README.md b/exporter/README.md index edbf238083..d852f3e146 100644 --- a/exporter/README.md +++ b/exporter/README.md @@ -2,7 +2,8 @@ ## Officially Supported Exporters -### SynthesisFusionAddin -An Autodesk® Fusion™ add-in to export assemblies into the [mirabuf](https://github.com/HiceS/mirabuf) format. +### Synthesis Fusion Addin + +An Autodesk® Fusion™ addin to export assemblies into the [Mirabuf](https://github.com/HiceS/mirabuf) format. See [README](/exporter/SynthesisFusionAddin). diff --git a/exporter/SynthesisFusionAddin/.gitignore b/exporter/SynthesisFusionAddin/.gitignore index 9e33b772e7..9d1d52fdc0 100644 --- a/exporter/SynthesisFusionAddin/.gitignore +++ b/exporter/SynthesisFusionAddin/.gitignore @@ -110,3 +110,6 @@ site-packages proto/proto_out .aps_auth + + +src/Resources/**/*.png \ No newline at end of file diff --git a/exporter/SynthesisFusionAddin/README.md b/exporter/SynthesisFusionAddin/README.md index 1e61d0735c..59da6d3d8d 100644 --- a/exporter/SynthesisFusionAddin/README.md +++ b/exporter/SynthesisFusionAddin/README.md @@ -1,6 +1,6 @@ # Synthesis Exporter -This is a Addin for Autodesk® Fusion™ that will export a [Mirabuf](https://github.com/HiceS/mirabuf) usable by the Synthesis simulator. +This is an addin for Autodesk® Fusion™ that will export a [Mirabuf](https://github.com/HiceS/mirabuf) usable by the Synthesis simulator. ## Features @@ -74,7 +74,7 @@ Most of the runtime for the addin is saved under the `logs` directory in this fo Packaging is mainly for compressing the files into a smaller footprint -Contact us for information on how to use the packaging script to obfuscate all of the files using `pyminifier`. +Contact us for information on how to use the packaging script to obfuscate all the files using `pyminifier`. --- @@ -82,8 +82,8 @@ Contact us for information on how to use the packaging script to obfuscate all o We format using a Python formatter called `black` [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) in conjunction with [`isort`](https://pycqa.github.io/isort/). -- install by `pip3 install black && pip3 install isort` or `pip install black && pip install isort` -- use `isort .` followed by `black .` to format all relevant exporter python files. +- Install by `pip3 install black && pip3 install isort` or `pip install black && pip install isort` +- Use `isort .` followed by `black .` to format all relevant exporter python files. - or, alternatively, run `python ./tools/format.py` to do this for you! **Note: black will always ignore files in the proto/proto_out folder since google formats those** diff --git a/exporter/SynthesisFusionAddin/Synthesis.py b/exporter/SynthesisFusionAddin/Synthesis.py index 109460f808..ba748b1424 100644 --- a/exporter/SynthesisFusionAddin/Synthesis.py +++ b/exporter/SynthesisFusionAddin/Synthesis.py @@ -1,3 +1,4 @@ +import importlib import os import sys from typing import Any @@ -50,6 +51,7 @@ def run(_context: dict[str, Any]) -> None: Arguments: **context** *context* -- Fusion context to derive app and UI. """ + reload() # Remove all items prior to start just to make sure unregister_all() @@ -136,3 +138,19 @@ def register_ui() -> None: command=True, ) gm.elements.append(websiteButton) + + +@logFailure +def reload() -> None: + """Reloads the imports of sub modules of the Synthesis package. + + Allows for reloading the package without restarting Fusion. + """ + importlib.reload(HUI) + importlib.reload(Camera) + importlib.reload(ConfigCommand) + importlib.reload(MarkingMenu) + importlib.reload(ShowAPSAuthCommand) + importlib.reload(ShowWebsiteCommand) + + ConfigCommand.reload() diff --git a/exporter/SynthesisFusionAddin/requirements-mypy.txt b/exporter/SynthesisFusionAddin/requirements-mypy.txt index ef8b8dff85..b649c328e7 100644 --- a/exporter/SynthesisFusionAddin/requirements-mypy.txt +++ b/exporter/SynthesisFusionAddin/requirements-mypy.txt @@ -1,3 +1,4 @@ mypy types-protobuf types-requests +urllib3>=2.5.0 # not directly required, pinned by Snyk to avoid a vulnerability diff --git a/exporter/SynthesisFusionAddin/src/APS/APS.py b/exporter/SynthesisFusionAddin/src/APS/APS.py index 65ab01e4f7..16c18df849 100644 --- a/exporter/SynthesisFusionAddin/src/APS/APS.py +++ b/exporter/SynthesisFusionAddin/src/APS/APS.py @@ -12,6 +12,7 @@ import requests from src import ADDIN_PATH, gm +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result from src.Logging import getLogger logger = getLogger() @@ -149,10 +150,10 @@ def refreshAuthToken() -> None: gm.ui.messageBox("Please sign in again.") -def loadUserInfo() -> APSUserInfo | None: +def loadUserInfo() -> Result[APSUserInfo]: global APS_AUTH if not APS_AUTH: - return None + return Err("Aps Authentication is undefined", ErrorSeverity.Fatal) global APS_USER_INFO req = urllib.request.Request("https://api.userprofile.autodesk.com/userinfo") req.add_header(key="Authorization", val=APS_AUTH.access_token) @@ -174,22 +175,19 @@ def loadUserInfo() -> APSUserInfo | None: company=data["company"], picture=data["picture"], ) - return APS_USER_INFO + return Ok(APS_USER_INFO) except urllib.request.HTTPError as e: removeAuth() - logger.error(f"User Info Error:\n{e.code} - {e.reason}") - gm.ui.messageBox("Please sign in again.") - finally: - return None + return Err(f"User Info Error:\n{e.code} - {e.reason}\nPlease sign in again", ErrorSeverity.Fatal) -def getUserInfo() -> APSUserInfo | None: +def getUserInfo() -> Result[APSUserInfo]: if APS_USER_INFO is not None: - return APS_USER_INFO + return Ok(APS_USER_INFO) return loadUserInfo() -def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_display_name: str) -> str | None: +def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_display_name: str) -> Result[str]: """ creates a folder on an APS project @@ -219,18 +217,17 @@ def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_disp f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders", headers=headers, json=data ) if not res.ok: - gm.ui.messageBox(f"Failed to create new folder: {res.text}", "ERROR") - return None + return Err(f"Failed to create new folder: {res.text}", ErrorSeverity.Fatal) json: dict[str, Any] = res.json() id: str = json["data"]["id"] - return id + return Ok(id) def file_path_to_file_name(file_path: str) -> str: return file_path.split("/").pop() -def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_contents: str) -> str | None: +def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_contents: str) -> Result[str]: """ uploads mirabuf file to a specific folder in an APS project the folder and project must be created and valid @@ -261,34 +258,37 @@ def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_content # data:create global APS_AUTH if APS_AUTH is None: - gm.ui.messageBox("You must login to upload designs to APS", "USER ERROR") - return None + return Err("You must login to upload designs to APS (USER ERROR)", ErrorSeverity.Fatal) auth = APS_AUTH.access_token # Get token from APS API later new_folder_id = get_item_id(auth, project_id, folder_id, "MirabufDir", "folders") if new_folder_id is None: - created_folder_id = create_folder(auth, project_id, folder_id, "MirabufDir") + created_folder_result = create_folder(auth, project_id, folder_id, "MirabufDir") + if created_folder_result.is_fatal(): + return created_folder_result + else: + created_folder_id = created_folder_result.unwrap() else: created_folder_id = new_folder_id - if created_folder_id is None: - return None - - file_id_data = get_file_id(auth, project_id, created_folder_id, file_name) - if file_id_data is None: - return None + file_id_result = get_file_id(auth, project_id, created_folder_id, file_name) + if file_id_result.is_fatal(): + # Hack to get around different return types + return Err(file_id_result.unwrap_err()[0], ErrorSeverity.Fatal) + file_id_data = file_id_result.unwrap() (lineage_id, file_id, file_version) = file_id_data """ Create APS Storage Location """ - object_id = create_storage_location(auth, project_id, created_folder_id, file_name) - if object_id is None: - gm.ui.messageBox("UPLOAD ERROR", "Object id is none; check create storage location") - return None + object_id_result = create_storage_location(auth, project_id, created_folder_id, file_name) + if object_id_result.is_fatal(): + return object_id_result + object_id = object_id_result.unwrap() + (prefix, object_key) = str(object_id).split("/", 1) bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod @@ -296,25 +296,31 @@ def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_content Create Signed URL For APS Upload """ generate_signed_url_result = generate_signed_url(auth, bucket_key, object_key) - if generate_signed_url_result is None: - return None + if generate_signed_url_result.is_fatal(): + # Hack to get around different Result success types in the err case + return Err(generate_signed_url_result.unwrap_err()[0], ErrorSeverity.Fatal) - (upload_key, signed_url) = generate_signed_url_result - if upload_file(signed_url, file_contents) is None: - return None + (upload_key, signed_url) = generate_signed_url_result.unwrap() + upload_file_result = upload_file(signed_url, file_contents) + if upload_file_result.is_fatal(): + return upload_file_result """ Finish Upload and Initialize File Version """ - if complete_upload(auth, upload_key, object_key, bucket_key) is None: - return None + complete_upload_result = complete_upload(auth, upload_key, object_key, bucket_key) + if complete_upload_result.is_fatal(): + return complete_upload_result + if file_id != "": - update_file_version( + update_file_result = update_file_version( auth, project_id, created_folder_id, lineage_id, file_id, file_name, file_contents, file_version, object_id ) + if update_file_result.is_fatal(): + return update_file_result else: _lineage_info = create_first_file_version(auth, str(object_id), project_id, str(created_folder_id), file_name) - return "" + return Ok("") def get_hub_id(auth: str, hub_name: str) -> str | None: @@ -403,7 +409,7 @@ def update_file_version( file_contents: str, curr_file_version: str, object_id: str, -) -> str | None: +) -> Result[str]: """ updates an existing file in an APS folder @@ -423,22 +429,6 @@ def update_file_version( - file doesn't exist in that position / with that id / name ; fix: get_file_id() or smth - version one of the file hasn't been created ; fix: create_first_file_version() """ - - # object_id = create_storage_location(auth, project_id, folder_id, file_name) - # if object_id is None: - # return None - # - # (prefix, object_key) = str(object_id).split("/", 1) - # bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod - # (upload_key, signed_url) = generate_signed_url(auth, bucket_key, object_key) - # - # if upload_file(signed_url, file_contents) is None: - # return None - - # if complete_upload(auth, upload_key, object_key, bucket_key) is None: - # return None - - # gm.ui.messageBox(f"file_name:{file_name}\nlineage_id:{lineage_id}\nfile_id:{file_id}\ncurr_file_version:{curr_file_version}\nobject_id:{object_id}", "REUPLOAD ARGS") headers = { "Authorization": f"Bearer {auth}", "Content-Type": "application/vnd.api+json", @@ -469,16 +459,15 @@ def update_file_version( f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/versions", headers=headers, json=data ) if not update_res.ok: - gm.ui.messageBox(f"UPLOAD ERROR:\n{update_res.text}", "Updating file to new version failed") - return None + return Err(f"Updating file to new version failed\nUPLOAD ERROR:\n{update_res.text}", ErrorSeverity.Fatal) gm.ui.messageBox( f"Successfully updated file {file_name} to version {int(curr_file_version) + 1} on APS", "UPLOAD SUCCESS" ) new_id: str = update_res.json()["data"]["id"] - return new_id + return Ok(new_id) -def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> tuple[str, str, str] | None: +def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> Result[tuple[str, str, str]]: """ gets the file id given a file name @@ -509,20 +498,19 @@ def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> t params=params, ) if file_res.status_code == 404: - return ("", "", "") + return Ok(("", "", "")) elif not file_res.ok: - gm.ui.messageBox(f"UPLOAD ERROR: {file_res.text}", "Failed to get file") - return None + return Err(f"UPLOAD ERROR: {file_res.text} (Failed to get file)", ErrorSeverity.Fatal) file_json: dict[str, Any] = file_res.json() if len(file_json["data"]) == 0: - return ("", "", "") + return Ok(("", "", "")) id: str = str(file_json["data"][0]["id"]) lineage: str = str(file_json["data"][0]["relationships"]["item"]["data"]["id"]) version: str = str(file_json["data"][0]["attributes"]["versionNumber"]) - return (lineage, id, version) + return Ok((lineage, id, version)) -def create_storage_location(auth: str, project_id: str, folder_id: str, file_name: str) -> str | None: +def create_storage_location(auth: str, project_id: str, folder_id: str, file_name: str) -> Result[str]: """ creates a storage location (a bucket) the bucket can be used to upload a file to @@ -560,14 +548,15 @@ def create_storage_location(auth: str, project_id: str, folder_id: str, file_nam f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/storage", json=data, headers=headers ) if not storage_location_res.ok: - gm.ui.messageBox(f"UPLOAD ERROR: {storage_location_res.text}", f"Failed to create storage location") - return None + return Err( + f"UPLOAD ERROR: {storage_location_res.text} (Failed to create storage location)", ErrorSeverity.Fatal + ) storage_location_json: dict[str, Any] = storage_location_res.json() object_id: str = storage_location_json["data"]["id"] - return object_id + return Ok(object_id) -def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> tuple[str, str] | None: +def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> Result[tuple[str, str]]: """ generates a signed_url for a bucket, given a bucket_key and object_key @@ -593,13 +582,12 @@ def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> tuple[st headers=headers, ) if not signed_url_res.ok: - gm.ui.messageBox(f"UPLOAD ERROR: {signed_url_res.text}", "Failed to get signed url") - return None + return Err(f"Failed to get signed URL:\nUPLOAD ERROR: {signed_url_res.text}", ErrorSeverity.Fatal) signed_url_json: dict[str, str] = signed_url_res.json() - return (signed_url_json["uploadKey"], signed_url_json["urls"][0]) + return Ok((signed_url_json["uploadKey"], signed_url_json["urls"][0])) -def upload_file(signed_url: str, file_contents: str) -> str | None: +def upload_file(signed_url: str, file_contents: str) -> Result[str]: """ uploads a file to APS given a signed_url a path to the file on your machine @@ -616,12 +604,11 @@ def upload_file(signed_url: str, file_contents: str) -> str | None: """ upload_response = requests.put(url=signed_url, data=file_contents) if not upload_response.ok: - gm.ui.messageBox("UPLOAD ERROR", f"Failed to upload to signed url: {upload_response.text}") - return None - return "" + return Err(f"Failed to upload to signed url\nUPLOAD ERROR: {upload_response.text}", ErrorSeverity.Fatal) + return Ok("") -def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str) -> str | None: +def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str) -> Result[str]: """ completes and verifies the APS file upload given the upload_key @@ -647,16 +634,16 @@ def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str headers=headers, ) if not completed_res.ok: - gm.ui.messageBox( - f"UPLOAD ERROR: {completed_res.text}\n{completed_res.status_code}", "Failed to complete upload" + return Err( + f"Failed to complete upload\n UPLOAD ERROR: {completed_res.text}\n{completed_res.status_code}", + ErrorSeverity.Fatal, ) - return None - return "" + return Ok("") def create_first_file_version( auth: str, object_id: str, project_id: str, folder_id: str, file_name: str -) -> tuple[str, str] | None: +) -> Result[tuple[str, str]]: """ initializes versioning for a file @@ -720,8 +707,7 @@ def create_first_file_version( f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/items", json=data, headers=headers ) if not first_version_res.ok: - gm.ui.messageBox(f"Failed to create first file version: {first_version_res.text}", "UPLOAD ERROR") - return None + return Err(f"Failed to create first file version:\nUPLOAD ERROR: {first_version_res.text}", ErrorSeverity.Fatal) first_version_json: dict[str, Any] = first_version_res.json() lineage_id: str = first_version_json["data"]["id"] @@ -729,4 +715,4 @@ def create_first_file_version( gm.ui.messageBox(f"Successful Upload of {file_name} to APS", "UPLOAD SUCCESS") - return (lineage_id, href) + return Ok((lineage_id, href)) diff --git a/exporter/SynthesisFusionAddin/src/ErrorHandling.py b/exporter/SynthesisFusionAddin/src/ErrorHandling.py new file mode 100644 index 0000000000..b97e4ad827 --- /dev/null +++ b/exporter/SynthesisFusionAddin/src/ErrorHandling.py @@ -0,0 +1,139 @@ +import inspect +from collections.abc import Callable +from enum import Enum +from typing import Generic, TypeVar + +import adsk.core + +from .Logging import getLogger + +logger = getLogger() + + +# NOTE +# Severity refers to to the error's affect on the parser as a whole, rather than on the function itself +# If an error is non-fatal to the function that generated it, it should be declared but not return, which prints it to the screen +class ErrorSeverity(Enum): + Fatal = 50 # Critical Error + Error = 40 # Non-critical Error + Warning = 30 # Warning + + +T = TypeVar("T") + + +class Result(Generic[T]): + """ + Result class for error handling, similar to the Result enum in Rust. + + The `Err` and `Ok` variants are child types, rather than enum variants though. Another difference is that the error variant is necessarily packaged with a message and a severity, rather than being arbitrary. + + Since python3 has no match statements, use the `is_ok()` or `is_err()` function to check the variant, then `unwrap()` or `unwrap_err()` to get the value or error message and severity. + + ## Example + ```py + foo_result = foo() + if foo_result.is_fatal(): + return foo_result + ``` + + Please see the `Ok` and `Err` child class documentation for instructions on instantiating errors and ok-values respectively + """ + + def is_ok(self) -> bool: + return isinstance(self, Ok) + + def is_err(self) -> bool: + return isinstance(self, Err) + + def is_fatal(self) -> bool: + return self.is_err() and self.unwrap_err()[1] == ErrorSeverity.Fatal + + def unwrap(self) -> T: + if self.is_ok(): + return self.value # type: ignore + raise Exception(f"Called unwrap on Err: {self.message}") # type: ignore + + def unwrap_err(self) -> tuple[str, ErrorSeverity]: + + if self.is_err(): + return (self.message, self.severity) # type: ignore + raise Exception(f"Called unwrap_err on Ok: {self.value}") # type: ignore + + +class Ok(Result[T]): + """ + The non-error variant of the Result class. Contains the value of the happy path of the function. Return when the function has executed successfully. + """ + + value: T + + def __init__(self, value: T): + self.value = value + + def __repr__(self) -> str: + return f"Ok({self.value})" + + +class Err(Result[T]): + """ + The error variant of the Result class. + + It contains an error message and severity, which is either Fatal, Error, or Warning, each corresponding to a logger severity level, Critical Error (50) and Warning (30) respectively. + + When an `Err` is instantiated, it is automatically logged in the current synthesis logfile. + + ## Examples + If an error is fatal to the entire program (or the parent function), it should be returned and marked as Fatal: + ```python + return Err("Foo not found", ErrorSeverity.Fatal) + ``` + + If an error is fatal to the current function, it should be returned and marked as Error, as the parent could recover it: + ```python + return Err("Bar not found", ErrorSeverity.Error) + ``` + + If an error is not fatal to the current function, but ought to be logged, it should be marked as Warning and instantiated but not returned, as to not break control flow: + ```python + _: Err[T] = Err("Baz not found", ErrorSeverity.Warning) + ``` + Note that the lattermost example will raise a warning if not explicitely typed + """ + + message: str + severity: ErrorSeverity + function: str + line: int + + def __init__(self, message: str, severity: ErrorSeverity): + frame = inspect.currentframe() + caller_frame = inspect.getouterframes(frame)[1] + + self.function = caller_frame.function + self.line = caller_frame.lineno + + self.severity = severity + self.message = f"In `{self.function}` on line {self.line}: {message}" + + self.write_error() + + def __repr__(self) -> str: + return f"Err({self.message})" + + def write_error(self) -> None: + logger.log(self.severity.value, self.message) + + +def handle_err_top(func: Callable[..., Result[None]]) -> Callable[..., None]: + + def wrapper(*args, **kwargs): # type: ignore + result = func(*args, **kwargs) + + if result.is_err(): + message, severity = result.unwrap_err() + if severity == ErrorSeverity.Fatal: + app = adsk.core.Application.get() + app.userInterface.messageBox(f"Fatal Error Encountered {message}") + + return wrapper diff --git a/exporter/SynthesisFusionAddin/src/Logging.py b/exporter/SynthesisFusionAddin/src/Logging.py index 3b2bedb68c..85ea4e09b2 100644 --- a/exporter/SynthesisFusionAddin/src/Logging.py +++ b/exporter/SynthesisFusionAddin/src/Logging.py @@ -12,7 +12,7 @@ import adsk.core from src import INTERNAL_ID, IS_RELEASE, SUPPORT_PATH -from src.Util import makeDirectories +from src.lib.Util import makeDirectories MAX_LOG_FILES_TO_KEEP = 10 TIMING_LEVEL = 25 diff --git a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py index 306f839cd4..5fe783f96f 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py +++ b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py @@ -7,12 +7,13 @@ import os import platform from dataclasses import dataclass, field, fields +from typing import Any import adsk.core from adsk.fusion import CalculationAccuracy, TriangleMeshQualityOptions from src import INTERNAL_ID -from src.Logging import logFailure, timed +from src.Logging import getLogger, logFailure, timed from src.Types import ( KG, ExportLocation, @@ -32,7 +33,7 @@ class ExporterOptions: # Python's `os` module can return `None` when attempting to find the home directory if the # user's computer has conflicting configs of some sort. This has happened and should be accounted # for accordingly. - fileLocation: str | None = field( + fileLocation: str | os.PathLike[str] | None = field( default=(os.getenv("HOME") if platform.system() == "Windows" else os.path.expanduser("~")) ) name: str | None = field(default=None) @@ -42,6 +43,7 @@ class ExporterOptions: wheels: list[Wheel] = field(default_factory=list) joints: list[Joint] = field(default_factory=list) gamepieces: list[Gamepiece] = field(default_factory=list) + tags: dict[str, str] = field(default_factory=dict) robotWeight: KG = field(default=KG(0.0)) autoCalcRobotWeight: bool = field(default=False) autoCalcGamepieceWeight: bool = field(default=False) @@ -52,7 +54,7 @@ class ExporterOptions: compressOutput: bool = field(default=True) exportAsPart: bool = field(default=False) - exportLocation: ExportLocation = field(default=ExportLocation.UPLOAD) + exportLocation: ExportLocation = field(default=ExportLocation.DOWNLOAD) openSynthesisUponExport: bool = field(default=False) hierarchy: ModelHierarchy = field(default=ModelHierarchy.FusionAssembly) @@ -67,7 +69,19 @@ def readFromDesign(self) -> "ExporterOptions": for field in fields(self): attribute = designAttributes.itemByName(INTERNAL_ID, field.name) if attribute: - attrJsonData = makeObjectFromJson(type(field.type), json.loads(attribute.value)) + attrJsonData = makeObjectFromJson(field.type, json.loads(attribute.value)) + setattr(self, field.name, attrJsonData) + + self.visualQuality = TriangleMeshQualityOptions.LowQualityTriangleMesh + return self + + @logFailure + @timed + def readFromJSON(self, data: dict[str, Any]) -> "ExporterOptions": + for field in fields(self): + attribute = data.get(field.name) + if attribute is not None: + attrJsonData = makeObjectFromJson(field.type, attribute) setattr(self, field.name, attrJsonData) self.visualQuality = TriangleMeshQualityOptions.LowQualityTriangleMesh @@ -80,3 +94,12 @@ def writeToDesign(self) -> None: for field in fields(self): data = json.dumps(getattr(self, field.name), default=encodeNestedObjects, indent=4) designAttributes.add(INTERNAL_ID, field.name, data) + + @logFailure + @timed + def writeToJson(self) -> dict[str, Any]: + out = {} + for field in fields(self): + data = json.dumps(getattr(self, field.name), default=encodeNestedObjects, indent=4) + out[field.name] = json.loads(data) + return out diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Components.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Components.py index 6a78ad7cdf..b26bd54756 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Components.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Components.py @@ -1,8 +1,13 @@ # Contains all of the logic for mapping the Components / Occurrences +from platform import python_build + import adsk.core import adsk.fusion +from google.protobuf.message import Error +from requests.models import parse_header_links -from src.Logging import logFailure +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result, handle_err_top +from src.Logging import getLogger, logFailure from src.Parser.ExporterOptions import ExporterOptions from src.Parser.SynthesisParser import PhysicalProperties from src.Parser.SynthesisParser.PDMessage import PDMessage @@ -14,49 +19,64 @@ from src.Proto import assembly_pb2, joint_pb2, material_pb2, types_pb2 from src.Types import ExportMode -# TODO: Impelement Material overrides - -def _MapAllComponents( +# TODO: Impelement Material overrides +@handle_err_top +def mapAllComponents( design: adsk.fusion.Design, options: ExporterOptions, progressDialog: PDMessage, partsData: assembly_pb2.Parts, materials: material_pb2.Materials, -) -> None: +) -> Result[None]: + for component in design.allComponents: adsk.doEvents() if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) progressDialog.addComponent(component.name) comp_ref = guid_component(component) - fill_info(partsData, None) + fill_info_result = fill_info(partsData, None) + if fill_info_result.is_fatal(): + return fill_info_result partDefinition = partsData.part_definitions[comp_ref] - fill_info(partDefinition, component, comp_ref) + fill_info_result = fill_info(partDefinition, component, comp_ref) + if fill_info_result.is_fatal(): + return fill_info_result - PhysicalProperties.GetPhysicalProperties(component, partDefinition.physical_data) + physical_properties_result = PhysicalProperties.getPhysicalProperties(component, partDefinition.physical_data) + if physical_properties_result.is_fatal(): + return physical_properties_result - if options.exportMode == ExportMode.FIELD: - partDefinition.dynamic = False - else: - partDefinition.dynamic = True + partDefinition.dynamic = options.exportMode != ExportMode.FIELD - def processBody(body: adsk.fusion.BRepBody | adsk.fusion.MeshBody) -> None: + def processBody(body: adsk.fusion.BRepBody | adsk.fusion.MeshBody) -> Result[None]: if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) if body.isLightBulbOn: part_body = partDefinition.bodies.add() - fill_info(part_body, body) + + fill_info_result = fill_info(part_body, body) + if fill_info_result.is_fatal(): + return fill_info_result + part_body.part = comp_ref + if body.entityToken in options.tags: + partsData.user_data.data[f"tag_{body.entityToken}"] = options.tags[body.entityToken] + if isinstance(body, adsk.fusion.BRepBody): - _ParseBRep(body, options, part_body.triangle_mesh) + parse_result = parseBRep(body, options, part_body.triangle_mesh) + if parse_result.is_fatal(): + return parse_result else: - _ParseMesh(body, options, part_body.triangle_mesh) + parse_result = parseMesh(body, options, part_body.triangle_mesh) + if parse_result.is_fatal(): + return parse_result appearance_key = "{}_{}".format(body.appearance.name, body.appearance.id) # this should be appearance @@ -65,28 +85,38 @@ def processBody(body: adsk.fusion.BRepBody | adsk.fusion.MeshBody) -> None: else: part_body.appearance_override = "default" - for body in component.bRepBodies: - processBody(body) + return Ok(None) + for body in component.bRepBodies: + process_result = processBody(body) + if process_result.is_fatal(): + return process_result for body in component.meshBodies: - processBody(body) + process_result = processBody(body) + if process_result.is_fatal(): + return process_result + + return Ok(None) -def _ParseComponentRoot( +@handle_err_top +def parseComponentRoot( component: adsk.fusion.Component, progressDialog: PDMessage, options: ExporterOptions, partsData: assembly_pb2.Parts, material_map: dict[str, material_pb2.Appearance], node: types_pb2.Node, -) -> None: +) -> Result[None]: mapConstant = guid_component(component) part = partsData.part_instances[mapConstant] node.value = mapConstant - fill_info(part, component, mapConstant) + fill_info_result = fill_info(part, component, mapConstant) + if fill_info_result.is_fatal(): + return fill_info_result def_map = partsData.part_definitions @@ -95,24 +125,31 @@ def _ParseComponentRoot( for occur in component.occurrences: if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) if occur.isLightBulbOn: child_node = types_pb2.Node() - __parseChildOccurrence(occur, progressDialog, options, partsData, material_map, child_node) + + parse_child_result = parseChildOccurrence( + occur, progressDialog, options, partsData, material_map, child_node + ) + if parse_child_result.is_err(): + return parse_child_result + node.children.append(child_node) + return Ok(None) -def __parseChildOccurrence( +def parseChildOccurrence( occurrence: adsk.fusion.Occurrence, progressDialog: PDMessage, options: ExporterOptions, partsData: assembly_pb2.Parts, material_map: dict[str, material_pb2.Appearance], node: types_pb2.Node, -) -> None: +) -> Result[None]: if occurrence.isLightBulbOn is False: - return + return Ok(None) progressDialog.addOccurrence(occurrence.name) @@ -124,7 +161,9 @@ def __parseChildOccurrence( node.value = mapConstant - fill_info(part, occurrence, mapConstant) + fill_info_result = fill_info(part, occurrence, mapConstant) + if fill_info_result.is_fatal(): + return fill_info_result collision_attr = occurrence.attributes.itemByName("synthesis", "collision_off") if collision_attr != None: @@ -134,11 +173,15 @@ def __parseChildOccurrence( try: part.appearance = "{}_{}".format(occurrence.appearance.name, occurrence.appearance.id) except: + _: Err[None] = Err("Failed to format part appearance", ErrorSeverity.Warning) part.appearance = "default" # TODO: Add phyical_material parser + # TODO: I'm fairly sure that this should be a fatal error if occurrence.component.material: part.physical_material = occurrence.component.material.id + else: + __: Err[None] = Err(f"Component Material is None", ErrorSeverity.Warning) def_map = partsData.part_definitions @@ -154,25 +197,32 @@ def __parseChildOccurrence( part.transform.spatial_matrix.extend(occurrence.transform.asArray()) - worldTransform = GetMatrixWorld(occurrence) + worldTransform = getMatrixWorld(occurrence) if worldTransform: part.global_transform.spatial_matrix.extend(worldTransform.asArray()) for occur in occurrence.childOccurrences: if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) if occur.isLightBulbOn: child_node = types_pb2.Node() - __parseChildOccurrence(occur, progressDialog, options, partsData, material_map, child_node) + + parse_child_result = parseChildOccurrence( + occur, progressDialog, options, partsData, material_map, child_node + ) + if parse_child_result.is_err(): + return parse_child_result + node.children.append(child_node) + return Ok(None) # saw online someone used this to get the correct context but oh boy does it look pricey # I think if I can make all parts relative to a parent it should return that parents transform maybe # TESTED AND VERIFIED - but unoptimized -def GetMatrixWorld(occurrence: adsk.fusion.Occurrence) -> adsk.core.Matrix3D: +def getMatrixWorld(occurrence: adsk.fusion.Occurrence) -> adsk.core.Matrix3D: matrix = occurrence.transform2 while occurrence.assemblyContext: matrix.transformBy(occurrence.assemblyContext.transform2) @@ -180,42 +230,52 @@ def GetMatrixWorld(occurrence: adsk.fusion.Occurrence) -> adsk.core.Matrix3D: return matrix -@logFailure -def _ParseBRep( +def parseBRep( body: adsk.fusion.BRepBody, options: ExporterOptions, trimesh: assembly_pb2.TriangleMesh, -) -> None: - meshManager = body.meshManager - calc = meshManager.createMeshCalculator() +) -> Result[None]: + + calc = body.meshManager.createMeshCalculator() # Disabling for now. We need the user to be able to adjust this, otherwise it gets locked # into whatever the default was at the time it first creates the export options. # calc.setQuality(options.visualQuality) - calc.setQuality(adsk.fusion.TriangleMeshQualityOptions.LowQualityTriangleMesh) + _ = calc.setQuality(adsk.fusion.TriangleMeshQualityOptions.LowQualityTriangleMesh) # calc.maxNormalDeviation = 3.14159 * (1.0 / 6.0) # calc.surfaceTolerance = 0.5 - mesh = calc.calculate() + try: + mesh = calc.calculate() + except: + return Err(f"Failed to calculate mesh for {body.name}", ErrorSeverity.Error) + + fill_info_result = fill_info(trimesh, body) + if fill_info_result.is_fatal(): + return fill_info_result - fill_info(trimesh, body) trimesh.has_volume = True plainmesh_out = trimesh.mesh - plainmesh_out.verts.extend(mesh.nodeCoordinatesAsFloat) plainmesh_out.normals.extend(mesh.normalVectorsAsFloat) plainmesh_out.indices.extend(mesh.nodeIndices) plainmesh_out.uv.extend(mesh.textureCoordinatesAsFloat) + return Ok(None) + -@logFailure -def _ParseMesh( +def parseMesh( meshBody: adsk.fusion.MeshBody, options: ExporterOptions, trimesh: assembly_pb2.TriangleMesh, -) -> None: +) -> Result[None]: mesh = meshBody.displayMesh + if mesh is None: + return Err("Component Mesh was None", ErrorSeverity.Fatal) + + fill_info_result = fill_info(trimesh, meshBody) + if fill_info_result.is_fatal(): + return fill_info_result - fill_info(trimesh, meshBody) trimesh.has_volume = True plainmesh_out = trimesh.mesh @@ -225,8 +285,10 @@ def _ParseMesh( plainmesh_out.indices.extend(mesh.nodeIndices) plainmesh_out.uv.extend(mesh.textureCoordinatesAsFloat) + return Ok(None) + -def _MapRigidGroups(rootComponent: adsk.fusion.Component, joints: joint_pb2.Joints) -> None: +def mapRigidGroups(rootComponent: adsk.fusion.Component, joints: joint_pb2.Joints) -> None: groups = rootComponent.allRigidGroups for group in groups: mira_group = joint_pb2.RigidGroup() diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/JointHierarchy.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/JointHierarchy.py index 0b5be182c2..9e782c8f42 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/JointHierarchy.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/JointHierarchy.py @@ -1,11 +1,15 @@ import enum +import sys +from logging import ERROR +from os import error from typing import Any, Iterator, cast import adsk.core import adsk.fusion from src import gm -from src.Logging import getLogger, logFailure +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result, handle_err_top +from src.Logging import getLogger from src.Parser.ExporterOptions import ExporterOptions from src.Parser.SynthesisParser.PDMessage import PDMessage from src.Parser.SynthesisParser.Utilities import guid_component, guid_occurrence @@ -188,26 +192,26 @@ class SimulationEdge(GraphEdge): ... class JointParser: grounded: adsk.fusion.Occurrence - @logFailure + # NOTE This function cannot under the value-based error handling system, since it's an __init__ function def __init__(self, design: adsk.fusion.Design) -> None: - # Create hierarchy with just joint assembly - # - Assembly - # - Grounded - # - Axis 1 - # - Axis 2 - # - Axis 3 - - # 1. Find all Dynamic joint items to isolate [o] - # 2. Find the grounded component [x] (possible - not optimized) - # 3. Populate tree with all items from each set of joints [x] (done with grounding) - # - 3. a) Each Child element with no joints [x] - # - 3. b) Each Rigid Joint Connection [x] - # 4. Link Joint trees by discovery from root [x] - # 5. Record which trees have no children for creating end effectors [x] (next up) - this kinda already exists - - # Need to investigate creating an additional button for end effector possibly - # It might be possible to have multiple end effectors - # Total Number of final elements + """Create hierarchy with just joint assembly + - Assembly + - Grounded + - Axis 1 + - Axis 2 + - Axis 3 + + 1. Find all Dynamic joint items to isolate [o] + 2. Find the grounded component [x] (possible - not optimized) + 3. Populate tree with all items from each set of joints [x] (done with grounding) + - 3. a) Each Child element with no joints [x] + - 3. b) Each Rigid Joint Connection [x] + 4. Link Joint trees by discovery from root [x] + 5. Record which trees have no children for creating end effectors [x] (next up) - this kinda already exists + + Need to investigate creating an additional button for end effector possibly + It might be possible to have multiple end effectors + Total Number of final elements""" self.current = None self.previousJoint = None @@ -218,8 +222,10 @@ def __init__(self, design: adsk.fusion.Design) -> None: self.grounded = searchForGrounded(design.rootComponent) if self.grounded is None: - gm.ui.messageBox("There is not currently a Grounded Component in the assembly, stopping kinematic export.") - raise RuntimeWarning("There is no grounded component") + message = "There is not a pinned component in this assembly, aborting kinematic export." + # gm.ui.messageBox(message) + _____: Err[None] = Err(message, ErrorSeverity.Fatal) + raise RuntimeError(message) self.currentTraversal: dict[str, DynamicOccurrenceNode | bool] = dict() self.groundedConnections: list[adsk.fusion.Occurrence] = [] @@ -237,46 +243,60 @@ def __init__(self, design: adsk.fusion.Design) -> None: self.__getAllJoints() # dynamic joint node for grounded components and static components - rootNode = self._populateNode(self.grounded, None, None, is_ground=True) + populate_node_result = self._populateNode(self.grounded, None, None, is_ground=True) + if populate_node_result.is_err(): # We need the value to proceed + message = populate_node_result.unwrap_err()[0] + gm.ui.messageBox(message) + ____: Err[None] = Err(message, ErrorSeverity.Fatal) + raise RuntimeError(message) + + rootNode = populate_node_result.unwrap() self.groundSimNode = SimulationNode(rootNode, None, grounded=True) self.simulationNodesRef["GROUND"] = self.groundSimNode # combine all ground prior to this possibly - self._lookForGroundedJoints() + _ = self._lookForGroundedJoints() # creates the axis elements - adds all elements to axisNodes for key, value in self.dynamicJoints.items(): - self._populateAxis(key, value) + populate_axis_result = self._populateAxis(key, value) + if populate_axis_result.is_err(): + message = populate_axis_result.unwrap_err()[0] + gm.ui.messageBox(message) + ___: Err[None] = Err(message, ErrorSeverity.Fatal) + raise RuntimeError() - self._linkAllAxis() + __ = self._linkAllAxis() # self.groundSimNode.printLink() - @logFailure - def __getAllJoints(self) -> None: + def __getAllJoints(self) -> Result[None]: + logger.log(10, "Getting Joints") for joint in list(self.design.rootComponent.allJoints) + list(self.design.rootComponent.allAsBuiltJoints): if joint and joint.occurrenceOne and joint.occurrenceTwo: occurrenceOne = joint.occurrenceOne occurrenceTwo = joint.occurrenceTwo else: - return + # Non-fatal since it's recovered in the next two statements + _: Err[None] = Err("Found joint without two occurrences", ErrorSeverity.Warning) if occurrenceOne is None: - try: - occurrenceOne = joint.geometryOrOriginOne.entityOne.assemblyContext - except: - pass + if joint.geometryOrOriginOne.entityOne.assemblyContext is None: + ____: Err[None] = Err( + "occurrenceOne and entityOne's assembly context are None", ErrorSeverity.Fatal + ) + occurrenceOne = joint.geometryOrOriginOne.entityOne.assemblyContext if occurrenceTwo is None: - try: - occurrenceTwo = joint.geometryOrOriginTwo.entityOne.assemblyContext - except: - pass + if joint.geometryOrOriginTwo.entityTwo.assemblyContext is None: + __: Err[None] = Err("occurrenceOne and entityTwo's assembly context are None", ErrorSeverity.Fatal) + occurrenceTwo = joint.geometryOrOriginTwo.entityTwo.assemblyContext oneEntityToken = "" twoEntityToken = "" + # TODO: Fix change to if statement with Result returning try: oneEntityToken = occurrenceOne.entityToken except: @@ -293,124 +313,146 @@ def __getAllJoints(self) -> None: if oneEntityToken not in self.dynamicJoints.keys(): self.dynamicJoints[oneEntityToken] = joint + # TODO: Check if this is fatal or not if occurrenceTwo is None and occurrenceOne is None: - logger.error( - f"Occurrences that connect joints could not be found\n\t1: {occurrenceOne}\n\t2: {occurrenceTwo}" + ___: Err[None] = Err( + f"Occurrences that connect joints could not be found\n\t1: {occurrenceOne}\n\t2: {occurrenceTwo}", + ErrorSeverity.Fatal, ) - return else: if oneEntityToken == self.grounded.entityToken: self.groundedConnections.append(occurrenceTwo) elif twoEntityToken == self.grounded.entityToken: self.groundedConnections.append(occurrenceOne) + return Ok(None) - def _linkAllAxis(self) -> None: + def _linkAllAxis(self) -> Result[None]: # looks through each simulation nood starting with ground and orders them using edges # self.groundSimNode is ground - self._recurseLink(self.groundSimNode) + return self._recurseLink(self.groundSimNode) - def _recurseLink(self, simNode: SimulationNode) -> None: + def _recurseLink(self, simNode: SimulationNode) -> Result[None]: connectedAxisNodes = [ self.simulationNodesRef.get(componentKeys, None) for componentKeys in simNode.data.getConnectedAxisTokens() ] + if any([node is None for node in connectedAxisNodes]): + return Err(f"Found None Connected Access Node", ErrorSeverity.Fatal) + for connectedAxis in connectedAxisNodes: # connected is the occurrence if connectedAxis is not None: edge = SimulationEdge(JointRelationship.GROUND, connectedAxis) simNode.edges.append(edge) - self._recurseLink(connectedAxis) - def _lookForGroundedJoints(self) -> None: - grounded_token = self.grounded.entityToken + recurse_result = self._recurseLink(connectedAxis) + if recurse_result.is_fatal(): + return recurse_result + return Ok(None) + + def _lookForGroundedJoints(self) -> Result[None]: + # grounded_token = self.grounded.entityToken rootDynamicJoint = self.groundSimNode.data + if rootDynamicJoint is None: + return Err("Found None rootDynamicJoint", ErrorSeverity.Fatal) for grounded_connect in self.groundedConnections: self.currentTraversal = dict() - self._populateNode( + _ = self._populateNode( grounded_connect, rootDynamicJoint, OccurrenceRelationship.CONNECTION, is_ground=False, ) + return Ok(None) - def _populateAxis(self, occ_token: str, joint: adsk.fusion.Joint) -> None: + def _populateAxis(self, occ_token: str, joint: adsk.fusion.Joint) -> Result[None]: occ = self.design.findEntityByToken(occ_token)[0] - if occ is None: - return + return Ok(None) self.currentTraversal = dict() - rootNode = self._populateNode(occ, None, None) + populate_node_result = self._populateNode(occ, None, None) + if populate_node_result.is_err(): # We need the value to proceed + unwrapped = populate_node_result.unwrap_err() + return Err(unwrapped[0], unwrapped[1]) + rootNode = populate_node_result.unwrap() if rootNode is not None: axisNode = SimulationNode(rootNode, joint) self.simulationNodesRef[occ_token] = axisNode + return Ok(None) + + # TODO: Verify that this works after the Result-refactor :skull: def _populateNode( self, occ: adsk.fusion.Occurrence, prev: DynamicOccurrenceNode | None, relationship: OccurrenceRelationship | None, is_ground: bool = False, - ) -> DynamicOccurrenceNode | None: + ) -> Result[DynamicOccurrenceNode | None]: if occ.isGrounded and not is_ground: - return None + return Ok(None) elif (relationship == OccurrenceRelationship.NEXT) and (prev is not None): node = DynamicOccurrenceNode(occ) edge = DynamicEdge(relationship, node) prev.edges.append(edge) - return None + return Ok(None) elif ((occ.entityToken in self.dynamicJoints.keys()) and (prev is not None)) or self.currentTraversal.get( occ.entityToken ) is not None: - return None + return Ok(None) node = DynamicOccurrenceNode(occ) self.currentTraversal[occ.entityToken] = True for occurrence in occ.childOccurrences: - self._populateNode(occurrence, node, OccurrenceRelationship.TRANSFORM, is_ground=is_ground) + populate_result = self._populateNode( + occurrence, node, OccurrenceRelationship.TRANSFORM, is_ground=is_ground + ) + if populate_result.is_fatal(): + return populate_result # if not is_ground: # THIS IS A BUG - OCCURRENCE ACCESS VIOLATION # this is the current reason for wrapping in try except pass - try: - for joint in occ.joints: - if joint and joint.occurrenceOne and joint.occurrenceTwo: - occurrenceOne = joint.occurrenceOne - occurrenceTwo = joint.occurrenceTwo - connection = None - rigid = joint.jointMotion.jointType == 0 - - if rigid: - if joint.occurrenceOne == occ: - connection = joint.occurrenceTwo - if joint.occurrenceTwo == occ: - connection = joint.occurrenceOne - else: - if joint.occurrenceOne != occ: - connection = joint.occurrenceOne - - if connection is not None: - if prev is None or connection.entityToken != prev.data.entityToken: - self._populateNode( - connection, - node, - (OccurrenceRelationship.CONNECTION if rigid else OccurrenceRelationship.NEXT), - is_ground=is_ground, - ) + for joint in occ.joints: + if joint and joint.occurrenceOne and joint.occurrenceTwo: + occurrenceOne = joint.occurrenceOne + occurrenceTwo = joint.occurrenceTwo + connection = None + rigid = joint.jointMotion.jointType == 0 + + if rigid: + if joint.occurrenceOne == occ: + connection = joint.occurrenceTwo + if joint.occurrenceTwo == occ: + connection = joint.occurrenceOne else: - continue - except: - pass + if joint.occurrenceOne != occ: + connection = joint.occurrenceOne + + if connection is not None: + if prev is None or connection.entityToken != prev.data.entityToken: + populate_result = self._populateNode( + connection, + node, + (OccurrenceRelationship.CONNECTION if rigid else OccurrenceRelationship.NEXT), + is_ground=is_ground, + ) + if populate_result.is_fatal(): + return populate_result + else: + # Check if this joint occurance violation is really a fatal error or just something we should filter on + return Err("Joint without two occurrences", ErrorSeverity.Fatal) if prev is not None: edge = DynamicEdge(relationship, node) prev.edges.append(edge) self.currentTraversal[occ.entityToken] = node - return node + return Ok(node) def searchForGrounded( @@ -422,14 +464,13 @@ def searchForGrounded( occ (adsk.fusion.Occurrence): start point Returns: - Union(adsk.fusion.Occurrence, None): Either a grounded part or nothing + adsk.fusion.Occurrence | None: Either a grounded part or nothing """ if occ.objectType == "adsk::fusion::Component": # this makes it possible to search an object twice (unoptimized) collection = occ.allOccurrences # components cannot be grounded technically - else: # Object is an occurrence if occ.isGrounded: return occ @@ -448,13 +489,14 @@ def searchForGrounded( # ________________________ Build implementation ______________________ # -@logFailure -def BuildJointPartHierarchy( +@handle_err_top +def buildJointPartHierarchy( design: adsk.fusion.Design, joints: joint_pb2.Joints, options: ExporterOptions, progressDialog: PDMessage, -) -> None: +) -> Result[None]: + # This try-catch is necessary because the JointParser __init__ functon is fallible and throws a RuntimeWarning (__init__ functions cannot return values) try: progressDialog.currentMessage = f"Constructing Simulation Hierarchy" progressDialog.update() @@ -462,7 +504,9 @@ def BuildJointPartHierarchy( jointParser = JointParser(design) rootSimNode = jointParser.groundSimNode - populateJoint(rootSimNode, joints, progressDialog) + populate_joint_result = populateJoint(rootSimNode, joints, progressDialog) + if populate_joint_result.is_fatal(): + return populate_joint_result # 1. Get Node # 2. Get Transform of current Node @@ -475,15 +519,18 @@ def BuildJointPartHierarchy( # now add each wheel to the root I believe if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) + + return Ok(None) - except Warning: - pass + except RuntimeError as e: + progressDialog.progressDialog.hide() + raise e -def populateJoint(simNode: SimulationNode, joints: joint_pb2.Joints, progressDialog: PDMessage) -> None: +def populateJoint(simNode: SimulationNode, joints: joint_pb2.Joints, progressDialog: PDMessage) -> Result[None]: if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) if not simNode.joint: proto_joint = joints.joint_instances["grounded"] @@ -494,19 +541,23 @@ def populateJoint(simNode: SimulationNode, joints: joint_pb2.Joints, progressDia progressDialog.update() if not proto_joint: - logger.error(f"Could not find protobuf joint for {simNode.name}") - return + return Err(f"Could not find protobuf joint for {simNode.name}", ErrorSeverity.Fatal) root = types_pb2.Node() # construct body tree if possible - createTreeParts(simNode.data, OccurrenceRelationship.CONNECTION, root, progressDialog) + tree_parts_result = createTreeParts(simNode.data, OccurrenceRelationship.CONNECTION, root, progressDialog) + if tree_parts_result.is_fatal(): + return tree_parts_result proto_joint.parts.nodes.append(root) # next in line to be populated for edge in simNode.edges: - populateJoint(cast(SimulationNode, edge.node), joints, progressDialog) + populate_joint_result = populateJoint(cast(SimulationNode, edge.node), joints, progressDialog) + if populate_joint_result.is_fatal(): + return populate_joint_result + return Ok(None) def createTreeParts( @@ -514,34 +565,44 @@ def createTreeParts( relationship: RelationshipBase | None, node: types_pb2.Node, progressDialog: PDMessage, -) -> None: +) -> Result[None]: if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) # if it's the next part just exit early for our own sanity + # This shouldn't be fatal nor even an error if relationship == OccurrenceRelationship.NEXT or dynNode.data.isLightBulbOn == False: - return + return Ok(None) # set the occurrence / component id to reference the part - try: - objectType = dynNode.data.objectType - except: + # Fine way to use try-excepts in this language + if dynNode.data.objectType is None: + _: Err[None] = Err("Found None object type", ErrorSeverity.Warning) objectType = "" + else: + objectType = dynNode.data.objectType if objectType == "adsk::fusion::Occurrence": node.value = guid_occurrence(dynNode.data) elif objectType == "adsk::fusion::Component": node.value = guid_component(dynNode.data) else: - try: - node.value = dynNode.data.entityToken - except RuntimeError: + if dynNode.data.entityToken is None: + __: Err[None] = Err("Found None EntityToken", ErrorSeverity.Warning) node.value = dynNode.data.name + else: + node.value = dynNode.data.entityToken # possibly add additional information for the type of connection made # recurse and add all children connections for edge in dynNode.edges: child_node = types_pb2.Node() - createTreeParts(cast(DynamicOccurrenceNode, edge.node), edge.relationship, child_node, progressDialog) + tree_parts_result = createTreeParts( + cast(DynamicOccurrenceNode, edge.node), edge.relationship, child_node, progressDialog + ) + if tree_parts_result.is_fatal(): + return tree_parts_result node.children.append(child_node) + + return Ok(None) diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Joints.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Joints.py index 0577da7a41..e7b7a1e239 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Joints.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Joints.py @@ -29,6 +29,7 @@ import adsk.core import adsk.fusion +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result, handle_err_top from src.Logging import getLogger from src.Parser.ExporterOptions import ExporterOptions from src.Parser.SynthesisParser.PDMessage import PDMessage @@ -69,6 +70,7 @@ # 3. connect all instances with graphcontainer +@handle_err_top def populateJoints( design: adsk.fusion.Design, joints: joint_pb2.Joints, @@ -76,20 +78,26 @@ def populateJoints( progressDialog: PDMessage, options: ExporterOptions, assembly: assembly_pb2.Assembly, -) -> None: - fill_info(joints, None) +) -> Result[None]: + info_result = fill_info(joints, None) + if info_result.is_fatal(): + return info_result # This is for creating all of the Joint Definition objects # So we need to iterate through the joints and construct them and add them to the map if not options.joints: - return + return Ok(None) # Add the grounded joints object - TODO: rename some of the protobuf stuff for the love of god joint_definition_ground = joints.joint_definitions["grounded"] - construct_info("grounded", joint_definition_ground) + info_result = construct_info("grounded", joint_definition_ground) + if info_result.is_fatal(): + return info_result joint_instance_ground = joints.joint_instances["grounded"] - construct_info("grounded", joint_instance_ground) + info_result = construct_info("grounded", joint_instance_ground) + if info_result.is_fatal(): + return info_result joint_instance_ground.joint_reference = joint_definition_ground.info.GUID @@ -106,7 +114,8 @@ def populateJoints( if joint.jointMotion.jointType in AcceptedJointTypes: try: - # Fusion has no instances of joints but lets roll with it anyway + # Fusion has no instances of joints but lets roll with it anyway + # ^^^ This majorly confuses me ^^^ # progressDialog.message = f"Exporting Joint configuration {joint.name}" progressDialog.addJoint(joint.name) @@ -122,7 +131,11 @@ def populateJoints( if parse_joints.jointToken == joint.entityToken: guid = str(uuid.uuid4()) signal = signals.signal_map[guid] - construct_info(joint.name, signal, GUID=guid) + + info_result = construct_info(joint.name, signal, GUID=guid) + if info_result.is_fatal(): + return info_result + signal.io = signal_pb2.IOType.OUTPUT # really could just map the enum to a friggin string @@ -133,7 +146,11 @@ def populateJoints( signal.device_type = signal_pb2.DeviceType.PWM motor = joints.motor_definitions[joint.entityToken] - fill_info(motor, joint) + + info_result = fill_info(motor, joint) + if info_result.is_fatal(): + return info_result + simple_motor = motor.simple_motor simple_motor.stall_torque = parse_joints.force simple_motor.max_velocity = parse_joints.speed @@ -144,19 +161,24 @@ def populateJoints( # else: # signals.signal_map.remove(guid) - _addJointInstance(joint, joint_instance, joint_definition, signals, options) + joint_result = _addJointInstance(joint, joint_instance, joint_definition, signals, options) + if joint_result.is_fatal(): + return joint_result # adds information for joint motion and limits _motionFromJoint(joint.jointMotion, joint_definition) except: - logger.error("Failed:\n{}".format(traceback.format_exc())) + # TODO: Figure out how to construct and return this (ie, what actually breaks in this try block) + _: Err[None] = Err("Failed:\n{}".format(traceback.format_exc()), ErrorSeverity.Fatal) continue + return Ok(None) -def _addJoint(joint: adsk.fusion.Joint, joint_definition: joint_pb2.Joint) -> None: - fill_info(joint_definition, joint) - +def _addJoint(joint: adsk.fusion.Joint, joint_definition: joint_pb2.Joint) -> Result[None]: + info_result = fill_info(joint_definition, joint) + if info_result.is_fatal(): + return info_result jointPivotTranslation = _jointOrigin(joint) if jointPivotTranslation: @@ -168,10 +190,13 @@ def _addJoint(joint: adsk.fusion.Joint, joint_definition: joint_pb2.Joint) -> No joint_definition.origin.y = 0.0 joint_definition.origin.z = 0.0 - logger.error(f"Cannot find joint origin on joint {joint.name}") + # TODO: We definitely could make this fatal, figure out if we should + _: Err[None] = Err(f"Cannot find joint origin on joint {joint.name}", ErrorSeverity.Warning) joint_definition.break_magnitude = 0.0 + return Ok(None) + def _addJointInstance( joint: adsk.fusion.Joint, @@ -179,8 +204,11 @@ def _addJointInstance( joint_definition: joint_pb2.Joint, signals: signal_pb2.Signals, options: ExporterOptions, -) -> None: - fill_info(joint_instance, joint) +) -> Result[None]: + info_result = fill_info(joint_instance, joint) + if info_result.is_fatal(): + return info_result + # because there is only one and we are using the token - should be the same joint_instance.joint_reference = joint_instance.info.GUID @@ -221,7 +249,11 @@ def _addJointInstance( else: # if not then create it and add the signal type guid = str(uuid.uuid4()) signal = signals.signal_map[guid] - construct_info("joint_signal", signal, GUID=guid) + + info_result = construct_info("joint_signal", signal, GUID=guid) + if info_result.is_fatal(): + return info_result + signal.io = signal_pb2.IOType.OUTPUT joint_instance.signal_reference = signal.info.GUID @@ -232,6 +264,7 @@ def _addJointInstance( signal.device_type = signal_pb2.DeviceType.PWM else: joint_instance.signal_reference = "" + return Ok(None) def _addRigidGroup(joint: adsk.fusion.Joint, assembly: assembly_pb2.Assembly) -> None: @@ -422,7 +455,7 @@ def notImplementedPlaceholder(*argv: Any) -> None: ... def _searchForGrounded( occ: adsk.fusion.Occurrence, -) -> Union[adsk.fusion.Occurrence, None]: +) -> adsk.fusion.Occurrence | None: """Search for a grounded component or occurrence in the assembly Args: @@ -507,12 +540,13 @@ def _jointOrigin(fusionJoint: Union[adsk.fusion.Joint, adsk.fusion.AsBuiltJoint] return adsk.core.Point3D.create(origin.x + offsetX, origin.y + offsetY, origin.z + offsetZ) +@handle_err_top def createJointGraph( suppliedJoints: list[Joint], _wheels: list[Wheel], jointTree: types_pb2.GraphContainer, progressDialog: PDMessage, -) -> None: +) -> Result[None]: # progressDialog.message = f"Building Joint Graph Map from given joints" progressDialog.currentMessage = f"Building Joint Graph Map from given joints" @@ -542,11 +576,16 @@ def createJointGraph( elif nodeMap[suppliedJoint.parent.value] is not None and nodeMap[suppliedJoint.jointToken] is not None: nodeMap[str(suppliedJoint.parent)].children.append(nodeMap[suppliedJoint.jointToken]) else: - logger.error(f"Cannot construct hierarhcy because of detached tree at : {suppliedJoint.jointToken}") + # TODO: This might not need to be fatal + return Err( + f"Cannot construct hierarchy because of detached tree at : {suppliedJoint.jointToken}", + ErrorSeverity.Fatal, + ) for node in nodeMap.values(): # append everything at top level to isolate kinematics jointTree.nodes.append(node) + return Ok(None) def addWheelsToGraph(wheels: list[Wheel], rootNode: types_pb2.Node, jointTree: types_pb2.GraphContainer) -> None: diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Materials.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Materials.py index 08c5e17bb3..40b27ce63f 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Materials.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Materials.py @@ -1,6 +1,6 @@ import adsk.core -from src.Logging import logFailure +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result, handle_err_top from src.Parser.ExporterOptions import ExporterOptions from src.Parser.SynthesisParser.PDMessage import PDMessage from src.Parser.SynthesisParser.Utilities import construct_info, fill_info @@ -26,26 +26,37 @@ } -def _MapAllPhysicalMaterials( +@handle_err_top +def mapAllPhysicalMaterials( physicalMaterials: list[material_pb2.PhysicalMaterial], materials: material_pb2.Materials, options: ExporterOptions, progressDialog: PDMessage, -) -> None: - setDefaultMaterial(materials.physicalMaterials["default"], options) +) -> Result[None]: + set_result = setDefaultMaterial(materials.physicalMaterials["default"], options) + if set_result.is_fatal(): + return set_result for material in physicalMaterials: - progressDialog.addMaterial(material.name) + if material.name is None or material.id is None: + return Err("Material missing id or name", ErrorSeverity.Fatal) + progressDialog.addMaterial(material.name) if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) newmaterial = materials.physicalMaterials[material.id] - getPhysicalMaterialData(material, newmaterial, options) + material_result = getPhysicalMaterialData(material, newmaterial, options) + if material_result.is_fatal(): + return material_result + + return Ok(None) -def setDefaultMaterial(physicalMaterial: material_pb2.PhysicalMaterial, options: ExporterOptions) -> None: - construct_info("default", physicalMaterial) +def setDefaultMaterial(physicalMaterial: material_pb2.PhysicalMaterial, options: ExporterOptions) -> Result[None]: + construct_info_result = construct_info("default", physicalMaterial) + if construct_info_result.is_err(): + return construct_info_result physicalMaterial.description = "A default physical material" if options.frictionOverride: @@ -59,11 +70,12 @@ def setDefaultMaterial(physicalMaterial: material_pb2.PhysicalMaterial, options: physicalMaterial.deformable = False physicalMaterial.matType = 0 # type: ignore[assignment] + return Ok(None) + -@logFailure def getPhysicalMaterialData( fusionMaterial: adsk.core.Material, physicalMaterial: material_pb2.PhysicalMaterial, options: ExporterOptions -) -> None: +) -> Result[None]: """Gets the material data and adds it to protobuf Args: @@ -71,7 +83,9 @@ def getPhysicalMaterialData( proto_material (protomaterial): proto material mirabuf options (parseoptions): parse options """ - construct_info("", physicalMaterial, fus_object=fusionMaterial) + construct_info_result = construct_info("", physicalMaterial, fus_object=fusionMaterial) + if construct_info_result.is_err(): + return construct_info_result physicalMaterial.deformable = False physicalMaterial.matType = 0 # type: ignore[assignment] @@ -127,41 +141,65 @@ def getPhysicalMaterialData( mechanicalProperties.density = materialProperties.itemById("structural_Density").value mechanicalProperties.damping_coefficient = materialProperties.itemById("structural_Damping_coefficient").value + missingProperties: list[str] = [ + k for k, v in mechanicalProperties.ListFields() if v is None and not k.startswith("__") + ] # ignore: type + if missingProperties.__len__() > 0: + _: Err[None] = Err(f"Missing Mechanical Properties {missingProperties}", ErrorSeverity.Warning) + """ Strength Properties """ strengthProperties.yield_strength = materialProperties.itemById("structural_Minimum_yield_stress").value strengthProperties.tensile_strength = materialProperties.itemById("structural_Minimum_tensile_strength").value + + missingStrengthProperties: list[str] = [k for k, v in strengthProperties.ListFields() if v is None] # ignore: type + if missingStrengthProperties.__len__() > 0: + __: Err[None] = Err(f"Missing Strength Properties {missingProperties}", ErrorSeverity.Warning) + """ strengthProperties.thermal_treatment = materialProperties.itemById( "structural_Thermally_treated" ).value """ + return Ok(None) + -def _MapAllAppearances( +@handle_err_top +def mapAllAppearances( appearances: list[material_pb2.Appearance], materials: material_pb2.Materials, options: ExporterOptions, progressDialog: PDMessage, -) -> None: +) -> Result[None]: # in case there are no appearances on a body # this is just a color tho - setDefaultAppearance(materials.appearances["default"]) + set_default_result = setDefaultAppearance(materials.appearances["default"]) + if set_default_result.is_fatal(): + return set_default_result - fill_info(materials, None) + fill_info_result = fill_info(materials, None) + if fill_info_result.is_err(): + return fill_info_result for appearance in appearances: progressDialog.addAppearance(appearance.name) + # NOTE I'm not sure if this should be integrated with the error handling system or not, since it's fully intentional and immediantly aborts, which is the desired behavior + # TODO Talk to Brandon about this if progressDialog.wasCancelled(): - raise RuntimeError("User canceled export") + return Err("User canceled export", ErrorSeverity.Fatal) material = materials.appearances["{}_{}".format(appearance.name, appearance.id)] - getMaterialAppearance(appearance, options, material) + material_result = getMaterialAppearance(appearance, options, material) + if material_result.is_fatal(): + return material_result + return Ok(None) -def setDefaultAppearance(appearance: material_pb2.Appearance) -> None: + +def setDefaultAppearance(appearance: material_pb2.Appearance) -> Result[None]: """Get a default color for the appearance Returns: @@ -169,7 +207,10 @@ def setDefaultAppearance(appearance: material_pb2.Appearance) -> None: """ # add info - construct_info("default", appearance) + # TODO: Check if appearance actually can be passed in here in place of an assembly or smth + construct_info_result = construct_info("default", appearance) + if construct_info_result.is_err(): + return construct_info_result appearance.roughness = 0.5 appearance.metallic = 0.5 @@ -181,18 +222,22 @@ def setDefaultAppearance(appearance: material_pb2.Appearance) -> None: color.B = 127 color.A = 255 + return Ok(None) + def getMaterialAppearance( fusionAppearance: adsk.core.Appearance, options: ExporterOptions, appearance: material_pb2.Appearance, -) -> None: +) -> Result[None]: """Takes in a Fusion Mesh and converts it to a usable unity mesh Args: fusionAppearance (adsk.core.Appearance): Fusion appearance material """ - construct_info("", appearance, fus_object=fusionAppearance) + construct_info_result = construct_info("", appearance, fus_object=fusionAppearance) + if construct_info_result.is_err(): + return construct_info_result appearance.roughness = 0.9 appearance.metallic = 0.3 @@ -206,6 +251,8 @@ def getMaterialAppearance( color.A = 127 properties = fusionAppearance.appearanceProperties + if properties is None: + return Err("Apperarance Properties were None", ErrorSeverity.Fatal) roughnessProp = properties.itemById("surface_roughness") if roughnessProp: @@ -260,3 +307,4 @@ def getMaterialAppearance( color.B = baseColor.blue color.A = baseColor.opacity break + return Ok(None) diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py index b085b48d32..3776bfa100 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py @@ -3,10 +3,10 @@ import adsk.core import adsk.fusion -from google.protobuf.json_format import MessageToJson from src import gm from src.APS.APS import getAuth, upload_mirabuf +from src.ErrorHandling import Err, ErrorSeverity, Result from src.Logging import getLogger, logFailure, timed from src.Parser.ExporterOptions import ExporterOptions from src.Parser.SynthesisParser import ( @@ -24,6 +24,16 @@ logger = getLogger() +def reload() -> None: + """Reloads the Parser module""" + import importlib + + importlib.reload(Components) + importlib.reload(Joints) + importlib.reload(Materials) + importlib.reload(PDMessage) + + class Parser: def __init__(self, options: ExporterOptions): """Creates a new parser with the supplied options @@ -33,9 +43,9 @@ def __init__(self, options: ExporterOptions): """ self.exporterOptions = options - @logFailure(messageBox=True) @timed def export(self) -> None: + getLogger().info(f"Exporting with options {self.exporterOptions}") app = adsk.core.Application.get() design: adsk.fusion.Design = app.activeDocument.design @@ -44,10 +54,13 @@ def export(self) -> None: return assembly_out = assembly_pb2.Assembly() - fill_info( - assembly_out, - design.rootComponent, - override_guid=design.parentDocument.name, + # This can't use the wrapper because there are lower level calls of this utility function + handle_err_top( + fill_info( + assembly_out, + design.rootComponent, + override_guid=design.parentDocument.name, + ) ) # set int to 0 in dropdown selection for dynamic @@ -77,21 +90,21 @@ def export(self) -> None: progressDialog, ) - Materials._MapAllAppearances( + Materials.mapAllAppearances( design.appearances, assembly_out.data.materials, self.exporterOptions, self.pdMessage, ) - Materials._MapAllPhysicalMaterials( + Materials.mapAllPhysicalMaterials( design.materials, assembly_out.data.materials, self.exporterOptions, self.pdMessage, ) - Components._MapAllComponents( + Components.mapAllComponents( design, self.exporterOptions, self.pdMessage, @@ -101,7 +114,7 @@ def export(self) -> None: rootNode = types_pb2.Node() - Components._ParseComponentRoot( + Components.parseComponentRoot( design.rootComponent, self.pdMessage, self.exporterOptions, @@ -110,7 +123,7 @@ def export(self) -> None: rootNode, ) - Components._MapRigidGroups(design.rootComponent, assembly_out.data.joints) + Components.mapRigidGroups(design.rootComponent, assembly_out.data.joints) assembly_out.design_hierarchy.nodes.append(rootNode) @@ -135,7 +148,12 @@ def export(self) -> None: self.pdMessage, ) - JointHierarchy.BuildJointPartHierarchy(design, assembly_out.data.joints, self.exporterOptions, self.pdMessage) + try: + JointHierarchy.buildJointPartHierarchy( + design, assembly_out.data.joints, self.exporterOptions, self.pdMessage + ) + except RuntimeError as e: + raise e # These don't have an effect, I forgot how this is suppose to work # progressDialog.message = "Taking Photo for thumbnail..." @@ -179,12 +197,18 @@ def export(self) -> None: logger.debug("Uploading file to APS") project = app.data.activeProject if not project.isValid: - raise RuntimeError("Project is invalid") + app.userInterface.messageBox(f"Project is invalid") + return project_id = project.id folder_id = project.rootFolder.id file_name = f"{self.exporterOptions.fileLocation}.mira" - if upload_mirabuf(project_id, folder_id, file_name, assembly_out.SerializeToString()) is None: - raise RuntimeError("Could not upload to APS") + + # Can't use decorator because it returns a value + upload_result = upload_mirabuf(project_id, folder_id, file_name, assembly_out.SerializeToString()) + if upload_result.is_err(): + message = upload_result.unwrap_err()[0] + app.userInterface.messageBox(f"Fatal Error Encountered: {message}") + return else: assert self.exporterOptions.exportLocation == ExportLocation.DOWNLOAD # check if entire path exists and create if not since gzip doesn't do that. @@ -195,10 +219,10 @@ def export(self) -> None: if self.exporterOptions.compressOutput: logger.debug("Compressing file") with gzip.open(str(self.exporterOptions.fileLocation), "wb", 9) as f: - f.write(assembly_out.SerializeToString()) + _ = f.write(assembly_out.SerializeToString()) else: with open(str(self.exporterOptions.fileLocation), "wb") as f: - f.write(assembly_out.SerializeToString()) + _ = f.write(assembly_out.SerializeToString()) _ = progressDialog.hide() @@ -249,3 +273,11 @@ def export(self) -> None: ) logger.debug(debug_output.strip()) + + +def handle_err_top[T](result: Result[T]) -> None: + if result.is_err(): + message, severity = result.unwrap_err() + if severity == ErrorSeverity.Fatal: + app = adsk.core.Application.get() + app.userInterface.messageBox(f"Fatal Error Encountered: {message}") diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/PhysicalProperties.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/PhysicalProperties.py index 53af57dfe5..42f933a951 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/PhysicalProperties.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/PhysicalProperties.py @@ -1,43 +1,52 @@ -""" Gets the Physical Data associated with a given item - - Takes: - - BRepBody - - Occurrence - - Component - - Adds: - - Density - - Mass - - Volume - - COM - - X - - Y - - Z +"""Gets the Physical Data associated with a given item + +Takes: + - BRepBody + - Occurrence + - Component + +Adds: + - Density + - Mass + - Volume + - COM + - X + - Y + - Z """ -from typing import Union +from typing import Any import adsk +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result from src.Logging import logFailure from src.Proto import types_pb2 -@logFailure -def GetPhysicalProperties( - fusionObject: Union[adsk.fusion.BRepBody, adsk.fusion.Occurrence, adsk.fusion.Component], +def getPhysicalProperties( + fusionObject: adsk.fusion.BRepBody | adsk.fusion.Occurrence | adsk.fusion.Component, physicalProperties: types_pb2.PhysicalProperties, level: int = 1, -) -> None: +) -> Result[None]: """Will populate a physical properties section of an exported file Args: - fusionObject (Union[adsk.fusion.BRepBody, adsk.fusion.Occurrence, adsk.fusion.Component]): The base fusion object + fusionObject (adsk.fusion.BRepBody | adsk.fusion.Occurrence, adsk.fusion.Component): The base fusion object physicalProperties (any): Unity Joint object for now level (int): Level of accurracy """ physical = fusionObject.getPhysicalProperties(level) + if physical is None: + return Err("Physical properties object is None", ErrorSeverity.Error) + + missing_properties_bools: list[bool] = [ + value is None for prop, value in vars(physical).items() if not prop.startswith("__") + ] + if any(prop for prop in missing_properties_bools): + # missing_properties: list[Any] = [physical[i] for i, prop in enumerate(missing_properties_bools) if prop] + _: Err[None] = Err("Missing some physical properties", ErrorSeverity.Warning) physicalProperties.density = physical.density physicalProperties.mass = physical.mass @@ -51,3 +60,7 @@ def GetPhysicalProperties( _com.x = com.x _com.y = com.y _com.z = com.z + else: + __: Err[None] = Err("com is None", ErrorSeverity.Warning) + + return Ok(None) diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/RigidGroup.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/RigidGroup.py index a5e389beda..afcba2114a 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/RigidGroup.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/RigidGroup.py @@ -1,19 +1,17 @@ -""" Module to define static methods to extract rigidGroup information from a given occurrence or component +"""Module to define static methods to extract rigidGroup information from a given occurrence or component - NOT CURRENTLY IN USE BECAUSE OF BUG + NOT CURRENTLY IN USE BECAUSE OF BUG - - Will directly add it to the given Assembly Message +- Will directly add it to the given Assembly Message - Takes: - - Occurrence - - Component +Takes: + - Occurrence + - Component - Returns: - - Success +Returns: + - Success """ -from typing import Union - import adsk.core import adsk.fusion @@ -25,8 +23,8 @@ # According to the type errors I'm getting here this code would have never compiled. # Should be removed later @logFailure -def ExportRigidGroups( - fus_occ: Union[adsk.fusion.Occurrence, adsk.fusion.Component], +def exportRigidGroups( + fus_occ: adsk.fusion.Occurrence | adsk.fusion.Component, hel_occ: assembly_pb2.Occurrence, # type: ignore[name-defined] ) -> None: """Takes a Fusion and Protobuf Occurrence and will assign Rigidbody data per the occurrence if any exist and are not surpressed. diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Utilities.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Utilities.py index d8f38d921f..b3b23db4bd 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Utilities.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Utilities.py @@ -1,10 +1,12 @@ import math import uuid +from typing import Never import adsk.core import adsk.fusion -from src.Proto import assembly_pb2 +from src.ErrorHandling import Err, ErrorSeverity, Ok, Result +from src.Proto import assembly_pb2, material_pb2, types_pb2 def guid_component(comp: adsk.fusion.Component) -> str: @@ -19,17 +21,22 @@ def guid_none(_: None) -> str: return str(uuid.uuid4()) -def fill_info(proto_obj: assembly_pb2.Assembly, fus_object: adsk.core.Base, override_guid: str | None = None) -> None: - construct_info("", proto_obj, fus_object=fus_object, GUID=override_guid) +def fill_info( + proto_obj: assembly_pb2.Assembly | material_pb2.Materials, + fus_object: adsk.core.Base, + override_guid: str | None = None, +) -> Result[None]: + return construct_info("", proto_obj, fus_object=fus_object, GUID=override_guid) def construct_info( name: str, - proto_obj: assembly_pb2.Assembly, + proto_obj: assembly_pb2.Assembly | material_pb2.Materials | material_pb2.PhysicalMaterial, version: int = 5, fus_object: adsk.core.Base | None = None, GUID: str | None = None, -) -> None: +) -> Result[None]: + # TODO Fix out of date documentation """Constructs a info object from either a name or a fus_object Args: @@ -49,7 +56,7 @@ def construct_info( if fus_object is not None: proto_obj.info.name = fus_object.name - else: + elif name != "": proto_obj.info.name = name if GUID is not None: @@ -59,153 +66,4 @@ def construct_info( else: proto_obj.info.GUID = str(uuid.uuid4()) - -# Transition: AARD-1765 -# Will likely be removed later as this is no longer used. Avoiding adding typing for now. -# My previous function was alot more optimized however now I realize the bug was this doesn't work well with degrees -def euler_to_quaternion(r): # type: ignore - (yaw, pitch, roll) = (r[0], r[1], r[2]) - qx = math.sin(roll / 2) * math.cos(pitch / 2) * math.cos(yaw / 2) - math.cos(roll / 2) * math.sin( - pitch / 2 - ) * math.sin(yaw / 2) - qy = math.cos(roll / 2) * math.sin(pitch / 2) * math.cos(yaw / 2) + math.sin(roll / 2) * math.cos( - pitch / 2 - ) * math.sin(yaw / 2) - qz = math.cos(roll / 2) * math.cos(pitch / 2) * math.sin(yaw / 2) - math.sin(roll / 2) * math.sin( - pitch / 2 - ) * math.cos(yaw / 2) - qw = math.cos(roll / 2) * math.cos(pitch / 2) * math.cos(yaw / 2) + math.sin(roll / 2) * math.sin( - pitch / 2 - ) * math.sin(yaw / 2) - return [qx, qy, qz, qw] - - -def rad_to_deg(rad): # type: ignore - """Very simple method to convert Radians to degrees - - Args: - rad (float): radians unit - - Returns: - float: degrees - """ - return (rad * 180) / math.pi - - -def quaternion_to_euler(qx, qy, qz, qw): # type: ignore - """Takes in quat values and converts to degrees - - - roll is x axis - atan2(2(qwqy + qzqw), 1-2(qy^2 + qz^2)) - - pitch is y axis - asin(2(qxqz - qwqy)) - - yaw is z axis - atan2(2(qxqw + qyqz), 1-2(qz^2+qw^3)) - - Args: - qx (float): quat_x - qy (float): quat_y - qz (float): quat_z - qw (float): quat_w - - Returns: - roll: x value in degrees - pitch: y value in degrees - yaw: z value in degrees - """ - # roll - sr_cp = 2 * ((qw * qx) + (qy * qz)) - cr_cp = 1 - (2 * ((qx * qx) + (qy * qy))) - roll = math.atan2(sr_cp, cr_cp) - # pitch - sp = 2 * ((qw * qy) - (qz * qx)) - if abs(sp) >= 1: - pitch = math.copysign(math.pi / 2, sp) - else: - pitch = math.asin(sp) - # yaw - sy_cp = 2 * ((qw * qz) + (qx * qy)) - cy_cp = 1 - (2 * ((qy * qy) + (qz * qz))) - yaw = math.atan2(sy_cp, cy_cp) - # convert to degrees - roll = rad_to_deg(roll) - pitch = rad_to_deg(pitch) - yaw = rad_to_deg(yaw) - # round and return - return round(roll, 4), round(pitch, 4), round(yaw, 4) - - -def throwZero(): # type: ignore - """Simple function to report incorrect quat values - - Raises: - RuntimeError: Error describing the issue - """ - raise RuntimeError("While computing the quaternion the trace was reported as 0 which is invalid") - - -def spatial_to_quaternion(mat): # type: ignore - """Takes a 1D Spatial Transform Matrix and derives rotational quaternion - - I wrote this however it is difficult to extensibly test so use with caution - Args: - mat (list): spatial transform matrix - - Raises: - RuntimeError: matrix is not of the correct size - - Returns: - x, y, z, w: float representation of quaternions - """ - if len(mat) > 15: - trace = mat[0] + mat[5] + mat[10] - if trace > 0: - s = math.sqrt(trace + 1.0) * 2 - if s == 0: - throwZero() - qw = 0.25 * s - qx = (mat[9] - mat[6]) / s - qy = (mat[2] - mat[8]) / s - qz = (mat[4] - mat[1]) / s - elif (mat[0] > mat[5]) and (mat[0] > mat[8]): - s = math.sqrt(1.0 + mat[0] - mat[5] - mat[10]) * 2.0 - if s == 0: - throwZero() - qw = (mat[9] - mat[6]) / s - qx = 0.25 * s - qy = (mat[1] + mat[4]) / s - qz = (mat[2] + mat[8]) / s - elif mat[5] > mat[10]: - s = math.sqrt(1.0 + mat[5] - mat[0] - mat[10]) * 2.0 - if s == 0: - throwZero() - qw = (mat[2] - mat[8]) / s - qx = (mat[1] + mat[4]) / s - qy = 0.25 * s - qz = (mat[6] + mat[9]) / s - else: - s = math.sqrt(1.0 + mat[10] - mat[0] - mat[5]) * 2.0 - if s == 0: - throwZero() - qw = (mat[4] - mat[1]) / s - qx = (mat[2] + mat[8]) / s - qy = (mat[6] + mat[9]) / s - qz = 0.25 * s - - # normalizes the value - as demanded by unity - qx, qy, qz, qw = normalize_quaternion(qx, qy, qz, qw) - - # So these quat values need to be reversed? I have no idea why at the moment - return round(qx, 13), round(-qy, 13), round(-qz, 13), round(qw, 13) - - else: - raise RuntimeError("Supplied matrix to spatial_to_quaternion is not a 1D spatial matrix in size.") - - -def normalize_quaternion(x, y, z, w): # type: ignore - f = 1.0 / math.sqrt((x * x) + (y * y) + (z * z) + (w * w)) - return x * f, y * f, z * f, w * f - - -def _getAngleTo(vec_origin: list, vec_current: adsk.core.Vector3D) -> int: # type: ignore - origin = adsk.core.Vector3D.create(vec_origin[0], vec_origin[1], vec_origin[2]) - val = origin.angleTo(vec_current) - deg = val * (180 / math.pi) - return val # type: ignore + return Ok(None) diff --git a/exporter/SynthesisFusionAddin/src/Resources/HTML/info.html b/exporter/SynthesisFusionAddin/src/Resources/HTML/info.html index a44b45f115..759ed2a48d 100644 --- a/exporter/SynthesisFusionAddin/src/Resources/HTML/info.html +++ b/exporter/SynthesisFusionAddin/src/Resources/HTML/info.html @@ -2,7 +2,7 @@