diff --git a/.github/workflows/build-pr.yml b/.github/workflows/build-pr.yml deleted file mode 100644 index ba0ec61be6..0000000000 --- a/.github/workflows/build-pr.yml +++ /dev/null @@ -1,346 +0,0 @@ -name: Build Branch -run-name: Build ยซ${{ github.ref_name }}ยป (${{ github.actor}}) -on: - push: - branches-ignore: - - main - - release/* - tags-ignore: - - '*' - workflow_call: - inputs: - is_called_workflow: - required: true - default: false - type: boolean - description: | - Should be set to true. This is used to determine of this workflow was called directly or - from another workflow. - do_instrumentation: - required: false - default: false - type: boolean - description: | - Set to true to enable instrumentation of Igor code. The execution will be approx. 30% - slower. - secrets: - GHA_MIES_CERTIFICATE_PIN: - required: true - GHA_MIES_FTP_ARTEFACT_STORAGE_USER: - required: true - GHA_MIES_FTP_ARTEFACT_STORAGE_PWD: - required: true -defaults: - run: - shell: bash -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true -jobs: - - Ipt: - name: ๐Ÿ”Ž IPT checks - runs-on: [ self-hosted, Windows ] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - # no need to checkout submodules or the full history - - name: Download ipt binary from server - run: curl -o ipt.exe "https://byte-physics.de/public-downloads/aistorage/transfer/ipt/0.9.0/ipt.exe" - - name: IPT version - run: ./ipt.exe --version - - name: Format and lint code - run: tools/run-ipt.sh - - name: Check for changed files - run: git diff --name-only --ignore-submodules; git diff --ignore-submodules --quiet - - name: Create patch - if: ${{ failure() }} - run: git diff > format-changes.patch - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: ${{ failure() }} - with: - name: IPT-assets - path: | - format-changes.patch - if-no-files-found: warn - - BuildInstaller: - name: ๐Ÿ— Build installer - runs-on: [ self-hosted, Windows, Certificate ] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 # load all commits - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Create release package and installer - run: tools/create-installer.sh unelevated - - name: Sign installer - run: tools/sign-installer.sh -p '${{ secrets.GHA_MIES_CERTIFICATE_PIN }}' - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: ${{ always() }} - with: - name: BuildInstaller-assets - path: | - *.zip - tools/installer/MIES-*.exe - if-no-files-found: error - - Linting: - name: ๐Ÿ”Ž Linting - runs-on: [ self-hosted, Linux, Docker ] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Code Checks - run: tools/check-code.sh - # straight from the documentation, see https://pre-commit.com/#github-actions-example - - name: set PY - run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV - - uses: actions/cache@v4 - with: - path: ~/.cache/pre-commit - key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} - - name: Pre commit - run: tools/pre-commit/run.sh - - CompilationTest: - name: ๐Ÿ‘ฉ๐Ÿพโ€๐Ÿ”ฌ Compilation of ${{ matrix.source }} ${{ fromJSON('["","with XOP"]')[matrix.hardware] }} v${{ matrix.version }} - needs: - - BuildInstaller - strategy: - matrix: - source: [ git, installer ] - hardware: [ false, true ] - version: [9, 10] - exclude: - # the git source is always checked with hardware support - - source: git - hardware: false - uses: ./.github/workflows/test-igor-workflow.yml - with: - job_name: ๐Ÿ‘ฉ๐Ÿพโ€๐Ÿ”ฌ Compilation of ${{ matrix.source }} ${{ fromJSON('["","with XOP"]')[matrix.hardware] }} - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: Packages/tests/Compilation/CompilationTester.pxp - installer_artifact_name: BuildInstaller-assets - installer_flags: ${{ fromJSON('["-x skipHardwareXOPs",""]')[matrix.hardware] }} -s ${{ matrix.source }} - artifact_name: CompilationTest-${{ matrix.source }}-${{ fromJSON('["no-hardware","hardware"]')[matrix.hardware] }}-v${{ matrix.version }}-assets - timeout_minutes: 60 - major_igorpro_version: ${{ matrix.version }} - - CompilationEachCommitTest: - name: ๐Ÿ‘ฉ๐Ÿพโ€๐Ÿ”ฌ Compilation of each commit - if: ${{ !inputs.is_called_workflow }} - uses: ./.github/workflows/test-igor-rebase-exec-workflow.yml - with: - job_name: ๐Ÿ‘ฉ๐Ÿพโ€๐Ÿ”ฌ Compilation of each commit - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: Packages/tests/Compilation/CompilationTester.pxp - installer_flags: "-s git" - artifact_name: Compilation-Each-Commit-assets - timeout_minutes: 180 - - Documentation: - name: ๐Ÿ‘ท Documentation - runs-on: [ self-hosted, Linux, Docker ] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 # load all commits - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Build documentation - run: tools/documentation/run.sh - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: ${{ always() }} - with: - name: Documentation-assets - path: | - Packages/doc/*.zip - if-no-files-found: error - - TestWithoutHardware: - name: ๐Ÿงช Test ${{ matrix.name }} v${{ matrix.version }} - needs: - - BuildInstaller - - Linting - - CompilationTest - strategy: - matrix: - version: - - 9 - - 10 - name: - - Basic - - PAPlot - - HistoricData - include: - - experiment: Packages/tests/Basic/Basic.pxp - name: Basic - - experiment: Packages/tests/PAPlot/PAPlot.pxp - name: PAPlot - - experiment: Packages/tests/HistoricData/HistoricData.pxp - name: HistoricData - exclude: - - version: 10 - name: PAPlot - - version: 10 - name: HistoricData - uses: ./.github/workflows/test-igor-workflow.yml - with: - job_name: ๐Ÿงช Test ${{ matrix.name }} - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: ${{ matrix.experiment }} - artifact_name: TestWithoutHardware-${{ matrix.name }}-v${{ matrix.version }}-assets - expensive_checks: "1" - instrument_tests: ${{ fromJson('["0", "1"]')[inputs.do_instrumentation] }} - timeout_minutes: 60 - major_igorpro_version: ${{ matrix.version }} - - TestNI: - name: ๐Ÿงช Test NI ${{ matrix.name }} - needs: - - BuildInstaller - - CompilationTest - - TestWithoutHardware - strategy: - matrix: - include: - - experiment: Packages/tests/HardwareBasic/HardwareBasic-NI.pxp - name: HardwareBasic - - experiment: Packages/tests/HardwareAnalysisFunctions/HardwareAnalysisFunctions-NI.pxp - name: HardwareAnalysisFunctions - uses: ./.github/workflows/test-igor-workflow.yml - with: - job_name: ๐Ÿงช Test NI ${{ matrix.name }} - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: ${{ matrix.experiment }} - target: "[ 'self-hosted', 'Windows', 'IgorPro', 'NI' ]" - artifact_name: TestNI-${{ matrix.name }}-assets - expensive_checks: "1" - instrument_tests: ${{ fromJson('["0", "1"]')[inputs.do_instrumentation] }} - timeout_minutes: 180 - - TestITC18: - name: ๐Ÿงช Test ITC18-USB ${{ matrix.name }} - needs: - - BuildInstaller - - CompilationTest - - TestWithoutHardware - strategy: - matrix: - include: - - experiment: Packages/tests/HardwareBasic/HardwareBasic.pxp - name: HardwareBasic - - experiment: Packages/tests/HardwareAnalysisFunctions/HardwareAnalysisFunctions.pxp - name: HardwareAnalysisFunctions - uses: ./.github/workflows/test-igor-workflow.yml - with: - job_name: ๐Ÿงช Test ITC18-USB ${{ matrix.name }} - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: ${{ matrix.experiment }} - target: "[ 'self-hosted', 'Windows', 'IgorPro', 'ITC' ]" - artifact_name: TestITC18-${{ matrix.name }}-assets - expensive_checks: "1" - instrument_tests: ${{ fromJson('["0", "1"]')[inputs.do_instrumentation] }} - timeout_minutes: 180 - - TestITC1600: - name: ๐Ÿงช Test ITC1600 ${{ matrix.name }} - needs: - - BuildInstaller - - CompilationTest - - TestWithoutHardware - strategy: - matrix: - include: - - experiment: Packages/tests/HardwareBasic/HardwareBasic-ITC1600.pxp - name: HardwareBasic - - experiment: Packages/tests/HardwareAnalysisFunctions/HardwareAnalysisFunctions-ITC1600.pxp - name: HardwareAnalysisFunctions - uses: ./.github/workflows/test-igor-workflow.yml - with: - job_name: ๐Ÿงช Test ITC1600 ${{ matrix.name }} - overwrite_job_name: ${{ inputs.is_called_workflow || false }} - experiment: ${{ matrix.experiment }} - target: "[ 'self-hosted', 'Windows', 'IgorPro', 'ITC1600' ]" - artifact_name: TestITC1600-${{ matrix.name }}-assets - expensive_checks: "1" - instrument_tests: ${{ fromJson('["0", "1"]')[inputs.do_instrumentation] }} - timeout_minutes: 180 - - ValidateNwb: - name: ๐Ÿ‘ฎ๐Ÿผ Validate NWBv2 - runs-on: [ self-hosted, Linux, Docker ] - needs: - - TestITC18 - - TestITC1600 - - TestNI - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download ITC18-USB artifacts - uses: actions/download-artifact@v4 - with: - pattern: TestITC18-* - - name: Download ITC1600 artifacts - uses: actions/download-artifact@v4 - with: - pattern: TestITC1600-* - - name: Download NI artifacts - uses: actions/download-artifact@v4 - with: - pattern: TestNI-* - - name: Validate and read NWBv2 files - run: tools/nwb-read-tests/run.sh - - FTPUpload: - name: ๐Ÿ“ฆ FTP Upload Artifacts - runs-on: [ self-hosted, Linux, Docker ] - if: ${{ !cancelled() && !inputs.is_called_workflow}} - needs: - # All jobs that create new artifacts - - Ipt - - BuildInstaller - - CompilationEachCommitTest - - CompilationTest - - Documentation - - TestWithoutHardware - - TestNI - - TestITC18 - - TestITC1600 - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download all artifacts - id: download - uses: actions/download-artifact@v4 - with: - path: artifacts - - name: Flatten artifact structure - run: | - find "${{ steps.download.outputs.download-path }}" -mindepth 1 -maxdepth 1 -type d |\ - xargs -I {} tools/ftp-upload/flatten-files.sh "{}" - - name: Compress some artifacts - run: tools/ftp-upload/compress-files.sh "${{ steps.download.outputs.download-path }}" - - name: Upload artifacts using FTP - run: | - tools/ftp-upload/upload-files.sh \ - -s "${{ vars.GHA_MIES_FTP_ARTEFACT_STORAGE_DNS }}" \ - -u "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_USER }}" \ - -p "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_PWD }}" \ - -d "${{ steps.download.outputs.download-path }}" \ - -t "branch/$(echo "${{ github.ref_name }}" | sed "s@/@_@g")/${{ github.sha }}/${{ github.run_attempt }}" diff --git a/.github/workflows/build-release.yml b/.github/workflows/build-release.yml deleted file mode 100644 index 174e9f2a1e..0000000000 --- a/.github/workflows/build-release.yml +++ /dev/null @@ -1,154 +0,0 @@ -name: Build Release -run-name: Build ${{ github.ref_name }} ๐Ÿš€ -on: - push: - branches: - - main - - release/* -defaults: - run: - shell: bash -jobs: - - CallPR: - name: โš™ Run PR Workflow - uses: ./.github/workflows/build-pr.yml - with: - is_called_workflow: true - do_instrumentation: ${{ github.ref_name == 'main' }} - secrets: inherit - - DeployDocumentation: - name: โœˆ๏ธ Deploy documentation - runs-on: [ self-hosted, Linux, Docker ] - if: github.ref_name == 'main' - permissions: write-all - needs: - - CallPR - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 # load all commits - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - name: Documentation-assets - - name: Deploy documentation to github pages - run: tools/publish-docs-on-github.sh - - DeployInstaller: - name: โœˆ๏ธ Deploy Installer - runs-on: [ self-hosted, Linux, Docker ] - permissions: write-all - needs: - - CallPR - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 # load all commits - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - name: BuildInstaller-assets - - name: Deploy release assets to github - run: tools/upload-github-release-asset.sh "${{ github.token }}" - - GenerateReport: - name: ๐Ÿ“Š Generate Reports - runs-on: [ self-hosted, Linux, Docker ] - if: github.ref_name == 'main' - needs: - - CallPR - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download all artifacts - id: download - uses: actions/download-artifact@v4 - with: - path: artifacts - - name: ๐Ÿ“ฅ Download report cache from FTP - id: cache-download - run: | - tools/ftp-upload/download-files.sh \ - -s "${{ vars.GHA_MIES_FTP_ARTEFACT_STORAGE_DNS }}" \ - -u "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_USER }}" \ - -p "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_PWD }}" \ - -d "history" \ - -t "cache/coverage-history" - - name: ๐Ÿ“Š Generate Reports - id: gen - run: | - tools/report-generator/build.sh \ - -s "$(pwd)" \ - -d "${{ steps.download.outputs.download-path }}" \ - -h "${{ steps.cache-download.outputs.data }}" \ - -o report \ - -l "${{ secrets.GHA_MIES_REPORTGENERATOR_LICENSE }}" - - name: ๐Ÿ“ค Upload latest report to FTP - run: | - tools/ftp-upload/upload-files.sh \ - -s "${{ vars.GHA_MIES_FTP_ARTEFACT_STORAGE_DNS }}" \ - -u "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_USER }}" \ - -p "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_PWD }}" \ - -d "${{ steps.gen.outputs.report }}" \ - -t "report/coverage" - - name: ๐Ÿ“ค Upload report cache to FTP - run: | - tools/ftp-upload/upload-files.sh \ - -s "${{ vars.GHA_MIES_FTP_ARTEFACT_STORAGE_DNS }}" \ - -u "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_USER }}" \ - -p "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_PWD }}" \ - -d "${{ steps.gen.outputs.history }}" \ - -t "cache/coverage-history" - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: report-artifacts - path: | - ${{ steps.gen.outputs.report }} - if-no-files-found: warn - - FTPUpload: - name: ๐Ÿ“ฆ FTP Upload Artifacts - runs-on: [ self-hosted, Linux, Docker ] - if: ${{ !cancelled() }} - needs: - # All jobs that create new artifacts - - CallPR - - GenerateReport - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download all artifacts - id: download - uses: actions/download-artifact@v4 - with: - path: artifacts - - name: Flatten artifact structure - run: | - find "${{ steps.download.outputs.download-path }}" -mindepth 1 -maxdepth 1 -type d |\ - xargs -I {} tools/ftp-upload/flatten-files.sh "{}" - - name: Compress some artifacts - run: tools/ftp-upload/compress-files.sh "${{ steps.download.outputs.download-path }}" - - name: Upload artifacts using FTP - run: | - tools/ftp-upload/upload-files.sh \ - -s "${{ vars.GHA_MIES_FTP_ARTEFACT_STORAGE_DNS }}" \ - -u "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_USER }}" \ - -p "${{ secrets.GHA_MIES_FTP_ARTEFACT_STORAGE_PWD }}" \ - -d "${{ steps.download.outputs.download-path }}" \ - -t "deploy/$(echo "${{ github.ref_name }}" | sed "s@/@_@g")/${{ github.sha }}/${{ github.run_attempt }}" diff --git a/.github/workflows/monthly-scheduled.yml b/.github/workflows/monthly-scheduled.yml deleted file mode 100644 index aeedd49315..0000000000 --- a/.github/workflows/monthly-scheduled.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Monthly scheduled jobs -run-name: Build ยซ${{ github.ref_name }}ยป (${{ github.actor}}) -on: - schedule: - # on every 1st of each month at 2:30 UTC - - cron: '30 2 1 * *' -defaults: - run: - shell: bash - -jobs: - GitGarbageCollection: - name: ๐Ÿ—‘ Git garbage collection - strategy: - matrix: - runner: [Linux_I, Linux_II, Linux_III, Linux_IV, Linux_V, Windows_I, Windows_II, Windows_Cert, Windows_ITC, Windows_NI] - runs-on: [ self-hosted, "${{ matrix.runner }}" ] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Apply configuration and run gc in auto mode - run: | - git config gc.auto 1024 - git config gc.packLimit 10 - git gc --auto --no-detach diff --git a/.github/workflows/test-igor-rebase-exec-workflow.yml b/.github/workflows/test-igor-rebase-exec-workflow.yml deleted file mode 100644 index daa7c435d1..0000000000 --- a/.github/workflows/test-igor-rebase-exec-workflow.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: Test Rebase Exec Igor Workflow -run-name: Test Rebase Exec Igor Workflow -env: - # if this environment variable is set it will use the igor version from - # C:\Program Files\WaveMetrics\Igor Pro ${VERSION} Folder\IgorBinaries_x64_${CI_IGOR${VERSION}_REVISION} - CI_IGOR9_REVISION: "r56565" - CI_IGOR10_REVISION: "r29303" -on: - workflow_call: - inputs: - job_name: - required: false - type: string - default: Run - description: The name of the main job - overwrite_job_name: - required: false - type: boolean - default: false - description: | - If set to true it will replace the name of the main job with the content of job_name - experiment: - required: true - type: string - description: The path to the Igor experiment file - target: - required: false - default: "['self-hosted', 'Windows', 'IgorPro']" - type: string - description: A JSON string with the tags for the target runner - installer_artifact_name: - required: false - default: BuildInstaller-assets - type: string - description: The name of the artifact that holds the installer - installer_flags: - required: false - default: "-s installer" - type: string - description: The flags that should be used for the installer - artifact_name: - required: true - type: string - description: The name of the artifact that will be uploaded - expensive_checks: - required: false - default: "0" - type: string - description: Sets environment variable CI_EXPENSIVE_CHECKS - instrument_tests: - required: false - default: "0" - type: string - description: Sets environment variable CI_INSTRUMENT_TESTS - timeout_minutes: - required: false - default: 360 - type: number - description: Defines the job timeout in minutes - major_igorpro_version: - required: false - default: 9 - type: number - description: The major Igor Pro version to use -defaults: - run: - shell: bash -jobs: - - Test: - name: ${{ inputs.overwrite_job_name && inputs.job_name || 'Run' }} - runs-on: ${{ fromJson(inputs.target) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - env: - CI_EXPENSIVE_CHECKS: ${{ inputs.expensive_checks }} - CI_INSTRUMENT_TESTS: ${{ inputs.instrument_tests }} - CI_SKIP_COMPILATION_TEST_DEFINES: "1" - steps: - - name: Check out repository code - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - ref: ${{ github.sha }} - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Get latest release branch - run: | - CI_RELEASE_BRANCH=$(git branch --sort="version:refname" -rl "origin/release*" | tail -n1) - echo "CI_RELEASE_BRANCH=$CI_RELEASE_BRANCH" >> $GITHUB_ENV - - name: Gather base branch - run: | - CI_BASE_BRANCH=$((git merge-base --is-ancestor ${{ env.CI_RELEASE_BRANCH }} HEAD && echo "${{ env.CI_RELEASE_BRANCH }}") || echo "origin/main") - echo "CI_BASE_BRANCH=$CI_BASE_BRANCH" >> $GITHUB_ENV - - name: List of commits to operate on - run: git log --pretty=ref ${{ env.CI_BASE_BRANCH }}.. - - name: Cleanup earlier rebase invocations - run: git rebase --quit 2>/dev/null || true - - name: Compile check each commit with ${{ inputs.experiment }} - run: | - git rebase --exec "git log --pretty=ref -n1" \ - --exec "tools/clean_mies_installation.sh ${{ inputs.installer_flags }}" \ - --exec "tools/autorun-test.sh -p ${{ inputs.experiment }} -v IP_${{ inputs.major_igorpro_version }}_64" ${{ env.CI_BASE_BRANCH }} - - name: Gather log files and crash dumps - if: always() - run: tools/gather-logfiles-and-crashdumps.sh - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: ${{ always() }} - with: - name: ${{ inputs.artifact_name }} - path: | - Packages/tests/**/JU_*.xml - Packages/tests/**/*.log - Diagnostics - **/*.jsonl - if-no-files-found: error diff --git a/.github/workflows/test-igor-workflow.yml b/.github/workflows/test-igor-workflow.yml deleted file mode 100644 index 8a72c8c20b..0000000000 --- a/.github/workflows/test-igor-workflow.yml +++ /dev/null @@ -1,110 +0,0 @@ -name: Test Igor Workflow -run-name: Test Igor Workflow -env: - # if this environment variable is set it will use the igor version from - # C:\Program Files\WaveMetrics\Igor Pro ${VERSION} Folder\IgorBinaries_x64_${CI_IGOR${VERSION}_REVISION} - CI_IGOR9_REVISION: "r56565" - CI_IGOR10_REVISION: "r29303" -on: - workflow_call: - inputs: - job_name: - required: false - type: string - default: Run - description: The name of the main job - overwrite_job_name: - required: false - type: boolean - default: false - description: | - If set to true it will replace the name of the main job with the content of job_name - experiment: - required: true - type: string - description: The path to the Igor experiment file - target: - required: false - default: "['self-hosted', 'Windows', 'IgorPro']" - type: string - description: A JSON string with the tags for the target runner - installer_artifact_name: - required: false - default: BuildInstaller-assets - type: string - description: The name of the artifact that holds the installer - installer_flags: - required: false - default: "-s installer" - type: string - description: The flags that should be used for the installer - artifact_name: - required: true - type: string - description: The name of the artifact that will be uploaded - expensive_checks: - required: false - default: "0" - type: string - description: Sets environment variable CI_EXPENSIVE_CHECKS - instrument_tests: - required: false - default: "0" - type: string - description: Sets environment variable CI_INSTRUMENT_TESTS - timeout_minutes: - required: false - default: 360 - type: number - description: Defines the job timeout in minutes - major_igorpro_version: - required: false - default: 9 - type: number - description: The major Igor Pro version to use -defaults: - run: - shell: bash -jobs: - - Test: - name: ${{ inputs.overwrite_job_name && inputs.job_name || 'Run' }} - runs-on: ${{ fromJson(inputs.target) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - env: - CI_EXPENSIVE_CHECKS: ${{ inputs.expensive_checks }} - CI_INSTRUMENT_TESTS: ${{ inputs.instrument_tests }} - steps: - - name: Check out repository code - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Initial repo config - run: tools/initial-repo-config.sh - - name: Download artifacts - if: inputs.installer_artifact_name != '' - uses: actions/download-artifact@v4 - continue-on-error: false - with: - name: ${{ inputs.installer_artifact_name }} - - name: Clean MIES installation - run: tools/clean_mies_installation.sh ${{ inputs.installer_flags }} - - name: Test experiment ${{ inputs.experiment }} - run: tools/autorun-test.sh -p ${{ inputs.experiment }} -v IP_${{ inputs.major_igorpro_version }}_64 - - name: Gather log files and crash dumps - if: always() - run: tools/gather-logfiles-and-crashdumps.sh - - name: upload artifacts - uses: actions/upload-artifact@v4 - if: ${{ always() }} - with: - name: ${{ inputs.artifact_name }} - path: | - Packages/tests/**/*-V2.nwb - Packages/tests/**/JU_*.xml - Packages/tests/**/Cobertura_*.xml - Packages/tests/**/*.log - Diagnostics - **/*.jsonl - !Packages/tests/**/input - if-no-files-found: error diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a90b2b208..4c8b448ead 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,3 +35,8 @@ repos: rev: 0.4.0 hooks: - id: forbid-bidi-controls +- repo: https://github.com/psf/black-pre-commit-mirror + rev: 25.1.0 + hooks: + - id: black + language_version: python3.11 diff --git a/Packages/MIES/MIES_Python.ipf b/Packages/MIES/MIES_Python.ipf new file mode 100644 index 0000000000..c9bda4b2ad --- /dev/null +++ b/Packages/MIES/MIES_Python.ipf @@ -0,0 +1,41 @@ +#pragma TextEncoding = "UTF-8" +#pragma rtGlobals = 3 // Use modern global access method and strict wave access. +#pragma rtFunctionErrors = 1 + +#ifdef AUTOMATED_TESTING +#pragma ModuleName = MIES_PY +#endif // AUTOMATED_TESTING + +Function/WAVE PY_CallSpikeExtractor(string device, variable sweepNo, variable channelType, variable channelNumber) + + string code, resultWaveName + + DFREF deviceDFR = GetDeviceDataPath(device) + DFREF sweepDFR = GetSingleSweepFolder(deviceDFR, sweepNo) + + WAVE numericalValues = GetLBNumericalValues(device) + WAVE textualValues = GetLBTextualValues(device) + + WAVE/Z single_v = GetDAQDataSingleColumnWaveNG(numericalValues, textualValues, sweepNo, sweepDFR, channelType, channelNumber) + ASSERT(WaveExists(single_v), "Missing wave") + ASSERT(!cmpstr(WaveUnits(single_v, -1), "mV"), "Unexpected AD Unit") + + DFREF dfr = GetUniqueTempPath() + Make/N=(DimSize(single_v, ROWS)) dfr:single_t/WAVE=single_t + single_t[] = DimOffset(single_v, ROWS) + DimDelta(single_v, ROWS) * p + + Python execute="import ipfx_helpers as ih" + Python execute="import importlib" + + sprintf code, "importlib.reload(ih); result = ih.extract_spikes('%s', '%s', '%s')", GetWavesDataFolder(single_t, 2), GetWavesDataFolder(single_v, 2), GetDataFolder(1, dfr) + Python execute=code, var={"result", resultWaveName} + + if(!IsEmpty(resultWaveName)) + WAVE/SDFR=dfr result = $resultWaveName + MakeWaveFree(result) + endif + + KillOrMoveToTrash(dfr = dfr) + + return result +End diff --git a/Packages/MIES_Include.ipf b/Packages/MIES_Include.ipf index 28b3c8959d..d4cf400bf6 100644 --- a/Packages/MIES_Include.ipf +++ b/Packages/MIES_Include.ipf @@ -264,6 +264,7 @@ End #include "MIES_ProgrammaticGuiControl" #include "MIES_Publish" #include "MIES_PulseAveraging" +#include "MIES_Python" #include "MIES_RepeatedAcquisition" #include "MIES_SamplingInterval" #include "MIES_StimsetAPI" diff --git a/Packages/Python/ipfx_helpers.py b/Packages/Python/ipfx_helpers.py new file mode 100644 index 0000000000..5e3b00e70f --- /dev/null +++ b/Packages/Python/ipfx_helpers.py @@ -0,0 +1,41 @@ +import igorpro + +# File needs to be copied into "Python Scripts", reported as #7232 + +import ipfx + +from ipfx.feature_extractor import SpikeFeatureExtractor + + +def extract_spikes(t_path: str, v_path: str, out_folder: str): + + t_ip = igorpro.wave(t_path) + v_ip = igorpro.wave(v_path) + + t = t_ip.asarray() + v = v_ip.asarray() + + folder = igorpro.folder(out_folder) + + ext = SpikeFeatureExtractor() + spikes = ext.process(t, v, None) + + # empty list are not handled correctly, reported as #7234 + if spikes.empty: + return "" + + # creating a 2D text wave throws, reported as #7225 + resultName = "spikes_output" + spikes_text = igorpro.wave.create( + resultName, 0, "", igorpro.WaveType.text, folder, True + ) + + # written with wrong shape, already reported to WaveMetrics as #7227, use transpose as workaround + spikes_text.set_data(spikes.to_numpy().transpose().tolist()) + + # spikes.columns.array hangs in IP, reported as #7230 + col_labels = spikes.columns.tolist() + for i in range(len(col_labels)): + spikes_text.set_label("y", i, col_labels[i]) + + return resultName diff --git a/Packages/doc/conf.py b/Packages/doc/conf.py index ad8817a670..2afcbf3172 100644 --- a/Packages/doc/conf.py +++ b/Packages/doc/conf.py @@ -1,8 +1,10 @@ # imports from subprocess import Popen, PIPE + def setup(app): - app.add_css_file('custom.css') + app.add_css_file("custom.css") + # functions def get_version(): @@ -10,27 +12,45 @@ def get_version(): Returns project version as derived by git. """ - branchString = Popen('git rev-parse --abbrev-ref HEAD', stdout = PIPE, shell = True).stdout.read().rstrip() - revString = Popen('git describe --always --tags --match "Release_*"', stdout = PIPE, shell = True).stdout.read().rstrip() + branchString = ( + Popen("git rev-parse --abbrev-ref HEAD", stdout=PIPE, shell=True) + .stdout.read() + .rstrip() + ) + revString = ( + Popen( + 'git describe --always --tags --match "Release_*"', stdout=PIPE, shell=True + ) + .stdout.read() + .rstrip() + ) + + return "({branch}) {version}".format( + branch=branchString.decode("ascii"), version=revString.decode("ascii") + ) - return "({branch}) {version}".format(branch=branchString.decode('ascii'), version=revString.decode('ascii')) # sphinx config -extensions = ['sphinx.ext.mathjax', 'sphinx.ext.todo', 'breathe', \ - 'sphinxcontrib.fulltoc', 'sphinxcontrib.images', \ - 'sphinxcontrib.youtube'] +extensions = [ + "sphinx.ext.mathjax", + "sphinx.ext.todo", + "breathe", + "sphinxcontrib.fulltoc", + "sphinxcontrib.images", + "sphinxcontrib.youtube", +] master_doc = "index" -project= "MIES Igor" +project = "MIES Igor" -html_static_path = ['_static'] +html_static_path = ["_static"] html_copy_source = False html_show_sourcelink = False -exclude_patterns = [ 'releasenotes_template.rst', 'IPNWB/specifications'] +exclude_patterns = ["releasenotes_template.rst", "IPNWB/specifications"] -cpp_id_attributes = [ 'threadsafe' ] +cpp_id_attributes = ["threadsafe"] version = get_version() release = version @@ -38,18 +58,18 @@ def get_version(): # theming html_theme = "classic" html_theme_options = { - "bodyfont" : "Helvetica, Arial, sans-serif", - "headfont" : "Helvetica, Arial, sans-serif" - } + "bodyfont": "Helvetica, Arial, sans-serif", + "headfont": "Helvetica, Arial, sans-serif", +} # pygments options highlight_language = "text" -pygments_style = "igor" +pygments_style = "igor" # breathe -breathe_projects = { "MIES": "xml" } -breathe_default_project = "MIES" -breathe_domain_by_extension = { "ipf" : "cpp" } -breathe_default_members = ('members', 'undoc-members') +breathe_projects = {"MIES": "xml"} +breathe_default_project = "MIES" +breathe_domain_by_extension = {"ipf": "cpp"} +breathe_default_members = ("members", "undoc-members") images_config = {"override_image_directive": True} diff --git a/tools/nwb-read-tests/nwbv2-read-test.py b/tools/nwb-read-tests/nwbv2-read-test.py index 8b9b67380a..e04eee9015 100755 --- a/tools/nwb-read-tests/nwbv2-read-test.py +++ b/tools/nwb-read-tests/nwbv2-read-test.py @@ -12,12 +12,14 @@ print("Unsupported python version: {}".format(vers), file=sys.stderr) sys.exit(1) + def to_str(s): if isinstance(s, bytes): - return s.decode('utf-8') + return s.decode("utf-8") else: return s + def checkFile(path): if not os.path.isfile(path): @@ -25,8 +27,13 @@ def checkFile(path): return 1 # 1.) pynwb Validation - comp = run(["pynwb-validate", path], - stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=120) + comp = run( + ["pynwb-validate", path], + stdout=PIPE, + stderr=STDOUT, + universal_newlines=True, + timeout=120, + ) if comp.returncode != 0: print(f"pynwb validation output: {comp.stdout}", file=sys.stderr) @@ -35,8 +42,13 @@ def checkFile(path): print(f"pynwb validation output: {comp.stdout}", file=sys.stdout) # 2.) dandi Validation - comp = run(["dandi", "validate", "--ignore", "(NWBI|DANDI)", path], - stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=120) + comp = run( + ["dandi", "validate", "--ignore", "(NWBI|DANDI)", path], + stdout=PIPE, + stderr=STDOUT, + universal_newlines=True, + timeout=120, + ) if comp.returncode != 0: print(f"dandi validation output: {comp.stdout}", file=sys.stderr) @@ -45,7 +57,7 @@ def checkFile(path): print(f"dandi validation output: {comp.stdout}", file=sys.stdout) # 3.) Read test - with NWBHDF5IO(path, mode='r', load_namespaces=True) as io: + with NWBHDF5IO(path, mode="r", load_namespaces=True) as io: nwbfile = io.read() print(f"nwbfile: {nwbfile}") @@ -67,11 +79,14 @@ def checkFile(path): print(f"epochs.timeseries: {nwbfile.epochs[:, 'timeseries']}") # 4. Check that pynwb/hdmf can read our object IDs - with h5py.File(path, 'r') as f: + with h5py.File(path, "r") as f: root_object_id_hdf5 = to_str(f["/"].attrs["object_id"]) if root_object_id_hdf5 not in object_ids: - print(f"object IDs don't match as {root_object_id_hdf5} could not be found.", file=sys.stderr) + print( + f"object IDs don't match as {root_object_id_hdf5} could not be found.", + file=sys.stderr, + ) return 1 return 0 @@ -80,21 +95,20 @@ def checkFile(path): def main(): parser = ArgumentParser(description="Validate and read an NWB file") - parser.add_argument("paths", type=str, nargs='+', help="NWB file paths") + parser.add_argument("paths", type=str, nargs="+", help="NWB file paths") args = parser.parse_args() ret = 0 for path in args.paths: ret = ret or checkFile(path) - if ret == 0: print("Success!") return ret -if __name__ == '__main__': +if __name__ == "__main__": try: sys.exit(main())