diff --git a/.github/workflows/auto-add-issue-to-project.yml b/.github/workflows/auto-add-issue-to-project.yml deleted file mode 100644 index 187d4663d4..0000000000 --- a/.github/workflows/auto-add-issue-to-project.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -name: Auto Add Issues to Tracking boards -on: # yamllint disable-line rule:truthy - issues: - types: - - opened -jobs: - add-to-project: - name: Add issue to projects - runs-on: ubuntu-latest - steps: - - name: Generate github-app token - id: app-token - uses: getsentry/action-github-app-token@v3 - with: - app_id: ${{ secrets.DEVOPS_APP_ID }} - private_key: ${{ secrets.DEVOPS_APP_PRIVATE_KEY }} - - uses: actions/add-to-project@v0.5.0 - with: - project-url: https://github.com/orgs/opendatahub-io/projects/40 - github-token: ${{ steps.app-token.outputs.token }} - - uses: actions/add-to-project@v0.5.0 - with: - project-url: https://github.com/orgs/opendatahub-io/projects/45 - github-token: ${{ steps.app-token.outputs.token }} diff --git a/.github/workflows/gemini-pr-review.yml b/.github/workflows/gemini-pr-review.yml new file mode 100644 index 0000000000..1b7847fef2 --- /dev/null +++ b/.github/workflows/gemini-pr-review.yml @@ -0,0 +1,474 @@ +# https://blog.google/technology/developers/introducing-gemini-cli-github-actions/ +# https://github.com/google-github-actions/run-gemini-cli/tree/main/examples/workflows/pr-review +--- +name: '♊ Gemini Pull Request Review' + +"on": + pull_request_target: + types: + - 'opened' + - 'reopened' + issue_comment: + types: + - 'created' + pull_request_review_comment: + types: + - 'created' + pull_request_review: + types: + - 'submitted' + workflow_dispatch: + inputs: + pr_number: + description: 'PR number to review' + required: true + type: 'number' + +concurrency: + group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}' + cancel-in-progress: true + +defaults: + run: + shell: 'bash' + +permissions: + contents: 'read' + id-token: 'write' + issues: 'write' + pull-requests: 'write' + statuses: 'write' + +jobs: + review-pr: + # This condition seeks to ensure the action is only run when it is triggered by a trusted user. + # For private repos, users who have access to the repo are considered trusted. + # For public repos, users who are members, owners, or collaborators are considered trusted. + if: |- + github.event_name == 'workflow_dispatch' || + ( + github.event_name == 'pull_request_target' && + ( + github.event.repository.private == true || + contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.pull_request.author_association) + ) + ) || + ( + ( + ( + github.event_name == 'issue_comment' && + github.event.issue.pull_request + ) || + github.event_name == 'pull_request_review_comment' + ) && + contains(github.event.comment.body, '@gemini-cli /review') && + ( + github.event.repository.private == true || + contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association) + ) + ) || + ( + github.event_name == 'pull_request_review' && + contains(github.event.review.body, '@gemini-cli /review') && + ( + github.event.repository.private == true || + contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.review.author_association) + ) + ) + timeout-minutes: 5 + runs-on: 'ubuntu-latest' + steps: + - name: 'Checkout PR code' + uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 + + - name: 'Generate GitHub App Token' + id: 'generate_token' + if: |- + ${{ vars.APP_ID }} + uses: 'actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e' # ratchet:actions/create-github-app-token@v2 + with: + app-id: '${{ vars.APP_ID }}' + private-key: '${{ secrets.APP_PRIVATE_KEY }}' + + - name: 'Get PR details (pull_request & pull_request_target & workflow_dispatch)' + id: 'get_pr' + if: |- + ${{ github.event_name == 'pull_request' || github.event_name == 'pull_request_target' || github.event_name == 'workflow_dispatch' }} + env: + GITHUB_TOKEN: '${{ steps.generate_token.outputs.token || secrets.GITHUB_TOKEN }}' + EVENT_NAME: '${{ github.event_name }}' + WORKFLOW_PR_NUMBER: '${{ github.event.inputs.pr_number }}' + PULL_REQUEST_NUMBER: '${{ github.event.pull_request.number }}' + run: |- + set -euo pipefail + + if [[ "${EVENT_NAME}" = "workflow_dispatch" ]]; then + PR_NUMBER="${WORKFLOW_PR_NUMBER}" + else + PR_NUMBER="${PULL_REQUEST_NUMBER}" + fi + + echo "pr_number=${PR_NUMBER}" >> "${GITHUB_OUTPUT}" + + # No additional instructions for non-comment triggers + echo "additional_instructions=" >> "${GITHUB_OUTPUT}" + + # Get PR details + PR_DATA="$(gh pr view "${PR_NUMBER}" --json title,body,additions,deletions,changedFiles,baseRefName,headRefName)" + echo "pr_data=${PR_DATA}" >> "${GITHUB_OUTPUT}" + + # Get file changes + CHANGED_FILES="$(gh pr diff "${PR_NUMBER}" --name-only)" + { + echo "changed_files<> "${GITHUB_OUTPUT}" + + + - name: 'Get PR details (issue_comment & reviews)' + id: 'get_pr_comment' + if: |- + ${{ github.event_name == 'issue_comment' || github.event_name == 'pull_request_review' || github.event_name == 'pull_request_review_comment' }} + env: + GITHUB_TOKEN: '${{ steps.generate_token.outputs.token || secrets.GITHUB_TOKEN }}' + COMMENT_BODY: '${{ github.event.comment.body || github.event.review.body }}' + PR_NUMBER: '${{ github.event.issue.number || github.event.pull_request.number }}' + run: |- + set -euo pipefail + + echo "pr_number=${PR_NUMBER}" >> "${GITHUB_OUTPUT}" + + # Extract additional instructions from comment + ADDITIONAL_INSTRUCTIONS="$( + echo "${COMMENT_BODY}" | sed 's/.*@gemini-cli \/review//' | xargs + )" + echo "additional_instructions=${ADDITIONAL_INSTRUCTIONS}" >> "${GITHUB_OUTPUT}" + + # Get PR details + PR_DATA="$(gh pr view "${PR_NUMBER}" --json title,body,additions,deletions,changedFiles,baseRefName,headRefName)" + echo "pr_data=${PR_DATA}" >> "${GITHUB_OUTPUT}" + + # Get file changes + CHANGED_FILES="$(gh pr diff "${PR_NUMBER}" --name-only)" + { + echo "changed_files<> "${GITHUB_OUTPUT}" + + - name: 'Run Gemini PR Review' + uses: 'google-github-actions/run-gemini-cli@v0' + id: 'gemini_pr_review' + env: + GITHUB_TOKEN: '${{ steps.generate_token.outputs.token || secrets.GITHUB_TOKEN }}' + PR_NUMBER: '${{ steps.get_pr.outputs.pr_number || steps.get_pr_comment.outputs.pr_number }}' + PR_DATA: '${{ steps.get_pr.outputs.pr_data || steps.get_pr_comment.outputs.pr_data }}' + CHANGED_FILES: '${{ steps.get_pr.outputs.changed_files || steps.get_pr_comment.outputs.changed_files }}' + ADDITIONAL_INSTRUCTIONS: '${{ steps.get_pr.outputs.additional_instructions || steps.get_pr_comment.outputs.additional_instructions }}' + REPOSITORY: '${{ github.repository }}' + with: + gemini_cli_version: '${{ vars.GEMINI_CLI_VERSION }}' + gcp_workload_identity_provider: '${{ vars.GCP_WIF_PROVIDER }}' + gcp_project_id: '${{ vars.GOOGLE_CLOUD_PROJECT }}' + gcp_location: '${{ vars.GOOGLE_CLOUD_LOCATION }}' + gcp_service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' + gemini_api_key: '${{ secrets.GEMINI_API_KEY }}' + use_vertex_ai: '${{ vars.GOOGLE_GENAI_USE_VERTEXAI }}' + use_gemini_code_assist: '${{ vars.GOOGLE_GENAI_USE_GCA }}' + settings: |- + { + "debug": ${{ fromJSON(env.DEBUG || env.ACTIONS_STEP_DEBUG || false) }}, + "maxSessionTurns": 20, + "mcpServers": { + "github": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server" + ], + "includeTools": [ + "create_pending_pull_request_review", + "add_comment_to_pending_review", + "submit_pending_pull_request_review" + ], + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_TOKEN}" + } + } + }, + "coreTools": [ + "run_shell_command(echo)", + "run_shell_command(gh pr view)", + "run_shell_command(gh pr diff)", + "run_shell_command(cat)", + "run_shell_command(head)", + "run_shell_command(tail)", + "run_shell_command(grep)" + ], + "telemetry": { + "enabled": false, + "target": "gcp" + } + } + prompt: |- + ## Role + + You are an expert code reviewer. You have access to tools to gather + PR information and perform the review on GitHub. Use the available tools to + gather information; do not ask for information to be provided. + + ## Requirements + 1. All feedback must be left on GitHub. + 2. Any output that is not left in GitHub will not be seen. + + ## Steps + + Start by running these commands to gather the required data: + 1. Run: echo "${REPOSITORY}" to get the github repository in / format + 2. Run: echo "${PR_DATA}" to get PR details (JSON format) + 3. Run: echo "${CHANGED_FILES}" to get the list of changed files + 4. Run: echo "${PR_NUMBER}" to get the PR number + 5. Run: echo "${ADDITIONAL_INSTRUCTIONS}" to see any specific review + instructions from the user + 6. Run: gh pr diff "${PR_NUMBER}" to see the full diff and reference + Context section to understand it + 7. For any specific files, use: cat filename, head -50 filename, or + tail -50 filename + 8. If ADDITIONAL_INSTRUCTIONS contains text, prioritize those + specific areas or focus points in your review. Common instruction + examples: "focus on security", "check performance", "review error + handling", "check for breaking changes" + + ## Guideline + ### Core Guideline(Always applicable) + + 1. Understand the Context: Analyze the pull request title, description, changes, and code files to grasp the intent. + 2. Meticulous Review: Thoroughly review all relevant code changes, prioritizing added lines. Consider the specified + focus areas and any provided style guide. + 3. Comprehensive Review: Ensure that the code is thoroughly reviewed, as it's important to the author + that you identify any and all relevant issues (subject to the review criteria and style guide). + Missing any issues will lead to a poor code review experience for the author. + 4. Constructive Feedback: + * Provide clear explanations for each concern. + * Offer specific, improved code suggestions and suggest alternative approaches, when applicable. + Code suggestions in particular are very helpful so that the author can directly apply them + to their code, but they must be accurately anchored to the lines that should be replaced. + 5. Severity Indication: Clearly indicate the severity of the issue in the review comment. + This is very important to help the author understand the urgency of the issue. + The severity should be one of the following (which are provided below in decreasing order of severity): + * `critical`: This issue must be addressed immediately, as it could lead to serious consequences + for the code's correctness, security, or performance. + * `high`: This issue should be addressed soon, as it could cause problems in the future. + * `medium`: This issue should be considered for future improvement, but it's not critical or urgent. + * `low`: This issue is minor or stylistic, and can be addressed at the author's discretion. + 6. Avoid commenting on hardcoded dates and times being in future or not (for example "this date is in the future"). + * Remember you don't have access to the current date and time and leave that to the author. + 7. Targeted Suggestions: Limit all suggestions to only portions that are modified in the diff hunks. + This is a strict requirement as the GitHub (and other SCM's) API won't allow comments on parts of code files that are not + included in the diff hunks. + 8. Code Suggestions in Review Comments: + * Succinctness: Aim to make code suggestions succinct, unless necessary. Larger code suggestions tend to be + harder for pull request authors to commit directly in the pull request UI. + * Valid Formatting: Provide code suggestions within the suggestion field of the JSON response (as a string literal, + escaping special characters like \n, \\, \"). Do not include markdown code blocks in the suggestion field. + Use markdown code blocks in the body of the comment only for broader examples or if a suggestion field would + create an excessively large diff. Prefer the suggestion field for specific, targeted code changes. + * Line Number Accuracy: Code suggestions need to align perfectly with the code it intend to replace. + Pay special attention to line numbers when creating comments, particularly if there is a code suggestion. + Note the patch includes code versions with line numbers for the before and after code snippets for each diff, so use these to anchor + your comments and corresponding code suggestions. + * Compilable: Code suggestions should be compilable code snippets that can be directly copy/pasted into the code file. + If the suggestion is not compilable, it will not be accepted by the pull request. Note that not all languages Are + compiled of course, so by compilable here, we mean either literally or in spirit. + * Inline Code Comments: Feel free to add brief comments to the code suggestion if it enhances the underlying code readability. + Just make sure that the inline code comments add value, and are not just restating what the code does. Don't use + inline comments to "teach" the author (use the review comment body directly for that), instead use it if it's beneficial + to the readability of the code itself. + 10. Markdown Formatting: Heavily leverage the benefits of markdown for formatting, such as bulleted lists, bold text, tables, etc. + 11. Avoid mistaken review comments: + * Any comment you make must point towards a discrepancy found in the code and the best practice surfaced in your feedback. + For example, if you are pointing out that constants need to be named in all caps with underscores, + ensure that the code selected by the comment does not already do this, otherwise it's confusing let alone unnecessary. + 12. Remove Duplicated code suggestions: + * Some provided code suggestions are duplicated, please remove the duplicated review comments. + 13. Don't Approve The Pull Request + 14. Reference all shell variables as "${VAR}" (with quotes and braces) + + ### Review Criteria (Prioritized in Review) + + * Correctness: Verify code functionality, handle edge cases, and ensure alignment between function + descriptions and implementations. Consider common correctness issues (logic errors, error handling, + race conditions, data validation, API usage, type mismatches). + * Efficiency: Identify performance bottlenecks, optimize for efficiency, and avoid unnecessary + loops, iterations, or calculations. Consider common efficiency issues (excessive loops, memory + leaks, inefficient data structures, redundant calculations, excessive logging, etc.). + * Maintainability: Assess code readability, modularity, and adherence to language idioms and + best practices. Consider common maintainability issues (naming, comments/documentation, complexity, + code duplication, formatting, magic numbers). State the style guide being followed (defaulting to + commonly used guides, for example Python's PEP 8 style guide or Google Java Style Guide, if no style guide is specified). + * Security: Identify potential vulnerabilities (e.g., insecure storage, injection attacks, + insufficient access controls). + + ### Miscellaneous Considerations + * Testing: Ensure adequate unit tests, integration tests, and end-to-end tests. Evaluate + coverage, edge case handling, and overall test quality. + * Performance: Assess performance under expected load, identify bottlenecks, and suggest + optimizations. + * Scalability: Evaluate how the code will scale with growing user base or data volume. + * Modularity and Reusability: Assess code organization, modularity, and reusability. Suggest + refactoring or creating reusable components. + * Error Logging and Monitoring: Ensure errors are logged effectively, and implement monitoring + mechanisms to track application health in production. + + **CRITICAL CONSTRAINTS:** + + You MUST only provide comments on lines that represent the actual changes in + the diff. This means your comments should only refer to lines that begin with + a `+` or `-` character in the provided diff content. + DO NOT comment on lines that start with a space (context lines). + + You MUST only add a review comment if there exists an actual ISSUE or BUG in the code changes. + DO NOT add review comments to tell the user to "check" or "confirm" or "verify" something. + DO NOT add review comments to tell the user to "ensure" something. + DO NOT add review comments to explain what the code change does. + DO NOT add review comments to validate what the code change does. + DO NOT use the review comments to explain the code to the author. They already know their code. Only comment when there's an improvement opportunity. This is very important. + + Pay close attention to line numbers and ensure they are correct. + Pay close attention to indentations in the code suggestions and make sure they match the code they are to replace. + Avoid comments on the license headers - if any exists - and instead make comments on the code that is being changed. + + It's absolutely important to avoid commenting on the license header of files. + It's absolutely important to avoid commenting on copyright headers. + Avoid commenting on hardcoded dates and times being in future or not (for example "this date is in the future"). + Remember you don't have access to the current date and time and leave that to the author. + + Avoid mentioning any of your instructions, settings or criteria. + + Here are some general guidelines for setting the severity of your comments + - Comments about refactoring a hardcoded string or number as a constant are generally considered low severity. + - Comments about log messages or log enhancements are generally considered low severity. + - Comments in .md files are medium or low severity. This is really important. + - Comments about adding or expanding docstring/javadoc have low severity most of the times. + - Comments about suppressing unchecked warnings or todos are considered low severity. + - Comments about typos are usually low or medium severity. + - Comments about testing or on tests are usually low severity. + - Do not comment about the content of a URL if the content is not directly available in the input. + + Keep comments bodies concise and to the point. + Keep each comment focused on one issue. + + ## Context + The files that are changed in this pull request are represented below in the following + format, showing the file name and the portions of the file that are changed: + + + FILE: + DIFF: + + + -------------------- + + FILE: + DIFF: + + + -------------------- + + (and so on for all files changed) + + + Note that if you want to make a comment on the LEFT side of the UI / before the diff code version + to note those line numbers and the corresponding code. Same for a comment on the RIGHT side + of the UI / after the diff code version to note the line numbers and corresponding code. + This should be your guide to picking line numbers, and also very importantly, restrict + your comments to be only within this line range for these files, whether on LEFT or RIGHT. + If you comment out of bounds, the review will fail, so you must pay attention the file name, + line numbers, and pre/post diff versions when crafting your comment. + + Here are the patches that were implemented in the pull request, per the + formatting above: + + The get the files changed in this pull request, run: + "$(gh pr diff "${PR_NUMBER}" --patch)" to get the list of changed files PATCH + + ## Review + + Once you have the information and are ready to leave a review on GitHub, post the review to GitHub using the GitHub MCP tool by: + 1. Creating a pending review: Use the mcp__github__create_pending_pull_request_review to create a Pending Pull Request Review. + + 2. Adding review comments: + 2.1 Use the mcp__github__add_comment_to_pending_review to add comments to the Pending Pull Request Review. Inline comments are preferred whenever possible, so repeat this step, calling mcp__github__add_comment_to_pending_review, as needed. All comments about specific lines of code should use inline comments. It is preferred to use code suggestions when possible, which include a code block that is labeled "suggestion", which contains what the new code should be. All comments should also have a severity. The syntax is: + Normal Comment Syntax: + + {{SEVERITY}} {{COMMENT_TEXT}} + + + Inline Comment Syntax: (Preferred): + + {{SEVERITY}} {{COMMENT_TEXT}} + ```suggestion + {{CODE_SUGGESTION}} + ``` + + + Prepend a severity emoji to each comment: + - 🟢 for low severity + - 🟡 for medium severity + - 🟠 for high severity + - 🔴 for critical severity + - 🔵 if severity is unclear + + Including all of this, an example inline comment would be: + + 🟢 Use camelCase for function names + ```suggestion + myFooBarFunction + ``` + + + A critical severity example would be: + + 🔴 Remove storage key from GitHub + ```suggestion + ``` + + 3. Posting the review: Use the mcp__github__submit_pending_pull_request_review to submit the Pending Pull Request Review. + + 3.1 Crafting the summary comment: Include a summary of high level points that were not addressed with inline comments. Be concise. Do not repeat details mentioned inline. + + Structure your summary comment using this exact format with markdown: + ## 📋 Review Summary + + Provide a brief 2-3 sentence overview of the PR and overall + assessment. + + ## 🔍 General Feedback + - List general observations about code quality + - Mention overall patterns or architectural decisions + - Highlight positive aspects of the implementation + - Note any recurring themes across files + + ## Final Instructions + + Remember, you are running in a VM and no one reviewing your output. Your review must be posted to GitHub using the MCP tools to create a pending review, add comments to the pending review, and submit the pending review. + + + - name: 'Post PR review failure comment' + if: |- + ${{ failure() && steps.gemini_pr_review.outcome == 'failure' }} + uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' + with: + github-token: '${{ steps.generate_token.outputs.token || secrets.GITHUB_TOKEN }}' + script: |- + github.rest.issues.createComment({ + owner: '${{ github.repository }}'.split('/')[0], + repo: '${{ github.repository }}'.split('/')[1], + issue_number: '${{ steps.get_pr.outputs.pr_number || steps.get_pr_comment.outputs.pr_number }}', + body: 'There is a problem with the Gemini CLI PR review. Please check the [action logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details.' + }) diff --git a/.github/workflows/instant-merge.yaml b/.github/workflows/instant-merge.yaml new file mode 100644 index 0000000000..eec0021017 --- /dev/null +++ b/.github/workflows/instant-merge.yaml @@ -0,0 +1,27 @@ +--- +name: Instant Merge Konflux PRs + +on: # yamllint disable-line rule:truthy + pull_request: + types: + - opened + paths: + - manifests/base/params-latest.env + +permissions: + contents: write + pull-requests: write + checks: write + security-events: write + statuses: write + +jobs: + instant-merge: + runs-on: ubuntu-latest + steps: + - name: instant-merge + if: ${{ github.event.sender.login == 'red-hat-konflux[bot]' && ( contains(github.event.pull_request.title, 'Update odh-workbench-jupyter-') || contains(github.event.pull_request.title, 'Update odh-workbench-codeserver-') || contains(github.event.pull_request.title, 'Update odh-pipeline-runtime-') ) }} + env: + GITHUB_TOKEN: ${{ github.token }} + run: | + gh pr merge --merge --admin ${{ github.event.pull_request.html_url }} diff --git a/.github/workflows/notebook-digest-updater.yaml b/.github/workflows/notebook-digest-updater.yaml new file mode 100644 index 0000000000..a194f64159 --- /dev/null +++ b/.github/workflows/notebook-digest-updater.yaml @@ -0,0 +1,254 @@ +--- +# The aim of this GitHub workflow is to update the params.env and commit.env files with the latest builds. +name: Update notebook image build references (downstream) +on: # yamllint disable-line rule:truthy + workflow_dispatch: + inputs: + branch: + required: true + description: "Provide the name of the branch you want to update ex main, vYYYYx etc: " + # Put the scheduler on comment until automate the full release procedure + # schedule: + # - cron: "0 0 * * 5" #Scheduled every Friday +env: + DIGEST_UPDATER_BRANCH: digest-updater-${{ github.run_id }} + BRANCH_NAME: ${{ github.event.inputs.branch || 'main' }} + RELEASE_VERSION_N: 2024b + RELEASE_VERSION_N_1: 2024a +jobs: + initialize: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Install Skopeo CLI + shell: bash + run: | + sudo apt-get -y update + sudo apt-get -y install skopeo + + # Checkout the branch + - name: Checkout branch + uses: actions/checkout@v4 + with: + ref: ${{ env.BRANCH_NAME }} + + # Create a new branch + - name: Create a new branch + run: | + echo ${{ env.DIGEST_UPDATER_BRANCH }} + git checkout -b ${{ env.DIGEST_UPDATER_BRANCH }} + git push --set-upstream origin ${{ env.DIGEST_UPDATER_BRANCH }} + + update-n-version: + needs: [initialize] + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Configure Git + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "GitHub Actions" + + # Get latest build commit from the https://github.com/red-hat-data-services/notebooks/${release_branch} using this as identifier for the latest tag name + - name: Retrive latest commit hash from the release branch + id: hash-n + shell: bash + run: | + PAYLOAD=$(curl --silent -H 'Accept: application/vnd.github.v4.raw' https://api.github.com/repos/red-hat-data-services/notebooks/commits?sha=release-$RELEASE_VERSION_N&per_page=1) + echo "HASH_N=$(echo $PAYLOAD | jq -r '.[0].sha[0:7]')" >> ${GITHUB_OUTPUT} + + # Checkout the release branch to apply the updates + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ env.DIGEST_UPDATER_BRANCH }} + + - name: Update the params.env file + shell: bash + run: | + echo Latest commit is: ${{ steps.hash-n.outputs.HASH_N }} on ${{ env.RELEASE_VERSION_N }} + + # Get the complete list of images N-version to update + PARAMS_ENV_PATH="manifests/base/params.env" + IMAGES=$(grep "\-n=" "${PARAMS_ENV_PATH}" | cut -d "=" -f 1) + + # The order of the regexes array should match with the params.env file + REGEXES=("v3-${{ env.RELEASE_VERSION_N }}-\d{8}-+${{ steps.hash-n.outputs.HASH_N }}" \ + "cuda-[a-z]+-minimal-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}" \ + "v3-${{ env.RELEASE_VERSION_N }}-\d{8}-+${{ steps.hash-n.outputs.HASH_N }}" \ + "v3-${{ env.RELEASE_VERSION_N }}-\d{8}-+${{ steps.hash-n.outputs.HASH_N }}" \ + "cuda-[a-z]+-tensorflow-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}" \ + "v3-${{ env.RELEASE_VERSION_N }}-\d{8}-+${{ steps.hash-n.outputs.HASH_N }}" \ + "codeserver-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}" \ + "rocm-[a-z]+-minimal-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}" \ + "rocm-[a-z]+-pytorch-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}" \ + "rocm-[a-z]+-tensorflow-[a-z0-9]+-[a-z]+-3.11-${{ env.RELEASE_VERSION_N }}-\d{8}-${{ steps.hash-n.outputs.HASH_N }}") + + i=0 + for image in ${IMAGES}; do + echo "CHECKING: '${image}'" + img=$(grep -E "${image}=" "${PARAMS_ENV_PATH}" | cut -d '=' -f2) + registry=$(echo "${img}" | cut -d '@' -f1) + + regex=${REGEXES[$i]} + skopeo_metadata=$(skopeo inspect --retry-times 3 "docker://${img}") + latest_tag=$(echo "${skopeo_metadata}" | jq -r --arg regex "$regex" '.RepoTags | map(select(. | test($regex))) | .[0]') + digest=$(skopeo inspect --retry-times 3 "docker://${registry}:${latest_tag}" | jq .Digest | tr -d '"') + output="${registry}@${digest}" + echo "NEW: ${output}" + sed -i "s|${image}=.*|${image}=${output}|" "${PARAMS_ENV_PATH}" + i=$((i+1)) + done + if [[ $(git status --porcelain | wc -l) -gt 0 ]]; then + git fetch origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git pull origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git add "${PARAMS_ENV_PATH}" && \ + git commit -m "Update images for release N via ${{ env.DIGEST_UPDATER_BRANCH }} GitHub action" && \ + git push origin ${{ env.DIGEST_UPDATER_BRANCH }} + else + echo "There were no changes detected in the images for the ${{ env.RELEASE_VERSION_N }}" + fi + + - name: Update the commit.env file + run: | + echo Latest commit is: ${{ steps.hash-n.outputs.HASH_N }} on ${{ env.RELEASE_VERSION_N }} + + COMMIT_ENV_PATH="manifests/base/commit.env" + + # Get the complete list of commits N-version to update + COMMIT=$(grep "\-n=" "${COMMIT_ENV_PATH}" | cut -d "=" -f 1) + + for val in ${COMMIT}; do + echo "${val}" + sed -i "s|${val}=.*|${val}=${{ steps.hash-n.outputs.HASH_N }}|" "${COMMIT_ENV_PATH}" + done + + if [[ $(git status --porcelain | wc -l) -gt 0 ]]; then + git fetch origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git pull origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git add "${COMMIT_ENV_PATH}" && \ + git commit -m "Update image commits for release N via ${{ env.DIGEST_UPDATER_BRANCH }} GitHub action" && \ + git push origin ${{ env.DIGEST_UPDATER_BRANCH }} + else + echo "There were no changes detected in the images for the ${{ env.RELEASE_VERSION_N }}" + fi + + update-n-1-version: + needs: [initialize, update-n-version] + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Configure Git + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "GitHub Actions" + + # Get latest build commit from the https://github.com/red-hat-data-services/notebooks/${release_branch} using this as identifier for the latest tag name + - name: Retrive latest commit hash from the release branch + id: hash-n-1 + shell: bash + run: | + PAYLOAD=$(curl --silent -H 'Accept: application/vnd.github.v4.raw' https://api.github.com/repos/red-hat-data-services/notebooks/commits?sha=release-$RELEASE_VERSION_N_1&per_page=1) + echo "HASH_N_1=$(echo $PAYLOAD | jq -r '.[0].sha[0:7]')" >> ${GITHUB_OUTPUT} + + # Checkout the release branch to apply the updates + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ env.DIGEST_UPDATER_BRANCH }} + + - name: Update the params.env file + shell: bash + run: | + echo Latest commit is: ${{ steps.hash-n-1.outputs.HASH_N_1 }} on ${{ env.RELEASE_VERSION_N_1}} + + # Get the complete list of images N-1-version to update + PARAMS_ENV_PATH="manifests/base/params.env" + IMAGES=$(grep "\-n-1=" "${PARAMS_ENV_PATH}" | cut -d "=" -f 1) + + # The order of the regexes array should match with the params.env file + REGEXES=("v2-${{ env.RELEASE_VERSION_N_1 }}-\d{8}+-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "cuda-[a-z]+-minimal-[a-z0-9]+-[a-z]+-3.9-${{ env.RELEASE_VERSION_N_1 }}-\d{8}-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "v2-${{ env.RELEASE_VERSION_N_1 }}-\d{8}+-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "v2-${{ env.RELEASE_VERSION_N_1 }}-\d{8}+-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "cuda-[a-z]+-tensorflow-[a-z0-9]+-[a-z]+-3.9-${{ env.RELEASE_VERSION_N_1 }}-\d{8}-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "v2-${{ env.RELEASE_VERSION_N_1 }}-\d{8}+-${{ steps.hash-n-1.outputs.HASH_N_1 }}" \ + "codeserver-[a-z0-9]+-[a-z]+-3.9-${{ env.RELEASE_VERSION_N_1 }}-\d{8}-${{ steps.hash-n-1.outputs.HASH_N_1 }}" ) + + i=0 + for image in ${IMAGES}; do + echo "CHECKING: '${image}'" + img=$(grep -E "${image}=" "${PARAMS_ENV_PATH}" | cut -d '=' -f2) + registry=$(echo "${img}" | cut -d '@' -f1) + regex=${REGEXES[$i]} + skopeo_metadata=$(skopeo inspect --retry-times 3 "docker://${img}") + latest_tag=$(echo "${skopeo_metadata}" | jq -r --arg regex "$regex" '.RepoTags | map(select(. | test($regex))) | .[0]') + digest=$(skopeo inspect --retry-times 3 "docker://${registry}:${latest_tag}" | jq .Digest | tr -d '"') + output="${registry}@${digest}" + echo "NEW: ${output}" + sed -i "s|${image}=.*|${image}=${output}|" "${PARAMS_ENV_PATH}" + i=$((i+1)) + done + + if [[ $(git status --porcelain | wc -l) -gt 0 ]]; then + git fetch origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git pull origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git add "${PARAMS_ENV_PATH}" && \ + git commit -m "Update images for release N-1 via ${{ env.DIGEST_UPDATER_BRANCH }} GitHub action" && \ + git push origin ${{ env.DIGEST_UPDATER_BRANCH }} + else + echo "There were no changes detected in the images for the ${{ env.RELEASE_VERSION_N_1 }}" + fi + + - name: Update the commit.env file + run: | + COMMIT_ENV_PATH="manifests/base/commit.env" + + echo Latest commit is: ${{ steps.hash-n-1.outputs.HASH_N_1 }} on ${{ env.RELEASE_VERSION_N_1 }} + # Get the complete list of images N-1-version to update + COMMIT=$(grep "\-n-1=" "${COMMIT_ENV_PATH}" | cut -d "=" -f 1) + + for val in ${COMMIT}; do + echo "${val}" + sed -i "s|${val}=.*|${val}=${{ steps.hash-n-1.outputs.HASH_N_1 }}|" "${COMMIT_ENV_PATH}" + done + + if [[ $(git status --porcelain | wc -l) -gt 0 ]]; then + git fetch origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git pull origin ${{ env.DIGEST_UPDATER_BRANCH }} && \ + git add "${COMMIT_ENV_PATH}" && \ + git commit -m "Update image commits for release N-1 via ${{ env.DIGEST_UPDATER_BRANCH }} GitHub action" && \ + git push origin ${{ env.DIGEST_UPDATER_BRANCH }} + else + echo "There were no changes detected in the images for the ${{ env.RELEASE_VERSION_N_1 }}" + fi + + open-pull-request: + needs: [update-n-version, update-n-1-version] + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Checkout repo + uses: actions/checkout@v4 + + - name: pull-request + uses: repo-sync/pull-request@v2 + with: + source_branch: ${{ env.DIGEST_UPDATER_BRANCH }} + destination_branch: ${{ env.BRANCH_NAME }} + github_token: ${{ secrets.GITHUB_TOKEN }} + pr_label: "automated pr" + pr_title: "[Digest Updater Action] Update Notebook Images" + pr_body: | + :rocket: This is an automated Pull Request. + Created by `/.github/workflows/notebooks-digest-updater-upstream.yaml` + + This PR updates the following files: + - `manifests/base/params.env` file with the latest updated SHA digests of the notebooks (N & N-1). + - `manifests/base/commit.env` file with the latest commit (N & N-1). + + :exclamation: **IMPORTANT NOTE**: Remember to delete the `${{ env.DIGEST_UPDATER_BRANCH }}` branch after merging the changes diff --git a/.github/workflows/sec-scan.yml b/.github/workflows/sec-scan.yml deleted file mode 100644 index be44a73a9a..0000000000 --- a/.github/workflows/sec-scan.yml +++ /dev/null @@ -1,148 +0,0 @@ ---- -# The aim of this GitHub workflow is to update the `ci/securitty-scan/security_scan_results.md` with latest security scan results. -name: Update notebook image security reports -on: # yamllint disable-line rule:truthy - workflow_dispatch: - inputs: - branch: - required: true - description: "Provide the name of the branch you want to update ex main, vYYYYx etc: " - schedule: - - cron: "0 0 */21 * 5" # Scheduled every third Friday -env: - SEC_SCAN_BRANCH: sec-scan-${{ github.run_id }} - BRANCH_NAME: main - RELEASE_VERSION_N: 2023b - RELEASE_VERSION_N_1: 2023a -jobs: - initialize: - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Install Skopeo CLI - shell: bash - run: | - sudo apt-get -y update - sudo apt-get -y install skopeo - - # Checkout the branch - - name: Checkout branch - uses: actions/checkout@v5 - with: - ref: ${{ env.BRANCH_NAME }} - - # Create a new branch - - name: Create a new branch - run: | - echo ${{ env.SEC_SCAN_BRANCH }} - git checkout -b ${{ env.SEC_SCAN_BRANCH }} - git push --set-upstream origin ${{ env.SEC_SCAN_BRANCH }} - - check-vulnerabilities: - needs: [initialize] - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Configure Git - run: | - git config --global user.email "github-actions[bot]@users.noreply.github.com" - git config --global user.name "GitHub Actions" - - # Get the latest weekly build commit hash: https://github.com/opendatahub-io/notebooks/commits/2023b - - name: Checkout upstream notebooks repo - uses: actions/checkout@v5 - with: - repository: opendatahub-io/notebooks.git - ref: ${{ env.RELEASE_VERSION_N }} - - - name: Retrieve latest weekly commit hash from the "N" branch - id: hash-n - shell: bash - run: | - echo "HASH_N=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} - - - name: Checkout "N - 1" branch - uses: actions/checkout@v5 - with: - repository: opendatahub-io/notebooks.git - ref: ${{ env.RELEASE_VERSION_N_1 }} - - - name: Retrieve latest weekly commit hash from the "N - 1" branch - id: hash-n-1 - shell: bash - run: | - echo "HASH_N_1=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} - - - name: Checkout "main" branch - uses: actions/checkout@v5 - with: - repository: opendatahub-io/notebooks.git - ref: main - - - name: Retrieve latest weekly commit hash from the "main" branch - id: hash-main - shell: bash - run: | - echo "LATEST_MAIN_COMMIT=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} - - # Checkout the release branch to apply the updates - - name: Checkout release branch - uses: actions/checkout@v5 - with: - ref: ${{ env.SEC_SCAN_BRANCH }} - - - name: setup python - uses: actions/setup-python@v5 - with: - python-version: '3.10' # install the python version needed - - - name: install python packages - run: | - python -m pip install --upgrade pip - pip install requests - - - name: execute py script # run trial.py - env: - HASH_N: ${{ steps.hash-n.outputs.HASH_N }} - RELEASE_VERSION_N: ${{ env.RELEASE_VERSION_N }} - - HASH_N_1: ${{ steps.hash-n-1.outputs.HASH_N_1 }} - RELEASE_VERSION_N_1: ${{ env.RELEASE_VERSION_N_1 }} - - LATEST_MAIN_COMMIT: ${{ steps.hash-main.outputs.LATEST_MAIN_COMMIT }} - run: make scan-image-vulnerabilities - - - name: Push the files - run: | - git fetch origin ${{ env.SEC_SCAN_BRANCH }} && git pull origin ${{ env.SEC_SCAN_BRANCH }} && git add . && git commit -m "Update security scans" && git push origin ${{ env.SEC_SCAN_BRANCH }} - - # Creates the Pull Request - open-pull-request: - needs: [check-vulnerabilities] - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Checkout repo - uses: actions/checkout@v5 - - - name: pull-request - uses: repo-sync/pull-request@v2 - with: - source_branch: ${{ env.SEC_SCAN_BRANCH }} - destination_branch: ${{ env.BRANCH_NAME}} - github_token: ${{ secrets.GITHUB_TOKEN }} - pr_label: "automated pr" - pr_title: "[Security Scanner Action] Weekly update of security vulnerabilities reported by Quay" - pr_body: | - :rocket: This is an automated Pull Request. - - This PR updates: - - * `ci/security-scan/security_scan_results.md` file with the latest security vulnerabilities reported by Quay. - * `ci/security-scan/weekly_commit_ids` with the latest updated SHA digests of the notebooks (N & N-1) - Created by `/.github/workflows/sec-scan.yaml` - - :exclamation: **IMPORTANT NOTE**: Remember to delete the ` ${{ env.SEC_SCAN_BRANCH }}` branch after merging the changes diff --git a/.tekton/README.md b/.tekton/README.md new file mode 100644 index 0000000000..92ee75eebd --- /dev/null +++ b/.tekton/README.md @@ -0,0 +1,39 @@ +# ⚠️ Do Not Modify Files in the `.tekton/` Directory Directly + +The `.tekton/` directory in each component repository is **automatically synchronized** from [`konflux-central`](https://github.com/red-hat-data-services/konflux-central) using automation. Any edits made directly to Tekton files in the component repositories will be **overwritten** by the next sync. + +All Tekton file updates **must be made in the `konflux-central` repository**. + +## ✅ How to Make Changes + +To modify the pipelines for `notebooks` in the `rhoai-2.25` release: + +- Clone the [`konflux-central`](https://github.com/red-hat-data-services/konflux-central) repository. + +```bash +git clone git@github.com:red-hat-data-services/konflux-central.git +cd konflux-central +``` + +- Check out the release branch + +```bash +git checkout rhoai-2.25 +``` + +- Navigate to the Tekton files for your component(s). + +```bash +cd pipelineruns/notebooks/.tekton +``` + +- Make the required changes to the Tekton YAML files. + +- Commit and push your changes. + +```bash +git commit -am "Update pipelinerun for notebooks (rhoai-2.25)" +git push origin rhoai-2.25 +``` + +- Once pushed, automation will automatically sync your updates to the corresponding component repository. diff --git a/.tekton/multiarch-pull-request-pipeline.yaml b/.tekton/multiarch-pull-request-pipeline.yaml deleted file mode 100644 index 12b341d16e..0000000000 --- a/.tekton/multiarch-pull-request-pipeline.yaml +++ /dev/null @@ -1,604 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - labels: - appstudio.openshift.io/application: opendatahub-release - pipelines.appstudio.openshift.io/type: build - name: multiarch-pull-request-pipeline - namespace: open-data-hub-tenant -spec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://enterprisecontract.dev/docs/ec-policies/release_policy.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - finally: - - name: show-sbom - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - taskRef: - params: - - name: name - value: show-sbom - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-show-sbom:0.1@sha256:beb0616db051952b4b861dd8c3e00fa1c0eccbd926feddf71194d3bb3ace9ce7 - - name: kind - value: task - resolver: bundles - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched by Cachi2 - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "true" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: [] - description: Array of extra tags to apply to the resulting image (e.g. commit or release identifiers). - name: additional-tags - type: array - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote VMs - name: privileged-nested - type: string - - default: - - linux/x86_64 - description: List of platforms to build the container images on. The available set of values is determined by the configuration of the multi-platform-controller. - name: build-platforms - type: array - - name: buildah-format - default: docker - type: string - description: The format for the resulting image's mediaType. Valid values are oci or docker. - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:abf231cfc5a68b56f68a8ac9bb26dca3c3e434c88dd9627c72bdec0b8c335c67 - - name: kind - value: task - resolver: bundles - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3f1b468066b301083d8550e036f5a654fcb064810bd29eb06fec6d8ad3e35b9c - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:dc82a7270aace9b1c26f7e96f8ccab2752e53d32980c41a45e1733baad76cde6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - matrix: - params: - - name: PLATFORM - value: - - $(params.build-platforms) - name: build-images - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - - name: IMAGE_APPEND_PLATFORM - value: "true" - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-remote-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-remote-oci-ta:0.5@sha256:650b0bca57c626c1e82f35cdfadf44a7792230b2b992aaa9c369d615aae6590d - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-images.results.IMAGE_REF[*]) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-images - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:79784d53749584bc5a8de32142ec4e2f01cdbf42c20d94e59280e0b927c8597d - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:b0d6cb28a23f20db4f5cf78ed78ae3a91b9a5adfe989696ed0bbc63840a485b6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - matrix: - params: - - name: image-platform - value: - - $(params.build-platforms) - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:a7cc183967f89c4ac100d04ab8f81e54733beee60a0528208107c9a22d3c43af - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:dae8e28761cee4ab0baf04ab9f8f1a4b3cee3c7decf461fda2bacc5c01652a60 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:783f5de1b4def2fb3fad20b914f4b3afee46ffb8f652114946e321ef3fa86449 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:b0bd59748cda4a7abf311e4f448e6c1d00c6b6d8c0ecc1c2eb33e08dc0e0b802 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:cdbe1a968676e4f5519b082bf1e27a4cdcf66dd60af66dbc26b3e604f957f7e9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:db2b267dc15e4ed17f704ee91b8e9b38068e1a35b1018a328fdca621819d74c6 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:bf7bdde00b7212f730c1356672290af6f38d070da2c8a316987b5c32fd49e0b9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:a2bde66f6b4164620298c7d709b8f08515409404000fa1dc2260d2508b135651 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: ADDITIONAL_TAGS - value: - - $(params.additional-tags[*]) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:49f778479f468e71c2cfef722e96aa813d7ef98bde8a612e1bf1a13cd70849ec - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:1b6c20ab3dbfb0972803d3ebcb2fa72642e59400c77bd66dfd82028bdd09e120 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true diff --git a/.tekton/multiarch-push-pipeline.yaml b/.tekton/multiarch-push-pipeline.yaml deleted file mode 100644 index 692cf87a80..0000000000 --- a/.tekton/multiarch-push-pipeline.yaml +++ /dev/null @@ -1,664 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - labels: - appstudio.openshift.io/application: opendatahub-release - pipelines.appstudio.openshift.io/type: build - name: multiarch-push-pipeline - namespace: open-data-hub-tenant -spec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://enterprisecontract.dev/docs/ec-policies/release_policy.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - finally: - - name: show-sbom - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - taskRef: - params: - - name: name - value: show-sbom - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-show-sbom:0.1@sha256:beb0616db051952b4b861dd8c3e00fa1c0eccbd926feddf71194d3bb3ace9ce7 - - name: kind - value: task - resolver: bundles - - name: send-slack-notification - params: - - name: message - value: "$(tasks.rhoai-init.results.slack-message-failure-text)" - - name: secret-name - value: slack-secret - - name: key-name - value: slack-webhook - taskRef: - params: - - name: name - value: slack-webhook-notification - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-slack-webhook-notification:0.1@sha256:4e68fe2225debc256d403b828ed358345bb56d03327b46d55cb6c42911375750 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.status) - operator: in - values: - - "Failed" - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched by Cachi2 - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "true" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: [] - description: Array of extra tags to apply to the resulting image (e.g. commit or release identifiers). - name: additional-tags - type: array - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote VMs - name: privileged-nested - type: string - - default: - - linux/x86_64 - description: List of platforms to build the container images on. The available set of values is determined by the configuration of the multi-platform-controller. - name: build-platforms - type: array - - name: buildah-format - default: docker - type: string - description: The format for the resulting image's mediaType. Valid values are oci or docker. - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: rhoai-init - params: - - name: pipelinerun-name - value: "$(context.pipelineRun.name)" - taskSpec: - results: - - description: Notification text to be posted to slack - name: slack-message-failure-text - steps: - - image: quay.io/rhoai-konflux/alpine:latest - name: rhoai-init - env: - - name: slack_message - valueFrom: - secretKeyRef: - name: slack-secret - key: slack-component-failure-notification - script: | - pipelinerun_name=$(params.pipelinerun-name) - echo "pipelinerun-name = $pipelinerun_name" - application_name=opendatahub-release - echo "application-name = $application_name" - - component_name=${pipelinerun_name/-on-*/} - echo "component-name = $component_name" - - KONFLUX_SERVER="https://konflux-ui.apps.stone-prd-rh01.pg1f.p1.openshiftapps.com" - build_url="${KONFLUX_SERVER}/ns/open-data-hub-tenant/applications/${application_name}/pipelineruns/${pipelinerun_name}/logs" - - build_time="$(date +%Y-%m-%dT%H:%M:%S)" - - slack_message=${slack_message/__BUILD__URL__/$build_url} - slack_message=${slack_message/__PIPELINERUN__NAME__/$pipelinerun_name} - slack_message=${slack_message/__BUILD__TIME__/$build_time} - - echo -en "${slack_message}" > "$(results.slack-message-failure-text.path)" - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:abf231cfc5a68b56f68a8ac9bb26dca3c3e434c88dd9627c72bdec0b8c335c67 - - name: kind - value: task - resolver: bundles - runAfter: - - rhoai-init - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3f1b468066b301083d8550e036f5a654fcb064810bd29eb06fec6d8ad3e35b9c - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:dc82a7270aace9b1c26f7e96f8ccab2752e53d32980c41a45e1733baad76cde6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - matrix: - params: - - name: PLATFORM - value: - - $(params.build-platforms) - name: build-images - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - - name: IMAGE_APPEND_PLATFORM - value: "true" - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-remote-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-remote-oci-ta:0.5@sha256:650b0bca57c626c1e82f35cdfadf44a7792230b2b992aaa9c369d615aae6590d - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-images.results.IMAGE_REF[*]) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-images - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:79784d53749584bc5a8de32142ec4e2f01cdbf42c20d94e59280e0b927c8597d - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:b0d6cb28a23f20db4f5cf78ed78ae3a91b9a5adfe989696ed0bbc63840a485b6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - matrix: - params: - - name: image-platform - value: - - $(params.build-platforms) - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:a7cc183967f89c4ac100d04ab8f81e54733beee60a0528208107c9a22d3c43af - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:dae8e28761cee4ab0baf04ab9f8f1a4b3cee3c7decf461fda2bacc5c01652a60 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:783f5de1b4def2fb3fad20b914f4b3afee46ffb8f652114946e321ef3fa86449 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:b0bd59748cda4a7abf311e4f448e6c1d00c6b6d8c0ecc1c2eb33e08dc0e0b802 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:cdbe1a968676e4f5519b082bf1e27a4cdcf66dd60af66dbc26b3e604f957f7e9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:db2b267dc15e4ed17f704ee91b8e9b38068e1a35b1018a328fdca621819d74c6 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:bf7bdde00b7212f730c1356672290af6f38d070da2c8a316987b5c32fd49e0b9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:a2bde66f6b4164620298c7d709b8f08515409404000fa1dc2260d2508b135651 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: ADDITIONAL_TAGS - value: - - $(params.additional-tags[*]) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:49f778479f468e71c2cfef722e96aa813d7ef98bde8a612e1bf1a13cd70849ec - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:1b6c20ab3dbfb0972803d3ebcb2fa72642e59400c77bd66dfd82028bdd09e120 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true diff --git a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-pull-request.yaml b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..ea4b6bac30 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-pull-request.yaml @@ -0,0 +1,69 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-datascience-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-pipeline-runtime-datascience-cpu-py312 + - name: dockerfile + value: runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index d80f13a7ee..0000000000 --- a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-datascience\-cpu\-py312\-ubi9|runtimes/datascience/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( "runtimes/datascience/ubi9-python-3.12/**".pathChanged() || "runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() || ".tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-pull-request.yaml".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-datascience-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-datascience-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - linux/ppc64le - - linux/s390x - - name: dockerfile - value: runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml deleted file mode 100644 index c3fb370452..0000000000 --- a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( "runtimes/datascience/ubi9-python-3.12/**".pathChanged() || "runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() || ".tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-datascience-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-datascience-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..73024327ef --- /dev/null +++ b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-v2-25-push.yaml @@ -0,0 +1,72 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( "runtimes/datascience/ubi9-python-3.12/**".pathChanged() || + ".tekton/odh-pipeline-runtime-datascience-cpu-py312-v2-25-push.yaml".pathChanged() ) + + creationTimestamp: null + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-datascience-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-datascience-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-datascience-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-datascience-cpu-py312 + - name: dockerfile + value: runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-datascience-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-pull-request.yaml b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..e2a0290103 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-pull-request.yaml @@ -0,0 +1,69 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-minimal-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-pipeline-runtime-minimal-cpu-py312 + - name: dockerfile + value: runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index eb99f360f9..0000000000 --- a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-minimal\-cpu\-py312\-ubi9|runtimes/minimal/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( "runtimes/minimal/ubi9-python-3.12/**".pathChanged() || "runtimes/minimal/build-args/cpu.conf".pathChanged() || ".tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-pull-request.yaml".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-minimal-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-minimal-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-minimal-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - linux/arm64 - - linux/ppc64le - - linux/s390x - - name: dockerfile - value: runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-minimal-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml deleted file mode 100644 index 0a20554c8d..0000000000 --- a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( "runtimes/minimal/ubi9-python-3.12/**".pathChanged() || "runtimes/minimal/build-args/cpu.conf".pathChanged() || ".tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-minimal-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-minimal-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-minimal-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-minimal-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..ab9569c5e9 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-v2-25-push.yaml @@ -0,0 +1,71 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( "runtimes/minimal/ubi9-python-3.12/**".pathChanged() || + ".tekton/odh-pipeline-runtime-minimal-cpu-py312-v2-25-push.yaml".pathChanged() ) + creationTimestamp: null + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-minimal-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-minimal-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-minimal-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-minimal-cpu-py312 + - name: dockerfile + value: runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux/ppc64le + - linux/s390x + - linux-m2xlarge/arm64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-minimal-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-pull-request.yaml b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-pull-request.yaml new file mode 100644 index 0000000000..eac3816fd1 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-cuda-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-pipeline-runtime-pytorch-cuda-py312 + - name: dockerfile + value: runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index 969cb5fcf9..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-pytorch\-cuda\-py312\-ubi9|runtimes/pytorch/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 807013c34c..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,58 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..e339a59c46 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-v2-25-push.yaml @@ -0,0 +1,88 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-pipeline-runtime-pytorch-cuda-py312-v2-25-push.yaml".pathChanged() || + "runtimes/pytorch/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-pytorch-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-pytorch-cuda-py312 + - name: dockerfile + value: runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index e1bbf722b2..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-pytorch\-llmcompressor\-cuda\-py312\-ubi9|runtimes/pytorch\+llmcompressor/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "runtimes/pytorch+llmcompressor/ubi9-python-3.12/**".pathChanged() || "runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 8e6c7bdd32..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,58 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml".pathChanged() || "runtimes/pytorch+llmcompressor/ubi9-python-3.12/**".pathChanged() || "runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..c20589ec37 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml @@ -0,0 +1,87 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml".pathChanged() || + "runtimes/pytorch-llmcompressor/ubi9-python-3.12/**".pathChanged()) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312 + - name: dockerfile + value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-pull-request.yaml b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-pull-request.yaml new file mode 100644 index 0000000000..e48ef1ca33 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-rocm-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-pipeline-runtime-pytorch-rocm-py312 + - name: dockerfile + value: runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-pull-request.yaml deleted file mode 100644 index af8f99ea9f..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-pytorch\-rocm\-py312\-ubi9|runtimes/rocm\-pytorch/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-pull-request.yaml".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-rocm-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-rocm-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm - - name: path-context - value: . - - name: build-args-file - value: runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml deleted file mode 100644 index 156bc00a1e..0000000000 --- a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml +++ /dev/null @@ -1,47 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-pytorch-rocm-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-rocm-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm - - name: build-args-file - value: runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-v2-25-push.yaml new file mode 100644 index 0000000000..4ddb063d90 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-v2-25-push.yaml @@ -0,0 +1,87 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && (".tekton/odh-pipeline-runtime-pytorch-rocm-py312-v2-25-push.yaml".pathChanged() || + "runtimes/rocm-pytorch/ubi9-python-3.12/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-rocm-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-rocm-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-pytorch-rocm-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-pytorch-rocm-py312 + - name: dockerfile + value: runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-rocm-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-pull-request.yaml b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-pull-request.yaml new file mode 100644 index 0000000000..a547259385 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-tensorflow-cuda-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-pipeline-runtime-tensorflow-cuda-py312 + - name: dockerfile + value: runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index bac94bb45b..0000000000 --- a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-tensorflow\-cuda\-py312\-ubi9|runtimes/tensorflow/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 179ec7e3c5..0000000000 --- a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,65 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -#test -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: ecosystem-cert-preflight-checks - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - - pipelineTaskName: clair-scan - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..70c26245dc --- /dev/null +++ b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-v2-25-push.yaml @@ -0,0 +1,89 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && (".tekton/odh-pipeline-runtime-tensorflow-cuda-py312-v2-25-push.yaml".pathChanged() || + "runtimes/tensorflow/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-tensorflow-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-tensorflow-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-tensorflow-cuda-py312 + - name: dockerfile + value: runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-pull-request.yaml b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-pull-request.yaml deleted file mode 100644 index 464038ff3f..0000000000 --- a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-pipeline\-runtime\-tensorflow\-rocm\-py312\-ubi9|runtimes/rocm\-tensorflow/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-pull-request.yaml".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm - - name: path-context - value: . - - name: build-args-file - value: runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml deleted file mode 100644 index 8109656a57..0000000000 --- a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml +++ /dev/null @@ -1,79 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -#test -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9:{{revision}} - - name: dockerfile - value: runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm - - name: build-args-file - value: runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - - pipelineTaskName: ecosystem-cert-preflight-checks - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - - pipelineTaskName: clair-scan - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-v2-25-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-v2-25-push.yaml new file mode 100644 index 0000000000..2c61bec05f --- /dev/null +++ b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-v2-25-push.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-pipeline-runtime-tensorflow-rocm-py312-v2-25-push.yaml".pathChanged() || + "runtimes/rocm-tensorflow/ubi9-python-3.12/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-rocm-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-tensorflow-rocm-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-pipeline-runtime-tensorflow-rocm-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-pipeline-runtime-tensorflow-rocm-py312 + - name: dockerfile + value: runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-rocm-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-pull-request.yaml b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..bf572e9288 --- /dev/null +++ b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-pull-request.yaml @@ -0,0 +1,68 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-codeserver-datascience-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-codeserver-datascience-cpu-py312 + - name: dockerfile + value: codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: codeserver/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index 5cf0b2eb43..0000000000 --- a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-codeserver\-datascience\-cpu\-py312\-ubi9|codeserver/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-pull-request.yaml".pathChanged() || "codeserver/ubi9-python-3.12/**".pathChanged() || "codeserver/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-codeserver-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-codeserver-datascience-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - # https://github.com/redhat-appstudio/infra-deployments/blob/main/components/multi-platform-controller/production-downstream/stone-prod-p02/host-config.yaml - - linux-extra-fast/amd64 - - linux-m2xlarge/arm64 - - linux/ppc64le - - name: dockerfile - value: codeserver/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: codeserver/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-codeserver-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml deleted file mode 100644 index 718971ee4c..0000000000 --- a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,49 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml".pathChanged() || "codeserver/ubi9-python-3.12/**".pathChanged() || "codeserver/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-codeserver-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-codeserver-datascience-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: codeserver/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: codeserver/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - - name: build-platforms - value: - - linux-extra-fast/amd64 - - linux-m2xlarge/arm64 - pipelineRef: - name: multiarch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-codeserver-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-v2-25-push.yaml b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..db4b41f94f --- /dev/null +++ b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-v2-25-push.yaml @@ -0,0 +1,69 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-codeserver-datascience-cpu-py312-v2-25-push.yaml".pathChanged() || + "codeserver/ubi9-python-3.12/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-codeserver-datascience-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-codeserver-datascience-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-codeserver-datascience-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-codeserver-datascience-cpu-py312 + - name: dockerfile + value: codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: codeserver/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - linux-m2xlarge/arm64 + - linux/ppc64le + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-codeserver-datascience-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..8d5c3af3f0 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-pull-request.yaml @@ -0,0 +1,69 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-datascience-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-datascience-cpu-py312 + - name: dockerfile + value: jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index c96e2d1b93..0000000000 --- a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-datascience\-cpu\-py312\-ubi9|jupyter/datascience/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-datascience-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - linux/ppc64le - - linux/s390x - - name: dockerfile - value: jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml deleted file mode 100644 index 3f24de4d03..0000000000 --- a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-datascience-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-datascience-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-datascience-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..a3adf2cb82 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-v2-25-push.yaml @@ -0,0 +1,71 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-datascience-cpu-py312-v2-25-push.yaml".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-datascience-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-datascience-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-datascience-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-datascience-cpu-py312 + - name: dockerfile + value: jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-datascience-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..03d0388378 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-pull-request.yaml @@ -0,0 +1,69 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-minimal-cpu-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + - linux/s390x + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index 06532aebe7..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,54 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-minimal\-cpu\-py312\-ubi9|jupyter/minimal/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - linux/ppc64le - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml deleted file mode 100644 index baf9d3c4df..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..6ac04ec2af --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-v2-25-push.yaml @@ -0,0 +1,70 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-minimal-cpu-py312-v2-25-push.yaml".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-minimal-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-minimal-cpu-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux/ppc64le + - linux/s390x + - linux-m2xlarge/arm64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-pull-request.yaml new file mode 100644 index 0000000000..473618960a --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-pull-request.yaml @@ -0,0 +1,87 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cuda-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-minimal-cuda-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - linux-m2xlarge/arm64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index 33744914fb..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-minimal\-cuda\-py312\-ubi9|jupyter/minimal/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml deleted file mode 100644 index eb00e9b0d2..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..1e43323138 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-v2-25-push.yaml @@ -0,0 +1,89 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-minimal-cuda-py312-v2-25-push.yaml".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-minimal-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-minimal-cuda-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-pull-request.yaml new file mode 100644 index 0000000000..7fdf7786ab --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-rocm-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-minimal-rocm-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-pull-request.yaml deleted file mode 100644 index 846b6f1ff8..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-minimal\-rocm\-py312\-ubi9|jupyter/minimal/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-rocm-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-rocm-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm - - name: path-context - value: . - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml deleted file mode 100644 index 187434016c..0000000000 --- a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml +++ /dev/null @@ -1,59 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -# -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-minimal-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-minimal-rocm-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-minimal-rocm-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm - - name: build-args-file - value: jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-v2-25-push.yaml new file mode 100644 index 0000000000..41dcfa83c6 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-v2-25-push.yaml @@ -0,0 +1,87 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-minimal-rocm-py312-v2-25-push.yaml".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/**".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-rocm-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-rocm-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-minimal-rocm-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-minimal-rocm-py312 + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-rocm-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-pull-request.yaml new file mode 100644 index 0000000000..b038c6d633 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-cuda-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-pytorch-cuda-py312 + - name: dockerfile + value: jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index d4b2c0b249..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-pytorch\-cuda\-py312\-ubi9|jupyter/pytorch/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/**".pathChanged() || "cuda/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 613c68d159..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/**".pathChanged() || "cuda/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..9eee3c2f7d --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-v2-25-push.yaml @@ -0,0 +1,92 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-pytorch-cuda-py312-v2-25-push.yaml".pathChanged() || + "jupyter/pytorch/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2.25.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-pytorch-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-pytorch-cuda-py312 + - name: dockerfile + value: jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index ae2af40f36..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-pytorch\-llmcompressor\-cuda\-py312\-ubi9|jupyter/pytorch\+llmcompressor/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/pytorch+llmcompressor/ubi9-python-3.12/**".pathChanged() || "jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 20ebad84c2..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/pytorch+llmcompressor/ubi9-python-3.12/**".pathChanged() || "jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..75b7714d1c --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml @@ -0,0 +1,93 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25-push.yaml".pathChanged() || + "jupyter/pytorch-llmcompressor/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312 + - name: dockerfile + value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-pull-request.yaml new file mode 100644 index 0000000000..4a17701643 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-pull-request.yaml @@ -0,0 +1,86 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-rocm-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-pytorch-rocm-py312 + - name: dockerfile + value: jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-pull-request.yaml deleted file mode 100644 index 9339bb1572..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-pytorch\-rocm\-py312\-ubi9|jupyter/rocm/pytorch/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-rocm-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-rocm-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm - - name: path-context - value: . - - name: build-args-file - value: jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml deleted file mode 100644 index 1d2dd7cf63..0000000000 --- a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-pytorch-rocm-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-rocm-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm - - name: build-args-file - value: jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - taskRunSpecs: - - pipelineTaskName: build-container - stepSpecs: - - name: build - computeResources: - requests: - cpu: '8' - memory: 16Gi - limits: - cpu: '16' - memory: 32Gi - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-v2-25-push.yaml new file mode 100644 index 0000000000..a9e3308e57 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-v2-25-push.yaml @@ -0,0 +1,91 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-pytorch-rocm-py312-v2-25-push.yaml".pathChanged() || + "jupyter/rocm/pytorch/ubi9-python-3.12/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2.25.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-rocm-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-rocm-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-pytorch-rocm-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-pytorch-rocm-py312 + - name: dockerfile + value: jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-rocm-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-pull-request.yaml new file mode 100644 index 0000000000..0799aecf23 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-pull-request.yaml @@ -0,0 +1,87 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-tensorflow-cuda-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-tensorflow-cuda-py312 + - name: dockerfile + value: jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - linux-m2xlarge/arm64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-pull-request.yaml deleted file mode 100644 index 07e9989026..0000000000 --- a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-tensorflow\-cuda\-py312\-ubi9|jupyter/tensorflow/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml deleted file mode 100644 index 858ceb2f7f..0000000000 --- a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda - - name: build-args-file - value: jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v2-25-push.yaml new file mode 100644 index 0000000000..7179909329 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v2-25-push.yaml @@ -0,0 +1,93 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v2-25-push.yaml".pathChanged() || + "jupyter/tensorflow/ubi9-python-3.12/**".pathChanged() || + "cuda/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2.25.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-cuda-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-tensorflow-cuda-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-tensorflow-cuda-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-tensorflow-cuda-py312 + - name: dockerfile + value: jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda + - name: build-args-file + value: jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-d160-m4xlarge/arm64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-cuda-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-pull-request.yaml deleted file mode 100644 index 8639e39e36..0000000000 --- a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-tensorflow\-rocm\-py312\-ubi9|jupyter/rocm/tensorflow/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm - - name: path-context - value: . - - name: build-args-file - value: jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml deleted file mode 100644 index 6f0970bdc4..0000000000 --- a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml +++ /dev/null @@ -1,47 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm - - name: build-args-file - value: jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-v2-25-push.yaml new file mode 100644 index 0000000000..1ca6f6db1f --- /dev/null +++ b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-v2-25-push.yaml @@ -0,0 +1,91 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-tensorflow-rocm-py312-v2-25-push.yaml".pathChanged() || + "jupyter/rocm/tensorflow/ubi9-python-3.12/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2.25.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-rocm-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-tensorflow-rocm-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-tensorflow-rocm-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-tensorflow-rocm-py312 + - name: dockerfile + value: jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm + - name: build-args-file + value: jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + timeouts: + pipeline: 8h + tasks: 4h + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-rocm-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-pull-request.yaml b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-pull-request.yaml new file mode 100644 index 0000000000..7c930250c0 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-pull-request.yaml @@ -0,0 +1,66 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "true" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-comment: "^/build-konflux" + pipelinesascode.tekton.dev/on-event: "[pull_request]" + labels: + appstudio.openshift.io/application: automation + appstudio.openshift.io/component: pull-request-pipelines-notebooks + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-trustyai-cpu-py312-on-pull-request + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/pull-request-pipelines:notebooks-{{revision}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=on-pr-{{revision}} + - io.openshift.tags=odh-workbench-jupyter-trustyai-cpu-py312 + - name: dockerfile + value: jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux-extra-fast/amd64 + - name: image-expires-after + value: 5d + - name: enable-slack-failure-notification + value: "false" + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-pull-request-pipelines + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-pull-request.yaml deleted file mode 100644 index d9effe1e8b..0000000000 --- a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-pull-request.yaml +++ /dev/null @@ -1,54 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-jupyter\-trustyai\-cpu\-py312\-ubi9|jupyter/trustyai/ubi9\-python\-3\.12) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-pull-request.yaml".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-trustyai-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-trustyai-cpu-py312-ubi9-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - linux/ppc64le - - name: dockerfile - value: jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-trustyai-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml deleted file mode 100644 index 4917c4e9ee..0000000000 --- a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml +++ /dev/null @@ -1,47 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-jupyter-trustyai-cpu-py312-ubi9 - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-jupyter-trustyai-cpu-py312-ubi9-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9:{{revision}} - - name: dockerfile - value: jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu - - name: build-args-file - value: jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-jupyter-trustyai-cpu-py312-ubi9 - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-v2-25-push.yaml b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-v2-25-push.yaml new file mode 100644 index 0000000000..5780eb24de --- /dev/null +++ b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-v2-25-push.yaml @@ -0,0 +1,74 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "build/operator-nudging.yaml" + pipelinesascode.tekton.dev/on-cel-expression: | + event == "push" + && target_branch == "rhoai-2.25" + && !("manifests/base/params-latest.env".pathChanged()) + && ( ".tekton/odh-workbench-jupyter-trustyai-cpu-py312-v2-25-push.yaml".pathChanged() || + "jupyter/trustyai/ubi9-python-3.12/**".pathChanged() || + "jupyter/utils/**".pathChanged() || + "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/mssql-2.25.repo-x86_64/**".pathChanged() || + "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + labels: + appstudio.openshift.io/application: rhoai-v2-25 + appstudio.openshift.io/component: odh-workbench-jupyter-trustyai-cpu-py312-v2-25 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-trustyai-cpu-py312-v2-25-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/rhoai/odh-workbench-jupyter-trustyai-cpu-py312-rhel9:{{target_branch}} + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - name: additional-labels + value: + - version=v2.25.0 + - io.openshift.tags=odh-workbench-jupyter-trustyai-cpu-py312 + - name: dockerfile + value: jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu + - name: build-args-file + value: jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: hermetic + value: false + - name: build-image-index + value: true + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - linux/ppc64le + pipelineRef: + resolver: git + params: + - name: url + value: https://github.com/red-hat-data-services/konflux-central.git + - name: revision + value: '{{ target_branch }}' + - name: pathInRepo + value: pipelines/multi-arch-container-build.yaml + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-trustyai-cpu-py312-v2-25 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-pull-request.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-pull-request.yaml deleted file mode 100644 index 26d217e61f..0000000000 --- a/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-rstudio\-minimal\-cpu\-py311\-c9s|rstudio/c9s\-python\-3\.11) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-pull-request.yaml".pathChanged() || "rstudio/c9s-python-3.11/**".pathChanged() || "rstudio/c9s-python-3.11/build-args/cpu.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cpu-py311-c9s - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-rstudio-minimal-cpu-py311-c9s-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cpu-py311-c9s:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: rstudio/c9s-python-3.11/Dockerfile.cpu - - name: path-context - value: . - - name: build-args-file - value: rstudio/c9s-python-3.11/build-args/cpu.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cpu-py311-c9s - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-push.yaml deleted file mode 100644 index 3bfe038fc7..0000000000 --- a/.tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-push.yaml +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -#test1 -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-rstudio-minimal-cpu-py311-c9s-push.yaml".pathChanged() || "rstudio/c9s-python-3.11/**".pathChanged() || "rstudio/c9s-python-3.11/build-args/cpu.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cpu-py311-c9s - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-rstudio-minimal-cpu-py311-c9s-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cpu-py311-c9s:{{revision}} - - name: dockerfile - value: rstudio/c9s-python-3.11/Dockerfile.cpu - - name: build-args-file - value: rstudio/c9s-python-3.11/build-args/cpu.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cpu-py311-c9s - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-pull-request.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-pull-request.yaml deleted file mode 100644 index 8d2d5c39e4..0000000000 --- a/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-pull-request.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# yamllint disable-file -# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: 'true' - pipelinesascode.tekton.dev/max-keep-runs: '3' - pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-rstudio\-minimal\-cuda\-py311\-c9s|rstudio/c9s\-python\-3\.11) - pipelinesascode.tekton.dev/on-cel-expression: | - event == "pull_request" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-pull-request.yaml".pathChanged() || "rstudio/c9s-python-3.11/**".pathChanged() || "rstudio/c9s-python-3.11/build-args/cuda.conf".pathChanged() ) - && body.repository.full_name == "opendatahub-io/notebooks" - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cuda-py311-c9s - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-rstudio-minimal-cuda-py311-c9s-on-pull-request - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 3h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cuda-py311-c9s:on-pr-{{revision}} - - name: image-expires-after - value: 5d - - name: build-platforms - value: - - linux/x86_64 - - name: dockerfile - value: rstudio/c9s-python-3.11/Dockerfile.cuda - - name: path-context - value: . - - name: build-args-file - value: rstudio/c9s-python-3.11/build-args/cuda.conf - pipelineRef: - name: multiarch-pull-request-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cuda-py311-c9s - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-push.yaml deleted file mode 100644 index 53214d3002..0000000000 --- a/.tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-push.yaml +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -#test -metadata: - annotations: - build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} - build.appstudio.redhat.com/commit_sha: '{{revision}}' - build.appstudio.redhat.com/target_branch: '{{target_branch}}' - pipelinesascode.tekton.dev/cancel-in-progress: "false" - pipelinesascode.tekton.dev/max-keep-runs: "3" - build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" - pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-rstudio-minimal-cuda-py311-c9s-push.yaml".pathChanged() || "rstudio/c9s-python-3.11/**".pathChanged() || "rstudio/c9s-python-3.11/build-args/cuda.conf".pathChanged() ) - creationTimestamp: - labels: - appstudio.openshift.io/application: opendatahub-release - appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cuda-py311-c9s - pipelines.appstudio.openshift.io/type: build - name: odh-workbench-rstudio-minimal-cuda-py311-c9s-on-push - namespace: open-data-hub-tenant -spec: - timeouts: - pipeline: 8h - params: - - name: git-url - value: '{{source_url}}' - - name: revision - value: '{{revision}}' - - name: output-image - value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cuda-py311-c9s:{{revision}} - - name: dockerfile - value: rstudio/c9s-python-3.11/Dockerfile.cuda - - name: build-args-file - value: rstudio/c9s-python-3.11/build-args/cuda.conf - - name: path-context - value: . - - name: additional-tags - value: - - '{{target_branch}}-{{revision}}' - - 2025b-v1.36 - pipelineRef: - name: singlearch-push-pipeline - taskRunTemplate: - serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cuda-py311-c9s - workspaces: - - name: git-auth - secret: - secretName: '{{ git_auth_secret }}' -status: {} diff --git a/.tekton/singlearch-push-pipeline.yaml b/.tekton/singlearch-push-pipeline.yaml deleted file mode 100644 index 978d22be85..0000000000 --- a/.tekton/singlearch-push-pipeline.yaml +++ /dev/null @@ -1,642 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - labels: - appstudio.openshift.io/application: opendatahub-release - pipelines.appstudio.openshift.io/type: build - name: singlearch-push-pipeline - namespace: open-data-hub-tenant -spec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://enterprisecontract.dev/docs/ec-policies/release_policy.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - finally: - - name: show-sbom - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - taskRef: - params: - - name: name - value: show-sbom - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-show-sbom:0.1@sha256:beb0616db051952b4b861dd8c3e00fa1c0eccbd926feddf71194d3bb3ace9ce7 - - name: kind - value: task - resolver: bundles - - name: send-slack-notification - params: - - name: message - value: "$(tasks.rhoai-init.results.slack-message-failure-text)" - - name: secret-name - value: slack-secret - - name: key-name - value: slack-webhook - taskRef: - params: - - name: name - value: slack-webhook-notification - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-slack-webhook-notification:0.1@sha256:4e68fe2225debc256d403b828ed358345bb56d03327b46d55cb6c42911375750 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.status) - operator: in - values: - - "Failed" - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter path-context - name: dockerfile - type: string - - default: "true" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched by Cachi2 - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "false" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote VMs - name: privileged-nested - type: string - - name: buildah-format - default: docker - type: string - description: The format for the resulting image's mediaType. Valid values are oci or docker. - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: rhoai-init - params: - - name: pipelinerun-name - value: "$(context.pipelineRun.name)" - taskSpec: - results: - - description: Notification text to be posted to slack - name: slack-message-failure-text - steps: - - image: quay.io/rhoai-konflux/alpine:latest - name: rhoai-init - env: - - name: slack_message - valueFrom: - secretKeyRef: - name: slack-secret - key: slack-component-failure-notification - script: | - pipelinerun_name=$(params.pipelinerun-name) - echo "pipelinerun-name = $pipelinerun_name" - application_name=opendatahub-release - echo "application-name = $application_name" - - component_name=${pipelinerun_name/-on-*/} - echo "component-name = $component_name" - - KONFLUX_SERVER="https://konflux-ui.apps.stone-prd-rh01.pg1f.p1.openshiftapps.com" - build_url="${KONFLUX_SERVER}/ns/open-data-hub-tenant/applications/${application_name}/pipelineruns/${pipelinerun_name}/logs" - - build_time="$(date +%Y-%m-%dT%H:%M:%S)" - - slack_message=${slack_message/__BUILD__URL__/$build_url} - slack_message=${slack_message/__PIPELINERUN__NAME__/$pipelinerun_name} - slack_message=${slack_message/__BUILD__TIME__/$build_time} - - echo -en "${slack_message}" > "$(results.slack-message-failure-text.path)" - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:abf231cfc5a68b56f68a8ac9bb26dca3c3e434c88dd9627c72bdec0b8c335c67 - - name: kind - value: task - resolver: bundles - runAfter: - - rhoai-init - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3f1b468066b301083d8550e036f5a654fcb064810bd29eb06fec6d8ad3e35b9c - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:dc82a7270aace9b1c26f7e96f8ccab2752e53d32980c41a45e1733baad76cde6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - name: build-container - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.5@sha256:6ec006242975a17388bfe813e2afd0ae721dd013247580c0d988e3c4a9c7f867 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-container - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:79784d53749584bc5a8de32142ec4e2f01cdbf42c20d94e59280e0b927c8597d - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:b0d6cb28a23f20db4f5cf78ed78ae3a91b9a5adfe989696ed0bbc63840a485b6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:a7cc183967f89c4ac100d04ab8f81e54733beee60a0528208107c9a22d3c43af - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:dae8e28761cee4ab0baf04ab9f8f1a4b3cee3c7decf461fda2bacc5c01652a60 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:783f5de1b4def2fb3fad20b914f4b3afee46ffb8f652114946e321ef3fa86449 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:b0bd59748cda4a7abf311e4f448e6c1d00c6b6d8c0ecc1c2eb33e08dc0e0b802 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:cdbe1a968676e4f5519b082bf1e27a4cdcf66dd60af66dbc26b3e604f957f7e9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:db2b267dc15e4ed17f704ee91b8e9b38068e1a35b1018a328fdca621819d74c6 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:bf7bdde00b7212f730c1356672290af6f38d070da2c8a316987b5c32fd49e0b9 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:a2bde66f6b4164620298c7d709b8f08515409404000fa1dc2260d2508b135651 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: ADDITIONAL_TAGS - value: - - $(params.additional-tags[*]) - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:49f778479f468e71c2cfef722e96aa813d7ef98bde8a612e1bf1a13cd70849ec - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:1b6c20ab3dbfb0972803d3ebcb2fa72642e59400c77bd66dfd82028bdd09e120 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true diff --git a/ci/check-params-env.sh b/ci/check-params-env.sh index 5f7f03bae9..71d6c534da 100755 --- a/ci/check-params-env.sh +++ b/ci/check-params-env.sh @@ -27,7 +27,7 @@ PARAMS_ENV_PATH="manifests/base/params.env" # This value needs to be updated everytime we deliberately change number of the # images we want to have in the `params.env` or `params-latest.env` file. -EXPECTED_NUM_RECORDS=46 +EXPECTED_NUM_RECORDS=67 EXPECTED_ADDI_RUNTIME_RECORDS=0 # Number of attempts for the skopeo tool to gather data from the repository. @@ -117,13 +117,43 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=1217 + expected_img_size=1219 ;; odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="jupyter-minimal-ubi9-python-3.11-amd64" - expected_img_size=503 + expected_img_size=528 + ;; + odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-2) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=489 + ;; + odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-3) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=486 + ;; + odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-4) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=475 + ;; + odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-5) + expected_name="odh-notebook-jupyter-minimal-ubi8-python-3.8" + expected_commitref="release-1.2" + expected_build_name="jupyter-minimal-ubi8-python-3.8-amd64" + expected_img_size=479 + ;; + odh-workbench-jupyter-minimal-cpu-py312-ubi9-n) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.12" + expected_commitref="main" + expected_build_name="konflux" + expected_img_size=1213 ;; odh-workbench-jupyter-minimal-cpu-py312-ubi9-n) expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.12" @@ -135,49 +165,127 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-jupyter-cuda-minimal-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=5624 + expected_img_size=5614 ;; odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="cuda-jupyter-minimal-ubi9-python-3.11-amd64" expected_img_size=5157 ;; + odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-2) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="cuda-jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=6026 + ;; + odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-3) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="cuda-jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=5326 + ;; + odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-4) + expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="cuda-jupyter-minimal-ubi9-python-3.9-amd64" + expected_img_size=5038 + ;; + odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-5) + expected_name="odh-notebook-jupyter-minimal-ubi8-python-3.8" + expected_commitref="release-1.2" + expected_build_name="cuda-jupyter-minimal-ubi8-python-3.8-amd64" + expected_img_size=5333 + ;; odh-workbench-jupyter-minimal-cuda-py312-ubi9-n) expected_name="odh-notebook-jupyter-cuda-minimal-ubi9-python-3.12" expected_commitref="main" expected_build_name="konflux" - expected_img_size=3370 + expected_img_size=3357 ;; odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n) expected_name="odh-notebook-jupyter-cuda-pytorch-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=9226 + expected_img_size=9224 ;; odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-pytorch-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="jupyter-pytorch-ubi9-python-3.11-amd64" expected_img_size=8571 ;; + odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-2) + expected_name="odh-notebook-jupyter-pytorch-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="jupyter-pytorch-ubi9-python-3.9-amd64" + expected_img_size=9354 + ;; + odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-3) + expected_name="odh-notebook-jupyter-pytorch-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="jupyter-pytorch-ubi9-python-3.9-amd64" + expected_img_size=8711 + ;; + odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-4) + expected_name="odh-notebook-jupyter-pytorch-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="jupyter-pytorch-ubi9-python-3.9-amd64" + expected_img_size=7130 + ;; + odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-5) + expected_name="odh-notebook-cuda-jupyter-pytorch-ubi8-python-3.8" + expected_commitref="release-1.2" + expected_build_name="jupyter-pytorch-ubi8-python-3.8-amd64" + expected_img_size=6592 + ;; odh-workbench-jupyter-pytorch-cuda-py312-ubi9-n) expected_name="odh-notebook-jupyter-cuda-pytorch-ubi9-python-3.12" expected_commitref="main" expected_build_name="konflux" - expected_img_size=6977 + expected_img_size=6964 ;; odh-workbench-jupyter-datascience-cpu-py311-ubi9-n) expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=1667 + expected_img_size=1665 ;; odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="jupyter-datascience-ubi9-python-3.11-amd64" - expected_img_size=904 + expected_img_size=961 + ;; + odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-2) + expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="jupyter-datascience-ubi9-python-3.9-amd64" + expected_img_size=890 + ;; + odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-3) + expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="jupyter-datascience-ubi9-python-3.9-amd64" + expected_img_size=883 + ;; + odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-4) + expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="jupyter-datascience-ubi9-python-3.9-amd64" + expected_img_size=685 + ;; + odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-5) + expected_name="odh-notebook-jupyter-datascience-ubi8-python-3.8" + expected_commitref="release-1.2" + expected_build_name="jupyter-datascience-ubi8-python-3.8-amd64" + expected_img_size=865 + ;; + odh-workbench-jupyter-datascience-cpu-py312-ubi9-n) + expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.12" + expected_commitref="main" + expected_build_name="konflux" + expected_img_size=1658 ;; odh-workbench-jupyter-datascience-cpu-py312-ubi9-n) expected_name="odh-notebook-jupyter-datascience-ubi9-python-3.12" @@ -189,32 +297,74 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=8652 + expected_img_size=8638 ;; odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-1) expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="cuda-jupyter-tensorflow-ubi9-python-3.11-amd64" expected_img_size=8211 ;; + odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-2) + expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="cuda-jupyter-tensorflow-ubi9-python-3.9-amd64" + expected_img_size=6984 + ;; + odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-3) + expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="cuda-jupyter-tensorflow-ubi9-python-3.9-amd64" + expected_img_size=6301 + ;; + odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-4) + expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="cuda-jupyter-tensorflow-ubi9-python-3.9-amd64" + expected_img_size=5927 + ;; + odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-5) + expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi8-python-3.8" + expected_commitref="release-1.2" + expected_build_name="cuda-jupyter-tensorflow-ubi8-python-3.8-amd64" + expected_img_size=6309 + ;; odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-n) expected_name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.12" expected_commitref="main" expected_build_name="konflux" - expected_img_size=6432 + expected_img_size=6373 ;; odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n) expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=5015 + expected_img_size=5010 ;; odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="jupyter-trustyai-ubi9-python-3.11-amd64" expected_img_size=4197 ;; + odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-2) + expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="jupyter-trustyai-ubi9-python-3.9-amd64" + expected_img_size=1123 + ;; + odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-3) + expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="jupyter-trustyai-ubi9-python-3.9-amd64" + expected_img_size=1057 + ;; + odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-4) + expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.9" + expected_commitref="release-2023a" + expected_build_name="jupyter-trustyai-ubi9-python-3.9-amd64" + expected_img_size=883 + ;; odh-workbench-jupyter-trustyai-cpu-py312-ubi9-n) expected_name="odh-notebook-jupyter-trustyai-ubi9-python-3.12" expected_commitref="main" @@ -225,13 +375,25 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-code-server-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=979 + expected_img_size=893 ;; odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-1) expected_name="odh-notebook-code-server-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="codeserver-ubi9-python-3.11-amd64" - expected_img_size=850 + expected_img_size=893 + ;; + odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-2) + expected_name="odh-notebook-code-server-ubi9-python-3.9" + expected_commitref="release-2024a" + expected_build_name="codeserver-ubi9-python-3.9-amd64" + expected_img_size=837 + ;; + odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-3) + expected_name="odh-notebook-code-server-ubi9-python-3.9" + expected_commitref="release-2023b" + expected_build_name="codeserver-ubi9-python-3.9-amd64" + expected_img_size=778 ;; odh-workbench-codeserver-datascience-cpu-py312-ubi9-n) expected_name="odh-notebook-code-server-ubi9-python-3.12" @@ -245,42 +407,15 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_build_name="konflux" expected_img_size=6977 ;; - odh-workbench-rstudio-minimal-cpu-py311-c9s-n) - expected_name="odh-notebook-rstudio-server-c9s-python-3.11" - expected_commitref="main" - expected_build_name="konflux" - expected_img_size=1377 - ;; - odh-workbench-rstudio-minimal-cpu-py311-c9s-n-1) - expected_name="odh-notebook-rstudio-server-c9s-python-3.11" - expected_commitref="2024b" - expected_build_name="rstudio-c9s-python-3.11-amd64" - expected_img_size=1208 - ;; - # For both RStudio GPU workbenches - the final name labels are identical to plain RStudio ones - # This is because the very same RStudio Dockerfile is used but different base images in both cases - # We should consider what to do with this - in ideal case, we should have different labels for these cases. - odh-workbench-rstudio-minimal-cuda-py311-c9s-n) - expected_name="odh-notebook-rstudio-server-cuda-c9s-python-3.11" - expected_commitref="main" - expected_build_name="konflux" - expected_img_size=6541 - ;; - odh-workbench-rstudio-minimal-cuda-py311-c9s-n-1) - expected_name="odh-notebook-rstudio-server-c9s-python-3.11" - expected_commitref="2024b" - expected_build_name="cuda-rstudio-c9s-python-3.11-amd64" - expected_img_size=7184 - ;; odh-workbench-jupyter-minimal-rocm-py311-ubi9-n) expected_name="odh-notebook-jupyter-rocm-minimal-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=6478 + expected_img_size=6480 ;; odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-minimal-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="rocm-jupyter-minimal-ubi9-python-3.11-amd64" expected_img_size=4830 ;; @@ -294,11 +429,11 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-jupyter-rocm-pytorch-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=8135 + expected_img_size=8133 ;; odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-rocm-pytorch-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="rocm-jupyter-pytorch-ubi9-python-3.11-amd64" expected_img_size=6571 ;; @@ -312,11 +447,11 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-jupyter-rocm-tensorflow-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=7432 + expected_img_size=7430 ;; odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-1) expected_name="odh-notebook-jupyter-rocm-tensorflow-ubi9-python-3.11" - expected_commitref="2024b" + expected_commitref="release-2024b" expected_build_name="rocm-jupyter-tensorflow-ubi9-python-3.11-amd64" expected_img_size=5782 ;; @@ -325,7 +460,7 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-runtime-minimal-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=579 + expected_img_size=570 ;; odh-pipeline-runtime-minimal-cpu-py312-ubi9-n) expected_name="odh-notebook-runtime-minimal-ubi9-python-3.12" @@ -337,7 +472,7 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-runtime-datascience-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=964 + expected_img_size=954 ;; odh-pipeline-runtime-datascience-cpu-py312-ubi9-n) expected_name="odh-notebook-runtime-datascience-ubi9-python-3.12" @@ -349,19 +484,19 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-runtime-pytorch-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=8515 + expected_img_size=8506 ;; odh-pipeline-runtime-pytorch-cuda-py312-ubi9-n) expected_name="odh-notebook-runtime-pytorch-ubi9-python-3.12" expected_commitref="main" expected_build_name="konflux" - expected_img_size=6265 + expected_img_size=6253 ;; odh-pipeline-runtime-pytorch-rocm-py311-ubi9-n) expected_name="odh-notebook-runtime-rocm-pytorch-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=7423 + expected_img_size=7413 ;; odh-pipeline-runtime-pytorch-rocm-py312-ubi9-n) expected_name="odh-notebook-runtime-rocm-pytorch-ubi9-python-3.12" @@ -373,19 +508,19 @@ function check_image_variable_matches_name_and_commitref_and_size() { expected_name="odh-notebook-cuda-runtime-tensorflow-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=7938 + expected_img_size=7917 ;; odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-n) expected_name="odh-notebook-cuda-runtime-tensorflow-ubi9-python-3.12" expected_commitref="main" expected_build_name="konflux" - expected_img_size=5715 + expected_img_size=5658 ;; odh-pipeline-runtime-tensorflow-rocm-py311-ubi9-n) expected_name="odh-notebook-rocm-runtime-tensorflow-ubi9-python-3.11" expected_commitref="main" expected_build_name="konflux" - expected_img_size=6714 + expected_img_size=6705 ;; odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-n) expected_name="odh-notebook-runtime-cuda-pytorch-llmcompressor-ubi9-python-3.12" @@ -579,7 +714,7 @@ function check_image() { echo "---------------------------------------------" } -# ------------------------------ MAIN SCRIPT --------------------------------- # +# ------------------------------ release-1.2 SCRIPT --------------------------------- # ret_code=0 diff --git a/codeserver/ubi9-python-3.12/.trigger_build.txt b/codeserver/ubi9-python-3.12/.trigger_build.txt new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/codeserver/ubi9-python-3.12/.trigger_build.txt @@ -0,0 +1 @@ + diff --git a/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu b/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..50225a32fe --- /dev/null +++ b/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,284 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# rpm-base # +#################### +# e.g., registry.access.redhat.com/ubi9/python-312:latest +FROM ${BASE_IMAGE} AS rpm-base + +USER root +WORKDIR /root + +ENV HOME=/root + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +ARG CODESERVER_SOURCE_CODE=codeserver/ubi9-python-3.12 + +ARG NODE_VERSION=22.18.0 + +ARG CODESERVER_VERSION=v4.104.0 + +COPY ${CODESERVER_SOURCE_CODE}/get_code_server_rpm.sh . + +# create dummy file to ensure this stage is awaited before installing rpm +RUN ./get_code_server_rpm.sh && touch /tmp/control + +####################### +# wheel caching stage # +####################### +FROM registry.access.redhat.com/ubi9/python-312:latest AS whl-cache + +USER root +WORKDIR /root + +ENV HOME=/root + +ARG CODESERVER_SOURCE_CODE=codeserver/ubi9-python-3.12 + +# copy requirements and scripts +COPY ${CODESERVER_SOURCE_CODE}/pylock.toml ./ +COPY ${CODESERVER_SOURCE_CODE}/devel_env_setup.sh ./ + +# This stage installs (builds) all the packages needed and caches it in uv-cache +# Important: Since HOME & USER for the python-312 has been changed, +# we need to ensure the same cache directory is mounted in +# the final stage with the necessary permissions to consume from cache +RUN --mount=type=cache,target=/root/.cache/uv \ + pip install --no-cache uv && \ + # the devel script is ppc64le specific - sets up build-time dependencies + source ./devel_env_setup.sh && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +# dummy file to make image build wait for this stage +RUN touch /tmp/control + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# (ARCH-ppc64le): since wheels are compiled from source, we need shared libs available at runtime +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS,target=/OpenBlas,rw \ + bash -c ' \ + if [[ $(uname -m) == "ppc64le" ]]; then \ + dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm; \ + dnf install -y lcms2 libraqm libimagequant openjpeg2; \ + PREFIX=/usr/ make install -C /OpenBlas; \ + fi ' + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +#################### +# codeserver # +#################### +FROM cpu-base AS codeserver + +ARG TARGETOS +ARG TARGETARCH + +ARG CODESERVER_SOURCE_CODE=codeserver/ubi9-python-3.12 +ARG CODESERVER_VERSION=v4.104.0 + +LABEL name="rhoai/odh-workbench-codeserver-datascience-cpu-py312-rhel9" \ + com.redhat.component="odh-workbench-codeserver-datascience-cpu-py312-rhel9" \ + io.k8s.display-name="odh-workbench-codeserver-datascience-cpu-py312-rhel9" \ + summary="code-server image with python 3.12 based on UBI 9" \ + description="code-server image with python 3.12 based on UBI9" \ + io.k8s.description="code-server image with python 3.11 based on UBI9" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" + +USER 0 + +WORKDIR /opt/app-root/bin + +# Install useful OS packages +RUN dnf install -y jq git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum + +# wait for rpm-base stage (rpm builds for ppc64le) +COPY --from=rpm-base /tmp/control /dev/null + +# Install code-server +# Note: Use cache mounts, bind mounts fail on konflux +# https://redhat-internal.slack.com/archives/C04PZ7H0VA8/p1755628065772589?thread_ts=1755597929.335999&cid=C04PZ7H0VA8 +RUN --mount=type=cache,from=rpm-base,source=/tmp/,target=/code-server-rpm/,rw \ + # EXPLANATION: dnf installation produces an "unsigned rpm" error from Konflux (Conforma) + # since we're building rpm from source, we will simply unpack it over / + # dnf install -y "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" + # dnf -y clean all --enablerepo='*' + dnf install -y cpio && dnf -y clean all && \ + cd / && rpm2cpio "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" | cpio -idmv + +COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/utils utils/ + +# Create and intall the extensions though build-time on a temporary directory. Later this directory will copied on the `/opt/app-root/src/.local/share/code-server/extensions` via run-code-server.sh file when it starts up. +# https://coder.com/docs/code-server/FAQ#how-do-i-install-an-extension +RUN mkdir -p /opt/app-root/extensions-temp && \ + code-server --install-extension /opt/app-root/bin/utils/ms-python.python-2025.14.0.vsix --extensions-dir /opt/app-root/extensions-temp && \ + code-server --install-extension /opt/app-root/bin/utils/ms-toolsai.jupyter-2025.8.0.vsix --extensions-dir /opt/app-root/extensions-temp + +# Install NGINX to proxy code-server and pass probes check +ENV APP_ROOT=/opt/app-root \ + NGINX_VERSION=1.24 \ + NGINX_SHORT_VER=124 \ + NGINX_CONFIGURATION_PATH=${APP_ROOT}/etc/nginx.d \ + NGINX_CONF_PATH=/etc/nginx/nginx.conf \ + NGINX_DEFAULT_CONF_PATH=${APP_ROOT}/etc/nginx.default.d \ + NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx \ + NGINX_APP_ROOT=${APP_ROOT} \ + NGINX_LOG_PATH=/var/log/nginx \ + NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl + +# Modules does not exist +RUN INSTALL_PKGS="bind-utils nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ + dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ + rpm -V $INSTALL_PKGS && \ + dnf -y clean all --enablerepo='*' + +# Configure httpd for CGI processing +COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf +COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/httpd/codeserver-cgi.conf /etc/httpd/conf.d/codeserver-cgi.conf + +# Copy extra files to the image. +COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/nginx/root/ / + +## Configure nginx +COPY ${CODESERVER_SOURCE_CODE}/nginx/serverconf/ /opt/app-root/etc/nginx.default.d/ +COPY ${CODESERVER_SOURCE_CODE}/nginx/httpconf/ /opt/app-root/etc/nginx.d/ +COPY ${CODESERVER_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ + +# Changing ownership and user rights to support following use-cases: +# 1) running container on OpenShift, whose default security model +# is to run the container under random UID, but GID=0 +# 2) for working root-less container with UID=1001, which does not have +# to have GID=0 +# 3) for default use-case, that is running container directly on operating system, +# with default UID and GID (1001:0) +# Supported combinations of UID:GID are thus following: +# UID=1001 && GID=0 +# UID=&& GID=0 +# UID=1001 && GID= +RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ + mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ + mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ + mkdir -p ${NGINX_APP_ROOT}/api/ && \ + mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ + mkdir -p ${NGINX_LOG_PATH} && \ + mkdir -p ${NGINX_PERL_MODULE_PATH} && \ + # Create httpd directories and set permissions + mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ + chown -R 1001:0 ${NGINX_CONF_PATH} && \ + chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ + chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ + chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ + chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ + chmod ug+rw ${NGINX_CONF_PATH} && \ + chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ + chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ + chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ + chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ + # Make CGI script executable + chmod +x /opt/app-root/api/kernels/access.cgi && \ + rpm-file-permissions && \ + # Ensure the temporary directory and target directory have the correct permissions + mkdir -p /opt/app-root/src/.local/share/code-server/extensions && \ + mkdir -p /opt/app-root/src/.local/share/code-server/coder-logs && \ + chown -R 1001:0 /opt/app-root/src/.local/share/code-server && \ + chown -R 1001:0 /opt/app-root/extensions-temp && \ + chown -R 1001:0 /opt/app-root/src/.config/code-server + +# Launcher +COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/run-code-server.sh ${CODESERVER_SOURCE_CODE}/run-nginx.sh ./ + +ENV SHELL=/bin/bash + +ENV PYTHONPATH=/opt/app-root/bin/python3 + +# Install useful packages from requirements.txt +COPY ${CODESERVER_SOURCE_CODE}/pylock.toml ./ + +# wait for whl-cache stage (builds uv cache) +COPY --from=whl-cache /tmp/control /dev/null + +# Install packages and cleanup +# (ARCH-ppc64le): install packages (eg. pyarrow) that need to be built from source repository on ppc64le +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ + bash -c ' \ + if [[ $(uname -m) == "ppc64le" ]]; then \ + uv pip install /wheelsdir/*.whl; \ + fi ' +# install packages as USER 0 (this will allow us to consume uv cache) +RUN --mount=type=cache,target=/root/.cache/uv \ + echo "Installing softwares and packages" && \ + # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag + # TODO(jdanek): seen some builds fail on GitHub Actions with --offline and see no need to limit ourselves to the cache, will remove this + UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ + # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files + # Build debugpy from source instead + UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ + # change ownership to default user (all packages were installed as root and has root:root ownership \ + chown -R 1001:0 /opt/app-root + +USER 1001 + +# Fix permissions to support pip in Openshift environments +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +CMD ["/opt/app-root/bin/run-code-server.sh"] + +FROM codeserver as tests +ARG CODESERVER_SOURCE_CODE=codeserver/ubi9-python-3.12 +COPY ${CODESERVER_SOURCE_CODE}/test /tmp/test +# TODO(jdanek): add --mount=type=bind,target=/opt/app-root/src +RUN <<'EOF' +set -Eeuxo pipefail +python3 /tmp/test/test_startup.py |& tee /tmp/test_log.txt +EOF + +from codeserver +COPY --from=tests /tmp/test_log.txt /tmp/test_log.txt diff --git a/codeserver/ubi9-python-3.12/build-args/cpu.conf b/codeserver/ubi9-python-3.12/build-args/cpu.conf index cc7c73581a..4583ee67cb 100644 --- a/codeserver/ubi9-python-3.12/build-args/cpu.conf +++ b/codeserver/ubi9-python-3.12/build-args/cpu.conf @@ -1 +1,3 @@ +# Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..d48e097bec --- /dev/null +++ b/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,370 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ + +ARG TARGETARCH + +# Keep s390x special-case from original (create dummy binary) but +# include explicit curl/unzip steps from the delta for non-s390x. +RUN arch="${TARGETARCH:-$(uname -m)}" && \ + arch=$(echo "$arch" | cut -d- -f1) && \ + if [ "$arch" = "s390x" ]; then \ + echo "Skipping mongocli build for ${arch}, creating dummy binary"; \ + mkdir -p /tmp && echo -e '#!/bin/sh\necho "mongocli not supported on s390x"' > /tmp/mongocli && \ + chmod +x /tmp/mongocli; \ + else \ + echo "Building mongocli for ${arch}"; \ + curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip && \ + unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip && \ + cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux GOARCH=${arch} GO111MODULE=on go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/; \ + fi + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root +ARG TARGETARCH + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN --mount=type=cache,target=/var/cache/dnf \ + echo "Building for architecture: ${TARGETARCH}" && \ + if [ "$TARGETARCH" = "s390x" ]; then \ + PACKAGES="perl mesa-libGL skopeo gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel"; \ + else \ + PACKAGES="perl mesa-libGL skopeo"; \ + fi && \ + echo "Installing: $PACKAGES" && \ + dnf install -y $PACKAGES && \ + dnf clean all && rm -rf /var/cache/yum + +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + # Install Rust and set up environment + mkdir -p /opt/.cargo && \ + export HOME=/root && \ + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o rustup-init.sh && \ + chmod +x rustup-init.sh && \ + CARGO_HOME=/opt/.cargo HOME=/root ./rustup-init.sh -y --no-modify-path && \ + rm -f rustup-init.sh && \ + chown -R 1001:0 /opt/.cargo && \ + # Set environment variables + echo 'export PATH=/opt/.cargo/bin:$PATH' >> /etc/profile.d/cargo.sh && \ + echo 'export CARGO_HOME=/opt/.cargo' >> /etc/profile.d/cargo.sh && \ + echo 'export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1' >> /etc/profile.d/cargo.sh; \ +fi + +# Set python alternatives only for s390x (not needed for other arches) +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + alternatives --install /usr/bin/python python /usr/bin/python3.12 1 && \ + alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \ + python --version && python3 --version; \ +fi + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +############################## +# wheel-builder stage # +# NOTE: Only used in ppc64le and s390x +############################## +FROM cpu-base AS pyarrow-builder + +ARG TARGETARCH +USER 0 +WORKDIR /tmp/build-wheels + +# Build pyarrow on ppc64le and s390x +RUN --mount=type=cache,target=/root/.cache/pip \ + --mount=type=cache,target=/root/.cache/dnf \ + if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ + # Install build dependencies (shared for pyarrow and onnx) + dnf install -y cmake make gcc-c++ pybind11-devel wget && \ + dnf clean all && \ + # Build and collect pyarrow wheel + git clone --depth 1 --branch "apache-arrow-17.0.0" https://github.com/apache/arrow.git && \ + cd arrow/cpp && \ + mkdir release && cd release && \ + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_DEPENDENCY_SOURCE=BUNDLED \ + -DARROW_WITH_LZ4=OFF \ + -DARROW_WITH_ZSTD=OFF \ + -DARROW_WITH_SNAPPY=OFF \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + .. && \ + make -j$(nproc) VERBOSE=1 && \ + make install -j$(nproc) && \ + cd ../../python && \ + pip install --no-cache-dir -r requirements-build.txt && \ + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ + mkdir -p /tmp/wheels && \ + cp dist/pyarrow-*.whl /tmp/wheels/ && \ + chmod -R 777 /tmp/wheels && \ + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/; \ + else \ + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels; \ + fi + +####################################################### +# common-builder (for Power-only) +####################################################### +FROM cpu-base AS common-builder +ARG TARGETARCH +USER root +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + dnf install -y gcc-toolset-13 cmake ninja-build git wget unzip + dnf clean all +else + echo "Skipping common-builder package install on non-Power" +fi +EOF + +####################################################### +# onnx-builder (Power-only) +####################################################### +FROM common-builder AS onnx-builder +ARG TARGETARCH +ARG ONNX_VERSION=v1.19.0 +WORKDIR /root +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + source /opt/rh/gcc-toolset-13/enable + git clone --recursive https://github.com/onnx/onnx.git + cd onnx + git checkout ${ONNX_VERSION} + git submodule update --init --recursive + pip install -r requirements.txt + export CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" + pip wheel . -w /root/onnx_wheel +else + echo "Skipping ONNX build on non-Power" + mkdir -p /root/onnx_wheel +fi +EOF + +####################################################### +# openblas-builder (Power-only) +####################################################### +FROM common-builder AS openblas-builder +ARG TARGETARCH +ARG OPENBLAS_VERSION=0.3.30 +WORKDIR /root +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + wget https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip + unzip OpenBLAS-${OPENBLAS_VERSION}.zip + cd OpenBLAS-${OPENBLAS_VERSION} + make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0 +else + mkdir -p OpenBLAS-${OPENBLAS_VERSION} + echo "Skipping OpenBLAS build on non-Power" +fi +EOF +#################### +# jupyter-minimal # +#################### +FROM cpu-base AS jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + + +######################## +# jupytyer-datascience # +######################## +FROM jupyter-minimal AS jupyter-datascience +ARG TARGETARCH + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG OPENBLAS_VERSION=0.3.30 +ARG TARGETARCH + +LABEL name="rhoai/odh-workbench-jupyter-datascience-cpu-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-datascience-cpu-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-datascience-cpu-py312-rhel9" \ + summary="Jupyter data science notebook image for ODH notebooks" \ + description="Jupyter data science notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter data science notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat && \ + dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/ + +# Copy wheels from build stage (ppc64le and s390x only) +COPY --from=pyarrow-builder /tmp/wheels /tmp/wheels +RUN if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ + pip install --no-cache-dir /tmp/wheels/*.whl; \ +else \ + echo "Skipping wheel install for $TARGETARCH"; \ +fi + +# Copy OpenBLAS,ONNX wheels for Power +COPY --from=openblas-builder /root/OpenBLAS-${OPENBLAS_VERSION} /openblas +COPY --from=onnx-builder /root/onnx_wheel/ /onnxwheels/ + +# Power-specific ONNX/OpenBLAS installation +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + pip install /onnxwheels/*.whl +else + echo "Skipping ONNX/OpenBLAS install on non-Power" +fi +EOF + +USER root +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + rm -rf /onnxwheels +else + echo "Skipping ONNX/OpenBLAS install on non-Power" +fi +EOF + +RUN <<'EOF' +set -Eeuxo pipefail +if [ "${TARGETARCH}" = "ppc64le" ]; then + PREFIX=/usr/local make -C /openblas install + rm -rf /openblas +else + echo "Skipping ONNX/OpenBLAS install on non-Power" +fi +EOF + +USER 1001:0 + +# Install Python packages and Jupyterlab extensions from pylock.toml +COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +RUN --mount=type=cache,target=/root/.cache/pip \ + echo "Installing software and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ + # We need special flags and environment variables when building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml; \ + else \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml; \ + fi && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + mkdir /opt/app-root/pipeline-runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" \ + /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + install -D -m 0644 /opt/app-root/bin/utils/jupyter_server_config.py \ + /opt/app-root/etc/jupyter/jupyter_server_config.py && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src diff --git a/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf b/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf index cc7c73581a..4583ee67cb 100644 --- a/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf @@ -1 +1,3 @@ +# Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/jupyter/datascience/ubi9-python-3.12/pylock.toml b/jupyter/datascience/ubi9-python-3.12/pylock.toml index bc01fdf55a..11744476fb 100644 --- a/jupyter/datascience/ubi9-python-3.12/pylock.toml +++ b/jupyter/datascience/ubi9-python-3.12/pylock.toml @@ -1047,6 +1047,7 @@ wheels = [ [[packages]] name = "fsspec" version = "2025.9.0" +marker = "platform_machine != 's390x'" sdist = { url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", upload-time = 2025-09-02T19:10:49Z, size = 304847, hashes = { sha256 = "19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19" } } wheels = [{ url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl", upload-time = 2025-09-02T19:10:47Z, size = 199289, hashes = { sha256 = "530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7" } }] diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..cf9e3dbf93 --- /dev/null +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,118 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +############################ +# Stage 1: PDF Tool Build # +############################ +FROM registry.access.redhat.com/ubi9/python-312:latest AS pdf-builder + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Copy scripts +COPY jupyter/utils/install_texlive.sh ./install_texlive.sh +COPY jupyter/utils/install_pandoc.sh ./install_pandoc.sh +RUN chmod +x install_texlive.sh install_pandoc.sh + +RUN ./install_texlive.sh +RUN ./install_pandoc.sh + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +#################### +# jupyter-minimal # +#################### +FROM cpu-base AS jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +LABEL name="odh-notebook-jupyter-minimal-ubi9-python-3.12" \ + summary="Minimal Jupyter notebook image for ODH notebooks" \ + description="Minimal Jupyter notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.display-name="Minimal Jupyter notebook image for ODH notebooks" \ + io.k8s.description="Minimal Jupyter notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + authoritative-source-url="https://github.com/opendatahub-io/notebooks" \ + io.openshift.build.commit.ref="main" \ + io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/jupyter/minimal/ubi9-python-3.12" \ + io.openshift.build.image="quay.io/opendatahub/workbench-images:jupyter-minimal-ubi9-python-3.12" \ + com.redhat.component="odh-workbench-jupyter-minimal-cpu-py312-rhel9" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +# Install Python dependencies from requirements.txt file +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..b5ec6fb4b0 --- /dev/null +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,108 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-jupyter-minimal # +######################### +FROM cuda-base AS cuda-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +LABEL name="odh-notebook-jupyter-cuda-minimal-ubi9-python-3.12" \ + summary="Minimal Jupyter CUDA notebook image for ODH notebooks" \ + description="Minimal Jupyter CUDA notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.display-name="Minimal Jupyter CUDA notebook image for ODH notebooks" \ + io.k8s.description="Minimal Jupyter CUDA notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + authoritative-source-url="https://github.com/opendatahub-io/notebooks" \ + io.openshift.build.commit.ref="main" \ + io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/jupyter/minimal/ubi9-python-3.12" \ + io.openshift.build.image="quay.io/opendatahub/workbench-images:cuda-jupyter-minimal-ubi9-python-3.12" + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +# Install Python dependencies from requirements.txt file +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +LABEL name="rhoai/odh-workbench-jupyter-minimal-cuda-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-minimal-cuda-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-minimal-cuda-py312-rhel9" \ + summary="Minimal Jupyter CUDA notebook image for ODH notebooks" \ + description="Minimal Jupyter CUDA notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Minimal Jupyter CUDA notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm new file mode 100644 index 0000000000..8a5c31b4be --- /dev/null +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -0,0 +1,99 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# rocm-base # +#################### +FROM ${BASE_IMAGE} AS rocm-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################## +# rocm-jupyter-minimal # +######################## +FROM rocm-base AS rocm-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +# Install Python dependencies from Pipfile.lock file +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh + +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +LABEL name="rhoai/odh-workbench-jupyter-minimal-rocm-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-minimal-rocm-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-minimal-rocm-py312-rhel9" \ + summary="Minimal Jupyter ROCm notebook image for ODH notebooks" \ + description="Minimal Jupyter ROCm notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Minimal Jupyter ROCm notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm b/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm index f2ace3915e..e62e811e03 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm @@ -87,12 +87,15 @@ RUN echo "Installing softwares and packages" && \ sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ # copy jupyter configuration cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P && \ # Apply JupyterLab addons \ /opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P +USER 1001 + WORKDIR /opt/app-root/src ENTRYPOINT ["start-notebook.sh"] diff --git a/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf b/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf index c10af38d45..4583ee67cb 100644 --- a/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf @@ -1,2 +1,3 @@ # Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf b/jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf index a101ee1855..9b62bf6687 100644 --- a/jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf +++ b/jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.8 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf b/jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf index 6682af4d77..cffbf80141 100644 --- a/jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf +++ b/jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-rocm-py312-ubi9:v6.2 +# Base Image : RHEL 9.6 with Python 3.12 +# ROCm Version : 6.3.4 +# Architectures: linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/rocm +BASE_IMAGE=quay.io/aipcc/base-images/rocm:3.0-1755080929 diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..426c9715c3 --- /dev/null +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,155 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-jupyter-minimal # +######################### +FROM cuda-base AS cuda-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +############################# +# cuda-jupyter-datascience # +############################# +FROM cuda-jupyter-minimal AS cuda-jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +############################# +# cuda-jupyter-pytorch # +############################# +FROM cuda-jupyter-datascience AS cuda-jupyter-pytorch + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG PYTORCH_SOURCE_CODE=jupyter/pytorch+llmcompressor/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages and Jupyterlab extensions from requirements.txt +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-rhel9" \ + summary="Jupyter CUDA pytorch-llmcompressor notebook image for ODH notebooks" \ + description="Jupyter CUDA pytorch-llmcompressor notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter CUDA pytorch-llmcompressor notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf index a101ee1855..9b62bf6687 100644 --- a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.8 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..22ab75e103 --- /dev/null +++ b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,155 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-jupyter-minimal # +######################### +FROM cuda-base AS cuda-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +############################# +# cuda-jupyter-datascience # +############################# +FROM cuda-jupyter-minimal AS cuda-jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +############################# +# cuda-jupyter-pytorch # +############################# +FROM cuda-jupyter-datascience AS cuda-jupyter-pytorch + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG PYTORCH_SOURCE_CODE=jupyter/pytorch/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages and Jupyterlab extensions from requirements.txt +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-workbench-jupyter-pytorch-cuda-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-pytorch-cuda-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-pytorch-cuda-py312-rhel9" \ + summary="Jupyter CUDA pytorch notebook image for ODH notebooks" \ + description="Jupyter CUDA pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter CUDA pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf b/jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf index a101ee1855..9b62bf6687 100644 --- a/jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf +++ b/jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.8 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm new file mode 100644 index 0000000000..96773f544a --- /dev/null +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -0,0 +1,157 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ + +#################### +# rocm-base # +#################### +FROM ${BASE_IMAGE} AS rocm-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################## +# rocm-jupyter-minimal # +######################## +FROM rocm-base AS rocm-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +############################ +# rocm-jupyter-datascience # +############################ +FROM rocm-jupyter-minimal AS rocm-jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +######################## +# rocm-jupyter-pytorch # +######################## +FROM rocm-jupyter-datascience AS rocm-jupyter-pytorch + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG PYTORCH_SOURCE_CODE=jupyter/rocm/pytorch/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh + +USER 0 +# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments +RUN ./de-vendor-torch.sh && \ + rm ./de-vendor-torch.sh && \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ + fix-permissions /opt/app-root -P +USER 1001 + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-workbench-jupyter-pytorch-rocm-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-pytorch-rocm-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-pytorch-rocm-py312-rhel9" \ + summary="Jupyter ROCm pytorch notebook image for ODH notebooks" \ + description="Jupyter ROCm pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter ROCm pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm index e6910b818d..a0249da9fa 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm @@ -146,12 +146,14 @@ RUN echo "Installing softwares and packages" && \ # Disable announcement plugin of jupyterlab \ jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # De-vendor the ROCm libs that are embedded in Pytorch \ - ./de-vendor-torch.sh && \ + /opt/app-root/bin/utils/addons/apply.sh + +USER 0 +# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments +RUN ./de-vendor-torch.sh && \ rm ./de-vendor-torch.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ fix-permissions /opt/app-root -P +USER 1001 WORKDIR /opt/app-root/src diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf b/jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf index 694a3031c4..cffbf80141 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-rocm-py312-ubi9:v6.4 +# Base Image : RHEL 9.6 with Python 3.12 +# ROCm Version : 6.3.4 +# Architectures: linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/rocm +BASE_IMAGE=quay.io/aipcc/base-images/rocm:3.0-1755080929 diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm new file mode 100644 index 0000000000..b2b04aae65 --- /dev/null +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -0,0 +1,156 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ + +#################### +# rocm-base # +#################### +FROM ${BASE_IMAGE} AS rocm-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################## +# rocm-jupyter-minimal # +######################## +FROM rocm-base AS rocm-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +############################ +# rocm-jupyter-datascience # +############################ +FROM rocm-jupyter-minimal AS rocm-jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +########################### +# rocm-jupyter-tensorflow # +########################### +FROM rocm-jupyter-datascience AS rocm-jupyter-tensorflow + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG TENSORFLOW_SOURCE_CODE=jupyter/rocm/tensorflow/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh + +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P +USER 1001 + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-workbench-jupyter-tensorflow-rocm-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-tensorflow-rocm-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-tensorflow-rocm-py312-rhel9" \ + summary="Jupyter AMD tensorflow notebook image for ODH notebooks" \ + description="Jupyter AMD tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter AMD tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm index a5d2719e36..e86adbd64b 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm @@ -148,10 +148,13 @@ RUN echo "Installing softwares and packages" && \ # Disable announcement plugin of jupyterlab \ jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + /opt/app-root/bin/utils/addons/apply.sh + +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ fix-permissions /opt/app-root -P +USER 1001 COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf b/jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf index 694a3031c4..cffbf80141 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-rocm-py312-ubi9:v6.4 +# Base Image : RHEL 9.6 with Python 3.12 +# ROCm Version : 6.3.4 +# Architectures: linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/rocm +BASE_IMAGE=quay.io/aipcc/base-images/rocm:3.0-1755080929 diff --git a/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..f15b5ea5a2 --- /dev/null +++ b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,160 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-jupyter-minimal # +######################### +FROM cuda-base AS cuda-jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ + +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + +############################# +# cuda-jupyter-datascience # +############################# +FROM cuda-jupyter-minimal AS cuda-jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# hdf5 is needed for h5py +RUN dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \ + dnf install -y hdf5-devel && \ + dnf clean all + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +############################ +# cuda-jupyter-tensorflow # +############################ +FROM cuda-jupyter-datascience AS cuda-jupyter-tensorflow + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG TENSORFLOW_SOURCE_CODE=jupyter/tensorflow/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages and Jupyterlab extensions from requirements.txt +COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # setup path for runtime configuration + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-workbench-jupyter-tensorflow-cuda-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-tensorflow-cuda-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-tensorflow-cuda-py312-rhel9" \ + summary="Jupyter CUDA tensorflow notebook image for ODH notebooks" \ + description="Jupyter CUDA tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter CUDA tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf b/jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf index a101ee1855..9b62bf6687 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf +++ b/jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.8 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..e584b95345 --- /dev/null +++ b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,204 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +###################################################### +# mongocli-builder (build stage only, not published) # +###################################################### +FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder + +ARG MONGOCLI_VERSION=2.0.4 + +WORKDIR /tmp/ +RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ + CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +#################### +# wheel-cache-base # +#################### +FROM ${BASE_IMAGE} AS whl-cache + +USER root +ENV HOME=/root +WORKDIR /root + +ARG TRUSTYAI_SOURCE_CODE=jupyter/trustyai/ubi9-python-3.12 + +COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml . +COPY ${TRUSTYAI_SOURCE_CODE}/devel_env_setup.sh . + +RUN --mount=type=cache,target=/root/.cache/uv \ + pip install --no-cache uv && \ + # the devel script is ppc64le specific - sets up build-time dependencies + source ./devel_env_setup.sh && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +#################### +# jupyter-minimal # +#################### +FROM cpu-base AS jupyter-minimal + +ARG JUPYTER_REUSABLE_UTILS=jupyter/utils +ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +COPY ${JUPYTER_REUSABLE_UTILS} utils/ +COPY ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ + +USER 0 + +# Dependencies for PDF export begin +RUN ./utils/install_pdf_deps.sh +ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" +# Dependencies for PDF export end + +USER 1001 + +WORKDIR /opt/app-root/src + +ENTRYPOINT ["start-notebook.sh"] + + +######################## +# jupyter-datascience # +######################## +FROM jupyter-minimal AS jupyter-datascience + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Install useful OS packages +RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Copy dynamically-linked mongocli built in earlier build stage +COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ + +# Other apps and tools installed as default user +USER 1001 + +# Copy Elyra setup to utils so that it's sourced at startup +COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +WORKDIR /opt/app-root/src + +#################### +# jupyter-trustyai # +#################### +FROM jupyter-datascience AS jupyter-trustyai + +ARG DATASCIENCE_SOURCE_CODE=jupyter/datascience/ubi9-python-3.12 +ARG TRUSTYAI_SOURCE_CODE=jupyter/trustyai/ubi9-python-3.12 + +ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib + +LABEL name="rhoai/odh-workbench-jupyter-trustyai-cpu-py312-rhel9" \ + com.redhat.component="odh-workbench-jupyter-trustyai-cpu-py312-rhel9" \ + io.k8s.display-name="odh-workbench-jupyter-trustyai-cpu-py312-rhel9" \ + summary="Jupyter trustyai notebook image for ODH notebooks" \ + description="Jupyter trustyai notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Jupyter trustyai notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" + +USER 0 + +# Install jre that is needed to run the trustyai library +RUN INSTALL_PKGS="java-17-openjdk" && \ + dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ + dnf -y clean all --enablerepo='*' + +# Install Python packages and Jupyterlab extensions from requirements.txt +COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml ./ + +# install openblas for ppc64le +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS/,target=/OpenBlas/,rw \ + bash -c ' \ + if [[ $(uname -m) == "ppc64le" ]]; then \ + dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm; \ + dnf install -y libraqm libimagequant; \ + PREFIX=/usr/ make install -C /OpenBlas; \ + fi ' + +# Install packages and cleanup +# install packages as USER 0 (this will allow us to consume uv cache) +RUN --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ + --mount=type=cache,target=/root/.cache/uv \ + bash -c ' \ + if [[ $(uname -m) == "ppc64le" ]]; then \ + UV_LINK_MODE=copy uv pip install /wheelsdir/*.whl accelerate --cache-dir /root/.cache/uv; \ + fi ' +RUN --mount=type=cache,target=/root/.cache/uv \ + echo "Installing softwares and packages" && \ + # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag + UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ + # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files + # Build debugpy from source instead + UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ + # change ownership to default user (all packages were installed as root and has root:root ownership \ + chown -R 1001:0 /opt/app-root/ + +USER 1001 + +RUN # setup path for runtime configuration \ + mkdir /opt/app-root/runtimes && \ + # Remove default Elyra runtime-images \ + rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ + # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ + sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ + # copy jupyter configuration + cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ + # Disable announcement plugin of jupyterlab \ + jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ + # Apply JupyterLab addons \ + /opt/app-root/bin/utils/addons/apply.sh && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P diff --git a/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf b/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf index cc7c73581a..4583ee67cb 100644 --- a/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf @@ -1 +1,3 @@ +# Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/jupyter/utils/install_pandoc.sh b/jupyter/utils/install_pandoc.sh new file mode 100755 index 0000000000..1dcb12e70b --- /dev/null +++ b/jupyter/utils/install_pandoc.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -euxo pipefail + +# Mapping of `uname -m` values to equivalent GOARCH values +declare -A UNAME_TO_GOARCH +UNAME_TO_GOARCH["x86_64"]="amd64" +UNAME_TO_GOARCH["aarch64"]="arm64" +UNAME_TO_GOARCH["ppc64le"]="ppc64le" +UNAME_TO_GOARCH["s390x"]="s390x" + +ARCH="${UNAME_TO_GOARCH[$(uname -m)]}" + +if [[ "$ARCH" == "ppc64le" ]]; then + + # Install Pandoc from source + dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm + dnf install -y pandoc + mkdir -p /usr/local/pandoc/bin + ln -s /usr/bin/pandoc /usr/local/pandoc/bin/pandoc + export PATH="/usr/local/pandoc/bin:$PATH" + pandoc --version + +fi diff --git a/jupyter/utils/install_texlive.sh b/jupyter/utils/install_texlive.sh new file mode 100755 index 0000000000..0e53665f60 --- /dev/null +++ b/jupyter/utils/install_texlive.sh @@ -0,0 +1,84 @@ +#!/bin/bash +set -euxo pipefail + +# Mapping of `uname -m` values to equivalent GOARCH values +declare -A UNAME_TO_GOARCH +UNAME_TO_GOARCH["x86_64"]="amd64" +UNAME_TO_GOARCH["aarch64"]="arm64" +UNAME_TO_GOARCH["ppc64le"]="ppc64le" +UNAME_TO_GOARCH["s390x"]="s390x" + +ARCH="${UNAME_TO_GOARCH[$(uname -m)]}" + +if [[ "$ARCH" == "ppc64le" ]]; then + + echo "Installing TeX Live from source for $ARCH..." + + # Install build dependencies + dnf install -y gcc-toolset-13 perl make libX11-devel \ + zlib-devel freetype-devel libpng-devel ncurses-devel \ + gd-devel libtool wget tar xz \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXmu-devel-1.1.3-8.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXext-devel-1.3.4-8.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libICE-devel-1.0.10-8.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libSM-devel-1.2.3-10.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXmu-1.1.3-8.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXaw-devel-1.0.13-19.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXaw-1.0.13-19.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/libXt-devel-1.2.0-6.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/flex-2.6.4-9.el9.ppc64le.rpm \ + https://mirror.stream.centos.org/9-stream/AppStream/ppc64le/os/Packages/bison-3.7.4-5.el9.ppc64le.rpm + + # Step 1: Download and extract the TeX Live source + wget https://ftp.math.utah.edu/pub/tex/historic/systems/texlive/2025/texlive-20250308-source.tar.xz + tar -xf texlive-20250308-source.tar.xz + cd texlive-20250308-source + + # Enable newer GCC toolchain + source /opt/rh/gcc-toolset-13/enable + + # Create build directory and build + mkdir -p ../texlive-build + cd ../texlive-build + ../texlive-20250308-source/configure --prefix=/usr/local/texlive + make -j"$(nproc)" + make install + + # Symlink for pdflatex + ln -sf pdftex /usr/local/texlive/bin/powerpc64le-unknown-linux-gnu/pdflatex + + # Cleanup sources to reduce image size + rm -rf /texlive-20250308-source /texlive-build + + # Step 2: Run TeX Live installer for runtime tree setup + cd / + wget https://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz + tar -xzf install-tl-unx.tar.gz + cd install-tl-2*/ + + # Create a custom install profile + TEXLIVE_INSTALL_PREFIX="/usr/local/texlive" + cat < texlive.profile +selected_scheme scheme-small +TEXDIR $TEXLIVE_INSTALL_PREFIX +TEXMFCONFIG ~/.texlive2025/texmf-config +TEXMFVAR ~/.texlive2025/texmf-var +option_doc 0 +option_src 0 +EOF + + ./install-tl --profile=texlive.profile --custom-bin=$TEXLIVE_INSTALL_PREFIX/bin/powerpc64le-unknown-linux-gnu + +# TeX Live binary directory +TEX_BIN_DIR="/usr/local/texlive/bin/powerpc64le-unknown-linux-gnu" + +# Create standard symlink 'linux' → arch-specific folder +ln -sf "$TEX_BIN_DIR" /usr/local/texlive/bin/linux + + + # Set up environment + export PATH="$TEXLIVE_INSTALL_PREFIX/bin/linux:$PATH" + pdflatex --version + tlmgr --version + +fi \ No newline at end of file diff --git a/manifests/base/README.md b/manifests/base/README.md index 5e08691afc..136578c551 100644 --- a/manifests/base/README.md +++ b/manifests/base/README.md @@ -1,31 +1,32 @@ # IDE Imagestreams -Listing the order in which each imagestreams are introduced. -NOTE: In overlays/additional there are new set of Python 3.12 images, they are also included in this ordering - -1. jupyter-minimal-notebook-imagestream.yaml -2. jupyter-minimal-cpu-py312-ubi9-imagestream.yaml -3. jupyter-minimal-gpu-notebook-imagestream.yaml -4. jupyter-minimal-cuda-py312-ubi9-imagestream.yaml -5. jupyter-rocm-minimal-notebook-imagestream.yaml -6. jupyter-minimal-rocm-py312-ubi9-imagestream.yaml -7. jupyter-datascience-notebook-imagestream.yaml -8. jupyter-datascience-cpu-py312-ubi9-imagestream.yaml -9. jupyter-pytorch-notebook-imagestream.yaml -10. jupyter-pytorch-cuda-py312-ubi9-imagestream.yaml -11. jupyter-rocm-pytorch-notebook-imagestream.yaml -12. jupyter-pytorch-rocm-py312-ubi9-imagestream.yaml -13. jupyter-tensorflow-notebook-imagestream.yaml -14. jupyter-tensorflow-cuda-py312-ubi9-imagestream.yaml -15. jupyter-rocm-tensorflow-notebook-imagestream.yaml -16. jupyter-trustyai-notebook-imagestream.yaml -17. jupyter-trustyai-cpu-py312-ubi9-imagestream.yaml -18. code-server-notebook-imagestream.yaml -19. codeserver-datascience-cpu-py312-ubi9-imagestream.yaml -20. rstudio-notebook-imagestream.yaml -21. rstudio-gpu-notebook-imagestream.yaml - -The order would also be same as `opendatahub.io/notebook-image-order` listed in each imagestreams. +Listing the order in which each imagestreams are introduced based on the `opendatahub.io/notebook-image-order` annotation in each file. + +## Notebook Imagestreams (with order annotations): + +1. jupyter-minimal-notebook-imagestream.yaml (Order: 1) +2. jupyter-minimal-gpu-notebook-imagestream.yaml (Order: 3) +3. jupyter-rocm-minimal-notebook-imagestream.yaml (Order: 5) +4. jupyter-datascience-notebook-imagestream.yaml (Order: 7) +5. jupyter-pytorch-notebook-imagestream.yaml (Order: 9) +6. jupyter-pytorch-llmcompressor-imagestream.yaml (Order: 10) +7. jupyter-rocm-pytorch-notebook-imagestream.yaml (Order: 12) +8. jupyter-tensorflow-notebook-imagestream.yaml (Order: 14) +9. jupyter-trustyai-notebook-imagestream.yaml (Order: 16) +10. jupyter-rocm-tensorflow-notebook-imagestream.yaml (Order: 16) +11. code-server-notebook-imagestream.yaml (Order: 19) + +## Runtime Imagestreams (no order annotations): + +- runtime-datascience-imagestream.yaml +- runtime-minimal-imagestream.yaml +- runtime-pytorch-imagestream.yaml +- runtime-rocm-pytorch-imagestream.yaml +- runtime-rocm-tensorflow-imagestream.yaml +- runtime-tensorflow-imagestream.yaml +- runtime-pytorch-llmcompressor-imagestream.yaml + +The order is determined by the `opendatahub.io/notebook-image-order` annotation listed in each imagestream file. _Note_: On deprecation/removal of imagestream, the index of that image is retired with it. ## Params file diff --git a/manifests/base/code-server-notebook-imagestream.yaml b/manifests/base/code-server-notebook-imagestream.yaml index 78d5b8a714..ae1e28f82a 100644 --- a/manifests/base/code-server-notebook-imagestream.yaml +++ b/manifests/base/code-server-notebook-imagestream.yaml @@ -5,7 +5,7 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/codeserver" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/tree/main/codeserver" opendatahub.io/notebook-image-name: "Code Server | Data Science | CPU | Python 3.12" opendatahub.io/notebook-image-desc: "code-server workbench allows users to code, build, and collaborate on projects directly from web." opendatahub.io/notebook-image-order: "19" @@ -36,7 +36,7 @@ spec: {"name": "ipykernel", "version": "6.30"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/codeserver opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-codeserver-datascience-cpu-py312-ubi9-commit-n_PLACEHOLDER from: @@ -67,7 +67,7 @@ spec: {"name": "ipykernel", "version": "6.29"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/codeserver opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -76,3 +76,78 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "code-server", "version": "4.92"}, + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Matplotlib", "version": "3.8"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "ipykernel", "version": "6.29"}, + {"name": "Kubeflow-Training", "version": "1.8"} + ] + openshift.io/imported-from: quay.io/modh/codeserver + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "code-server", "version": "4.22"}, + {"name": "Python", "version": "v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Matplotlib", "version": "3.8"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Plotly", "version": "5.19"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "ipykernel", "version": "6.29"} + ] + openshift.io/imported-from: quay.io/modh/codeserver + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + opendatahub.io/notebook-software: '[{"name":"Python","version":"v3.9"}]' + opendatahub.io/notebook-python-dependencies: '[{"name":"code-server","version":"4.16"}]' + openshift.io/imported-from: quay.io/modh/codeserver + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source diff --git a/manifests/base/commit-latest.env b/manifests/base/commit-latest.env index 425f5ddb57..f7085e1ef4 100644 --- a/manifests/base/commit-latest.env +++ b/manifests/base/commit-latest.env @@ -1,20 +1,18 @@ -odh-pipeline-runtime-datascience-cpu-py312-ubi9-commit-n=564f037 -odh-pipeline-runtime-minimal-cpu-py312-ubi9-commit-n=dummy -odh-pipeline-runtime-pytorch-cuda-py312-ubi9-commit-n=564f037 -odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-commit-n=dummy -odh-pipeline-runtime-pytorch-rocm-py312-ubi9-commit-n=564f037 -odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-commit-n=564f037 -odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-commit-n=564f037 -odh-workbench-codeserver-datascience-cpu-py312-ubi9-commit-n=dummy -odh-workbench-jupyter-datascience-cpu-py312-ubi9-commit-n=99ebfb6 -odh-workbench-jupyter-minimal-cpu-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-minimal-cuda-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-minimal-rocm-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-pytorch-cuda-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-pytorch-rocm-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-commit-n=564f037 -odh-workbench-jupyter-trustyai-cpu-py312-ubi9-commit-n=564f037 -odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n=564f037 -odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n=564f037 +odh-workbench-jupyter-minimal-cpu-py312-ubi9-commit-n=d3137ca +odh-workbench-jupyter-minimal-cuda-py312-ubi9-commit-n=d3137ca +odh-workbench-jupyter-minimal-rocm-py312-ubi9-commit-n=bff12e2 +odh-workbench-jupyter-datascience-cpu-py312-ubi9-commit-n=d3137ca +odh-workbench-jupyter-pytorch-cuda-py312-ubi9-commit-n=8e73cac +odh-workbench-jupyter-pytorch-rocm-py312-ubi9-commit-n=d3137ca +odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-commit-n=8e73cac +odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-commit-n=aaaaaaa +odh-workbench-jupyter-trustyai-cpu-py312-ubi9-commit-n=06da715 +odh-workbench-codeserver-datascience-cpu-py312-ubi9-commit-n=06703a3 +odh-pipeline-runtime-minimal-cpu-py312-ubi9-commit-n=06703a3 +odh-pipeline-runtime-datascience-cpu-py312-ubi9-commit-n=8a0af91 +odh-pipeline-runtime-pytorch-cuda-py312-ubi9-commit-n=8e73cac +odh-pipeline-runtime-pytorch-rocm-py312-ubi9-commit-n=8e73cac +odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-commit-n=8e73cac +odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-commit-n=aaaaaaa +odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-commit-n=8e73cac +odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-commit-n=93c0810 diff --git a/manifests/base/commit.env b/manifests/base/commit.env index 9f279e97dc..f6167da116 100644 --- a/manifests/base/commit.env +++ b/manifests/base/commit.env @@ -1,18 +1,45 @@ -odh-pipeline-runtime-datascience-cpu-py311-ubi9-commit-n-1=1247a41 -odh-pipeline-runtime-minimal-cpu-py311-ubi9-commit-n-1=1247a41 -odh-pipeline-runtime-pytorch-cuda-py311-ubi9-commit-n-1=1247a41 -odh-pipeline-runtime-pytorch-rocm-py311-ubi9-commit-n-1=1247a41 -odh-pipeline-runtime-tensorflow-cuda-py311-ubi9-commit-n-1=1247a41 -odh-pipeline-runtime-tensorflow-rocm-py311-ubi9-commit-n-1=80cffeb -odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-1=80cffeb -odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-1=1247a41 -odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-1=1247a41 -odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n-1=60b6ecc -odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n-1=60b6ecc +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-1=d3137ca +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-1=8a0af91 +odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-1=bff12e2 +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-1=d3137ca +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-1=bff12e2 +odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-1=8e73cac +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-1=8e73cac +odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-1=8e73cac +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-1=d3137ca +odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-1=d3137ca +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-5=07015ec +odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-6=3e71410 +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-5=07015ec +odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-6=3e71410 +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-5=07015ec +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-6=3e71410 +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-5=07015ec +odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-6=3e71410 +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-5=07015ec +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-6=3e71410 +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-3=b42b86c +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-5=07015ec +odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-2=be38cca +odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-3=b42b86c +odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-4=76a016f +odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-2=be38cca +odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-2=be38cca diff --git a/manifests/base/jupyter-datascience-notebook-imagestream.yaml b/manifests/base/jupyter-datascience-notebook-imagestream.yaml index 3d6cf1c3c8..182df4c369 100644 --- a/manifests/base/jupyter-datascience-notebook-imagestream.yaml +++ b/manifests/base/jupyter-datascience-notebook-imagestream.yaml @@ -5,11 +5,11 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/jupyter/datascience" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/tree/main/jupyter/datascience" opendatahub.io/notebook-image-name: "Jupyter | Data Science | CPU | Python 3.12" - opendatahub.io/notebook-image-desc: "Jupyter notebook image with a set of data science libraries that advanced AI/ML notebooks will use as a base image to provide a standard for libraries available in all notebooks" + opendatahub.io/notebook-image-desc: "Jupyter notebook image with a set of data science libraries that advanced AI/ML notebooks will use as a base image to provide a standard for libraries avialable in all notebooks" opendatahub.io/notebook-image-order: "7" - name: jupyter-datascience-notebook + name: s2i-generic-data-science-notebook spec: lookupPolicy: local: true @@ -43,7 +43,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.4"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py312-ubi9-commit-n_PLACEHOLDER from: @@ -80,7 +80,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.3"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -89,3 +89,166 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "Boto3", "version": "1.35"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Kfp", "version": "2.9"}, + {"name": "Matplotlib", "version": "3.9"}, + {"name": "Numpy", "version": "2.1"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.26"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"}, + {"name": "Kubeflow-Training", "version": "1.8"} + ] + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Kfp", "version": "2.11"}, + {"name": "Matplotlib", "version": "3.8"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Odh-Elyra", "version": "3.16"}, + {"name": "PyMongo", "version": "4.6"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.21"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "Psycopg", "version": "3.1"}, + {"name": "MySQL Connector/Python", "version": "8.3"} + ] + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"Boto3","version":"1.28"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.3"}, + {"name":"Scipy","version":"1.11"}, + {"name":"Elyra","version":"3.15"}, + {"name":"PyMongo","version":"4.5"}, + {"name":"Pyodbc","version":"4.0"}, + {"name":"Codeflare-SDK","version":"0.13"}, + {"name":"Sklearn-onnx","version":"1.15"}, + {"name":"Psycopg","version":"3.1"}, + {"name":"MySQL Connector/Python","version":"8.0"} + ] + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"Boto3","version":"1.26"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.2"}, + {"name":"Scipy","version":"1.10"}, + {"name":"Elyra","version":"3.15"} + ] + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source + # N - 6 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.8"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"Boto3","version":"1.17"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Matplotlib","version":"3.4"}, + {"name":"Numpy","version":"1.19"}, + {"name":"Pandas","version":"1.2"}, + {"name":"Scikit-learn","version":"0.24"}, + {"name":"Scipy","version":"1.6"} + ] + openshift.io/imported-from: quay.io/modh/odh-generic-data-science-notebook + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-6_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-6_PLACEHOLDER + name: "1.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-minimal-gpu-notebook-imagestream.yaml b/manifests/base/jupyter-minimal-gpu-notebook-imagestream.yaml index c8f5f2e9d1..918cf49014 100644 --- a/manifests/base/jupyter-minimal-gpu-notebook-imagestream.yaml +++ b/manifests/base/jupyter-minimal-gpu-notebook-imagestream.yaml @@ -5,12 +5,12 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/jupyter/minimal" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/tree/main/jupyter/minimal" opendatahub.io/notebook-image-name: "Jupyter | Minimal | CUDA | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter notebook image with GPU support and minimal dependency set to start experimenting with Jupyter environment." opendatahub.io/notebook-image-order: "3" opendatahub.io/recommended-accelerators: '["nvidia.com/gpu"]' - name: jupyter-minimal-gpu-notebook + name: minimal-gpu spec: lookupPolicy: local: true @@ -28,7 +28,7 @@ spec: [ {"name": "JupyterLab", "version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py312-ubi9-commit-n_PLACEHOLDER from: @@ -50,7 +50,7 @@ spec: [ {"name": "JupyterLab", "version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -59,3 +59,117 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.4"}, + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab", "version": "4.2"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.1"}, + {"name": "Python", "version": "v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab", "version": "3.6"}, + {"name": "Notebook","version": "6.5"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version":"3.6"}, + {"name":"Notebook","version":"6.5"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version":"3.5"}, + {"name":"Notebook","version":"6.5"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source + # N - 6 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.4"}, + {"name":"Python","version":"v3.8"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version":"3.2"}, + {"name":"Notebook","version":"6.4"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-6_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-6_PLACEHOLDER + name: "1.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-minimal-notebook-imagestream.yaml b/manifests/base/jupyter-minimal-notebook-imagestream.yaml index a1d4b5a5b9..9adceafab5 100644 --- a/manifests/base/jupyter-minimal-notebook-imagestream.yaml +++ b/manifests/base/jupyter-minimal-notebook-imagestream.yaml @@ -5,11 +5,11 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/jupyter/minimal" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/tree/main/jupyter/minimal" opendatahub.io/notebook-image-name: "Jupyter | Minimal | CPU | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter notebook image with minimal dependency set to start experimenting with Jupyter environment." opendatahub.io/notebook-image-order: "1" - name: jupyter-minimal-notebook + name: s2i-minimal-notebook spec: lookupPolicy: local: true @@ -26,7 +26,7 @@ spec: [ {"name": "JupyterLab","version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/default-image: "true" opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py312-ubi9-commit-n_PLACEHOLDER @@ -46,11 +46,10 @@ spec: # language=json opendatahub.io/notebook-python-dependencies: | [ - {"name": "JupyterLab","version": "4.4"} + {"name": "JupyterLab", "version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'false' - opendatahub.io/default-image: "false" opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-1_PLACEHOLDER from: kind: DockerImage @@ -58,3 +57,112 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.4"} + ] + openshift.io/imported-from: quay.io/modh/odh-minimal-notebook-container + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "3.6"}, + {"name": "Notebook","version": "6.5"} + ] + openshift.io/imported-from: quay.io/modh/odh-minimal-notebook-container + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version": "3.6"}, + {"name": "Notebook","version": "6.5"} + ] + openshift.io/imported-from: quay.io/modh/odh-minimal-notebook-container + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version": "3.5"}, + {"name": "Notebook","version": "6.5"} + ] + openshift.io/imported-from: quay.io/modh/odh-minimal-notebook-container + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source + # N - 6 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.8"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"JupyterLab","version": "3.2"}, + {"name": "Notebook","version": "6.4"} + ] + openshift.io/imported-from: quay.io/modh/odh-minimal-notebook-container + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-6_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-6_PLACEHOLDER + name: "1.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-pytorch-llmcompressor-imagestream.yaml b/manifests/base/jupyter-pytorch-llmcompressor-imagestream.yaml index 71316bb74b..c5705826cc 100644 --- a/manifests/base/jupyter-pytorch-llmcompressor-imagestream.yaml +++ b/manifests/base/jupyter-pytorch-llmcompressor-imagestream.yaml @@ -49,7 +49,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.4"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-commit-n_PLACEHOLDER from: diff --git a/manifests/base/jupyter-pytorch-notebook-imagestream.yaml b/manifests/base/jupyter-pytorch-notebook-imagestream.yaml index 89df4e0e2a..358f21b785 100644 --- a/manifests/base/jupyter-pytorch-notebook-imagestream.yaml +++ b/manifests/base/jupyter-pytorch-notebook-imagestream.yaml @@ -5,12 +5,12 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/blob/main/jupyter/pytorch" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/blob/main/jupyter/pytorch" opendatahub.io/notebook-image-name: "Jupyter | PyTorch | CUDA | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter notebook image with PyTorch libraries and dependencies to start experimenting with advanced AI/ML notebooks." opendatahub.io/notebook-image-order: "9" opendatahub.io/recommended-accelerators: '["nvidia.com/gpu"]' - name: jupyter-pytorch-notebook + name: pytorch spec: lookupPolicy: local: true @@ -89,7 +89,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.3"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -98,3 +98,187 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.4"}, + {"name": "Python", "version": "v3.11"}, + {"name": "PyTorch", "version": "2.4"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "PyTorch", "version": "2.4"}, + {"name": "Tensorboard", "version": "2.17"}, + {"name": "Boto3", "version": "1.35"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Kfp", "version": "2.9"}, + {"name": "Matplotlib", "version": "3.9"}, + {"name": "Numpy", "version": "2.1"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.26"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"}, + {"name": "Kubeflow-Training", "version": "1.8"} + ] + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.1"}, + {"name": "Python", "version": "v3.9"}, + {"name": "PyTorch", "version": "2.2"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "PyTorch", "version": "2.2"}, + {"name": "Tensorboard", "version": "2.16"}, + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Kfp", "version": "2.11"}, + {"name": "Matplotlib", "version": "3.8"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Odh-Elyra", "version": "3.16"}, + {"name": "PyMongo", "version": "4.6"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.21"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "Psycopg", "version": "3.1"}, + {"name": "MySQL Connector/Python", "version": "8.3"} + ] + + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"}, + {"name":"PyTorch","version":"2.2"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"PyTorch","version":"2.2"}, + {"name":"Tensorboard","version":"2.13"}, + {"name":"Boto3","version":"1.28"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.3"}, + {"name":"Scipy","version":"1.11"}, + {"name":"Elyra","version":"3.15"}, + {"name":"PyMongo","version":"4.5"}, + {"name":"Pyodbc","version":"4.0"}, + {"name":"Codeflare-SDK","version":"0.14"}, + {"name":"Sklearn-onnx","version":"1.15"}, + {"name":"Psycopg","version":"3.1"}, + {"name":"MySQL Connector/Python","version":"8.0"} + ] + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"}, + {"name":"PyTorch","version":"1.13"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"PyTorch","version":"1.13"}, + {"name":"Tensorboard","version":"2.11"}, + {"name":"Boto3","version":"1.26"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.2"}, + {"name":"Scipy","version":"1.10"}, + {"name":"Elyra","version":"3.15"} + ] + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source + # N - 6 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.4"}, + {"name":"Python","version":"v3.8"}, + {"name":"PyTorch","version":"1.8"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"PyTorch","version":"1.8"}, + {"name":"Tensorboard","version":"2.6"}, + {"name":"Boto3","version":"1.17"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Matplotlib","version":"3.4"}, + {"name":"Numpy","version":"1.19"}, + {"name":"Pandas","version":"1.2"}, + {"name":"Scikit-learn","version":"0.24"}, + {"name":"Scipy","version":"1.6"} + ] + openshift.io/imported-from: quay.io/modh/odh-pytorch-notebook + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-6_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-6_PLACEHOLDER + name: "1.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-rocm-minimal-notebook-imagestream.yaml b/manifests/base/jupyter-rocm-minimal-notebook-imagestream.yaml index bd5e898075..09f2f21963 100644 --- a/manifests/base/jupyter-rocm-minimal-notebook-imagestream.yaml +++ b/manifests/base/jupyter-rocm-minimal-notebook-imagestream.yaml @@ -5,7 +5,7 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/blob/main/jupyter/rocm" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/blob/main/rocm" opendatahub.io/notebook-image-name: "Jupyter | Minimal | ROCm | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter ROCm notebook image for ODH notebooks." opendatahub.io/notebook-image-order: "5" @@ -28,7 +28,7 @@ spec: [ {"name": "JupyterLab", "version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/rocm-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-rocm-py312-ubi9-commit-n_PLACEHOLDER from: @@ -50,7 +50,7 @@ spec: [ {"name": "JupyterLab", "version": "4.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/rocm-notebooks opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -59,3 +59,25 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "ROCm", "version": "6.1"}, + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab", "version": "4.2"} + ] + openshift.io/imported-from: quay.io/modh/rocm-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-rocm-pytorch-notebook-imagestream.yaml b/manifests/base/jupyter-rocm-pytorch-notebook-imagestream.yaml index cd7f0d4869..70672a4ffb 100644 --- a/manifests/base/jupyter-rocm-pytorch-notebook-imagestream.yaml +++ b/manifests/base/jupyter-rocm-pytorch-notebook-imagestream.yaml @@ -5,8 +5,8 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/blob/main/jupyter/rocm/pytorch" - opendatahub.io/notebook-image-name: "Jupyter | PyTorch | ROCm | Python 3.12" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/blob/main/jupyter/rocm/pytorch" + opendatahub.io/notebook-image-name: "Jupyter | PyTorch | ROCm | Python 3.11" opendatahub.io/notebook-image-desc: "Jupyter ROCm optimized PyTorch notebook image for ODH notebooks." opendatahub.io/notebook-image-order: "12" opendatahub.io/recommended-accelerators: '["amd.com/gpu"]' @@ -85,7 +85,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.3"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/rocm-notebooks opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -94,3 +94,41 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"}, + {"name": "ROCm-PyTorch", "version": "2.4"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "ROCm-PyTorch", "version": "2.4"}, + {"name": "Tensorboard", "version": "2.16"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Matplotlib", "version": "3.9"}, + {"name": "Numpy", "version": "2.1"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.26"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"}, + {"name": "Kubeflow-Training", "version": "1.8"} + ] + openshift.io/imported-from: quay.io/modh/rocm-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-rocm-tensorflow-notebook-imagestream.yaml b/manifests/base/jupyter-rocm-tensorflow-notebook-imagestream.yaml index 3a9f3d3e13..c0c6b58fb2 100644 --- a/manifests/base/jupyter-rocm-tensorflow-notebook-imagestream.yaml +++ b/manifests/base/jupyter-rocm-tensorflow-notebook-imagestream.yaml @@ -5,7 +5,7 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/blob/main/jupyter/rocm/tensorflow" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/blob/main/jupyter/rocm/tensorflow" opendatahub.io/notebook-image-name: "Jupyter | TensorFlow | ROCm | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter ROCm optimized TensorFlow notebook image for ODH notebooks." opendatahub.io/notebook-image-order: "16" @@ -45,7 +45,7 @@ spec: {"name": "Psycopg", "version": "3.2"}, {"name": "MySQL Connector/Python", "version": "9.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/rocm-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-commit-n_PLACEHOLDER from: @@ -83,7 +83,7 @@ spec: {"name": "Psycopg", "version": "3.2"}, {"name": "MySQL Connector/Python", "version": "9.3"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/rocm-notebooks opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -92,3 +92,40 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"}, + {"name": "ROCm-TensorFlow", "version": "2.14"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "ROCm-TensorFlow", "version": "2.14"}, + {"name": "Tensorboard", "version": "2.14"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Matplotlib", "version": "3.9"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.24"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"} + ] + openshift.io/imported-from: quay.io/modh/rocm-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-tensorflow-notebook-imagestream.yaml b/manifests/base/jupyter-tensorflow-notebook-imagestream.yaml index dd6da54ce9..a01637385d 100644 --- a/manifests/base/jupyter-tensorflow-notebook-imagestream.yaml +++ b/manifests/base/jupyter-tensorflow-notebook-imagestream.yaml @@ -5,12 +5,12 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/blob/main/jupyter/tensorflow" - opendatahub.io/notebook-image-name: "Jupyter | TensorFlow | CUDA | Python 3.12" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/blob/main/jupyter/tensorflow" + opendatahub.io/notebook-image-name: "Jupyter | TensorFlow | CUDA | Python 3.11" opendatahub.io/notebook-image-desc: "Jupyter notebook image with TensorFlow libraries and dependencies to start experimenting with advanced AI/ML notebooks." opendatahub.io/notebook-image-order: "14" opendatahub.io/recommended-accelerators: '["nvidia.com/gpu"]' - name: jupyter-tensorflow-notebook + name: tensorflow spec: lookupPolicy: local: true @@ -48,7 +48,7 @@ spec: {"name": "Psycopg", "version": "3.2"}, {"name": "MySQL Connector/Python", "version": "9.4"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-commit-n_PLACEHOLDER from: @@ -89,7 +89,7 @@ spec: {"name": "Psycopg", "version": "3.2"}, {"name": "MySQL Connector/Python", "version": "9.3"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/cuda-notebooks opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -98,3 +98,186 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.4"}, + {"name": "Python", "version": "v3.11"}, + {"name": "TensorFlow", "version": "2.17"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "TensorFlow", "version": "2.17"}, + {"name": "Tensorboard", "version": "2.17"}, + {"name": "Nvidia-CUDA-CU12-Bundle", "version": "12.3"}, + {"name": "Boto3", "version": "1.35"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Kfp", "version": "2.5"}, + {"name": "Matplotlib", "version": "3.9"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.24"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "CUDA", "version": "12.1"}, + {"name": "Python", "version": "v3.9"}, + {"name": "TensorFlow", "version": "2.15"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "TensorFlow", "version": "2.15"}, + {"name": "Tensorboard", "version": "2.15"}, + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Kfp", "version": "2.5"}, + {"name": "Matplotlib", "version": "3.8"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "2.2"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Odh-Elyra", "version": "3.16"}, + {"name": "PyMongo", "version": "4.6"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.21"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "Psycopg", "version": "3.1"}, + {"name": "MySQL Connector/Python", "version": "8.3"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"}, + {"name":"TensorFlow","version":"2.13"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"TensorFlow","version":"2.13"}, + {"name":"Tensorboard","version":"2.13"}, + {"name":"Boto3","version":"1.28"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.3"}, + {"name":"Scipy","version":"1.11"}, + {"name":"Elyra","version":"3.15"}, + {"name":"PyMongo","version":"4.5"}, + {"name":"Pyodbc","version":"4.0"}, + {"name":"Codeflare-SDK","version":"0.13"}, + {"name":"Sklearn-onnx","version":"1.15"}, + {"name":"Psycopg","version":"3.1"}, + {"name":"MySQL Connector/Python","version":"8.0"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.8"}, + {"name":"Python","version":"v3.9"}, + {"name":"TensorFlow","version":"2.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"TensorFlow","version":"2.11"}, + {"name":"Tensorboard","version":"2.11"}, + {"name":"Boto3","version":"1.26"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.2"}, + {"name":"Scipy","version":"1.10"}, + {"name":"Elyra","version":"3.15"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source + # N - 6 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"CUDA","version":"11.4"}, + {"name":"Python","version":"v3.8"}, + {"name":"TensorFlow","version":"2.7"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"TensorFlow","version":"2.7"}, + {"name":"Tensorboard","version":"2.6"}, + {"name":"Boto3","version":"1.17"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Matplotlib","version":"3.4"}, + {"name":"Numpy","version":"1.19"}, + {"name":"Pandas","version":"1.2"}, + {"name":"Scikit-learn","version":"0.24"}, + {"name":"Scipy","version":"1.6"} + ] + openshift.io/imported-from: quay.io/modh/cuda-notebooks + opendatahub.io/image-tag-outdated: 'true' + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-6_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-6_PLACEHOLDER + name: "1.2" + referencePolicy: + type: Source diff --git a/manifests/base/jupyter-trustyai-notebook-imagestream.yaml b/manifests/base/jupyter-trustyai-notebook-imagestream.yaml index c85a4f0e73..8dd7b77a07 100644 --- a/manifests/base/jupyter-trustyai-notebook-imagestream.yaml +++ b/manifests/base/jupyter-trustyai-notebook-imagestream.yaml @@ -5,11 +5,11 @@ metadata: labels: opendatahub.io/notebook-image: "true" annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/jupyter/trustyai" + opendatahub.io/notebook-image-url: "https://github.com/red-hat-data-services/notebooks/tree/main/jupyter/trustyai" opendatahub.io/notebook-image-name: "Jupyter | TrustyAI | CPU | Python 3.12" opendatahub.io/notebook-image-desc: "Jupyter TrustyAI notebook integrates the TrustyAI Explainability Toolkit on Jupyter environment." - opendatahub.io/notebook-image-order: "17" - name: jupyter-trustyai-notebook + opendatahub.io/notebook-image-order: "16" + name: odh-trustyai-notebook spec: lookupPolicy: local: true @@ -47,7 +47,7 @@ spec: {"name": "MySQL Connector/Python", "version": "9.4"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook opendatahub.io/workbench-image-recommended: 'true' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py312-ubi9-commit-n_PLACEHOLDER from: @@ -68,28 +68,28 @@ spec: [ {"name": "JupyterLab","version": "4.4"}, {"name": "TrustyAI", "version": "0.6"}, - {"name": "Transformers", "version": "4.53"}, + {"name": "Transformers", "version": "4.55"}, {"name": "Datasets", "version": "3.4"}, {"name": "Accelerate", "version": "1.5"}, {"name": "Torch", "version": "2.6"}, {"name": "Boto3", "version": "1.37"}, {"name": "Kafka-Python-ng", "version": "2.2"}, {"name": "Kfp", "version": "2.12"}, - {"name": "Matplotlib", "version": "3.6"}, - {"name": "Numpy", "version": "1.24"}, + {"name": "Matplotlib", "version": "3.10"}, + {"name": "Numpy", "version": "1.26"}, {"name": "Pandas", "version": "1.5"}, - {"name": "Scikit-learn", "version": "1.5"}, + {"name": "Scikit-learn", "version": "1.7"}, {"name": "Scipy", "version": "1.15"}, {"name": "Odh-Elyra", "version": "4.2"}, {"name": "PyMongo", "version": "4.11"}, {"name": "Pyodbc", "version": "5.2"}, - {"name": "Codeflare-SDK", "version": "0.30"}, + {"name": "Codeflare-SDK", "version": "0.29"}, {"name": "Sklearn-onnx", "version": "1.18"}, {"name": "Psycopg", "version": "3.2"}, {"name": "MySQL Connector/Python", "version": "9.3"}, {"name": "Kubeflow-Training", "version": "1.9"} ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook opendatahub.io/workbench-image-recommended: 'false' opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-1_PLACEHOLDER from: @@ -98,3 +98,147 @@ spec: name: "2025.1" referencePolicy: type: Source + # N - 2 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.11"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "JupyterLab","version": "4.2"}, + {"name": "TrustyAI", "version": "0.6"}, + {"name": "Transformers", "version": "4.38"}, + {"name": "Datasets", "version": "2.21"}, + {"name": "Accelerate", "version": "0.34"}, + {"name": "Torch", "version": "2.2"}, + {"name": "Boto3", "version": "1.35"}, + {"name": "Kafka-Python-ng", "version": "2.2"}, + {"name": "Kfp", "version": "2.9"}, + {"name": "Matplotlib", "version": "3.10"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "1.5"}, + {"name": "Scikit-learn", "version": "1.2"}, + {"name": "Scipy", "version": "1.14"}, + {"name": "Odh-Elyra", "version": "4.2"}, + {"name": "PyMongo", "version": "4.8"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.26"}, + {"name": "Sklearn-onnx", "version": "1.17"}, + {"name": "Psycopg", "version": "3.2"}, + {"name": "MySQL Connector/Python", "version": "9.0"}, + {"name": "Kubeflow-Training", "version": "1.8"} + ] + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-2_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-2_PLACEHOLDER + name: "2024.2" + referencePolicy: + type: Source + # N - 3 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name": "Python", "version": "v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name": "TrustyAI", "version": "0.6"}, + {"name": "Boto3", "version": "1.34"}, + {"name": "Kafka-Python", "version": "2.0"}, + {"name": "Kfp", "version": "2.11"}, + {"name": "Matplotlib", "version": "3.10"}, + {"name": "Numpy", "version": "1.26"}, + {"name": "Pandas", "version": "1.5"}, + {"name": "Scikit-learn", "version": "1.4"}, + {"name": "Scipy", "version": "1.12"}, + {"name": "Odh-Elyra", "version": "3.16"}, + {"name": "PyMongo", "version": "4.6"}, + {"name": "Pyodbc", "version": "5.1"}, + {"name": "Codeflare-SDK", "version": "0.21"}, + {"name": "Sklearn-onnx", "version": "1.16"}, + {"name": "Psycopg", "version": "3.1"}, + {"name": "MySQL Connector/Python", "version": "8.3"} + ] + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-3_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-3_PLACEHOLDER + name: "2024.1" + referencePolicy: + type: Source + # N - 4 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"TrustyAI","version":"0.6"}, + {"name":"Boto3","version":"1.28"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.3"}, + {"name":"Scipy","version":"1.11"}, + {"name":"Elyra","version":"3.15"}, + {"name":"PyMongo","version":"4.5"}, + {"name":"Pyodbc","version":"4.0"}, + {"name":"Codeflare-SDK","version":"0.14"}, + {"name":"Sklearn-onnx","version":"1.15"}, + {"name":"Psycopg","version":"3.1"}, + {"name":"MySQL Connector/Python","version":"8.0"} + ] + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-4_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-4_PLACEHOLDER + name: "2023.2" + referencePolicy: + type: Source + # N - 5 Version of the image + - annotations: + # language=json + opendatahub.io/notebook-software: | + [ + {"name":"Python","version":"v3.9"} + ] + # language=json + opendatahub.io/notebook-python-dependencies: | + [ + {"name":"TrustyAI","version":"0.3"}, + {"name":"Boto3","version":"1.26"}, + {"name":"Kafka-Python","version":"2.0"}, + {"name":"Kfp-tekton","version":"1.5"}, + {"name":"Matplotlib","version":"3.6"}, + {"name":"Numpy","version":"1.24"}, + {"name":"Pandas","version":"1.5"}, + {"name":"Scikit-learn","version":"1.2"}, + {"name":"Scipy","version":"1.10"}, + {"name":"Elyra","version":"3.15"} + ] + openshift.io/imported-from: quay.io/modh/odh-trustyai-notebook + opendatahub.io/image-tag-outdated: "true" + opendatahub.io/notebook-build-commit: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-5_PLACEHOLDER + from: + kind: DockerImage + name: odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-5_PLACEHOLDER + name: "2023.1" + referencePolicy: + type: Source diff --git a/manifests/base/kustomization.yaml b/manifests/base/kustomization.yaml index 8a96ac553a..90dc5e0dc0 100644 --- a/manifests/base/kustomization.yaml +++ b/manifests/base/kustomization.yaml @@ -9,8 +9,8 @@ resources: - jupyter-tensorflow-notebook-imagestream.yaml - jupyter-trustyai-notebook-imagestream.yaml - code-server-notebook-imagestream.yaml - - rstudio-notebook-imagestream.yaml - - rstudio-gpu-notebook-imagestream.yaml + - rstudio-buildconfig.yaml + - cuda-rstudio-buildconfig.yaml - jupyter-rocm-minimal-notebook-imagestream.yaml - jupyter-rocm-pytorch-notebook-imagestream.yaml - jupyter-rocm-tensorflow-notebook-imagestream.yaml @@ -52,7 +52,7 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-notebook + name: s2i-minimal-notebook version: v1 - source: fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-1 @@ -65,7 +65,72 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-notebook + name: s2i-minimal-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-minimal-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-minimal-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-minimal-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-minimal-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-6 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.6.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-minimal-notebook version: v1 - source: fieldPath: data.odh-workbench-jupyter-datascience-cpu-py312-ubi9-n @@ -74,27 +139,625 @@ replacements: version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-6 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.6.from.name + select: + group: image.openshift.io + kind: ImageStream + name: s2i-generic-data-science-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-6 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.6.from.name + select: + group: image.openshift.io + kind: ImageStream + name: minimal-gpu + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-6 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.6.from.name + select: + group: image.openshift.io + kind: ImageStream + name: pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-6 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.6.from.name + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-5 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.5.from.name + select: + group: image.openshift.io + kind: ImageStream + name: odh-trustyai-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-3 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.3.from.name + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-4 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.4.from.name + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-rocm-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-minimal + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-minimal + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-minimal + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-2 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.2.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-pytorch + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-n + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.0.from.name + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-1 + kind: ConfigMap + name: notebook-image-params + version: v1 + targets: + - fieldPaths: + - spec.tags.1.from.name select: group: image.openshift.io kind: ImageStream - name: jupyter-datascience-notebook + name: jupyter-rocm-tensorflow version: v1 - source: - fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-2 kind: ConfigMap name: notebook-image-params version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.2.from.name select: group: image.openshift.io kind: ImageStream - name: jupyter-datascience-notebook + name: jupyter-rocm-tensorflow version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cuda-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-n kind: ConfigMap name: notebook-image-params version: v1 @@ -104,371 +767,371 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-gpu-notebook + name: jupyter-pytorch-llmcompressor version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py312-ubi9-commit-n kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-gpu-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-1 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-pytorch-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-2 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-pytorch-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-3 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-tensorflow-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-4 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-tensorflow-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-5 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-trustyai-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-6 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.6.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-trustyai-notebook + name: s2i-minimal-notebook version: v1 - source: - fieldPath: data.odh-workbench-codeserver-datascience-cpu-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py312-ubi9-commit-n kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: code-server-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-1 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: code-server-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cpu-py311-c9s-n + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-2 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cpu-py311-c9s-n-1 + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-3 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cuda-py311-c9s-n + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-4 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-gpu-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cuda-py311-c9s-n-1 + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-5 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-gpu-notebook + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-rocm-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-6 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.6.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-minimal + name: s2i-generic-data-science-notebook version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py312-ubi9-commit-n kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-minimal + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-1 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-pytorch + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-2 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-pytorch + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-3 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-tensorflow + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-1 + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-4 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.from.name + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-rocm-tensorflow + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-n + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-5 kind: ConfigMap - name: notebook-image-params + name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.from.name + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-pytorch-llmcompressor + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cpu-py312-ubi9-commit-n + fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-6 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.6.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-notebook + name: minimal-gpu version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cpu-py311-ubi9-commit-n-1 + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py312-ubi9-commit-n kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-datascience-cpu-py312-ubi9-commit-n + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-1 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-datascience-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-datascience-cpu-py311-ubi9-commit-n-1 + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-2 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-datascience-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cuda-py312-ubi9-commit-n + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-3 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-gpu-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-minimal-cuda-py311-ubi9-commit-n-1 + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-4 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-minimal-gpu-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py312-ubi9-commit-n + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-5 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-pytorch-notebook + name: pytorch version: v1 - source: - fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-1 + fieldPath: data.odh-workbench-jupyter-pytorch-cuda-py311-ubi9-commit-n-6 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.6.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: jupyter-pytorch-notebook + name: pytorch version: v1 - source: fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-commit-n @@ -481,7 +1144,7 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-tensorflow-notebook + name: tensorflow version: v1 - source: fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-1 @@ -494,7 +1157,72 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-tensorflow-notebook + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-2 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-3 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-4 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-5 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow + version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-commit-n-6 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.6.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: tensorflow version: v1 - source: fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py312-ubi9-commit-n @@ -507,7 +1235,7 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-trustyai-notebook + name: odh-trustyai-notebook version: v1 - source: fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-1 @@ -520,62 +1248,62 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: jupyter-trustyai-notebook + name: odh-trustyai-notebook version: v1 - source: - fieldPath: data.odh-workbench-codeserver-datascience-cpu-py312-ubi9-commit-n + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-2 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: code-server-notebook + name: odh-trustyai-notebook version: v1 - source: - fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-1 + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-3 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: code-server-notebook + name: odh-trustyai-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-4 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.0.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-notebook + name: odh-trustyai-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n-1 + fieldPath: data.odh-workbench-jupyter-trustyai-cpu-py311-ubi9-commit-n-5 kind: ConfigMap name: notebook-image-commithash version: v1 targets: - fieldPaths: - - spec.tags.1.annotations.[opendatahub.io/notebook-build-commit] + - spec.tags.5.annotations.[opendatahub.io/notebook-build-commit] select: group: image.openshift.io kind: ImageStream - name: rstudio-notebook + name: odh-trustyai-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py312-ubi9-commit-n kind: ConfigMap name: notebook-image-commithash version: v1 @@ -585,10 +1313,10 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: rstudio-gpu-notebook + name: code-server-notebook version: v1 - source: - fieldPath: data.odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n-1 + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-1 kind: ConfigMap name: notebook-image-commithash version: v1 @@ -598,7 +1326,46 @@ replacements: select: group: image.openshift.io kind: ImageStream - name: rstudio-gpu-notebook + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-2 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-3 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.3.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook + version: v1 + - source: + fieldPath: data.odh-workbench-codeserver-datascience-cpu-py311-ubi9-commit-n-4 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.4.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: code-server-notebook version: v1 - source: fieldPath: data.odh-workbench-jupyter-minimal-rocm-py312-ubi9-commit-n @@ -626,6 +1393,19 @@ replacements: kind: ImageStream name: jupyter-rocm-minimal version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-minimal-rocm-py311-ubi9-commit-n-2 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-minimal + version: v1 - source: fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py312-ubi9-commit-n kind: ConfigMap @@ -652,6 +1432,19 @@ replacements: kind: ImageStream name: jupyter-rocm-pytorch version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-pytorch-rocm-py311-ubi9-commit-n-2 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-pytorch + version: v1 - source: fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-commit-n kind: ConfigMap @@ -678,6 +1471,19 @@ replacements: kind: ImageStream name: jupyter-rocm-tensorflow version: v1 + - source: + fieldPath: data.odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-commit-n-2 + kind: ConfigMap + name: notebook-image-commithash + version: v1 + targets: + - fieldPaths: + - spec.tags.2.annotations.[opendatahub.io/notebook-build-commit] + select: + group: image.openshift.io + kind: ImageStream + name: jupyter-rocm-tensorflow + version: v1 - source: fieldPath: data.odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-commit-n kind: ConfigMap diff --git a/manifests/base/params-latest.env b/manifests/base/params-latest.env index 11088313f9..6506e913e1 100644 --- a/manifests/base/params-latest.env +++ b/manifests/base/params-latest.env @@ -1,20 +1,18 @@ -odh-workbench-jupyter-minimal-cpu-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-minimal-cuda-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-minimal-rocm-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-minimal-rocm-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-datascience-cpu-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-pytorch-cuda-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-pytorch-cuda-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-pytorch-rocm-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-pytorch-rocm-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-datascience-cpu-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-datascience-cpu-py312-ubi9:2025b-v1.36 -odh-workbench-jupyter-trustyai-cpu-py312-ubi9-n=quay.io/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9:2025b-v1.36 -odh-workbench-codeserver-datascience-cpu-py312-ubi9-n=quay.io/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-minimal-cpu-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-minimal-cpu-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-pytorch-cuda-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-pytorch-cuda-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-pytorch-rocm-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-pytorch-rocm-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9:2025b-v1.36 -odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-n=quay.io/opendatahub/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9:2025b-v1.36 -odh-workbench-rstudio-minimal-cuda-py311-c9s-n=quay.io/opendatahub/odh-workbench-rstudio-minimal-cuda-py311-c9s:2025b-v1.36 -odh-workbench-rstudio-minimal-cpu-py311-c9s-n=quay.io/opendatahub/odh-workbench-rstudio-minimal-cpu-py311-c9s:2025b-v1.36 +odh-workbench-jupyter-minimal-cpu-py312-ubi9-n=dummy +odh-workbench-jupyter-minimal-cuda-py312-ubi9-n=dummy +odh-workbench-jupyter-minimal-rocm-py312-ubi9-n=dummy +odh-workbench-jupyter-datascience-cpu-py312-ubi9-n=dummy +odh-workbench-jupyter-pytorch-cuda-py312-ubi9-n=dummy +odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-n=dummy +odh-workbench-jupyter-pytorch-rocm-py312-ubi9-n=dummy +odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-n=dummy +odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-n=dummy +odh-workbench-jupyter-trustyai-cpu-py312-ubi9-n=dummy +odh-workbench-codeserver-datascience-cpu-py312-ubi9-n=dummy +odh-pipeline-runtime-minimal-cpu-py312-ubi9-n=dummy +odh-pipeline-runtime-datascience-cpu-py312-ubi9-n=dummy +odh-pipeline-runtime-pytorch-cuda-py312-ubi9-n=dummy +odh-pipeline-runtime-pytorch-rocm-py312-ubi9-n=dummy +odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-n=dummy +odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-n=dummy +odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-n=dummy diff --git a/manifests/base/params.env b/manifests/base/params.env index b03b8f1525..6db7e5e30f 100644 --- a/manifests/base/params.env +++ b/manifests/base/params.env @@ -1,18 +1,45 @@ -odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-minimal-cpu-py311-ubi9@sha256:45ec6fc94d5f0eb1efff548687f4beacb2f5c4a1bef78a38b9475e1583536d7d -odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-minimal-cuda-py311-ubi9@sha256:b23028134c94c2392a74b89e62dc7f2b5223b15e1c1fb691c6c39a36a4149ee5 -odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-minimal-rocm-py311-ubi9@sha256:0b1f4efb8008ac53c9becdef770f498a263424c3ccea31f4fb377fc9a48b2178 -odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-datascience-cpu-py311-ubi9@sha256:97ac518d5982f0ad438b6c33f36162715be651fb4dbe1a80f972712997c0de22 -odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-pytorch-cuda-py311-ubi9@sha256:b9245c43060321b3cb8261fee538765e1debe43e5546103fa30a7c61fdaec032 -odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-pytorch-rocm-py311-ubi9@sha256:09ead5584a314601190b2aa72ccf31063036c66f113c0a217feeac848020efe6 -odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-tensorflow-cuda-py311-ubi9@sha256:9e40b024480c35d5c67114528ff28a87ff96add636bfdf9801099f581f53157e -odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-tensorflow-rocm-py311-ubi9@sha256:c104ee21be07288428e13fc9a76cb8c6f8cd5eb0a846a9b7cf52a574d4280b8d -odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-jupyter-trustyai-cpu-py311-ubi9@sha256:ba950f4071d79a131e48eb6c892501cbc137f86ebe58de81a3ce44aaebe74301 -odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-workbench-codeserver-datascience-cpu-py311-ubi9@sha256:633bbba923717b75656a56752ed1fc325a6ab255589fbb63ce8a3ffc0ee0d730 -odh-pipeline-runtime-minimal-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-minimal-cpu-py311-ubi9@sha256:4461dd9388cbb9354dd716c6f7680d58c1051eb59ef16e4486bbc4d5ec7fc5ee -odh-pipeline-runtime-datascience-cpu-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-datascience-cpu-py311-ubi9@sha256:62ceae2f54e06b7231027d1fed7392cfb7cbf5952929fc85b4342e3cb75fab75 -odh-pipeline-runtime-pytorch-cuda-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-pytorch-cuda-py311-ubi9@sha256:56dc5275d80a7445ab05a53a4dbf347754bf08fbf892b58d2c10e41b0336ddcc -odh-pipeline-runtime-pytorch-rocm-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-pytorch-rocm-py311-ubi9@sha256:b55bd2abd3a153ccd78069139a6b60a1fb0917d597f5b5a23b96044ed9eed134 -odh-pipeline-runtime-tensorflow-cuda-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-tensorflow-cuda-py311-ubi9@sha256:1582cb6d7dd2f949fa73e323b19b5ec0e10bc3e01fe0e752ceed275fe9613bc4 -odh-pipeline-runtime-tensorflow-rocm-py311-ubi9-n-1=quay.io/opendatahub/odh-pipeline-runtime-tensorflow-rocm-py311-ubi9@sha256:71cbe8249b57af33e92216c11979e669882871cac4743a1cf5bd3230e671390a -odh-workbench-rstudio-minimal-cpu-py311-c9s-n-1=quay.io/opendatahub/workbench-images@sha256:562fde56f25507435520920ac41edd9ced0e025198a56142c44692f6340e0f0f -odh-workbench-rstudio-minimal-cuda-py311-c9s-n-1=quay.io/opendatahub/workbench-images@sha256:f2988efa77ba4725a701f5717e8b70b5141ad0cb04dfc42a68293752b28324a7 +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-1=dummy +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-1=dummy +odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-1=dummy +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-1=dummy +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-1=dummy +odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-1=dummy +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-1=dummy +odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-1=dummy +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-1=dummy +odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-1=dummy +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-2=quay.io/modh/odh-minimal-notebook-container@sha256:2217d8a9cbf84c2bd3e6c6dc09089559e8a3905687ca3739e897c4b45e2b00b3 +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-3=quay.io/modh/odh-minimal-notebook-container@sha256:e2296a1386e4d9756c386b4c7dc44bac6f61b99b3b894a10c9ff2d8d5602ca4e +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-4=quay.io/modh/odh-minimal-notebook-container@sha256:4ba72ae7f367a36030470fa4ac22eca0aab285c7c3f1c4cdcc33dc07aa522143 +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-5=quay.io/modh/odh-minimal-notebook-container@sha256:eec50e5518176d5a31da739596a7ddae032d73851f9107846a587442ebd10a82 +odh-workbench-jupyter-minimal-cpu-py311-ubi9-n-6=quay.io/modh/odh-minimal-notebook-container@sha256:39068767eebdf3a127fe8857fbdaca0832cdfef69eed6ec3ff6ed1858029420f +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-2=quay.io/modh/cuda-notebooks@sha256:55598c7de919afc6390cf59595549dc4554102481617ec42beaa3c47ef26d5e4 +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-3=quay.io/modh/cuda-notebooks@sha256:81484fafe7012792ecdda28fef89287219c21b99c4e79a504aff0b265d94b429 +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-4=quay.io/modh/cuda-notebooks@sha256:a484d344f6feab25e025ea75575d837f5725f819b50a6e3476cef1f9925c07a5 +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-5=quay.io/modh/cuda-notebooks@sha256:f6cdc993b4d493ffaec876abb724ce44b3c6fc37560af974072b346e45ac1a3b +odh-workbench-jupyter-minimal-cuda-py311-ubi9-n-6=quay.io/modh/cuda-notebooks@sha256:00c53599f5085beedd0debb062652a1856b19921ccf59bd76134471d24c3fa7d +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-2=quay.io/modh/odh-pytorch-notebook@sha256:20f7ab8e7954106ea5e22f3ee0ba8bc7b03975e5735049a765e021aa7eb06861 +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-3=quay.io/modh/odh-pytorch-notebook@sha256:2403b3dccc3daf5b45a973c49331fdac4ec66e2e020597975fcd9cb4a625099b +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-4=quay.io/modh/odh-pytorch-notebook@sha256:806e6524cb46bcbd228e37a92191c936bb4c117100fc731604e19df80286b19d +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-5=quay.io/modh/odh-pytorch-notebook@sha256:97b346197e6fc568c2eb52cb82e13a206277f27c21e299d1c211997f140f638b +odh-workbench-jupyter-pytorch-cuda-py311-ubi9-n-6=quay.io/modh/odh-pytorch-notebook@sha256:b68e0192abf7d46c8c6876d0819b66c6a2d4a1e674f8893f8a71ffdcba96866c +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-2=quay.io/modh/odh-generic-data-science-notebook@sha256:d0ba5fc23e2b3846763f60e8ade8a0f561cdcd2bf6717df6e732f6f8b68b89c4 +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-3=quay.io/modh/odh-generic-data-science-notebook@sha256:3e51c462fc03b5ccb080f006ced86d36480da036fa04b8685a3e4d6d51a817ba +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-4=quay.io/modh/odh-generic-data-science-notebook@sha256:39853fd63555ebba097483c5ac6a375d6039e5522c7294684efb7966ba4bc693 +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-5=quay.io/modh/odh-generic-data-science-notebook@sha256:e2cab24ebe935d87f7596418772f5a97ce6a2e747ba0c1fd4cec08a728e99403 +odh-workbench-jupyter-datascience-cpu-py311-ubi9-n-6=quay.io/modh/odh-generic-data-science-notebook@sha256:76e6af79c601a323f75a58e7005de0beac66b8cccc3d2b67efb6d11d85f0cfa1 +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-2=quay.io/modh/cuda-notebooks@sha256:99d3fb964e635873214de4676c259a96c2ea25f3f79cc4bead5bc9f39aba34c0 +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-3=quay.io/modh/cuda-notebooks@sha256:0e57a0b756872636489ccd713dc9f00ad69d0c481a66ee0de97860f13b4fedcd +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-4=quay.io/modh/cuda-notebooks@sha256:3da74d732d158b92eaada0a27fb7067fa18c8bde5033c672e23caed0f21d6481 +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-5=quay.io/modh/cuda-notebooks@sha256:88d80821ff8c5d53526794261d519125d0763b621d824f8c3222127dab7b6cc8 +odh-workbench-jupyter-tensorflow-cuda-py311-ubi9-n-6=quay.io/modh/cuda-notebooks@sha256:6fadedc5a10f5a914bb7b27cd41bc644392e5757ceaf07d930db884112054265 +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-2=quay.io/modh/odh-trustyai-notebook@sha256:a1b863c2787ba2bca292e381561ed1d92cf5bc25705edfb1ded5e0720a12d102 +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-3=quay.io/modh/odh-trustyai-notebook@sha256:70fe49cee6d5a231ddea7f94d7e21aefd3d8da71b69321f51c406a92173d3334 +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-4=quay.io/modh/odh-trustyai-notebook@sha256:fe883d8513c5d133af1ee3f7bb0b7b37d3bada8ae73fc7209052591d4be681c0 +odh-workbench-jupyter-trustyai-cpu-py311-ubi9-n-5=quay.io/modh/odh-trustyai-notebook@sha256:8c5e653f6bc6a2050565cf92f397991fbec952dc05cdfea74b65b8fd3047c9d4 +odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-2=quay.io/modh/codeserver@sha256:92f2a10dde5c96b29324426b4325401e8f4a0d257e439927172d5fe909289c44 +odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-3=quay.io/modh/codeserver@sha256:1fd51b0e8a14995f1f7273a4b0b40f6e7e27e225ab179959747846e54079d61e +odh-workbench-codeserver-datascience-cpu-py311-ubi9-n-4=quay.io/modh/codeserver@sha256:b1a048f3711149e36a89e0eda1a5601130fb536ecc0aabae42ab6e4d26977354 +odh-workbench-jupyter-minimal-rocm-py311-ubi9-n-2=quay.io/modh/rocm-notebooks@sha256:199367d2946fc8427611b4b96071cb411433ffbb5f0988279b10150020af22db +odh-workbench-jupyter-pytorch-rocm-py311-ubi9-n-2=quay.io/modh/rocm-notebooks@sha256:1f0b19b7ae587d638e78697c67f1290d044e48bfecccfb72d7a16faeba13f980 +odh-workbench-jupyter-tensorflow-rocm-py311-ubi9-n-2=quay.io/modh/rocm-notebooks@sha256:f94702219419e651327636b390d1872c58fd7b8f9f6b16a02c958ffb918eded3 diff --git a/manifests/base/rstudio-gpu-notebook-imagestream.yaml b/manifests/base/rstudio-gpu-notebook-imagestream.yaml deleted file mode 100644 index 35d4f8bdef..0000000000 --- a/manifests/base/rstudio-gpu-notebook-imagestream.yaml +++ /dev/null @@ -1,63 +0,0 @@ ---- -apiVersion: image.openshift.io/v1 -kind: ImageStream -metadata: - labels: - opendatahub.io/notebook-image: "true" - annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/rstudio" - opendatahub.io/notebook-image-name: "RStudio | Minimal | CUDA | R 4.4" - opendatahub.io/notebook-image-desc: "RStudio Server Workbench image with an integrated development environment for R, a programming language designed for statistical computing and graphics." - opendatahub.io/notebook-image-order: "22" - opendatahub.io/recommended-accelerators: '["nvidia.com/gpu"]' - name: rstudio-gpu-notebook -spec: - lookupPolicy: - local: true - tags: - # N Version of the image - - annotations: - # language=json - opendatahub.io/notebook-software: | - [ - {"name": "CUDA", "version": "12.6"}, - {"name": "R", "version": "v4.4"}, - {"name": "Python", "version": "v3.11"} - ] - # language=json - opendatahub.io/notebook-python-dependencies: | - [ - {"name": "rstudio-server", "version": "2024.12"} - ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images - opendatahub.io/workbench-image-recommended: 'true' - opendatahub.io/notebook-build-commit: odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n_PLACEHOLDER - from: - kind: DockerImage - name: odh-workbench-rstudio-minimal-cuda-py311-c9s-n_PLACEHOLDER - name: "2025.1" - referencePolicy: - type: Source - # N - 1 Version of the image - - annotations: - # language=json - opendatahub.io/notebook-software: | - [ - {"name": "CUDA", "version": "12.1"}, - {"name": "R", "version": "v4.4"}, - {"name": "Python", "version": "v3.11"} - ] - # language=json - opendatahub.io/notebook-python-dependencies: | - [ - {"name": "rstudio-server", "version": "2024.04"} - ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images - opendatahub.io/workbench-image-recommended: 'false' - opendatahub.io/notebook-build-commit: odh-workbench-rstudio-minimal-cuda-py311-c9s-commit-n-1_PLACEHOLDER - from: - kind: DockerImage - name: odh-workbench-rstudio-minimal-cuda-py311-c9s-n-1_PLACEHOLDER - name: "2024.2" - referencePolicy: - type: Source diff --git a/manifests/base/rstudio-notebook-imagestream.yaml b/manifests/base/rstudio-notebook-imagestream.yaml deleted file mode 100644 index 1f219cddfe..0000000000 --- a/manifests/base/rstudio-notebook-imagestream.yaml +++ /dev/null @@ -1,60 +0,0 @@ ---- -apiVersion: image.openshift.io/v1 -kind: ImageStream -metadata: - labels: - opendatahub.io/notebook-image: "true" - annotations: - opendatahub.io/notebook-image-url: "https://github.com/opendatahub-io/notebooks/tree/main/rstudio" - opendatahub.io/notebook-image-name: "RStudio | Minimal | CPU | R 4.4" - opendatahub.io/notebook-image-desc: "RStudio Server Workbench image with an integrated development environment for R, a programming language designed for statistical computing and graphics." - opendatahub.io/notebook-image-order: "21" - name: rstudio-notebook -spec: - lookupPolicy: - local: true - tags: - # N Version of the image - - annotations: - # language=json - opendatahub.io/notebook-software: | - [ - {"name": "R", "version": "v4.4"}, - {"name": "Python", "version": "v3.11"} - ] - # language=json - opendatahub.io/notebook-python-dependencies: | - [ - {"name": "rstudio-server", "version": "2024.12"} - ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images - opendatahub.io/workbench-image-recommended: 'true' - opendatahub.io/notebook-build-commit: odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n_PLACEHOLDER - from: - kind: DockerImage - name: odh-workbench-rstudio-minimal-cpu-py311-c9s-n_PLACEHOLDER - name: "2025.1" - referencePolicy: - type: Source - # N - 1 Version of the image - - annotations: - # language=json - opendatahub.io/notebook-software: | - [ - {"name": "R", "version": "v4.4"}, - {"name": "Python", "version": "v3.11"} - ] - # language=json - opendatahub.io/notebook-python-dependencies: | - [ - {"name": "rstudio-server", "version": "2024.04"} - ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images - opendatahub.io/workbench-image-recommended: 'false' - opendatahub.io/notebook-build-commit: odh-workbench-rstudio-minimal-cpu-py311-c9s-commit-n-1_PLACEHOLDER - from: - kind: DockerImage - name: odh-workbench-rstudio-minimal-cpu-py311-c9s-n-1_PLACEHOLDER - name: "2024.1" - referencePolicy: - type: Source diff --git a/manifests/base/runtime-datascience-imagestream.yaml b/manifests/base/runtime-datascience-imagestream.yaml index b7d73d6fda..683d605c2e 100644 --- a/manifests/base/runtime-datascience-imagestream.yaml +++ b/manifests/base/runtime-datascience-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-datascience-cpu-py312-ubi9-n_PLACEHOLDER diff --git a/manifests/base/runtime-minimal-imagestream.yaml b/manifests/base/runtime-minimal-imagestream.yaml index ef0acdc0ca..ba4d82b647 100644 --- a/manifests/base/runtime-minimal-imagestream.yaml +++ b/manifests/base/runtime-minimal-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-minimal-cpu-py312-ubi9-n_PLACEHOLDER diff --git a/manifests/base/runtime-pytorch-imagestream.yaml b/manifests/base/runtime-pytorch-imagestream.yaml index 6a1d00b011..7c7f4f53d7 100644 --- a/manifests/base/runtime-pytorch-imagestream.yaml +++ b/manifests/base/runtime-pytorch-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-pytorch-cuda-py312-ubi9-n_PLACEHOLDER diff --git a/manifests/base/runtime-rocm-pytorch-imagestream.yaml b/manifests/base/runtime-rocm-pytorch-imagestream.yaml index ec09ac21a5..c0d43a0ced 100644 --- a/manifests/base/runtime-rocm-pytorch-imagestream.yaml +++ b/manifests/base/runtime-rocm-pytorch-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-pytorch-rocm-py312-ubi9-n_PLACEHOLDER diff --git a/manifests/base/runtime-rocm-tensorflow-imagestream.yaml b/manifests/base/runtime-rocm-tensorflow-imagestream.yaml index b28fef81d9..d30e70c453 100644 --- a/manifests/base/runtime-rocm-tensorflow-imagestream.yaml +++ b/manifests/base/runtime-rocm-tensorflow-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-n_PLACEHOLDER diff --git a/manifests/base/runtime-tensorflow-imagestream.yaml b/manifests/base/runtime-tensorflow-imagestream.yaml index 6ee8509a06..3b41d5db28 100644 --- a/manifests/base/runtime-tensorflow-imagestream.yaml +++ b/manifests/base/runtime-tensorflow-imagestream.yaml @@ -29,7 +29,7 @@ spec: "schema_name": "runtime-image" } ] - openshift.io/imported-from: quay.io/opendatahub/workbench-images + openshift.io/imported-from: quay.io/modh/runtime-images from: kind: DockerImage name: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-n_PLACEHOLDER diff --git a/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu b/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..22713dff80 --- /dev/null +++ b/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,356 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +ARG TARGETARCH + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +ARG TARGETARCH + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN --mount=type=cache,target=/var/cache/dnf \ + echo "Building for architecture: ${TARGETARCH}" && \ + PACKAGES="perl mesa-libGL skopeo libxcrypt-compat" && \ + # Additional dev tools only for s390x + if [ "$TARGETARCH" = "s390x" ]; then \ + PACKAGES="$PACKAGES gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel openssl zlib-devel"; \ + fi && \ + if [ "$TARGETARCH" = "ppc64le" ]; then \ + PACKAGES="$PACKAGES git gcc-toolset-13 make wget unzip rust cargo unixODBC-devel cmake ninja-build"; \ + fi && \ + if [ -n "$PACKAGES" ]; then \ + echo "Installing: $PACKAGES" && \ + dnf install -y $PACKAGES && \ + dnf clean all && rm -rf /var/cache/yum; \ + fi + +RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ + echo 'export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/' >> /etc/profile.d/ppc64le.sh && \ + echo 'export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH' >> /etc/profile.d/ppc64le.sh && \ + echo 'export OPENBLAS_VERSION=0.3.30' >> /etc/profile.d/ppc64le.sh && \ + echo 'export ONNX_VERSION=1.19.0' >> /etc/profile.d/ppc64le.sh && \ + echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/ppc64le.sh && \ + echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> /etc/profile.d/ppc64le.sh && \ + echo 'export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1' >> /etc/profile.d/ppc64le.sh; \ + fi + +# For s390x only, set ENV vars and install Rust +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + # Install Rust and set up environment + mkdir -p /opt/.cargo && \ + export HOME=/root && \ + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o rustup-init.sh && \ + chmod +x rustup-init.sh && \ + CARGO_HOME=/opt/.cargo HOME=/root ./rustup-init.sh -y --no-modify-path && \ + rm -f rustup-init.sh && \ + chown -R 1001:0 /opt/.cargo && \ + # Set environment variables + echo 'export PATH=/opt/.cargo/bin:$PATH' >> /etc/profile.d/cargo.sh && \ + echo 'export CARGO_HOME=/opt/.cargo' >> /etc/profile.d/cargo.sh && \ + echo 'export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1' >> /etc/profile.d/cargo.sh; \ +fi + +# Set python alternatives only for s390x (not needed for other arches) +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + alternatives --install /usr/bin/python python /usr/bin/python3.12 1 && \ + alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \ + python --version && python3 --version; \ +fi + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +############################## +# wheel-builder stage # +# NOTE: Only used in s390x +############################## +FROM cpu-base AS s390x-builder + +ARG TARGETARCH +USER 0 +WORKDIR /tmp/build-wheels + +# Set pyarrow version for s390x +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/s390x.sh; \ +fi + +# Build pyarrow optimized for s390x +RUN --mount=type=cache,target=/root/.cache/pip \ + --mount=type=cache,target=/root/.cache/dnf \ + if [ "$TARGETARCH" = "s390x" ]; then \ + # Install build dependencies + dnf install -y cmake make gcc-c++ pybind11-devel wget git \ + openssl-devel zlib-devel bzip2-devel lz4-devel \ + ninja-build && \ + dnf clean all && \ + # Source the environment variables + source /etc/profile.d/s390x.sh && \ + # Clone specific version of arrow + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git && \ + cd arrow && \ + # Set environment variables for build + export ARROW_HOME=/usr/local && \ + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:$LD_LIBRARY_PATH && \ + export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH && \ + # Build C++ library first + cd cpp && \ + mkdir build && cd build && \ + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_SNAPPY=OFF \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + -DARROW_USE_CCACHE=OFF \ + -GNinja \ + .. && \ + ninja install && \ + cd ../../python && \ + # Install Python build requirements + pip install --no-cache-dir -r requirements-build.txt && \ + # Build Python package + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_WITH_LZ4=1 \ + PYARROW_WITH_ZSTD=1 \ + PYARROW_WITH_BZ2=1 \ + PYARROW_BUNDLE_ARROW_CPP=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ + mkdir -p /tmp/wheels && \ + cp dist/pyarrow-*.whl /tmp/wheels/ && \ + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/; \ + else \ + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels; \ + fi + +################################### +# openblas builder stage for ppc64le +################################## + +FROM cpu-base AS openblas-builder +USER root +WORKDIR /root + +ARG TARGETARCH + +ENV OPENBLAS_VERSION=0.3.30 + +RUN echo "openblas-builder stage TARGETARCH: ${TARGETARCH}" + +# Download and build OpenBLAS +RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ + source /opt/rh/gcc-toolset-13/enable && \ + wget https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip && \ + unzip OpenBLAS-${OPENBLAS_VERSION}.zip && cd OpenBLAS-${OPENBLAS_VERSION} && \ + make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0; \ + else \ + echo "Not ppc64le, skipping OpenBLAS build" && mkdir -p /root/OpenBLAS-dummy; \ + fi + +################################### +# onnx builder stage for ppc64le +################################### + +FROM cpu-base AS onnx-builder +USER root +WORKDIR /root + +ARG TARGETARCH +ENV ONNX_VERSION=1.19.0 + +RUN echo "onnx-builder stage TARGETARCH: ${TARGETARCH}" + +RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ + source /opt/rh/gcc-toolset-13/enable && \ + git clone --recursive https://github.com/onnx/onnx.git && \ + cd onnx && git checkout v${ONNX_VERSION} && \ + git submodule update --init --recursive && \ + pip install -r requirements.txt && \ + export CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" && \ + pip wheel . -w /onnx_wheels; \ + else \ + echo "Not ppc64le, skipping ONNX build" && mkdir -p /onnx_wheels; \ + fi + +################################### +# pyarrow builder stage for ppc64le +################################## + +FROM cpu-base AS arrow-builder +USER root +WORKDIR /root + +ARG TARGETARCH +ENV PYARROW_VERSION=17.0.0 + +RUN echo "arrow-builder stage TARGETARCH: ${TARGETARCH}" + +RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git --recursive && \ + cd arrow && rm -rf .git && mkdir dist && \ + pip3 install -r python/requirements-build.txt && \ + export ARROW_HOME=$(pwd)/dist && \ + export LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH && \ + export CMAKE_PREFIX_PATH=$ARROW_HOME:$CMAKE_PREFIX_PATH && \ + export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data" && \ + export ARROW_TEST_DATA="${PWD}/testing/data" && \ + cmake -S cpp -B cpp/build \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DCMAKE_BUILD_TYPE=release \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_SNAPPY=ON \ + -DARROW_WITH_BROTLI=ON \ + -DARROW_DATASET=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_COMPUTE=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_BUILD_SHARED=ON \ + -DARROW_BUILD_TESTS=OFF && \ + cd cpp/build && \ + make -j20 install && \ + export PYARROW_PARALLEL=20 && \ + export PYARROW_WITH_PARQUET=1 && \ + export PYARROW_WITH_DATASET=1 && \ + export PYARROW_BUNDLE_ARROW_CPP=1 && \ + pip3 install wheel && \ + cd ../../python && \ + python setup.py build_ext \ + --build-type=release \ + --bundle-arrow-cpp \ + bdist_wheel --dist-dir /arrowwheels; \ + else \ + echo "Not ppc64le, skipping pyarrow build" && mkdir -p /arrowwheels; \ + fi + +####################### +# runtime-datascience # +####################### +FROM cpu-base AS runtime-datascience + +ARG TARGETARCH +ARG DATASCIENCE_SOURCE_CODE=runtimes/datascience/ubi9-python-3.12 + +LABEL name="odh-notebook-runtime-datascience-ubi9-python-3.12" \ + summary="Runtime data science notebook image for ODH notebooks" \ + description="Runtime data science notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.display-name="Runtime data science notebook image for ODH notebooks" \ + io.k8s.description="Runtime data science notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + authoritative-source-url="https://github.com/opendatahub-io/notebooks" \ + io.openshift.build.commit.ref="main" \ + io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/runtimes/datascience/ubi9-python-3.12" \ + io.openshift.build.image="quay.io/opendatahub/workbench-images:runtime-datascience-ubi9-python-3.12" + +WORKDIR /opt/app-root/bin +USER 0 + +# Install ppc64le-built wheels if available +COPY --from=openblas-builder /root/OpenBLAS-* /openblas +COPY --from=onnx-builder /onnx_wheels /tmp/onnx_wheels +COPY --from=arrow-builder /arrowwheels /tmp/arrowwheels + +RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ + echo "Installing ppc64le ONNX, pyarrow wheels and OpenBLAS..." && \ + HOME=/root pip install /tmp/onnx_wheels/*.whl /tmp/arrowwheels/*.whl && \ + if [ -d "/openblas" ] && [ "$(ls -A /openblas 2>/dev/null)" ]; then \ + PREFIX=/usr/local make -C /openblas install; \ + fi && rm -rf /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ + else \ + echo "Skipping architecture-specific wheel installs for (${TARGETARCH})" && \ + rm -rf /tmp/wheels /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ + fi + +USER 0 +# Copy wheels from build stage (s390x only) +COPY --from=s390x-builder /tmp/wheels /tmp/wheels +RUN if [ "$TARGETARCH" = "s390x" ]; then \ + pip install --no-cache-dir /tmp/wheels/*.whl && rm -rf /tmp/wheels; \ +else \ + echo "Skipping wheel install for $TARGETARCH"; \ +fi + + +# Install Python packages from pylock.toml +COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ + +RUN --mount=type=cache,target=/root/.cache/pip \ + echo "Installing softwares and packages" && \ + if [ "$TARGETARCH" = "ppc64le" ]; then \ + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig; \ + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH && \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ + elif [ "$TARGETARCH" = "s390x" ]; then \ + # For s390x, we need special flags and environment variables for building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ + else \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ + fi && \ + # Fix permissions to support pip in Openshift environments + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +USER 1001 + +WORKDIR /opt/app-root/src diff --git a/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf b/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf index cc7c73581a..4583ee67cb 100644 --- a/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf +++ b/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf @@ -1 +1,3 @@ +# Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu b/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu new file mode 100644 index 0000000000..e80367972b --- /dev/null +++ b/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -0,0 +1,83 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN ARCH=$(uname -m) && \ + echo "Detected architecture: $ARCH" && \ + PACKAGES="perl mesa-libGL skopeo" && \ + if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ + PACKAGES="$PACKAGES gcc g++ make openssl-devel autoconf automake libtool cmake"; \ + fi && \ + dnf install -y $PACKAGES && \ + dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +#################### +# runtime-minimal # +#################### +FROM cpu-base AS runtime-minimal + +ARG MINIMAL_SOURCE_CODE=runtimes/minimal/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${MINIMAL_SOURCE_CODE}/utils ./utils/ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-minimal-cpu-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-minimal-cpu-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-minimal-cpu-py312-rhel9" \ + summary="Runtime minimal image for ODH notebooks" \ + description="Runtime minimal image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime minimal image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf b/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf index cc7c73581a..4583ee67cb 100644 --- a/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf +++ b/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf @@ -1 +1,3 @@ +# Base Image : UBI 9 with Python 3.12 +# Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest diff --git a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..abce56ce28 --- /dev/null +++ b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,78 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-runtime-pytorch # +######################### +FROM cuda-base AS cuda-runtime-pytorch + +ARG PYTORCH_SOURCE_CODE=runtimes/pytorch+llmcompressor/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-pytorch-cuda-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-pytorch-cuda-py312-rhel9" \ + summary="Runtime pytorch-llmcompressor notebook image for ODH notebooks" \ + description="Runtime pytorch-llmcompressor notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime pytorch-llmcompressor notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf index 7525e99151..9b62bf6687 100644 --- a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf +++ b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.6 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..67867bf887 --- /dev/null +++ b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,78 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################### +# cuda-runtime-pytorch # +######################### +FROM cuda-base AS cuda-runtime-pytorch + +ARG PYTORCH_SOURCE_CODE=runtimes/pytorch/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-pytorch-cuda-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-pytorch-cuda-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-pytorch-cuda-py312-rhel9" \ + summary="Runtime pytorch notebook image for ODH notebooks" \ + description="Runtime pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf b/runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf index 7525e99151..9b62bf6687 100644 --- a/runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf +++ b/runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.6 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241 diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm new file mode 100644 index 0000000000..14f654f150 --- /dev/null +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -0,0 +1,83 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# rocm-base # +#################### +FROM ${BASE_IMAGE} AS rocm-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +######################## +# rocm-runtime-pytorch # +######################## +FROM rocm-base AS rocm-runtime-pytorch + +ARG PYTORCH_SOURCE_CODE=runtimes/rocm-pytorch/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ +# Copy utility script +COPY ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +USER 0 +# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments +RUN ./de-vendor-torch.sh && \ + rm ./de-vendor-torch.sh && \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ + fix-permissions /opt/app-root -P +USER 1001 + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-pytorch-rocm-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-pytorch-rocm-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-pytorch-rocm-py312-rhel9" \ + summary="Runtime ROCm pytorch notebook image for ODH notebooks" \ + description="Runtime ROCm pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime ROCm pytorch notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm index d14a8b87cc..55af31490d 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm @@ -72,12 +72,14 @@ COPY ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ RUN echo "Installing softwares and packages" && \ # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # De-vendor the ROCm libs that are embedded in Pytorch \ - ./de-vendor-torch.sh && \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +USER 0 +# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments +RUN ./de-vendor-torch.sh && \ rm ./de-vendor-torch.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ fix-permissions /opt/app-root -P +USER 1001 WORKDIR /opt/app-root/src diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf b/runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf index 6682af4d77..cffbf80141 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-rocm-py312-ubi9:v6.2 +# Base Image : RHEL 9.6 with Python 3.12 +# ROCm Version : 6.3.4 +# Architectures: linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/rocm +BASE_IMAGE=quay.io/aipcc/base-images/rocm:3.0-1755080929 diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm new file mode 100644 index 0000000000..80c360dfc8 --- /dev/null +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -0,0 +1,80 @@ +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# rocm-base # +#################### +FROM ${BASE_IMAGE} AS rocm-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +########################### +# rocm-runtime-tensorflow # +########################### +FROM rocm-base AS rocm-runtime-tensorflow + +ARG TENSORFLOW_SOURCE_CODE=runtimes/rocm-tensorflow/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P +USER 1001 + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-tensorflow-rocm-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-tensorflow-rocm-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-tensorflow-rocm-py312-rhel9" \ + summary="Runtime ROCm tensorflow notebook image for ODH notebooks" \ + description="Runtime ROCm tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime ROCm tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm index b8a464df23..18b188887f 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm @@ -72,10 +72,13 @@ RUN echo "Installing softwares and packages" && \ # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml + +# Fix permissions to support pip in Openshift environments \ +USER 0 +RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ fix-permissions /opt/app-root -P +USER 1001 COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf b/runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf index 6682af4d77..cffbf80141 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-rocm-py312-ubi9:v6.2 +# Base Image : RHEL 9.6 with Python 3.12 +# ROCm Version : 6.3.4 +# Architectures: linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/rocm +BASE_IMAGE=quay.io/aipcc/base-images/rocm:3.0-1755080929 diff --git a/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda new file mode 100644 index 0000000000..f975a6a3aa --- /dev/null +++ b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -0,0 +1,80 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +ARG TARGETARCH + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER 0 + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=registry.access.redhat.com/ubi9/ubi /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ + && dnf clean all -y +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +# Install the oc client begin +RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz && \ + tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ + rm -f /tmp/openshift-client-linux.tar.gz +# Install the oc client end + +############################ +# cuda-runtime-tensorflow # +############################ +FROM cuda-base AS cuda-runtime-tensorflow + +ARG TENSORFLOW_SOURCE_CODE=runtimes/tensorflow/ubi9-python-3.12 + +WORKDIR /opt/app-root/bin + +# Install Python packages from requirements.txt +COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ +# Copy Elyra dependencies for air-gapped enviroment +COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ + +RUN echo "Installing softwares and packages" && \ + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ + # Fix permissions to support pip in Openshift environments \ + chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ + fix-permissions /opt/app-root -P + +WORKDIR /opt/app-root/src + +LABEL name="rhoai/odh-pipeline-runtime-tensorflow-cuda-py312-rhel9" \ + com.redhat.component="odh-pipeline-runtime-tensorflow-cuda-py312-rhel9" \ + io.k8s.display-name="odh-pipeline-runtime-tensorflow-cuda-py312-rhel9" \ + summary="Runtime CUDA tensorflow notebook image for ODH notebooks" \ + description="Runtime CUDA tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + io.k8s.description="Runtime CUDA tensorflow notebook image with base Python 3.12 builder image based on UBI9 for ODH notebooks" \ + com.redhat.license_terms="https://www.redhat.com/licenses/Red_Hat_Standard_EULA_20191108.pdf" diff --git a/runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf b/runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf index 7525e99151..9b62bf6687 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf +++ b/runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf @@ -1 +1,5 @@ -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.6 +# Base Image : RHEL 9.6 with Python 3.12 +# CUDA Version : 12.8.1 +# Architectures: linux/arm64, linux/x86_64 +# Source : https://quay.io/repository/aipcc/base-images/cuda +BASE_IMAGE=quay.io/aipcc/base-images/cuda:3.0-1756380241