diff --git a/.github/workflows/approve-reference-images.yml b/.github/workflows/approve-reference-images.yml new file mode 100644 index 000000000..fed4b7439 --- /dev/null +++ b/.github/workflows/approve-reference-images.yml @@ -0,0 +1,125 @@ +name: Approve Backstop Reference Images +on: + pull_request: + types: [opened, reopened, synchronize, ready_for_review] +jobs: + compare_images: + name: Generate Images + runs-on: ubuntu-latest + steps: + ## Setup variables for build info + - name: Set Variables + id: set_vars + run: | + ## PUSH + if [ "${{ github.event_name }}" == "push" ]; then + BUILD_NAME=$(sed -E 's/refs\/(heads|tags)\///; s/\//__/g;' <<< $GITHUB_REF) + BRANCH_NAME=$(sed -E 's/refs\/(heads|tags)\///;' <<< $GITHUB_REF) + COMMIT_HASH=$(echo "${GITHUB_SHA}") + ## PULL_REQUEST + elif [ "${{ github.event_name }}" == "pull_request" ]; then + BUILD_NAME=$(echo "pr-${{ github.event.pull_request.number }}") + BRANCH_NAME=$(echo "pr-${{ github.event.pull_request.number }}") + COMMIT_HASH=$(echo "${{ github.event.pull_request.head.sha }}") + else + ## ERROR + exit 1 + fi + + ## For step checks and artifact deployment path. + ## Same for push and PR + export REPO_FULL=${{ github.repository }} + export REPO_RE='([^/]+)/(.*)' + [[ "$REPO_FULL" =~ $REPO_RE ]] + REPO_OWNER=$(echo "${BASH_REMATCH[1]}") + REPO_NAME=$(echo "${BASH_REMATCH[2]}") + + ## Set step outputs for later use + echo "build_name=${BUILD_NAME}" >> $GITHUB_OUTPUT + echo "branch_name=${BRANCH_NAME}" >> $GITHUB_OUTPUT + echo "commit_hash=${COMMIT_HASH}" >> $GITHUB_OUTPUT + echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT + echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT + ## Need to look at some sort of PR state to check if this needs to run or not. + ## This clones and checks out the PR's ref. + - name: Checkout branch + uses: actions/checkout@v3 + ## Setup node and npm caching. + - name: Setup Node + uses: actions/setup-node@v3 + with: + node-version-file: '.nvmrc' + cache: 'yarn' + registry-url: https://npm.pkg.github.com + scope: '@nciocpl' + ## Override default lerna version + - name: Install Lerna + run: yarn global add lerna@6.6.2 + ## Bootstrap Lerna + - name: Bootstrap Lerna + run: lerna bootstrap -- --frozen-lockfile + env: + CI: true + NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ## This clones and checks out the base ref. (e.g., target branch) + - name: Checkout branch + uses: actions/checkout@v3 + with: + path: ./base-branch + ref: ${{ github.base_ref }} + - name: Copy images from the base branch to the BS image test folder. + run: | + mkdir -p ./testing/ncids-css-testing/.backstop/test/base-ref-images + cp ./base-branch/testing/ncids-css-testing/.backstop/reference/* ./testing/ncids-css-testing/.backstop/test/base-ref-images + - name: Compare the images and generate the BS Report. + env: + CI: true + working-directory: ./testing/ncids-css-testing + run: | + node ./util/backstop-ref-approval + ## Let's upload the thing. + - name: Zip Backstop Report + working-directory: ./testing/ncids-css-testing/.backstop + env: + BUILD_NAME: ${{ steps.set_vars.outputs.build_name }} + run: | + zip -r ${GITHUB_WORKSPACE}/${BUILD_NAME}-backstopjs.zip * + - name: Upload artifact to netstorage + uses: nciocpl/netstorage-upload-action@v1.0.0 + with: + hostname: ${{ secrets.ns_hostname }} + cp-code: ${{ secrets.ns_cpcode }} + key-name: ${{ secrets.ns_keyname }} + key: ${{ secrets.ns_key }} + index-zip: true + local-path: ${{ format('{0}-backstopjs.zip', steps.set_vars.outputs.build_name) }} + ## Note this action automatically prepends the cpcode to the path. + destination-path: ${{ format('/{0}-backstopjs.zip', steps.set_vars.outputs.build_name) }} + - name: Clear Site Cache + uses: nciocpl/akamai-purge-action@v1.0.2 + with: + hostname: ${{ secrets.eg_hostname }} + client-token: ${{ secrets.eg_client_token }} + client-secret: ${{ secrets.eg_client_secret }} + access-token: ${{ secrets.eg_access_token }} + type: "cpcodes" + ref: ${{ format('{0},{1}', secrets.ns_cpcode, secrets.prop_cpcode) }} + ## Add the comment. + - name: Add Comment on Where to See Backstop + uses: actions/github-script@v6 + if: startsWith(github.repository, 'NCIOCPL') && github.event_name == 'pull_request' && github.event.action == 'opened' + env: + BUILD_NAME: ${{steps.set_vars.outputs.build_name}} + with: + github-token: ${{secrets.GITHUB_TOKEN}} + ## NOTE: The script below is JavaScript + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: ` + ## Backstop Reference Image Change Report + * [The report](https://designsystem-dev.cancer.gov/${ process.env.BUILD_NAME }-backstopjs/html-report/index.html) + ` + }) diff --git a/.gitignore b/.gitignore index cfebeb61c..8bd860765 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,5 @@ dist # ide .idea/* + +base-branch/* diff --git a/testing/ncids-css-testing/util/backstop-ref-approval/index.js b/testing/ncids-css-testing/util/backstop-ref-approval/index.js new file mode 100644 index 000000000..dc30a057b --- /dev/null +++ b/testing/ncids-css-testing/util/backstop-ref-approval/index.js @@ -0,0 +1,40 @@ +const path = require('path'); +const executeCommand = require('backstopjs/core/command'); +const makeConfig = require('backstopjs/core/util/makeConfig'); +const mockCreateBitmaps = require('./mock-create-bitmaps'); + +/** + * This is the script entry point and it will mimick the code + * that would be run by backstopjs/core/runner + the steps + * leading up to an execution of _report. + */ +const main = async () => { + + // Loads the config as if we are running test, which is what would normally + // run for a comparison. + const config = makeConfig('test', { + config: path.join(__dirname, '..', '..', 'backstop.config.js'), + }); + + // Don't open the browser window with the report when this is done. + config.openReport = false; + + // This is going to write out a config file for comparison, because Docker. + await mockCreateBitmaps(config); + + try { + await executeCommand('_report', config); + } catch (err) { + if (err.message === 'Mismatch errors found.') { + // This is a good error. We can leave for the workflow to carry on. + process.exit(0); + } else { + // This is bad and should stop the process. + console.error(`NCIDS Backstop Ref Approval failed`); + console.error(err); + process.exit(1); + } + } +}; + +main(); diff --git a/testing/ncids-css-testing/util/backstop-ref-approval/mock-create-bitmaps.js b/testing/ncids-css-testing/util/backstop-ref-approval/mock-create-bitmaps.js new file mode 100644 index 000000000..4839c16e5 --- /dev/null +++ b/testing/ncids-css-testing/util/backstop-ref-approval/mock-create-bitmaps.js @@ -0,0 +1,227 @@ +/* + * This file contains the absolute necessary from + * backstopjs/core/util/createBitmaps. This file is called by test prior to + * reports being run. It modifies a config file with state information which + * is used by the _report chain of commands. + * + * At the end of the day, the biggest bit here is that we need a file generated + * at config.tempCompareConfigFileName that contains all of the test pairs. + * Most of the logic here is breaking down the config to get filenames that + * convey which scenario/viewport/selectors were captured. This code could be + * simplified in the future. + * + * A pair basically looks like: + * { + * "pair": { + * "reference": "../reference/ncids_usa-tooltip_right_focus_0_document_0_mobile.png", + * "test": "../test/base-ref-images/ncids_usa-tooltip_right_focus_0_document_0_mobile.png", + * "selector": "document", + * "fileName": "ncids_usa-tooltip_right_focus_0_document_0_mobile.png", + * "label": "usa-tooltip right focus", + * "misMatchThreshold": 0, + * "url": "http://host.docker.internal:6006/iframe.html?id=uswds-components-tooltip--default&args=&viewMode=story", + * "expect": 0, + * "viewportLabel": "mobile", + * "diff": { + * "isSameDimensions": true, + * "dimensionDifference": { + * "width": 0, + * "height": 0 + * }, + * "misMatchPercentage": "0.00" + * } + * }, + * "status": "pass" + * }, + */ + +const fs = require('fs/promises'); +const cloneDeep = require('lodash/cloneDeep'); +// Yes the original code imports lodash and cloneDeep separately. +const _ = require('lodash'); +const ensureDirectoryPath = require('backstopjs/core/util/ensureDirectoryPath'); +const logger = require('backstopjs/core/util/logger')('create-bitmap-function'); +const runEngineMock = require('./run-engine-mock'); + +function ensureViewportLabel (config) { + if (typeof config.viewports === 'object') { + config.viewports.forEach(function (viewport) { + if (!viewport.label) { + viewport.label = viewport.name; + } + }); + } +} + +function decorateConfigForCapture (config) { + const isReference = false; + let configJSON; + + if (typeof config.args.config === 'object') { + configJSON = config.args.config; + } else { + configJSON = Object.assign({}, require(config.backstopConfigFileName)); + } + configJSON.scenarios = configJSON.scenarios || []; + ensureViewportLabel(configJSON); + + const totalScenarioCount = configJSON.scenarios.length; + + function pad (number) { + let r = String(number); + if (r.length === 1) { + r = '0' + r; + } + return r; + } + + // NCI: We are cheating here and hopefully making so we have a standard + // folder to copy base-branch images to. + let screenshotDateTime = 'base-ref-images'; + screenshotDateTime = configJSON.dynamicTestId ? configJSON.dynamicTestId : screenshotDateTime; + configJSON.screenshotDateTime = screenshotDateTime; + config.screenshotDateTime = screenshotDateTime; + + if (configJSON.dynamicTestId) { + console.log(`dynamicTestId '${configJSON.dynamicTestId}' found. BackstopJS will run in dynamic-test mode.`); + } + + configJSON.env = cloneDeep(config); + configJSON.isReference = isReference; + configJSON.paths.tempCompareConfigFileName = config.tempCompareConfigFileName; + configJSON.defaultMisMatchThreshold = config.defaultMisMatchThreshold; + configJSON.backstopConfigFileName = config.backstopConfigFileName; + configJSON.defaultRequireSameDimensions = config.defaultRequireSameDimensions; + + if (config.args.filter) { + const scenarios = []; + config.args.filter.split(',').forEach(function (filteredTest) { + configJSON.scenarios.forEach(function (scenario) { + if (regexTest(scenario.label, filteredTest)) { + scenarios.push(scenario); + } + }); + }); + configJSON.scenarios = scenarios; + } + + logger.log('Selected ' + configJSON.scenarios.length + ' of ' + totalScenarioCount + ' scenarios.'); + return configJSON; +} + +function saveViewportIndexes (viewport, index) { + return Object.assign({}, viewport, { vIndex: index }); +} + +function delegateScenarios (config) { + const scenarios = []; + const scenarioViews = []; + + config.viewports = config.viewports.map(saveViewportIndexes); + + // casper.each(scenarios, function (casper, scenario, i) { + config.scenarios.forEach(function (scenario, i) { + // var scenarioLabelSafe = makeSafe(scenario.label); + scenario.sIndex = i; + scenario.selectors = scenario.selectors || []; + if (scenario.viewports) { + scenario.viewports = scenario.viewports.map(saveViewportIndexes); + } + scenarios.push(scenario); + + if (!config.isReference && _.has(scenario, 'variants')) { + scenario.variants.forEach(function (variant) { + // var variantLabelSafe = makeSafe(variant.label); + variant._parent = scenario; + scenarios.push(scenario); + }); + } + }); + + let scenarioViewId = 0; + scenarios.forEach(function (scenario) { + let desiredViewportsForScenario = config.viewports; + + if (scenario.viewports && scenario.viewports.length > 0) { + desiredViewportsForScenario = scenario.viewports; + } + + desiredViewportsForScenario.forEach(function (viewport) { + scenarioViews.push({ + scenario, + viewport, + config, + id: scenarioViewId++ + }); + }); + }); + + const asyncCaptureLimit = config.asyncCaptureLimit === 0 ? 1 : config.asyncCaptureLimit || CONCURRENCY_DEFAULT; + + // Pretend we execute the page load + screenshot for each scenario+view + const results = scenarioViews.map(runEngineMock); + + return results; +} + +/** + * This outputs the testpairs config for the compare process. The compare config + * does not actually look like the backstop config even though it seems that + * way at first. See flatMapTestPairs below for the actual structure. + * @param {*} comparePairsFileName + * @param {*} compareConfig + * @returns + */ +async function writeCompareConfigFile (comparePairsFileName, compareConfig) { + const compareConfigJSON = JSON.stringify(compareConfig, null, 2); + ensureDirectoryPath(comparePairsFileName); + return fs.writeFile(comparePairsFileName, compareConfigJSON); +} + +/** + * A single scenario + viewport can capture multiple selectors. So backstop maps + * the scenario view objects, it yields an object with an array of test pairs. + * Mapping ALL scenario + viewports is an array of these objects. They must be + * flattened down to a single object with a collection of test pairs. + * @param {*} rawTestPairs + * @returns + */ +function flatMapTestPairs (rawTestPairs) { + return rawTestPairs.reduce((acc, result) => { + let testPairs = result.testPairs; + if (!testPairs) { + testPairs = { + diff: { + isSameDimensions: '', + dimensionDifference: { + width: '', + height: '' + }, + misMatchPercentage: '' + }, + reference: '', + test: '', + selector: '', + fileName: '', + label: '', + scenario: result.scenario, + viewport: result.viewport, + msg: result.msg, + error: result.originalError && result.originalError.name + }; + } + return acc.concat(testPairs); + }, []); +} + +module.exports = async function (config) { + const isReference = false; + const modifiedConfig = decorateConfigForCapture(config, isReference); + const rawTestPairs = await delegateScenarios(modifiedConfig); + const result = { + compareConfig: { + testPairs: flatMapTestPairs(rawTestPairs) + } + }; + return await writeCompareConfigFile(config.tempCompareConfigFileName, result); +}; diff --git a/testing/ncids-css-testing/util/backstop-ref-approval/run-engine-mock.js b/testing/ncids-css-testing/util/backstop-ref-approval/run-engine-mock.js new file mode 100644 index 000000000..dd219c9a6 --- /dev/null +++ b/testing/ncids-css-testing/util/backstop-ref-approval/run-engine-mock.js @@ -0,0 +1,59 @@ +/* + * This file was created based on runPuppet and simulates the object that would + * be returned by runPuppet. + */ +const _ = require('lodash'); +const engineTools = require('backstopjs/core/util/engineTools'); + +const DEFAULT_FILENAME_TEMPLATE = '{configId}_{scenarioLabel}_{selectorIndex}_{selectorLabel}_{viewportIndex}_{viewportLabel}'; +const DEFAULT_BITMAPS_TEST_DIR = 'bitmaps_test'; +const DEFAULT_BITMAPS_REFERENCE_DIR = 'bitmaps_reference'; + +/** + * This mocks the results of mapping scenarioViews to either runPlaywrite or + * runPuppet. + * @param {Object} args The scenario view + * @return A test pair, I think? + */ +function runEngineMock(args) { + const scenario = args.scenario; + const viewport = args.viewport; + const config = args.config; + const scenarioLabelSafe = engineTools.makeSafe(scenario.label); + const variantOrScenarioLabelSafe = scenario._parent ? engineTools.makeSafe(scenario._parent.label) : scenarioLabelSafe; + + config._bitmapsTestPath = config.paths.bitmaps_test || DEFAULT_BITMAPS_TEST_DIR; + config._bitmapsReferencePath = config.paths.bitmaps_reference || DEFAULT_BITMAPS_REFERENCE_DIR; + config._fileNameTemplate = config.fileNameTemplate || DEFAULT_FILENAME_TEMPLATE; + config._outputFileFormatSuffix = '.' + ((config.outputFormat && config.outputFormat.match(/jpg|jpeg/)) || 'png'); + config._configId = config.id || engineTools.genHash(config.backstopConfigFileName); + + // Our scenarios should not have selectors, and thus default to document. + // If this changes we should hard error out. + if (_.has(scenario, 'selectors') && scenario.selectors.length > 0) { + throw new Error(`${scenarioLabelSafe}:${variantOrScenarioLabelSafe} defines selectors (${scenario.selectors}) and is not supported!`); + } + + if (!config.paths) { + config.paths = {}; + } + + if (typeof viewport.label !== 'string') { + viewport.label = viewport.name || ''; + } + + const compareConfig = { testPairs: [ + engineTools.generateTestPair( + config, + scenario, + viewport, + variantOrScenarioLabelSafe, + scenarioLabelSafe, + 0, + 'document') + ] }; + + return compareConfig; +} + +module.exports = runEngineMock;