diff --git a/.github/workflows/run-screendiff.yml b/.github/workflows/run-screendiff.yml new file mode 100644 index 00000000..f338640f --- /dev/null +++ b/.github/workflows/run-screendiff.yml @@ -0,0 +1,48 @@ +name: Run ScreenDiff Manually + +on: + workflow_dispatch: + inputs: + category: + description: 'Test category to run (leave empty to run all, or specify one: milo,feds,caas,uar,uar-ai)' + required: false + type: string + +jobs: + test-matrix: + name: Running ${{ inputs.category }} tests + strategy: + fail-fast: false + matrix: + include: + - platform: macos-latest + categories: ["milo","feds","caas","uar","uar-ai"] + runs-on: ${{ matrix.platform }} + + steps: + - name: Check out repository + uses: actions/checkout@v3 + + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: lts/* + + - name: Run Nala + run: | + if [ -n "${{ inputs.category }}" ]; then + bash runScreenDiff.sh ${{ inputs.category }} + else + for category in ${{ join(matrix.categories, ' ') }}; do + bash runScreenDiff.sh $category + done + fi + shell: bash + env: + IMS_EMAIL: ${{ secrets.IMS_EMAIL }} + IMS_PASS: ${{ secrets.IMS_PASS }} + HLX_TKN: ${{ secrets.HLX_TKN }} + SLACK_WH: ${{ secrets.SLACK_WH }} + AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}} + AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}} + AWS_ROLE_ARN: ${{secrets.AWS_ROLE_ARN}} diff --git a/libs/screenshot/cleans3Public.js b/libs/screenshot/cleans3Public.js new file mode 100644 index 00000000..75e18b53 --- /dev/null +++ b/libs/screenshot/cleans3Public.js @@ -0,0 +1,118 @@ +/* eslint-disable no-restricted-syntax */ +const readline = require('readline'); +// eslint-disable-next-line import/no-extraneous-dependencies +const { S3Client, ListObjectsV2Command, DeleteObjectsCommand } = require('@aws-sdk/client-s3'); +const { STSClient, GetSessionTokenCommand } = require('@aws-sdk/client-sts'); + +const S3REGION = 'us-west-2'; +const ROLE_ARN = process.env.AWS_ROLE_ARN; +const ROLE_SESSION_NAME = 'S3CleanupSession'; + +async function* listObjects(s3, params) { + let isTruncated = false; + let token; + do { + const command = new ListObjectsV2Command({ ...params, ContinuationToken: token }); + // eslint-disable-next-line no-await-in-loop + const response = await s3.send(command); + yield response.Contents; + ({ IsTruncated: isTruncated, NextContinuationToken: token } = response); + } while (isTruncated); +} + +function askQuestion(query) { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + return new Promise((resolve) => { + rl.question(query, (ans) => { + rl.close(); + resolve(ans); + }); + }); +} + +async function getTemporaryCredentials() { + const stsClient = new STSClient({ region: S3REGION }); + + const params = { + RoleArn: ROLE_ARN, + RoleSessionName: ROLE_SESSION_NAME, + DurationSeconds: 3600, + }; + + try { + const command = new GetSessionTokenCommand(params); + const response = await stsClient.send(command); + return response.Credentials; + } catch (err) { + console.error('Error assuming role:', err); + throw err; + } +} + +async function main() { + const bucket = process.argv[2]; + const s3Path = process.argv[3]; + + if (!bucket || !s3Path) { + console.log('Usage: node cleans3.js '); + process.exit(1); + } + + const tempCredentials = await getTemporaryCredentials(); + + const s3 = new S3Client({ + region: S3REGION, + credentials: { + accessKeyId: tempCredentials.AccessKeyId, + secretAccessKey: tempCredentials.SecretAccessKey, + sessionToken: tempCredentials.SessionToken, + }, + forcePathStyle: true, + }); + + const params = { Bucket: bucket, Prefix: s3Path, MaxKeys: 1000 }; + + let totalSize = 0; + const toBeDeleted = { + Bucket: bucket, + Delete: { + Objects: [], + Quiet: false, + }, + }; + + for await (const contents of listObjects(s3, params)) { + if (contents === undefined || contents.length === 0) { + console.log('No objects to delete.'); + return; // Skip to next iteration if current is empty + } + + for (const obj of contents) { + totalSize += obj.Size; + console.log(`${obj.Key}, ${obj.LastModified}, ${obj.Size}`); + toBeDeleted.Delete.Objects.push({ Key: obj.Key }); + } + } + + if (toBeDeleted.Delete.Objects.length > 0) { + const answer = await askQuestion('Are you sure you want to delete these files? (yes/no): '); + if (answer.toLowerCase() === 'yes') { + const deleteCommand = new DeleteObjectsCommand(toBeDeleted); + await s3.send(deleteCommand); + toBeDeleted.Delete.Objects = []; + console.log('Files deleted successfully.'); + } else { + console.log('Deletion canceled.'); + } + } else { + console.log('No files to delete.'); + } + + console.log(`Total Size: ${totalSize}`); +} + +main(); diff --git a/libs/screenshot/uploads3Public.js b/libs/screenshot/uploads3Public.js new file mode 100644 index 00000000..abc377ef --- /dev/null +++ b/libs/screenshot/uploads3Public.js @@ -0,0 +1,139 @@ +// eslint-disable-next-line import/no-extraneous-dependencies +const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3'); +const { STSClient, GetSessionTokenCommand } = require('@aws-sdk/client-sts'); +const fs = require('fs'); +const path = require('path'); +const { validatePath } = require('./utils.js'); + +const S3REGION = 'us-west-2'; +const S3BUCKET = 'nala-test-automation-screenshots'; +const ROLE_ARN = process.env.AWS_ROLE_ARN; +const ROLE_SESSION_NAME = 'ScreenshotUploadSession'; + +const s3Upload = async (s3, params) => { + try { + const command = new PutObjectCommand(params); + const response = await s3.send(command); + return response; + } catch (err) { + console.error('Upload error:', err); + throw err; + } +}; + +async function getTemporaryCredentials() { + const stsClient = new STSClient({ region: S3REGION }); + + const params = { + RoleArn: ROLE_ARN, + RoleSessionName: ROLE_SESSION_NAME, + DurationSeconds: 3600, + }; + + try { + const command = new GetSessionTokenCommand(params); + const response = await stsClient.send(command); + return response.Credentials; + } catch (err) { + console.error('Error assuming role:', err); + throw err; + } +} + +async function uploadFile(fileName, s3Bucket, s3Path, s3Key, mimeType) { + const tempCredentials = await getTemporaryCredentials(); + + const s3 = new S3Client({ + region: S3REGION, + credentials: { + accessKeyId: tempCredentials.AccessKeyId, + secretAccessKey: tempCredentials.SecretAccessKey, + sessionToken: tempCredentials.SessionToken, + }, + forcePathStyle: true, + }); + + const baseName = path.basename(fileName); + const key = path.join(s3Path, s3Key || baseName).replace(/\\/g, '/'); + + const fileContent = fs.readFileSync(validatePath(fileName)); + + const params = { + Bucket: s3Bucket, + Key: key, + Body: fileContent, + ContentType: mimeType, + }; + + await s3Upload(s3, params); +} + +async function main() { + const dir = process.argv[2] || 'screenshots/milo'; + const bucket = S3BUCKET; + const s3Path = '.'; + + if (!bucket || !s3Path) { + console.log('Usage: node uploads3.js '); + process.exit(1); + } + + const resultsPath = path.join(dir, 'results.json'); + const entries = JSON.parse(fs.readFileSync(validatePath(resultsPath))); + + console.log(entries); + + const mimeType = 'image/png'; + + Object.keys(entries).forEach(async (key) => { + const entry = entries[key]; + if (Array.isArray(entry)) { + entry.forEach(async (item) => { + if (item.a) { + console.log(item.a); + await uploadFile(item.a, bucket, s3Path, item.a, mimeType); + } + + if (item.b) { + console.log(item.b); + await uploadFile(item.b, bucket, s3Path, item.b, mimeType); + } + + if (item.diff) { + console.log(item.diff); + await uploadFile(item.diff, bucket, s3Path, item.diff, mimeType); + } + }); + } else { + if (entry.a) { + console.log(entry.a); + await uploadFile(entry.a, bucket, s3Path, entry.a, mimeType); + } + + if (entry.b) { + console.log(entry.b); + await uploadFile(entry.b, bucket, s3Path, entry.b, mimeType); + } + + if (entry.diff) { + console.log(entry.diff); + await uploadFile(entry.diff, bucket, s3Path, entry.diff, mimeType); + } + } + }); + + console.log('Upload results.json'); + await uploadFile(resultsPath, bucket, s3Path, resultsPath, 'application/json'); + + const timestampPath = path.join(dir, 'timestamp.json'); + + fs.writeFileSync( + validatePath(timestampPath, { forWriting: true }), + JSON.stringify([(new Date()).toLocaleString()], null, 2), + ); + + console.log('Upload timestamp.json'); + await uploadFile(timestampPath, bucket, s3Path, timestampPath, 'application/json'); +} + +main(); diff --git a/package-lock.json b/package-lock.json index 761ac5e7..4098214c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,6 +11,7 @@ "license": "Apache-2.0", "dependencies": { "@aws-sdk/client-s3": "^3.572.0", + "@aws-sdk/client-sts": "^3.572.0", "@axe-core/playwright": "^4.7.0", "@playwright/test": "^1.42", "axe-html-reporter": "^2.2.3", diff --git a/package.json b/package.json index f407ac80..bc76798d 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ "homepage": "https://github.com/adobecom/nala#readme", "dependencies": { "@aws-sdk/client-s3": "^3.572.0", + "@aws-sdk/client-sts": "^3.572.0", "@axe-core/playwright": "^4.7.0", "@playwright/test": "^1.42", "axe-html-reporter": "^2.2.3", diff --git a/runScreenDiff.sh b/runScreenDiff.sh index 44f80615..a8d6fbe0 100644 --- a/runScreenDiff.sh +++ b/runScreenDiff.sh @@ -12,10 +12,20 @@ fi category="$1" +# Only install dependencies if running in GitHub Actions +if [ -n "$GITHUB_ACTIONS" ]; then + echo "*** Installing playwright dependencies ***" + cd "$GITHUB_ACTION_PATH" || exit + npm ci + npx playwright install --with-deps +else + echo "Skipping dependency installation - not running in GitHub Actions" +fi + # Run each command one by one node run.js -c ${Config} -p ${Project} -g @${category}-screenshots node libs/screenshot/merge.js screenshots/${category} node libs/screenshot/compare.mjs screenshots/${category} -node libs/screenshot/uploads3.js screenshots/${category} +node libs/screenshot/uploads3Public.js screenshots/${category} echo "All commands executed successfully for category: ${category}!"