diff --git a/.github/workflows/accessibility-audit.yml b/.github/workflows/accessibility-audit.yml index 921c3d69c004c7d..0dd2285e1ffa98a 100644 --- a/.github/workflows/accessibility-audit.yml +++ b/.github/workflows/accessibility-audit.yml @@ -20,7 +20,7 @@ jobs: node-version: 18.1 - name: Install dependencies - run: npm install @actions/core pa11y axios xml2js + run: npm install @actions/core pa11y puppeteer - name: Run accessibility test id: test-accessibility diff --git a/.github/workflows/accessilibity-audit-2.yml b/.github/workflows/accessilibity-audit-2.yml deleted file mode 100644 index 301b1578776eee4..000000000000000 --- a/.github/workflows/accessilibity-audit-2.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Accessibility 2 - -# **What it does**: Regularly audits API links in our documentation. -# **Why we have it**: It's too burdensome to check on every commit like we do for internal links. -# **Who does it impact**: PCX team - -on: - pull_request: - branches: - - production - -jobs: - test2: - name: Test 2 - runs-on: ubuntu-latest - steps: - - uses: a11ywatch/github-actions@v2.0.2 - with: - WEBSITE_URL: https://developers.cloudflare.com - SUBDOMAINS: false - TLD: false - SITEMAP: true - FAIL_ERRORS_COUNT: 15 - LIST: true - FIX: false - UPGRADE: false diff --git a/bin/accessibility-check.js b/bin/accessibility-check.js index aaacf17f5a529e7..4b4f76584af63a5 100644 --- a/bin/accessibility-check.js +++ b/bin/accessibility-check.js @@ -1,41 +1,42 @@ import pa11y from "pa11y"; -import axios from "axios"; -import { parseString } from 'xml2js'; +import puppeteer from "puppeteer"; +import core from "@actions/core"; -process.on('warning', e => console.warn(e.stack)); +const navigationTimeout = 120000; // Set the navigation timeout to 120 seconds (120,000 milliseconds) -const sitemapUrl = 'https://developers.cloudflare.com/sitemap.xml'; -const urlsToProcess = []; // Array to store URLs for processing +async function checkLinks() { + const browser = await puppeteer.launch({ + headless: "new", + }); + const page = await browser.newPage(); -async function processUrl(url) { - try { - const results = await pa11y(url); - console.log(results); - } catch (error) { - console.log("error"); + const sitemapUrl = "https://developers.cloudflare.com/sitemap.xml"; + await page.goto(sitemapUrl, { timeout: navigationTimeout }); + + const sitemapLinks = await page.$$eval("url loc", (elements) => + elements.map((el) => el.textContent) + ); + + const visitedLinks = []; + const brokenLinks = []; + + for (const link of sitemapLinks) { + if (!link) { + continue; // Skip if the link is empty } -} - -axios.get(sitemapUrl) - .then(response => { - if (response.status === 200) { - parseString(response.data, (err, result) => { - if (err) { - console.error('Error parsing XML:', err); - return; - } - - const urls = result.urlset.url.map(urlObj => urlObj.loc[0]); - urlsToProcess.push(...urls); // Add individual URLs to the array - }); - } else { - console.error('Failed to fetch sitemap. Status code:', response.status); - } - }) - .catch(error => { - console.error('An error occurred:', error); + + const page2 = await browser.newPage() + const result = await pa11y(link, { + browser, + page: page2 }) - .finally(() => { - // Process the URLs after the response has been handled - urlsToProcess.forEach(processUrl); - }); + + console.log(result); + await page2.close(); + } + await page.close(); + await browser.close(); + } + + + checkLinks(); \ No newline at end of file