* Scaffold files for migration * Move user-agent into unit suite * Nothing to move from browser suite * Migrate tests to translations/content * Migrate existing translation test to meta * No graphql tests to migrate * Migrate lint-translation-reporter * Migrate lint-translation-reporter * Remove languages-schema, unused * Restore languages-schema * Restore languages-schema * Migrate rendering * Migrate routing * Migrate most of unit * Remove dead files, comment out tests that aren't expected to work yet * Migrate from get-redirect * Migrate page and pages * Migrate linting code * Fix lint issues * Found a few more * Run prettier * Move crowdin-config test and helper * Update crowdin-config.js * Remove translation linting, crowdin config lint, reduce file count * Remove test that's been skipped for a year * Restore linting with note to remove later * Update lint-translation-reporter.js * Clean up rendering suite * Update rendering.js * Remove excessive describe blocks * Redirect tests * Clean up unit * Remove test that's never called * Don't compare early access * Rename test suites * Update "content" tests * Update files.js * Update search.js * Update files.js * Update files.js
44 lines
1.2 KiB
JavaScript
44 lines
1.2 KiB
JavaScript
import robotsParser from 'robots-parser'
|
|
import { get } from '../helpers/e2etest.js'
|
|
import { jest } from '@jest/globals'
|
|
|
|
describe('robots.txt', () => {
|
|
jest.setTimeout(5 * 60 * 1000)
|
|
|
|
let res, robots
|
|
beforeAll(async () => {
|
|
res = await get('/robots.txt', {
|
|
headers: {
|
|
Host: 'docs.github.com',
|
|
},
|
|
})
|
|
robots = robotsParser('https://docs.github.com/robots.txt', res.text)
|
|
})
|
|
|
|
it('allows indexing of the homepage and English content', async () => {
|
|
expect(robots.isAllowed('https://docs.github.com/')).toBe(true)
|
|
expect(robots.isAllowed('https://docs.github.com/en')).toBe(true)
|
|
expect(
|
|
robots.isAllowed('https://docs.github.com/en/articles/verifying-your-email-address')
|
|
).toBe(true)
|
|
})
|
|
|
|
it('disallows indexing of azurecontainer.io domains', async () => {
|
|
const res = await get('/robots.txt', {
|
|
headers: {
|
|
host: 'docs-internal-preview-12345-asdfz.azurecontainer.io',
|
|
},
|
|
})
|
|
expect(res.text).toEqual('User-agent: *\nDisallow: /')
|
|
})
|
|
|
|
it('does not have duplicate lines', () => {
|
|
const lines = new Set()
|
|
for (const line of res.text.split('\n')) {
|
|
if (/^\s*$/.test(line)) continue
|
|
expect(lines.has(line)).toBe(false)
|
|
lines.add(line)
|
|
}
|
|
})
|
|
})
|