1
0
mirror of synced 2025-12-23 21:07:12 -05:00

Update tests to use fs.promises when not top-level (#16803)

* Update tests to use fs.promises when not top-level

* Move two to asyncFilter

* Update site-data-references.js

* Update site-data-references.js

* Clear out await fs.exists

* Lint

* A few more fixes

* Can't use async when defining tests
This commit is contained in:
Kevin Heis
2020-12-09 07:42:02 -08:00
committed by GitHub
parent aae3c4e6de
commit 98e4b78112
8 changed files with 73 additions and 61 deletions

View File

@@ -42,7 +42,7 @@ describe('category pages', () => {
// Get links included in product index page.
// Each link corresponds to a product subdirectory (category).
// Example: "getting-started-with-github"
const contents = fs.readFileSync(productIndex, 'utf8')
const contents = fs.readFileSync(productIndex, 'utf8') // TODO move to async
const { content } = matter(contents)
const productDir = path.dirname(productIndex)
@@ -50,6 +50,7 @@ describe('category pages', () => {
const categoryLinks = getLinks(content)
// Only include category directories, not standalone category files like content/actions/quickstart.md
.filter(link => fs.existsSync(getPath(productDir, link, 'index')))
// TODO this should move to async, but you can't asynchronously define tests with Jest...
// Map those to the Markdown file paths that represent that category page index
const categoryPaths = categoryLinks.map(link => getPath(productDir, link, 'index'))

View File

@@ -1,4 +1,4 @@
const fs = require('fs')
const fs = require('fs').promises
const path = require('path')
const cheerio = require('cheerio')
const matter = require('gray-matter')
@@ -35,8 +35,8 @@ function processFrontmatter (contents, file) {
}
describe('removing liquid statements only', () => {
test('removes liquid statements that specify "greater than version to deprecate"', () => {
let contents = fs.readFileSync(greaterThan, 'utf8')
test('removes liquid statements that specify "greater than version to deprecate"', async () => {
let contents = await fs.readFile(greaterThan, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('Alpha')
@@ -57,8 +57,8 @@ Alpha\n\n{% else %}\n\nBravo\n\n{% if currentVersion ver_gt "enterprise-server@2
expect($('.example10').text().trim()).toBe('Alpha')
})
test('removes liquid statements that specify "and greater than version to deprecate"', () => {
let contents = fs.readFileSync(andGreaterThan1, 'utf8')
test('removes liquid statements that specify "and greater than version to deprecate"', async () => {
let contents = await fs.readFile(andGreaterThan1, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('{% if currentVersion != "free-pro-team@latest" %}\n\nAlpha\n\n{% endif %}')
@@ -71,8 +71,8 @@ Alpha\n\n{% if currentVersion != "free-pro-team@latest" %}\n\nBravo\n\n{% endif
Alpha\n\n{% if currentVersion ver_gt "enterprise-server@2.16" %}\n\nBravo\n\n{% endif %}\n\n{% endif %}`)
})
test('removes liquid statements that specify "and greater than version to deprecate" (alternate format)', () => {
let contents = fs.readFileSync(andGreaterThan2, 'utf8')
test('removes liquid statements that specify "and greater than version to deprecate" (alternate format)', async () => {
let contents = await fs.readFile(andGreaterThan2, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('{% if currentVersion ver_lt "enterprise-server@2.16" %}\n\nAlpha\n\n{% endif %}')
@@ -85,8 +85,8 @@ Alpha\n\n{% if currentVersion ver_lt "enterprise-server@2.16" %}\n\nBravo\n\n{%
Alpha\n\n{% if currentVersion != "free-pro-team@latest" %}\n\nBravo\n\n{% endif %}\n\n{% endif %}`)
})
test('removes liquid statements that specify "not equals version to deprecate"', () => {
let contents = fs.readFileSync(notEquals, 'utf8')
test('removes liquid statements that specify "not equals version to deprecate"', async () => {
let contents = await fs.readFile(notEquals, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('Alpha')
@@ -103,8 +103,8 @@ Alpha\n\n{% endif %}`)
})
describe('removing liquid statements and content', () => {
test('removes interior content and liquid statements that specify "equals version to deprecate"', () => {
let contents = fs.readFileSync(equals, 'utf8')
test('removes interior content and liquid statements that specify "equals version to deprecate"', async () => {
let contents = await fs.readFile(equals, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('')
@@ -117,8 +117,8 @@ Alpha\n\n{% else %}\n\nCharlie\n\n{% endif %}`)
expect($('.example6').text().trim()).toBe('Charlie\n\nBravo')
})
test('removes interior content and liquid statements that specify "less than next oldest than version to deprecate"', () => {
let contents = fs.readFileSync(lessThanNextOldest, 'utf8')
test('removes interior content and liquid statements that specify "less than next oldest than version to deprecate"', async () => {
let contents = await fs.readFile(lessThanNextOldest, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text().trim()).toBe('Alpha')
@@ -137,8 +137,8 @@ Charlie\n\n{% else %}\n\nDelta\n\n{% endif %}\n\nEcho`)
})
describe('updating frontmatter', () => {
test('updates frontmatter versions Enterprise if set to greater-than-or-equal-to version to deprecate', () => {
let contents = fs.readFileSync(frontmatter1, 'utf8')
test('updates frontmatter versions Enterprise if set to greater-than-or-equal-to version to deprecate', async () => {
let contents = await fs.readFile(frontmatter1, 'utf8')
contents = processFrontmatter(contents, frontmatter1)
const $ = cheerio.load(contents)
// console.log('foo')
@@ -147,8 +147,8 @@ describe('updating frontmatter', () => {
expect($.text().includes('enterprise-server: \'>=2.13\'')).toBe(false)
})
test('updates frontmatter versions Enterprise if set to greater-than-or-equal-to next oldest version', () => {
let contents = fs.readFileSync(frontmatter2, 'utf8')
test('updates frontmatter versions Enterprise if set to greater-than-or-equal-to next oldest version', async () => {
let contents = await fs.readFile(frontmatter2, 'utf8')
contents = processFrontmatter(contents, frontmatter2)
const $ = cheerio.load(contents)
expect($.text().includes('enterprise-server: \'*\'')).toBe(true)
@@ -157,8 +157,8 @@ describe('updating frontmatter', () => {
})
describe('whitespace', () => {
test('does not add newlines when whitespace control is used', () => {
let contents = fs.readFileSync(whitespace, 'utf8')
test('does not add newlines when whitespace control is used', async () => {
let contents = await fs.readFile(whitespace, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example1').text()).toBe('\n Alpha\n')
@@ -167,8 +167,8 @@ describe('whitespace', () => {
expect($('.example4').text()).toBe('\n Alpha\n')
})
test('does not add newlines when no newlines are present', () => {
let contents = fs.readFileSync(whitespace, 'utf8')
test('does not add newlines when no newlines are present', async () => {
let contents = await fs.readFile(whitespace, 'utf8')
contents = removeLiquidStatements(contents, versionToDeprecate, nextOldestVersion)
const $ = cheerio.load(contents)
expect($('.example5').text()).toBe('\n Alpha\n')

View File

@@ -3,7 +3,7 @@ const loadSiteData = require('../../lib/site-data')
const { loadPages } = require('../../lib/pages')
const getDataReferences = require('../../lib/get-liquid-data-references')
const frontmatter = require('@github-docs/frontmatter')
const fs = require('fs')
const fs = require('fs').promises
const path = require('path')
describe('data references', () => {
@@ -33,34 +33,34 @@ describe('data references', () => {
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
})
test('every data reference found in metadata of English content files is defined and has a value', () => {
test('every data reference found in metadata of English content files is defined and has a value', async () => {
let errors = []
expect(pages.length).toBeGreaterThan(0)
pages.forEach(page => {
await Promise.all(pages.map(async page => {
const metadataFile = path.join('content', page.relativePath)
const fileContents = fs.readFileSync(path.join(__dirname, '../..', metadataFile))
const fileContents = await fs.readFile(path.join(__dirname, '../..', metadataFile))
const { data: metadata } = frontmatter(fileContents, { filepath: page.fullPath })
const metadataRefs = getDataReferences(JSON.stringify(metadata))
metadataRefs.forEach(key => {
const value = get(data.en, key)
if (typeof value !== 'string') errors.push({ key, value, metadataFile })
})
})
}))
errors = uniqWith(errors, isEqual) // remove duplicates
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
})
test('every data reference found in English reusable files is defined and has a value', () => {
test('every data reference found in English reusable files is defined and has a value', async () => {
let errors = []
const allReusables = data.en.site.data.reusables
const reusables = Object.values(allReusables)
expect(reusables.length).toBeGreaterThan(0)
reusables.forEach(reusablesPerFile => {
await Promise.all(reusables.map(async reusablesPerFile => {
let reusableFile = path.join(__dirname, '../../data/reusables/', getFilenameByValue(allReusables, reusablesPerFile))
reusableFile = getFilepath(reusableFile)
reusableFile = await getFilepath(reusableFile)
const reusableRefs = getDataReferences(JSON.stringify(reusablesPerFile))
@@ -68,21 +68,21 @@ describe('data references', () => {
const value = get(data.en, key)
if (typeof value !== 'string') errors.push({ key, value, reusableFile })
})
})
}))
errors = uniqWith(errors, isEqual) // remove duplicates
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
})
test('every data reference found in English variable files is defined and has a value', () => {
test('every data reference found in English variable files is defined and has a value', async () => {
let errors = []
const allVariables = data.en.site.data.variables
const variables = Object.values(allVariables)
expect(variables.length).toBeGreaterThan(0)
variables.forEach(variablesPerFile => {
await Promise.all(variables.map(async variablesPerFile => {
let variableFile = path.join(__dirname, '../../data/variables/', getFilenameByValue(allVariables, variablesPerFile))
variableFile = getFilepath(variableFile)
variableFile = await getFilepath(variableFile)
const variableRefs = getDataReferences(JSON.stringify(variablesPerFile))
@@ -90,7 +90,7 @@ describe('data references', () => {
const value = get(data.en, key)
if (typeof value !== 'string') errors.push({ key, value, variableFile })
})
})
}))
errors = uniqWith(errors, isEqual) // remove duplicates
expect(errors.length, JSON.stringify(errors, null, 2)).toBe(0)
@@ -102,10 +102,13 @@ function getFilenameByValue (object, value) {
}
// if path exists, assume it's a directory; otherwise, assume a YML extension
function getFilepath (filepath) {
filepath = fs.existsSync(filepath)
? filepath + '/'
: filepath + '.yml'
async function getFilepath (filepath) {
try {
await fs.stat(filepath)
filepath = filepath + '/'
} catch (_) {
filepath = filepath + '.yml'
}
// we only need the relative path
return filepath.replace(path.join(__dirname, '../../'), '')

View File

@@ -45,11 +45,11 @@ describe('siteData module (English)', () => {
// TODO: re-enable once Janky flakyness is resolved
test.skip('backfills missing translated site data with English values', async () => {
const newFile = path.join(__dirname, '../../data/newfile.yml')
fs.writeFileSync(newFile, 'newvalue: bar')
await fs.writeFile(newFile, 'newvalue: bar')
const data = await loadSiteData()
expect(get(data, 'en.site.data.newfile.newvalue')).toEqual('bar')
expect(get(data, 'ja.site.data.newfile.newvalue')).toEqual('bar')
fs.unlinkSync(newFile)
await fs.unlink(newFile)
})
test('all Liquid templating is valid', async () => {

View File

@@ -1,6 +1,6 @@
const yaml = require('js-yaml')
const { createChangelogEntry, cleanPreviewTitle, previewAnchor, prependDatedEntry } = require('../../script/graphql/build-changelog')
const fs = require('fs')
const fs = require('fs').promises
const MockDate = require('mockdate')
const expectedChangelogEntry = require('../fixtures/changelog-entry')
const expectedUpdatedChangelogFile = require('../fixtures/updated-changelog-file')
@@ -111,18 +111,18 @@ describe('updating the changelog file', () => {
MockDate.reset()
})
it('modifies the entry object and the file on disk', () => {
it('modifies the entry object and the file on disk', async () => {
const testTargetPath = 'tests/graphql/example_changelog.json'
const previousContents = fs.readFileSync(testTargetPath)
const previousContents = await fs.readFile(testTargetPath)
const exampleEntry = { someStuff: true }
const expectedDate = '2020-11-20'
MockDate.set(expectedDate)
prependDatedEntry(exampleEntry, testTargetPath)
const newContents = fs.readFileSync(testTargetPath, 'utf8')
const newContents = await fs.readFile(testTargetPath, 'utf8')
// reset the file:
fs.writeFileSync(testTargetPath, previousContents)
await fs.writeFile(testTargetPath, previousContents)
expect(exampleEntry).toEqual({ someStuff: true, date: expectedDate })
expect(JSON.parse(newContents)).toEqual(expectedUpdatedChangelogFile)

View File

@@ -1,20 +1,28 @@
const fs = require('fs')
const fs = require('fs').promises
const path = require('path')
const { filter: asyncFilter } = require('async')
describe('check for orphan tests', () => {
test('all tests are in sub-directories', () => {
test('all tests are in sub-directories', async () => {
// A known list of exceptions that can live outside of directories
const EXCEPTIONS = ['README.md']
const pathToTests = path.join(process.cwd(), 'tests')
// Get a list of files/directories in `/tests`
const testDirectory = fs.readdirSync(pathToTests)
const testDirectory = await fs.readdir(pathToTests)
const filteredList = testDirectory
let filteredList = testDirectory
// Filter out our exceptions
.filter(item => !EXCEPTIONS.includes(item))
// Don't include directories
.filter(item => !fs.statSync(path.join(pathToTests, item)).isDirectory())
// Don't include directories
filteredList = await asyncFilter(
filteredList,
async item => !(
await fs.stat(
path.join(pathToTests, item)
)
).isDirectory()
)
expect(filteredList).toHaveLength(0)
})

View File

@@ -1,4 +1,4 @@
const fs = require('fs')
const fs = require('fs').promises
const path = require('path')
const { difference, isPlainObject } = require('lodash')
const { getJSON } = require('../helpers/supertest')
@@ -17,7 +17,7 @@ describe('REST references docs', () => {
test('markdown file exists for every operationId prefix in the api.github.com schema', async () => {
const { categories } = require('../../lib/rest')
const referenceDir = path.join(__dirname, '../../content/rest/reference')
const filenames = fs.readdirSync(referenceDir)
const filenames = (await fs.readdir(referenceDir))
.filter(filename => !excludeFromResourceNameCheck.find(excludedFile => filename.endsWith(excludedFile)))
.map(filename => filename.replace('.md', ''))

View File

@@ -1,4 +1,4 @@
const fs = require('fs')
const fs = require('fs').promises
const path = require('path')
const { GITHUB_ACTIONS, GITHUB_REPOSITORY } = process.env
@@ -7,17 +7,17 @@ const testViaActionsOnly = runningActionsOnInternalRepo ? test : test.skip
describe('cloning early-access', () => {
testViaActionsOnly('the content directory exists', async () => {
const eaContentDir = path.join(process.cwd(), 'content/early-access')
expect(fs.existsSync(eaContentDir)).toBe(true)
const eaDir = path.join(process.cwd(), 'content/early-access')
expect(await fs.stat(eaDir)).toBeTruthy()
})
testViaActionsOnly('the data directory exists', async () => {
const eaContentDir = path.join(process.cwd(), 'data/early-access')
expect(fs.existsSync(eaContentDir)).toBe(true)
const eaDir = path.join(process.cwd(), 'data/early-access')
expect(await fs.stat(eaDir)).toBeTruthy()
})
testViaActionsOnly('the assets/images directory exists', async () => {
const eaContentDir = path.join(process.cwd(), 'assets/images/early-access')
expect(fs.existsSync(eaContentDir)).toBe(true)
const eaDir = path.join(process.cwd(), 'assets/images/early-access')
expect(await fs.stat(eaDir)).toBeTruthy()
})
})