Migrate CommonJS to ESM (#20301)
* First run of script * Get the app running --- ish * Get NextJS working * Remove `node:` * Get more tests passing in unit directory * Update FailBot test to use nock * Update test.yml * Update Dockerfile * tests/content fixes * Update page.js * Update build-changelog.js * updating tests/routing * Update orphan-tests.js * updating tests/rendering * Update .eslintrc.js * Update .eslintrc.js * Install jest/globals * "linting" tests * staging update to server.mjs * Change '.github/allowed-actions.js' to a ESM export * Lint * Fixes for the main package.json * Move Jest to be last in the npm test command so we can pass args * Just use 'npm run lint' in the npm test command * update algolia label script * update openapi script * update require on openapi * Update enterprise-algolia-label.js * forgot JSON.parse * Update lunr-search-index.js * Always explicitly include process.cwd() for JSON file reads pathed from project root * update graphql/update-files.js script * Update other npm scripts using jest to pass ESM NODE_OPTIONS * Update check-for-enterprise-issues-by-label.js for ESM * Update create-enterprise-issue.js for ESM * Import jest global for browser tests * Convert 'script/deploy' to ESM Co-authored-by: Grace Park <gracepark@github.com> Co-authored-by: James M. Greene <jamesmgreene@github.com>
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
const { dates, supported } = require('../../lib/enterprise-server-releases')
|
||||
const languageCodes = Object.keys(require('../../lib/languages'))
|
||||
const { namePrefix } = require('../../lib/search/config')
|
||||
const remoteIndexNames = require('../../lib/search/cached-index-names.json')
|
||||
import { dates, supported } from '../../lib/enterprise-server-releases.js'
|
||||
import xLanguages from '../../lib/languages.js'
|
||||
import { namePrefix } from '../../lib/search/config.js'
|
||||
import remoteIndexNames from '../../lib/search/cached-index-names.json'
|
||||
const languageCodes = Object.keys(xLanguages)
|
||||
|
||||
describe('algolia', () => {
|
||||
test('has remote indexNames in every language for every supported GHE version', () => {
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const walk = require('walk-sync')
|
||||
const matter = require('../../lib/read-frontmatter')
|
||||
const { zip, difference } = require('lodash')
|
||||
const GithubSlugger = require('github-slugger')
|
||||
const { XmlEntities } = require('html-entities')
|
||||
const readFileAsync = require('../../lib/readfile-async')
|
||||
const loadSiteData = require('../../lib/site-data')
|
||||
const renderContent = require('../../lib/render-content')
|
||||
const getApplicableVersions = require('../../lib/get-applicable-versions')
|
||||
import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import walk from 'walk-sync'
|
||||
import matter from '../../lib/read-frontmatter.js'
|
||||
import { zip, difference } from 'lodash-es'
|
||||
import GithubSlugger from 'github-slugger'
|
||||
import { XmlEntities } from 'html-entities'
|
||||
import readFileAsync from '../../lib/readfile-async.js'
|
||||
import loadSiteData from '../../lib/site-data.js'
|
||||
import renderContent from '../../lib/render-content/index.js'
|
||||
import getApplicableVersions from '../../lib/get-applicable-versions.js'
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
const slugger = new GithubSlugger()
|
||||
const entities = new XmlEntities()
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
const config = require('../helpers/crowdin-config').read()
|
||||
const { loadPages } = require('../../lib/page-data')
|
||||
import xCrowdinConfig from '../helpers/crowdin-config.js'
|
||||
import { loadPages } from '../../lib/page-data.js'
|
||||
import { jest } from '@jest/globals'
|
||||
|
||||
const config = xCrowdinConfig.read()
|
||||
const ignoredPagePaths = config.files[0].ignore
|
||||
const ignoredDataPaths = config.files[2].ignore
|
||||
|
||||
@@ -7,9 +10,8 @@ describe('crowdin.yml config file', () => {
|
||||
jest.setTimeout(60 * 1000)
|
||||
|
||||
let pages
|
||||
beforeAll(async (done) => {
|
||||
beforeAll(async () => {
|
||||
pages = await loadPages()
|
||||
done()
|
||||
})
|
||||
|
||||
test('has expected file structure', async () => {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
require('../../lib/feature-flags')
|
||||
const { getDOM, getJSON } = require('../helpers/supertest')
|
||||
const enterpriseServerReleases = require('../../lib/enterprise-server-releases')
|
||||
const japaneseCharacters = require('japanese-characters')
|
||||
import '../../lib/feature-flags.js'
|
||||
import { jest } from '@jest/globals'
|
||||
import { getDOM, getJSON } from '../helpers/supertest.js'
|
||||
import enterpriseServerReleases from '../../lib/enterprise-server-releases.js'
|
||||
import japaneseCharacters from 'japanese-characters'
|
||||
|
||||
describe('featuredLinks', () => {
|
||||
jest.setTimeout(3 * 60 * 1000)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
const gitignorePath = path.join(process.cwd(), '.gitignore')
|
||||
const gitignore = fs.readFileSync(gitignorePath, 'utf8')
|
||||
const entries = gitignore.split(/\r?\n/)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const loadSiteData = require('../../lib/site-data')
|
||||
import loadSiteData from '../../lib/site-data.js'
|
||||
|
||||
describe('glossaries', () => {
|
||||
let glossaries
|
||||
beforeAll(async (done) => {
|
||||
beforeAll(async () => {
|
||||
glossaries = (await loadSiteData()).en.site.data.glossaries
|
||||
done()
|
||||
})
|
||||
|
||||
test('are broken into external, internal, and candidates', async () => {
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const readJsonFile = require('../../lib/read-json-file')
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import readJsonFile from '../../lib/read-json-file.js'
|
||||
import { schemaValidator, previewsValidator, upcomingChangesValidator } from '../../lib/graphql/validator.js'
|
||||
import revalidator from 'revalidator'
|
||||
import xAllVersions from '../../lib/all-versions.js'
|
||||
import { jest } from '@jest/globals'
|
||||
|
||||
const previewsJson = readJsonFile('./lib/graphql/static/previews.json')
|
||||
const upcomingChangesJson = readJsonFile('./lib/graphql/static/upcoming-changes.json')
|
||||
const prerenderedObjectsJson = readJsonFile('./lib/graphql/static/prerendered-objects.json')
|
||||
const { schemaValidator, previewsValidator, upcomingChangesValidator } = require('../../lib/graphql/validator')
|
||||
const revalidator = require('revalidator')
|
||||
const allVersions = Object.values(require('../../lib/all-versions'))
|
||||
const allVersions = Object.values(xAllVersions)
|
||||
const graphqlVersions = allVersions.map(v => v.miscVersionName)
|
||||
const graphqlTypes = readJsonFile('./lib/graphql/types.json').map(t => t.kind)
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
const path = require('path')
|
||||
const walk = require('walk-sync')
|
||||
const matter = require('../../lib/read-frontmatter')
|
||||
const { zip } = require('lodash')
|
||||
const yaml = require('js-yaml')
|
||||
const readFileAsync = require('../../lib/readfile-async')
|
||||
import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import walk from 'walk-sync'
|
||||
import matter from '../../lib/read-frontmatter.js'
|
||||
import { zip } from 'lodash-es'
|
||||
import yaml from 'js-yaml'
|
||||
import readFileAsync from '../../lib/readfile-async.js'
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const rootDir = path.join(__dirname, '../..')
|
||||
const contentDir = path.join(rootDir, 'content')
|
||||
const reusablesDir = path.join(rootDir, 'data/reusables')
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
const path = require('path')
|
||||
const cheerio = require('cheerio')
|
||||
const matter = require('gray-matter')
|
||||
const readFileAsync = require('../../lib/readfile-async')
|
||||
const removeLiquidStatements = require('../../lib/remove-liquid-statements')
|
||||
const removeDeprecatedFrontmatter = require('../../lib/remove-deprecated-frontmatter')
|
||||
import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import cheerio from 'cheerio'
|
||||
import matter from 'gray-matter'
|
||||
import readFileAsync from '../../lib/readfile-async.js'
|
||||
import removeLiquidStatements from '../../lib/remove-liquid-statements.js'
|
||||
import removeDeprecatedFrontmatter from '../../lib/remove-deprecated-frontmatter.js'
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const removeLiquidStatementsFixtures = path.join(__dirname, '../fixtures/remove-liquid-statements')
|
||||
|
||||
// Hardcode values so tests don't go out of date
|
||||
|
||||
@@ -1,22 +1,26 @@
|
||||
const { isEqual, get, uniqWith } = require('lodash')
|
||||
const loadSiteData = require('../../lib/site-data')
|
||||
const { loadPages } = require('../../lib/page-data')
|
||||
const getDataReferences = require('../../lib/get-liquid-data-references')
|
||||
const frontmatter = require('../../lib/read-frontmatter')
|
||||
const fs = require('fs').promises
|
||||
const path = require('path')
|
||||
const readFileAsync = require('../../lib/readfile-async')
|
||||
import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import { isEqual, get, uniqWith } from 'lodash-es'
|
||||
import loadSiteData from '../../lib/site-data.js'
|
||||
import { loadPages } from '../../lib/page-data.js'
|
||||
import getDataReferences from '../../lib/get-liquid-data-references.js'
|
||||
import frontmatter from '../../lib/read-frontmatter.js'
|
||||
import xFs from 'fs'
|
||||
import readFileAsync from '../../lib/readfile-async.js'
|
||||
import { jest } from '@jest/globals'
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const fs = xFs.promises
|
||||
|
||||
describe('data references', () => {
|
||||
jest.setTimeout(60 * 1000)
|
||||
|
||||
let data, pages
|
||||
|
||||
beforeAll(async (done) => {
|
||||
beforeAll(async () => {
|
||||
data = await loadSiteData()
|
||||
pages = await loadPages()
|
||||
pages = pages.filter(page => page.languageCode === 'en')
|
||||
done()
|
||||
})
|
||||
|
||||
test('every data reference found in English content files is defined and has a value', () => {
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const { get, isPlainObject, has } = require('lodash')
|
||||
const flat = require('flat')
|
||||
const loadSiteData = require('../../lib/site-data')
|
||||
const patterns = require('../../lib/patterns')
|
||||
const { liquid } = require('../../lib/render-content')
|
||||
const walkSync = require('walk-sync')
|
||||
import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import { get, isPlainObject, has } from 'lodash-es'
|
||||
import flat from 'flat'
|
||||
import loadSiteData from '../../lib/site-data.js'
|
||||
import patterns from '../../lib/patterns.js'
|
||||
import { liquid } from '../../lib/render-content/index.js'
|
||||
import walkSync from 'walk-sync'
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
describe('siteData module (English)', () => {
|
||||
let data
|
||||
beforeAll(async (done) => {
|
||||
beforeAll(async () => {
|
||||
data = await loadSiteData()
|
||||
done()
|
||||
})
|
||||
|
||||
test('makes an object', async () => {
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
const revalidator = require('revalidator')
|
||||
const schema = require('../helpers/schemas/site-tree-schema')
|
||||
const latestEnterpriseRelease = require('../../lib/enterprise-server-releases').latest
|
||||
const { loadSiteTree } = require('../../lib/page-data')
|
||||
const japaneseCharacters = require('japanese-characters')
|
||||
const nonEnterpriseDefaultVersion = require('../../lib/non-enterprise-default-version')
|
||||
import revalidator from 'revalidator'
|
||||
import schema from '../helpers/schemas/site-tree-schema.js'
|
||||
import xEnterpriseServerReleases from '../../lib/enterprise-server-releases.js'
|
||||
import { loadSiteTree } from '../../lib/page-data.js'
|
||||
import japaneseCharacters from 'japanese-characters'
|
||||
import nonEnterpriseDefaultVersion from '../../lib/non-enterprise-default-version.js'
|
||||
import { jest } from '@jest/globals'
|
||||
|
||||
const latestEnterpriseRelease = xEnterpriseServerReleases.latest
|
||||
|
||||
describe('siteTree', () => {
|
||||
jest.setTimeout(3 * 60 * 1000)
|
||||
|
||||
let siteTree
|
||||
beforeAll(async (done) => {
|
||||
beforeAll(async () => {
|
||||
siteTree = await loadSiteTree()
|
||||
done()
|
||||
})
|
||||
|
||||
test('has language codes as top-level keys', () => {
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
const { difference } = require('lodash')
|
||||
const { getJSON } = require('../helpers/supertest')
|
||||
const { latest } = require('../../lib/enterprise-server-releases')
|
||||
const allVersions = Object.values(require('../../lib/all-versions'))
|
||||
import { difference } from 'lodash-es'
|
||||
import { getJSON } from '../helpers/supertest.js'
|
||||
import { latest } from '../../lib/enterprise-server-releases.js'
|
||||
import xAllVersions from '../../lib/all-versions.js'
|
||||
import webhookPayloads from '../../lib/webhooks'
|
||||
import { jest } from '@jest/globals'
|
||||
|
||||
const allVersions = Object.values(xAllVersions)
|
||||
const payloadVersions = allVersions.map(v => v.miscVersionName)
|
||||
const webhookPayloads = require('../../lib/webhooks')
|
||||
|
||||
// grab some values for testing
|
||||
const nonEnterpriseDefaultPayloadVersion = allVersions
|
||||
|
||||
Reference in New Issue
Block a user