* First run of script * Get the app running --- ish * Get NextJS working * Remove `node:` * Get more tests passing in unit directory * Update FailBot test to use nock * Update test.yml * Update Dockerfile * tests/content fixes * Update page.js * Update build-changelog.js * updating tests/routing * Update orphan-tests.js * updating tests/rendering * Update .eslintrc.js * Update .eslintrc.js * Install jest/globals * "linting" tests * staging update to server.mjs * Change '.github/allowed-actions.js' to a ESM export * Lint * Fixes for the main package.json * Move Jest to be last in the npm test command so we can pass args * Just use 'npm run lint' in the npm test command * update algolia label script * update openapi script * update require on openapi * Update enterprise-algolia-label.js * forgot JSON.parse * Update lunr-search-index.js * Always explicitly include process.cwd() for JSON file reads pathed from project root * update graphql/update-files.js script * Update other npm scripts using jest to pass ESM NODE_OPTIONS * Update check-for-enterprise-issues-by-label.js for ESM * Update create-enterprise-issue.js for ESM * Import jest global for browser tests * Convert 'script/deploy' to ESM Co-authored-by: Grace Park <gracepark@github.com> Co-authored-by: James M. Greene <jamesmgreene@github.com>
55 lines
1.6 KiB
JavaScript
Executable File
55 lines
1.6 KiB
JavaScript
Executable File
import path from 'path'
|
|
import flat from 'flat'
|
|
import { get, set } from 'lodash-es'
|
|
import languages from './languages.js'
|
|
import dataDirectory from './data-directory.js'
|
|
import encodeBracketedParentheses from './encode-bracketed-parentheses.js'
|
|
|
|
const loadSiteDataFromDir = dir => ({
|
|
site: {
|
|
data: dataDirectory(path.join(dir, 'data'), {
|
|
preprocess: dataString =>
|
|
encodeBracketedParentheses(dataString.trimEnd()),
|
|
ignorePatterns: [/README\.md$/]
|
|
})
|
|
}
|
|
})
|
|
|
|
export default function loadSiteData () {
|
|
// load english site data
|
|
const siteData = {
|
|
en: loadSiteDataFromDir(languages.en.dir)
|
|
}
|
|
|
|
// load and add other language data to siteData where keys match english keys,
|
|
// filling holes with english site data
|
|
const englishKeys = Object.keys(flat(siteData.en))
|
|
for (const language of Object.values(languages)) {
|
|
if (language.code === 'en') continue
|
|
const data = loadSiteDataFromDir(language.dir)
|
|
for (const key of englishKeys) {
|
|
set(
|
|
siteData,
|
|
`${language.code}.${key}`,
|
|
get(data, key) || get(siteData.en, key)
|
|
)
|
|
}
|
|
}
|
|
|
|
for (const language of Object.values(languages)) {
|
|
// Add the English `slug` to each item, to link a consistent anchor
|
|
siteData[language.code].site.data.glossaries.external.forEach((item, i) => {
|
|
item.slug = siteData.en.site.data.glossaries.external[i].term
|
|
})
|
|
|
|
// Sort glossary by language-specific function
|
|
if (language.code !== 'en') {
|
|
siteData[language.code].site.data.glossaries.external.sort(
|
|
(a, b) => a.term.localeCompare(b.term, language.code)
|
|
)
|
|
}
|
|
}
|
|
|
|
return siteData
|
|
}
|