1
0
mirror of synced 2025-12-23 11:54:18 -05:00

Merge branch 'main' into enterprise-owners-do-not-consume-licenses

This commit is contained in:
Janice
2021-02-01 10:59:30 -07:00
committed by GitHub
8 changed files with 235 additions and 58 deletions

View File

@@ -0,0 +1,61 @@
name: Check for External Repo Sync PR
on:
pull_request:
types:
- opened
- reopened
branches:
- main
jobs:
invalid-repo-sync-check:
name: Close external Repo Sync PRs
if: ${{ github.repository == 'github/docs' && github.ref == 'refs/heads/repo-sync' }}
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
script: |
const prCreator = context.payload.sender.login
// If the PR creator is the expected account, stop now
if (prCreator === 'Octomerger') {
return
}
try {
await github.teams.getMembershipForUserInOrg({
org: 'github',
team_slug: 'employees',
username: prCreator
})
// If the PR creator is a GitHub employee, stop now
return
} catch (err) {
// An error will be thrown if the user is not a GitHub employee.
// That said, we still want to proceed anyway!
}
const pr = context.payload.pull_request
const { owner, repo } = context.repo
// Close the PR and add the invalid label
await github.issues.update({
owner: owner,
repo: repo,
issue_number: pr.number,
labels: ['invalid'],
state: 'closed'
})
// Comment on the PR
await github.issues.createComment({
owner: owner,
repo: repo,
issue_number: pr.number,
body: "Please leave this `repo-sync` branch to the robots!\n\nI'm going to close this pull request now, but feel free to open a new issue or ask any questions in [discussions](https://github.com/github/docs/discussions)!"
})

View File

@@ -17,6 +17,9 @@ jobs:
with:
ref: translations # check out the 'translations' branch
- name: Check out tip of main
run: git fetch --depth=1 origin main
- name: Setup node
uses: actions/setup-node@c46424eee26de4078d34105d3de3cc4992202b1e
with:
@@ -41,6 +44,9 @@ jobs:
- name: Run linter
run: npx eslint .
- name: Lint translated content
run: npm run lint-translation
- name: Check dependencies
run: npm run check-deps

View File

@@ -78,7 +78,7 @@ jobs:
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
constFilesArr = [
const badFilesArr = [
'translations/**',
'lib/rest/static/**',
'.github/workflows/**',

View File

@@ -82,12 +82,12 @@ After enabling required status checks, all required status checks must pass befo
{% endnote %}
You can set up required status checks to either be "loose" or "strict." The type of required status check you choose determines whether your branch is required to be up-to-date with the base branch before merging.
You can set up required status checks to either be "loose" or "strict." The type of required status check you choose determines whether your branch is required to be up to date with the base branch before merging.
| Type of required status check | Setting | Merge requirements | Considerations |
| --- | --- | --- | --- |
| **Strict** | The **Require branches to be up-to-date before merging** checkbox is checked. | The branch **must** be up to date with the base branch before merging. | This is the default behavior for required status checks. More builds may be required, as you'll need to bring the head branch up to date after other collaborators merge pull requests to the protected base branch.|
| **Loose** | The **Require branches to be up-to-date before merging** checkbox is **not** checked. | The branch **does not** have to be up to date with the base branch before merging. | You'll have fewer required builds, as you won't need to bring the head branch up to date after other collaborators merge pull requests. Status checks may fail after you merge your branch if there are incompatible changes with the base branch. |
| **Strict** | The **Require branches to be up to date before merging** checkbox is checked. | The branch **must** be up to date with the base branch before merging. | This is the default behavior for required status checks. More builds may be required, as you'll need to bring the head branch up to date after other collaborators merge pull requests to the protected base branch.|
| **Loose** | The **Require branches to be up to date before merging** checkbox is **not** checked. | The branch **does not** have to be up to date with the base branch before merging. | You'll have fewer required builds, as you won't need to bring the head branch up to date after other collaborators merge pull requests. Status checks may fail after you merge your branch if there are incompatible changes with the base branch. |
| **Disabled** | The **Require status checks to pass before merging** checkbox is **not** checked. | The branch has no merge restrictions. | If required status checks aren't enabled, collaborators can merge the branch at any time, regardless of whether it is up to date with the base branch. This increases the possibility of incompatible changes.
For troubleshooting information, see "[Troubleshooting required status checks](/github/administering-a-repository/troubleshooting-required-status-checks)."

View File

@@ -2,6 +2,16 @@
const isBrowser = process.env.BROWSER
const isActions = Boolean(process.env.GITHUB_ACTIONS)
const testTranslation = Boolean(process.env.TEST_TRANSLATION)
let reporters = ['default']
if (testTranslation) {
// only use custom reporter if we are linting translations
reporters = ['<rootDir>/tests/helpers/lint-translation-reporter.js']
} else if (isActions) {
reporters.push('jest-github-actions-reporter')
}
module.exports = {
coverageThreshold: {
@@ -15,9 +25,7 @@ module.exports = {
preset: isBrowser
? 'jest-puppeteer'
: undefined,
reporters: isActions
? ['default', 'jest-github-actions-reporter']
: ['default'],
reporters,
modulePathIgnorePatterns: [
'assets/'
],

View File

@@ -170,6 +170,7 @@
"build": "cross-env NODE_ENV=production npx webpack --mode production",
"start-all-languages": "cross-env NODE_ENV=development nodemon server.js",
"lint": "eslint --fix . && prettier -w \"**/*.{yml,yaml}\"",
"lint-translation": "TEST_TRANSLATION=true jest content/lint-files",
"test": "jest && eslint . && prettier -c \"**/*.{yml,yaml}\" && npm run check-deps",
"prebrowser-test": "npm run build",
"browser-test": "start-server-and-test browser-test-server 4001 browser-test-tests",

View File

@@ -2,7 +2,7 @@ const path = require('path')
const slash = require('slash')
const fs = require('fs')
const walk = require('walk-sync')
const { zip } = require('lodash')
const { zip, groupBy } = require('lodash')
const yaml = require('js-yaml')
const revalidator = require('revalidator')
const generateMarkdownAST = require('mdast-util-from-markdown')
@@ -12,12 +12,14 @@ const languages = require('../../lib/languages')
const { tags } = require('../../lib/liquid-tags/extended-markdown')
const ghesReleaseNotesSchema = require('../../lib/release-notes-schema')
const renderContent = require('../../lib/render-content')
const { execSync } = require('child_process')
const rootDir = path.join(__dirname, '../..')
const contentDir = path.join(rootDir, 'content')
const reusablesDir = path.join(rootDir, 'data/reusables')
const variablesDir = path.join(rootDir, 'data/variables')
const glossariesDir = path.join(rootDir, 'data/glossaries')
const ghesReleaseNotesDir = path.join(rootDir, 'data/release-notes')
const languageCodes = Object.keys(languages)
@@ -149,7 +151,6 @@ const oldVariableErrorText = 'Found article uses old {{ site.data... }} syntax.
const oldOcticonErrorText = 'Found octicon variables with the old {{ octicon-name }} syntax. Use {% octicon "name" %} instead!'
const oldExtendedMarkdownErrorText = 'Found extended markdown tags with the old {{#note}} syntax. Use {% note %}/{% endnote %} instead!'
describe('lint-files', () => {
const mdWalkOptions = {
globs: ['**/*.md'],
ignore: ['**/README.md'],
@@ -157,6 +158,20 @@ describe('lint-files', () => {
includeBasePath: true
}
// Also test the "data/variables/" YAML files
const yamlWalkOptions = {
globs: ['**/*.yml'],
directories: false,
includeBasePath: true
}
// different lint rules apply to different content types
let mdToLint, ymlToLint, releaseNotesToLint
if (!process.env.TEST_TRANSLATION) {
// compile lists of all the files we want to lint
const contentMarkdownAbsPaths = walk(contentDir, mdWalkOptions).sort()
const contentMarkdownRelPaths = contentMarkdownAbsPaths.map(p => slash(path.relative(rootDir, p)))
const contentMarkdownTuples = zip(contentMarkdownRelPaths, contentMarkdownAbsPaths)
@@ -165,16 +180,81 @@ describe('lint-files', () => {
const reusableMarkdownRelPaths = reusableMarkdownAbsPaths.map(p => slash(path.relative(rootDir, p)))
const reusableMarkdownTuples = zip(reusableMarkdownRelPaths, reusableMarkdownAbsPaths)
describe.each([...contentMarkdownTuples, ...reusableMarkdownTuples])(
'in "%s"',
mdToLint = [...contentMarkdownTuples, ...reusableMarkdownTuples]
// data/variables
const variableYamlAbsPaths = walk(variablesDir, yamlWalkOptions).sort()
const variableYamlRelPaths = variableYamlAbsPaths.map(p => slash(path.relative(rootDir, p)))
const variableYamlTuples = zip(variableYamlRelPaths, variableYamlAbsPaths)
// data/glossaries
const glossariesYamlAbsPaths = walk(glossariesDir, yamlWalkOptions).sort()
const glossariesYamlRelPaths = glossariesYamlAbsPaths.map(p => slash(path.relative(rootDir, p)))
const glossariesYamlTuples = zip(glossariesYamlRelPaths, glossariesYamlAbsPaths)
ymlToLint = [...variableYamlTuples, ...glossariesYamlTuples]
// GHES release notes
const ghesReleaseNotesYamlAbsPaths = walk(ghesReleaseNotesDir, yamlWalkOptions).sort()
const ghesReleaseNotesYamlRelPaths = ghesReleaseNotesYamlAbsPaths.map(p => path.relative(rootDir, p))
releaseNotesToLint = zip(ghesReleaseNotesYamlRelPaths, ghesReleaseNotesYamlAbsPaths)
} else {
console.log('testing translations.')
// get all translated markdown or yaml files by comparing files changed to main branch
const changedFilesRelPaths = execSync('git diff --name-only origin/main | egrep "^translations/.*/.+.(yml|md)$"').toString().split('\n')
console.log(`Found ${changedFilesRelPaths.length} translated files.`)
const { mdRelPaths, ymlRelPaths, releaseNotesRelPaths } = groupBy(changedFilesRelPaths, (path) => {
// separate the changed files to different groups
if (path.endsWith('README.md')) {
return 'throwAway'
} else if (path.endsWith('.md')) {
return 'mdRelPaths'
} else if (path.match(/\/data\/(variables|glossaries)\//i)) {
return 'ymlRelPaths'
} else if (path.match(/\/data\/release-notes\//i)) {
return 'releaseNotesRelPaths'
} else {
// we aren't linting the rest
return 'throwAway'
}
})
const [mdTuples, ymlTuples, releaseNotesTuples] = [mdRelPaths, ymlRelPaths, releaseNotesRelPaths].map(relPaths => {
const absPaths = relPaths.map(p => path.join(rootDir, p))
return zip(relPaths, absPaths)
})
mdToLint = mdTuples
ymlToLint = ymlTuples
releaseNotesToLint = releaseNotesTuples
}
function formatLinkError (message, links) {
return `${message}\n - ${links.join('\n - ')}`
}
// Returns `content` if its a string, or `content.description` if it can.
// Used for getting the nested `description` key in glossary files.
function getContent (content) {
if (typeof content === 'string') return content
if (typeof content.description === 'string') return content.description
return null
}
describe('lint markdown content', () => {
describe.each(mdToLint)(
'%s',
(markdownRelPath, markdownAbsPath) => {
let content, ast, links, isHidden, isEarlyAccess, isSitePolicy
let content, ast, links, isHidden, isEarlyAccess, isSitePolicy, frontmatterErrors
beforeAll(async () => {
const fileContents = await fs.promises.readFile(markdownAbsPath, 'utf8')
const { data, content: bodyContent } = frontmatter(fileContents)
const { data, content: bodyContent, errors } = frontmatter(fileContents)
content = bodyContent
frontmatterErrors = errors
ast = generateMarkdownAST(content)
isHidden = data.hidden === true
isEarlyAccess = markdownRelPath.split('/').includes('early-access')
@@ -307,34 +387,20 @@ describe('lint-files', () => {
.resolves
.toBeTruthy()
})
if (!markdownRelPath.includes('data/reusables')) {
test('contains valid frontmatter', () => {
const errorMessage = frontmatterErrors.map(error => `- [${error.property}]: ${error.actual}, ${error.message}`).join('\n')
expect(frontmatterErrors.length, errorMessage).toBe(0)
})
}
}
)
})
// Also test the "data/variables/" YAML files
const yamlWalkOptions = {
globs: ['**/*.yml'],
directories: false,
includeBasePath: true
}
const variableYamlAbsPaths = walk(variablesDir, yamlWalkOptions).sort()
const variableYamlRelPaths = variableYamlAbsPaths.map(p => slash(path.relative(rootDir, p)))
const variableYamlTuples = zip(variableYamlRelPaths, variableYamlAbsPaths)
const glossariesYamlAbsPaths = walk(glossariesDir, yamlWalkOptions).sort()
const glossariesYamlRelPaths = glossariesYamlAbsPaths.map(p => slash(path.relative(rootDir, p)))
const glossariesYamlTuples = zip(glossariesYamlRelPaths, glossariesYamlAbsPaths)
// Returns `content` if its a string, or `content.description` if it can.
// Used for getting the nested `description` key in glossary files.
function getContent (content) {
if (typeof content === 'string') return content
if (typeof content.description === 'string') return content.description
return null
}
describe.each([...variableYamlTuples, ...glossariesYamlTuples])(
'in "%s"',
describe('lint yaml content', () => {
describe.each(ymlToLint)(
'%s',
(yamlRelPath, yamlAbsPath) => {
let dictionary, isEarlyAccess
@@ -518,16 +584,12 @@ describe('lint-files', () => {
})
}
)
})
// GHES release notes
const ghesReleaseNotesDir = path.join(__dirname, '../../data/release-notes')
const ghesReleaseNotesYamlAbsPaths = walk(ghesReleaseNotesDir, yamlWalkOptions).sort()
const ghesReleaseNotesYamlRelPaths = ghesReleaseNotesYamlAbsPaths.map(p => path.relative(rootDir, p))
const ghesReleaseNotesYamlTuples = zip(ghesReleaseNotesYamlRelPaths, ghesReleaseNotesYamlAbsPaths)
if (ghesReleaseNotesYamlTuples.length > 0) {
describe.each(ghesReleaseNotesYamlTuples)(
'in "%s"',
describe('lint release notes', () => {
if (releaseNotesToLint.length > 0) {
describe.each(releaseNotesToLint)(
'%s',
(yamlRelPath, yamlAbsPath) => {
let dictionary
@@ -538,14 +600,10 @@ describe('lint-files', () => {
it('matches the schema', () => {
const { errors } = revalidator.validate(dictionary, ghesReleaseNotesSchema)
const errorMessage = errors.map(error => `- [${error.property}]: ${error.attribute}, ${error.message}`).join('\n')
const errorMessage = errors.map(error => `- [${error.property}]: ${error.actual}, ${error.message}`).join('\n')
expect(errors.length, errorMessage).toBe(0)
})
}
)
}
})
function formatLinkError (message, links) {
return `${message}\n - ${links.join('\n - ')}`
}

View File

@@ -0,0 +1,43 @@
const chalk = require('chalk')
const { groupBy } = require('lodash')
// we don't want to print all the stack traces
const stackTrackRegExp = /^\s+at\s.+/i
class TranslationReporter {
constructor (globalConfig, options) {
this._globalConfig = globalConfig
this._options = options
}
onRunComplete (contexts, results) {
const failures = results.testResults.reduce((fails, { testResults: assertionResults }) => {
const formattedFails = assertionResults
.filter(result => result.status === 'failed')
.map(({ ancestorTitles, failureMessages, title }) => {
return {
fileName: ancestorTitles[1],
failedTests: title,
failureMessage: failureMessages.map((message) => message.split('\n').filter(line => !stackTrackRegExp.test(line)).join('\n'))
}
})
return [...fails, ...formattedFails]
}, [])
const failuresByFile = groupBy(failures, 'fileName')
for (const fileName in failuresByFile) {
console.group(chalk.red.bold(`\n${fileName}`))
failuresByFile[fileName].forEach(({ failureMessage }, index) => {
console.log(chalk.bold(`\n(${index + 1})`))
failureMessage.forEach(msg => console.log(msg))
})
console.groupEnd()
}
console.log(chalk.bold('\nthese files should not be included: '))
console.dir(Object.keys(failuresByFile), { maxArrayLength: null })
}
}
module.exports = TranslationReporter