1
0
mirror of synced 2025-12-22 11:26:57 -05:00

Merge branch 'main' into patch-3

This commit is contained in:
Laura Coursen
2021-07-16 07:06:10 -05:00
committed by GitHub
415 changed files with 29193 additions and 24585 deletions

View File

@@ -3,49 +3,40 @@ module.exports = {
browser: true, browser: true,
commonjs: true, commonjs: true,
es2020: true, es2020: true,
node: true node: true,
}, },
parser: '@babel/eslint-parser', parser: '@babel/eslint-parser',
extends: [ extends: ['eslint:recommended', 'standard', 'prettier'],
'eslint:recommended',
'standard',
'prettier'
],
parserOptions: { parserOptions: {
ecmaVersion: 11 ecmaVersion: 11,
requireConfigFile: 'false',
babelOptions: { configFile: './.babelrc' },
}, },
rules: { rules: {
'import/no-extraneous-dependencies': ['error'], 'import/no-extraneous-dependencies': ['error', { packageDir: '.' }],
'node/global-require': ['error'], 'node/global-require': ['error'],
'import/no-dynamic-require': ['error'] 'import/no-dynamic-require': ['error'],
}, },
overrides: [ overrides: [
{ {
files: [ files: ['**/tests/**/*.js'],
'**/tests/**/*.js'
],
env: { env: {
jest: true jest: true,
} },
}, },
{ {
files: [ files: ['**/*.tsx', '**/*.ts'],
'**/*.tsx', '**/*.ts' plugins: ['@typescript-eslint', 'jsx-a11y'],
],
plugins: [
'@typescript-eslint',
'jsx-a11y'
],
extends: ['plugin:jsx-a11y/recommended'], extends: ['plugin:jsx-a11y/recommended'],
parser: '@typescript-eslint/parser', parser: '@typescript-eslint/parser',
rules: { rules: {
'camelcase': 'off', camelcase: 'off',
'no-unused-vars': 'off', 'no-unused-vars': 'off',
'no-undef': 'off', 'no-undef': 'off',
'no-use-before-define': 'off', 'no-use-before-define': 'off',
'@typescript-eslint/no-unused-vars': ['error'], '@typescript-eslint/no-unused-vars': ['error'],
'jsx-a11y/no-onchange': 'off', 'jsx-a11y/no-onchange': 'off',
} },
}, },
] ],
} }

View File

@@ -1,27 +1,32 @@
#!/usr/bin/env node #!/usr/bin/env node
const github = require('@actions/github') import { getOctokit } from '@actions/github'
const core = require('@actions/core') import { setOutput } from '@actions/core'
async function run () { async function run() {
const token = process.env.GITHUB_TOKEN const token = process.env.GITHUB_TOKEN
const octokit = github.getOctokit(token) const octokit = getOctokit(token)
const query = encodeURIComponent('is:open repo:github/docs-internal is:issue') const query = encodeURIComponent('is:open repo:github/docs-internal is:issue')
const deprecationIssues = await octokit.request(`GET /search/issues?q=${query}+label:"enterprise%20deprecation"`) const deprecationIssues = await octokit.request(
const releaseIssues = await octokit.request(`GET /search/issues?q=${query}+label:"enterprise%20release"`) `GET /search/issues?q=${query}+label:"enterprise%20deprecation"`
)
const releaseIssues = await octokit.request(
`GET /search/issues?q=${query}+label:"enterprise%20release"`
)
const isDeprecationIssue = deprecationIssues.data.items.length === 0 ? 'false' : 'true' const isDeprecationIssue = deprecationIssues.data.items.length === 0 ? 'false' : 'true'
const isReleaseIssue = releaseIssues.data.items.length === 0 ? 'false' : 'true' const isReleaseIssue = releaseIssues.data.items.length === 0 ? 'false' : 'true'
core.setOutput('deprecationIssue', isDeprecationIssue) setOutput('deprecationIssue', isDeprecationIssue)
core.setOutput('releaseIssue', isReleaseIssue) setOutput('releaseIssue', isReleaseIssue)
return `Set outputs deprecationIssue: ${isDeprecationIssue}, releaseIssue: ${isReleaseIssue}` return `Set outputs deprecationIssue: ${isDeprecationIssue}, releaseIssue: ${isReleaseIssue}`
} }
run() run().then(
.then( (response) => {
(response) => { console.log(`Finished running: ${response}`) }, console.log(`Finished running: ${response}`)
(error) => { },
console.log(`#ERROR# ${error}`) (error) => {
process.exit(1) console.log(`#ERROR# ${error}`)
} process.exit(1)
) }
)

View File

@@ -1,10 +1,11 @@
#!/usr/bin/env node #!/usr/bin/env node
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const github = require('@actions/github') import { getOctokit } from '@actions/github'
const enterpriseDates = require('../../lib/enterprise-dates') import enterpriseDates from '../../lib/enterprise-dates.js'
const { latest, oldestSupported } = require('../../lib/enterprise-server-releases') import { latest, oldestSupported } from '../../lib/enterprise-server-releases.js'
const acceptedMilestones = ['release', 'deprecation'] const acceptedMilestones = ['release', 'deprecation']
const teamsToCC = '/cc @github/docs-content @github/docs-engineering' const teamsToCC = '/cc @github/docs-content @github/docs-engineering'
@@ -19,45 +20,46 @@ const numberOfdaysBeforeDeprecationToOpenIssue = 15
// number of days. // number of days.
// //
// When a milestone is within the specified number of days, a new issue is // When a milestone is within the specified number of days, a new issue is
// created using the templates in // created using the templates in
// .github/actions-scripts/enterprise-server-issue-templates. // .github/actions-scripts/enterprise-server-issue-templates.
// //
// Release issues are then added to the docs content squad board for triage. // Release issues are then added to the docs content squad board for triage.
// Deprecations issues are owned by docs engineering and are added to the // Deprecations issues are owned by docs engineering and are added to the
// docs engineering squad board automatically when the engineering label is added. // docs engineering squad board automatically when the engineering label is added.
// //
// [end-readme] // [end-readme]
run() run()
async function run () { async function run() {
const milestone = process.argv[2] const milestone = process.argv[2]
if (!acceptedMilestones.includes(milestone)) { if (!acceptedMilestones.includes(milestone)) {
console.log('Please specify either \'release\' or \'deprecation\'\n') console.log("Please specify either 'release' or 'deprecation'\n")
console.log('Example: script/open-enterprise-issue.js release') console.log('Example: script/open-enterprise-issue.js release')
process.exit(1) process.exit(1)
} }
// Milestone-dependent values. // Milestone-dependent values.
const numberOfdaysBeforeMilestoneToOpenIssue = milestone === 'release' const numberOfdaysBeforeMilestoneToOpenIssue =
? numberOfdaysBeforeReleaseToOpenIssue milestone === 'release'
: numberOfdaysBeforeDeprecationToOpenIssue ? numberOfdaysBeforeReleaseToOpenIssue
: numberOfdaysBeforeDeprecationToOpenIssue
const versionNumber = milestone === 'release' const versionNumber = milestone === 'release' ? getNextVersionNumber() : oldestSupported
? getNextVersionNumber()
: oldestSupported
if (!versionNumber) { if (!versionNumber) {
console.log(`Could not find the next version number after ${latest} in enterprise-dates.json. Try running script/udpate-enterprise-dates.js, then rerun this script.`) console.log(
`Could not find the next version number after ${latest} in enterprise-dates.json. Try running script/udpate-enterprise-dates.js, then rerun this script.`
)
process.exit(0) process.exit(0)
} }
const datesForVersion = enterpriseDates[versionNumber] const datesForVersion = enterpriseDates[versionNumber]
if (!datesForVersion) { if (!datesForVersion) {
console.log(`Could not find ${versionNumber} in enterprise-dates.json. Try running script/udpate-enterprise-dates.js, then rerun this script.`) console.log(
`Could not find ${versionNumber} in enterprise-dates.json. Try running script/udpate-enterprise-dates.js, then rerun this script.`
)
process.exit(0) process.exit(0)
} }
@@ -66,11 +68,19 @@ async function run () {
// If the milestone is more than the specific days away, exit now. // If the milestone is more than the specific days away, exit now.
if (daysUntilMilestone > numberOfdaysBeforeMilestoneToOpenIssue) { if (daysUntilMilestone > numberOfdaysBeforeMilestoneToOpenIssue) {
console.log(`The ${versionNumber} ${milestone} is not until ${nextMilestoneDate}! An issue will be opened when it is ${numberOfdaysBeforeMilestoneToOpenIssue} days away.`) console.log(
`The ${versionNumber} ${milestone} is not until ${nextMilestoneDate}! An issue will be opened when it is ${numberOfdaysBeforeMilestoneToOpenIssue} days away.`
)
process.exit(0) process.exit(0)
} }
const milestoneSteps = fs.readFileSync(path.join(process.cwd(), `.github/actions-scripts/enterprise-server-issue-templates/${milestone}-issue.md`), 'utf8') const milestoneSteps = fs.readFileSync(
path.join(
process.cwd(),
`.github/actions-scripts/enterprise-server-issue-templates/${milestone}-issue.md`
),
'utf8'
)
const issueLabels = [`enterprise ${milestone}`, `engineering`] const issueLabels = [`enterprise ${milestone}`, `engineering`]
const issueTitle = `[${nextMilestoneDate}] Enterprise Server ${versionNumber} ${milestone} (technical steps)` const issueTitle = `[${nextMilestoneDate}] Enterprise Server ${versionNumber} ${milestone} (technical steps)`
@@ -81,18 +91,20 @@ async function run () {
const token = process.env.GITHUB_TOKEN const token = process.env.GITHUB_TOKEN
// Create the milestone issue // Create the milestone issue
const octokit = github.getOctokit(token) const octokit = getOctokit(token)
try { try {
issue = await octokit.request('POST /repos/{owner}/{repo}/issues', { issue = await octokit.request('POST /repos/{owner}/{repo}/issues', {
owner: 'github', owner: 'github',
repo: 'docs-internal', repo: 'docs-internal',
title: issueTitle, title: issueTitle,
body: issueBody, body: issueBody,
labels: issueLabels labels: issueLabels,
}) })
if (issue.status === 201) { if (issue.status === 201) {
// Write the values to disk for use in the workflow. // Write the values to disk for use in the workflow.
console.log(`Issue #${issue.data.number} for the ${versionNumber} ${milestone} was opened: ${issue.data.html_url}`) console.log(
`Issue #${issue.data.number} for the ${versionNumber} ${milestone} was opened: ${issue.data.html_url}`
)
} }
} catch (error) { } catch (error) {
console.error(`#ERROR# ${error}`) console.error(`#ERROR# ${error}`)
@@ -100,7 +112,7 @@ async function run () {
process.exit(1) process.exit(1)
} }
// Add the release issue to the 'Needs triage' column on the // Add the release issue to the 'Needs triage' column on the
// docs content squad project board: // docs content squad project board:
// https://github.com/orgs/github/projects/1773#column-12198119 // https://github.com/orgs/github/projects/1773#column-12198119
// Deprecation issues are owned by docs engineering only and will // Deprecation issues are owned by docs engineering only and will
@@ -108,21 +120,21 @@ async function run () {
if (milestone === 'release') { if (milestone === 'release') {
try { try {
const addCard = await octokit.request('POST /projects/columns/{column_id}/cards', { const addCard = await octokit.request('POST /projects/columns/{column_id}/cards', {
column_id: 12198119, column_id: 12198119,
content_id: issue.data.id, content_id: issue.data.id,
content_type: 'Issue', content_type: 'Issue',
mediaType: { mediaType: {
previews: [ previews: ['inertia'],
'inertia' },
]
}
}) })
if (addCard.status === 201) { if (addCard.status === 201) {
// Write the values to disk for use in the workflow. // Write the values to disk for use in the workflow.
console.log(`The issue #${issue.data.number} was added to https://github.com/orgs/github/projects/1773#column-12198119.`) console.log(
} `The issue #${issue.data.number} was added to https://github.com/orgs/github/projects/1773#column-12198119.`
} catch(error) { )
}
} catch (error) {
console.error(`#ERROR# ${error}`) console.error(`#ERROR# ${error}`)
console.log(`🛑 There was an error adding the issue to the project board.`) console.log(`🛑 There was an error adding the issue to the project board.`)
process.exit(1) process.exit(1)
@@ -130,19 +142,19 @@ async function run () {
} }
} }
function getNextVersionNumber () { function getNextVersionNumber() {
const indexOfLatest = Object.keys(enterpriseDates).indexOf(latest) const indexOfLatest = Object.keys(enterpriseDates).indexOf(latest)
const indexOfNext = indexOfLatest + 1 const indexOfNext = indexOfLatest + 1
return Object.keys(enterpriseDates)[indexOfNext] return Object.keys(enterpriseDates)[indexOfNext]
} }
function calculateDaysUntilMilestone (nextMilestoneDate) { function calculateDaysUntilMilestone(nextMilestoneDate) {
const today = new Date().toISOString().slice(0, 10) const today = new Date().toISOString().slice(0, 10)
const differenceInMilliseconds = getTime(nextMilestoneDate) - getTime(today) const differenceInMilliseconds = getTime(nextMilestoneDate) - getTime(today)
// Return the difference in days // Return the difference in days
return Math.floor((differenceInMilliseconds) / (1000 * 60 * 60 * 24)) return Math.floor(differenceInMilliseconds / (1000 * 60 * 60 * 24))
} }
function getTime (date) { function getTime(date) {
return new Date(date).getTime() return new Date(date).getTime()
} }

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env node #!/usr/bin/env node
const fs = require('fs') import fs from 'fs'
const core = require('@actions/core') import { setOutput } from '@actions/core'
const eventPayload = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8')) const eventPayload = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8'))
// This workflow-run script does the following: // This workflow-run script does the following:
@@ -19,8 +20,8 @@ if (!(labelsArray && labelsArray.length)) {
// Find the relevant label // Find the relevant label
const algoliaLabel = labelsArray const algoliaLabel = labelsArray
.map(label => label.name) .map((label) => label.name)
.find(label => label.startsWith(labelText)) .find((label) => label.startsWith(labelText))
// Exit early if no relevant label is found // Exit early if no relevant label is found
if (!algoliaLabel) { if (!algoliaLabel) {
@@ -32,5 +33,5 @@ if (!algoliaLabel) {
const versionToSync = algoliaLabel.split(labelText)[1] const versionToSync = algoliaLabel.split(labelText)[1]
// Store the version so we can access it later in the workflow // Store the version so we can access it later in the workflow
core.setOutput('versionToSync', versionToSync) setOutput('versionToSync', versionToSync)
process.exit(0) process.exit(0)

View File

@@ -1,28 +1,31 @@
#!/usr/bin/env node #!/usr/bin/env node
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const { execSync } = require('child_process') import { execSync } from 'child_process'
const semver = require('semver') import semver from 'semver'
/* /*
* This script performs two checks to prevent shipping development mode OpenAPI schemas: * This script performs two checks to prevent shipping development mode OpenAPI schemas:
* - Ensures the `info.version` property is a semantic version. * - Ensures the `info.version` property is a semantic version.
* In development mode, the `info.version` property is a string * In development mode, the `info.version` property is a string
* containing the `github/github` branch name. * containing the `github/github` branch name.
* - Ensures the decorated schema matches the dereferenced schema. * - Ensures the decorated schema matches the dereferenced schema.
* The workflow that calls this script runs `script/rest/update-files.js` * The workflow that calls this script runs `script/rest/update-files.js`
* with the `--decorate-only` switch then checks to see if files changed. * with the `--decorate-only` switch then checks to see if files changed.
* *
*/ */
// Check that the `info.version` property is a semantic version // Check that the `info.version` property is a semantic version
const dereferencedDir = path.join(process.cwd(), 'lib/rest/static/dereferenced') const dereferencedDir = path.join(process.cwd(), 'lib/rest/static/dereferenced')
const schemas = fs.readdirSync(dereferencedDir) const schemas = fs.readdirSync(dereferencedDir)
schemas.forEach(filename => {
const schema = require(path.join(dereferencedDir, filename)) schemas.forEach((filename) => {
const schema = JSON.parse(fs.readFileSync(path.join(dereferencedDir, filename)))
if (!semver.valid(schema.info.version)) { if (!semver.valid(schema.info.version)) {
console.log(`🚧⚠️ Your branch contains a development mode OpenAPI schema: ${schema.info.version}. This check is a reminder to not 🚢 OpenAPI files in development mode. 🛑`) console.log(
`🚧⚠️ Your branch contains a development mode OpenAPI schema: ${schema.info.version}. This check is a reminder to not 🚢 OpenAPI files in development mode. 🛑`
)
process.exit(1) process.exit(1)
} }
}) })
@@ -30,10 +33,12 @@ schemas.forEach(filename => {
// Check that the decorated schema matches the dereferenced schema // Check that the decorated schema matches the dereferenced schema
const changedFiles = execSync('git diff --name-only HEAD').toString() const changedFiles = execSync('git diff --name-only HEAD').toString()
if(changedFiles !== '') { if (changedFiles !== '') {
console.log(`These files were changed:\n${changedFiles}`) console.log(`These files were changed:\n${changedFiles}`)
console.log(`🚧⚠️ Your decorated and dereferenced schema files don't match. Ensure you're using decorated and dereferenced schemas from the automatically created pull requests by the 'github-openapi-bot' user. For more information, see 'script/rest/README.md'. 🛑`) console.log(
process.exit(1) `🚧⚠️ Your decorated and dereferenced schema files don't match. Ensure you're using decorated and dereferenced schemas from the automatically created pull requests by the 'github-openapi-bot' user. For more information, see 'script/rest/README.md'. 🛑`
)
process.exit(1)
} }
// All checks pass, ready to ship // All checks pass, ready to ship

View File

@@ -3,37 +3,37 @@
// CI will fail and the action will need to be audited by the docs engineering team before it // CI will fail and the action will need to be audited by the docs engineering team before it
// can be added it this list. // can be added it this list.
module.exports = [ export default [
"actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f", // v2.3.4 'actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f', // v2.3.4
"actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d", // v4.0.2 'actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d', // v4.0.2
"actions/labeler@5f867a63be70efff62b767459b009290364495eb", // v2.2.0 'actions/labeler@5f867a63be70efff62b767459b009290364495eb', // v2.2.0
"actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f", // v2.2.0 'actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f', // v2.2.0
"actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6", // v2.2.2 'actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6', // v2.2.2
"actions/stale@9d6f46564a515a9ea11e7762ab3957ee58ca50da", // v3.0.16 'actions/stale@9d6f46564a515a9ea11e7762ab3957ee58ca50da', // v3.0.16
"alex-page/github-project-automation-plus@fdb7991b72040d611e1123d2b75ff10eda9372c9", 'alex-page/github-project-automation-plus@fdb7991b72040d611e1123d2b75ff10eda9372c9',
"andymckay/labeler@22d5392de2b725cea4b284df5824125054049d84", 'andymckay/labeler@22d5392de2b725cea4b284df5824125054049d84',
"crowdin/github-action@fd9429dd63d6c0f8a8cb4b93ad8076990bd6e688", 'crowdin/github-action@fd9429dd63d6c0f8a8cb4b93ad8076990bd6e688',
"crykn/copy_folder_to_another_repo_action@0282e8b9fef06de92ddcae9fe6cb44df6226646c", 'crykn/copy_folder_to_another_repo_action@0282e8b9fef06de92ddcae9fe6cb44df6226646c',
"cschleiden/actions-linter@0ff16d6ac5103cca6c92e6cbc922b646baaea5be", 'cschleiden/actions-linter@0ff16d6ac5103cca6c92e6cbc922b646baaea5be',
"dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911", 'dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911',
"docker://chinthakagodawita/autoupdate-action:v1", 'docker://chinthakagodawita/autoupdate-action:v1',
"github/codeql-action/analyze@v1", 'dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58',
"github/codeql-action/init@v1", 'github/codeql-action/analyze@v1',
"juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8", 'github/codeql-action/init@v1',
"juliangruber/find-pull-request-action@db875662766249c049b2dcd85293892d61cb0b51", // v1.5.0 'juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8',
"juliangruber/read-file-action@e0a316da496006ffd19142f0fd594a1783f3b512", 'juliangruber/find-pull-request-action@db875662766249c049b2dcd85293892d61cb0b51', // v1.5.0
"lee-dohm/close-matching-issues@22002609b2555fe18f52b8e2e7c07cbf5529e8a8", 'juliangruber/read-file-action@e0a316da496006ffd19142f0fd594a1783f3b512',
"lee-dohm/no-response@9bb0a4b5e6a45046f00353d5de7d90fb8bd773bb", 'lee-dohm/close-matching-issues@22002609b2555fe18f52b8e2e7c07cbf5529e8a8',
"pascalgn/automerge-action@c9bd1823770819dc8fb8a5db2d11a3a95fbe9b07", // v0.12.0 'lee-dohm/no-response@9bb0a4b5e6a45046f00353d5de7d90fb8bd773bb',
"peter-evans/create-issue-from-file@b4f9ee0a9d4abbfc6986601d9b1a4f8f8e74c77e", 'pascalgn/automerge-action@c9bd1823770819dc8fb8a5db2d11a3a95fbe9b07', // v0.12.0
"peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd", 'peter-evans/create-issue-from-file@b4f9ee0a9d4abbfc6986601d9b1a4f8f8e74c77e',
"peter-evans/create-pull-request@8c603dbb04b917a9fc2dd991dc54fef54b640b43", 'peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd',
"rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9", 'peter-evans/create-pull-request@8c603dbb04b917a9fc2dd991dc54fef54b640b43',
"rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e", 'rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9',
"repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88", 'rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e',
"repo-sync/pull-request@33777245b1aace1a58c87a29c90321aa7a74bd7d", 'repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88',
"someimportantcompany/github-actions-slack-message@0b470c14b39da4260ed9e3f9a4f1298a74ccdefd", 'repo-sync/pull-request@33777245b1aace1a58c87a29c90321aa7a74bd7d',
"tjenkinson/gh-action-auto-merge-dependency-updates@4d7756c04d9d999c5968697a621b81c47f533d61", 'someimportantcompany/github-actions-slack-message@0b470c14b39da4260ed9e3f9a4f1298a74ccdefd',
"EndBug/add-and-commit@b3c7c1e078a023d75fb0bd326e02962575ce0519", 'tjenkinson/gh-action-auto-merge-dependency-updates@4d7756c04d9d999c5968697a621b81c47f533d61',
"dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58", 'EndBug/add-and-commit@b3c7c1e078a023d75fb0bd326e02962575ce0519',
]; ]

1
.github/package.json vendored Normal file
View File

@@ -0,0 +1 @@
{"type":"module"}

View File

@@ -1,19 +0,0 @@
name: Block JavaScript Change
# **What it does**: Blocks JavaScript file changes.
# **Why we have it**: We need to freeze JavaScript file changes in order to upgrade from CommonJS to ESM.
# **Who does it impact**: Engineers.
on:
pull_request_target:
paths:
- '**.js'
jobs:
block:
runs-on: ubuntu-latest
steps:
- name: Fail
run: |
echo 'JavaScript files are frozen currently. Ask in Slack for help.'
exit 1

View File

@@ -59,4 +59,4 @@ jobs:
- name: Run tests - name: Run tests
run: npx jest tests/${{ matrix.test-group }}/ run: npx jest tests/${{ matrix.test-group }}/
env: env:
NODE_OPTIONS: '--max_old_space_size=8192' NODE_OPTIONS: '--max_old_space_size=8192 --experimental-vm-modules'

35
.github/workflows/triage-app-json.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: Check for app.json changes
# **What it does**: If someone changes app.json, we fail the check.
# **Why we have it**: app.json should rarely be edited, so we'll require an admin merge if the file really needs to be changed.
# **Who does it impact**: Docs engineering and content writers.
on:
pull_request:
jobs:
check-app-json-changes:
if: github.repository == 'github/docs-internal' && github.event.pull_request.user.login != 'Octomerger'
runs-on: ubuntu-latest
steps:
- name: Get files changed
uses: dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58
id: filter
with:
# Base branch used to get changed files
base: ${{ github.event.pull_request.base.ref }}
# Enables setting an output in the format in `${FILTER_NAME}_files
# with the names of the matching files formatted as JSON array
list-files: json
# Returns list of changed files matching each filter
filters: |
notAllowed:
- 'app.json'
- name: Fail if app.json was changed
if: ${{ steps.filter.outputs.notAllowed == 'true' }}
run: |
echo "Please admin merge if you really need to update app.json!"
exit 1

1
.gitignore vendored
View File

@@ -11,6 +11,7 @@ coverage/
/data/early-access /data/early-access
dist dist
.next .next
.eslintcache
# blc: broken link checker # blc: broken link checker
blc_output.log blc_output.log

View File

@@ -2,3 +2,4 @@
. "$(dirname "$0")/_/husky.sh" . "$(dirname "$0")/_/husky.sh"
node script/prevent-translation-commits.js node script/prevent-translation-commits.js
npx lint-staged

View File

@@ -7,7 +7,7 @@
} }
}, },
{ {
"files": ["**/*.{ts,tsx}"], "files": ["**/*.{ts,tsx,js,mjs}"],
"options": { "options": {
"semi": false, "semi": false,
"singleQuote": true, "singleQuote": true,

View File

@@ -93,11 +93,11 @@ COPY --chown=node:node layouts ./layouts
COPY --chown=node:node lib ./lib COPY --chown=node:node lib ./lib
COPY --chown=node:node middleware ./middleware COPY --chown=node:node middleware ./middleware
COPY --chown=node:node translations ./translations COPY --chown=node:node translations ./translations
COPY --chown=node:node server.js ./server.js COPY --chown=node:node server.mjs ./server.mjs
COPY --chown=node:node package*.json ./ COPY --chown=node:node package*.json ./
COPY --chown=node:node feature-flags.json ./ COPY --chown=node:node feature-flags.json ./
EXPOSE 80 EXPOSE 80
EXPOSE 443 EXPOSE 443
EXPOSE 4000 EXPOSE 4000
CMD ["node", "server.js"] CMD ["node", "server.mjs"]

View File

@@ -1,3 +1,3 @@
web: NODE_ENV=production node server.js web: NODE_ENV=production node server.mjs
release: NODE_ENV=production script/release-heroku release: NODE_ENV=production script/release-heroku

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 110 KiB

View File

@@ -8,8 +8,16 @@ import { ArticleList } from 'components/landing/ArticleList'
import { useTranslation } from 'components/hooks/useTranslation' import { useTranslation } from 'components/hooks/useTranslation'
export const TocLanding = () => { export const TocLanding = () => {
const { title, introPlainText, tocItems, productCallout, variant, featuredLinks, isEarlyAccess, renderedEarlyAccessPage } = const {
useTocLandingContext() title,
introPlainText,
tocItems,
productCallout,
variant,
featuredLinks,
isEarlyAccess,
renderedEarlyAccessPage,
} = useTocLandingContext()
const { t } = useTranslation('toc') const { t } = useTranslation('toc')
return ( return (
@@ -68,14 +76,15 @@ export const TocLanding = () => {
</div> </div>
</div> </div>
)} )}
{isEarlyAccess && {isEarlyAccess && (
<div className="markdown-body"> <div className="markdown-body">
<div <div
id="article-contents" id="article-contents"
className="article-grid-body" className="article-grid-body"
dangerouslySetInnerHTML={{ __html: renderedEarlyAccessPage }} dangerouslySetInnerHTML={{ __html: renderedEarlyAccessPage }}
/> />
</div>} </div>
)}
<TableOfContents items={tocItems} variant={variant} /> <TableOfContents items={tocItems} variant={variant} />
</div> </div>
</div> </div>

View File

@@ -15,8 +15,9 @@ export const SidebarProduct = () => {
// Setting to the top doesn't give enough context of surrounding categories // Setting to the top doesn't give enough context of surrounding categories
activeArticle?.scrollIntoView({ block: 'center' }) activeArticle?.scrollIntoView({ block: 'center' })
// scrollIntoView affects some articles that are very low in the sidebar // scrollIntoView affects some articles that are very low in the sidebar
// The content scrolls down a bit. This sets the article content back up top // The content scrolls down a bit. This sets the article content back up
window?.scrollTo(0, 0) // top unless the route contains a link heading.
if (!router.asPath.includes('#')) window?.scrollTo(0, 0)
}, []) }, [])
if (!currentProductTree) { if (!currentProductTree) {

View File

@@ -77,6 +77,8 @@ You can create an example workflow in your repository that automatically trigger
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with:
node-version: '14'
- run: npm install -g bats - run: npm install -g bats
- run: bats -v - run: bats -v
``` ```
@@ -171,10 +173,12 @@ To help you understand how YAML syntax is used to create a workflow file, this s
```yaml ```yaml
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with:
node-version: '14'
``` ```
</td> </td>
<td> <td>
This action installs the <code>node</code> software package on the runner, giving you access to the <code>npm</code> command. This step uses the <code>actions/setup-node@v2</code> action to install the specified version of the <code>node</code> software package on the runner, which gives you access to the <code>npm</code> command.
</td> </td>
</tr> </tr>
<tr> <tr>
@@ -238,6 +242,8 @@ Once your job has started running, you can {% ifversion fpt or ghes > 3.0 or gha
To continue learning about {% data variables.product.prodname_actions %}, see "[Finding and customizing actions](/actions/learn-github-actions/finding-and-customizing-actions)." To continue learning about {% data variables.product.prodname_actions %}, see "[Finding and customizing actions](/actions/learn-github-actions/finding-and-customizing-actions)."
To understand how billing works for {% data variables.product.prodname_actions %}, see "[About billing for {% data variables.product.prodname_actions %}](/actions/reference/usage-limits-billing-and-administration#about-billing-for-github-actions)".
## Contacting support ## Contacting support
{% data reusables.github-actions.contacting-support %} {% data reusables.github-actions.contacting-support %}

View File

@@ -64,7 +64,7 @@ We strongly recommend that actions use environment variables to access the files
| `GITHUB_REPOSITORY` | The owner and repository name. For example, `octocat/Hello-World`. | | `GITHUB_REPOSITORY` | The owner and repository name. For example, `octocat/Hello-World`. |
| `GITHUB_EVENT_NAME` | The name of the webhook event that triggered the workflow. | | `GITHUB_EVENT_NAME` | The name of the webhook event that triggered the workflow. |
| `GITHUB_EVENT_PATH` | The path of the file with the complete webhook event payload. For example, `/github/workflow/event.json`. | | `GITHUB_EVENT_PATH` | The path of the file with the complete webhook event payload. For example, `/github/workflow/event.json`. |
| `GITHUB_WORKSPACE` | The {% data variables.product.prodname_dotcom %} workspace directory path. The workspace directory is a copy of your repository if your workflow uses the [actions/checkout](https://github.com/actions/checkout) action. If you don't use the `actions/checkout` action, the directory will be empty. For example, `/home/runner/work/my-repo-name/my-repo-name`. | | `GITHUB_WORKSPACE` | The {% data variables.product.prodname_dotcom %} workspace directory path, initially empty. For example, `/home/runner/work/my-repo-name/my-repo-name`. The [actions/checkout](https://github.com/actions/checkout) action will check out files, by default a copy of your repository, within this directory. |
| `GITHUB_SHA` | The commit SHA that triggered the workflow. For example, `ffac537e6cbbf934b08745a378932722df287a53`. | | `GITHUB_SHA` | The commit SHA that triggered the workflow. For example, `ffac537e6cbbf934b08745a378932722df287a53`. |
| `GITHUB_REF` | The branch or tag ref that triggered the workflow. For example, `refs/heads/feature-branch-1`. If neither a branch or tag is available for the event type, the variable will not exist. | | `GITHUB_REF` | The branch or tag ref that triggered the workflow. For example, `refs/heads/feature-branch-1`. If neither a branch or tag is available for the event type, the variable will not exist. |
| `GITHUB_HEAD_REF` | Only set for pull request events. The name of the head branch. | `GITHUB_HEAD_REF` | Only set for pull request events. The name of the head branch.

View File

@@ -575,7 +575,7 @@ Runs your workflow anytime the `pull_request` event occurs. {% data reusables.de
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` | | Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------| | --------------------- | -------------- | ------------ | -------------|
| [`pull_request`](/webhooks/event-payloads/#pull_request) | - `assigned`<br/>- `unassigned`<br/>- `labeled`<br/>- `unlabeled`<br/>- `opened`<br/>- `edited`<br/>- `closed`<br/>- `reopened`<br/>- `synchronize`<br/>- `ready_for_review`<br/>- `locked`<br/>- `unlocked` <br/>- `review_requested` <br/>- `review_request_removed` | Last merge commit on the `GITHUB_REF` branch | PR merge branch `refs/pull/:prNumber/merge` | | [`pull_request`](/webhooks/event-payloads/#pull_request) | - `assigned`<br/>- `unassigned`<br/>- `labeled`<br/>- `unlabeled`<br/>- `opened`<br/>- `edited`<br/>- `closed`<br/>- `reopened`<br/>- `synchronize`<br/>- `converted_to_draft`<br/>- `ready_for_review`<br/>- `locked`<br/>- `unlocked` <br/>- `review_requested` <br/>- `review_request_removed`{% ifversion fpt or ghes > 3.0 or ghae %} <br/>- `auto_merge_enabled` <br/>- `auto_merge_disabled`{% endif %} | Last merge commit on the `GITHUB_REF` branch | PR merge branch `refs/pull/:prNumber/merge` |
You extend or limit the default activity types using the `types` keyword. For more information, see "[Workflow syntax for {% data variables.product.prodname_actions %}](/articles/workflow-syntax-for-github-actions#onevent_nametypes)." You extend or limit the default activity types using the `types` keyword. For more information, see "[Workflow syntax for {% data variables.product.prodname_actions %}](/articles/workflow-syntax-for-github-actions#onevent_nametypes)."
@@ -643,7 +643,7 @@ This event runs in the context of the base of the pull request, rather than in t
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` | | Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------| | --------------------- | -------------- | ------------ | -------------|
| [`pull_request_target`](/webhooks/event-payloads/#pull_request) | - `assigned`<br/>- `unassigned`<br/>- `labeled`<br/>- `unlabeled`<br/>- `opened`<br/>- `edited`<br/>- `closed`<br/>- `reopened`<br/>- `synchronize`<br/>- `ready_for_review`<br/>- `locked`<br/>- `unlocked` <br/>- `review_requested` <br/>- `review_request_removed` | Last commit on the PR base branch | PR base branch | | [`pull_request_target`](/webhooks/event-payloads/#pull_request) | - `assigned`<br/>- `unassigned`<br/>- `labeled`<br/>- `unlabeled`<br/>- `opened`<br/>- `edited`<br/>- `closed`<br/>- `reopened`<br/>- `synchronize`<br/>- `converted_to_draft`<br/>- `ready_for_review`<br/>- `locked`<br/>- `unlocked` <br/>- `review_requested` <br/>- `review_request_removed`{% ifversion fpt or ghes > 3.0 or ghae %} <br/>- `auto_merge_enabled` <br/>- `auto_merge_disabled`{% endif %} | Last commit on the PR base branch | PR base branch |
By default, a workflow only runs when a `pull_request_target`'s activity type is `opened`, `synchronize`, or `reopened`. To trigger workflows for more activity types, use the `types` keyword. For more information, see "[Workflow syntax for {% data variables.product.prodname_actions %}](/articles/workflow-syntax-for-github-actions#onevent_nametypes)." By default, a workflow only runs when a `pull_request_target`'s activity type is `opened`, `synchronize`, or `reopened`. To trigger workflows for more activity types, use the `types` keyword. For more information, see "[Workflow syntax for {% data variables.product.prodname_actions %}](/articles/workflow-syntax-for-github-actions#onevent_nametypes)."

View File

@@ -51,7 +51,7 @@ on:
When using the `push` and `pull_request` events, you can configure a workflow to run on specific branches or tags. For a `pull_request` event, only branches and tags on the base are evaluated. If you define only `tags` or only `branches`, the workflow won't run for events affecting the undefined Git ref. When using the `push` and `pull_request` events, you can configure a workflow to run on specific branches or tags. For a `pull_request` event, only branches and tags on the base are evaluated. If you define only `tags` or only `branches`, the workflow won't run for events affecting the undefined Git ref.
The `branches`, `branches-ignore`, `tags`, and `tags-ignore` keywords accept glob patterns that use the `*` and `**` wildcard characters to match more than one branch or tag name. For more information, see the "[Filter pattern cheat sheet](#filter-pattern-cheat-sheet)." The `branches`, `branches-ignore`, `tags`, and `tags-ignore` keywords accept glob patterns that use characters like `*`, `**`, `+`, `?`, `!` and others to match more than one branch or tag name. If a name contains any of these characters and you want a literal match, you need to *escape* each of these special characters with `\`. For more information about glob patterns, see the "[Filter pattern cheat sheet](#filter-pattern-cheat-sheet)."
### Example: Including branches and tags ### Example: Including branches and tags
@@ -181,6 +181,25 @@ The filter determines if a workflow should run by evaluating the changed files a
For more information, see "[About comparing branches in pull requests](/articles/about-comparing-branches-in-pull-requests)." For more information, see "[About comparing branches in pull requests](/articles/about-comparing-branches-in-pull-requests)."
## `on.workflow_dispatch.inputs`
When using `workflow_dispatch` event, you can optionally specify inputs that are passed to the workflow. Workflow dispatch inputs are specified with the same format as action inputs. For more information about the format see "[Metadata syntax for GitHub Actions](/actions/creating-actions/metadata-syntax-for-github-actions#inputs)."
```yaml
on:
workflow_dispatch:
inputs:
logLevel:
description: 'Log level'
required: true
default: 'warning'
tags:
description: 'Test scenario tags'
required: false
```
The triggered workflow receives the inputs in the `github.event.inputs` context. For more information, see "[Context and expression syntax for GitHub Actions](/actions/reference/context-and-expression-syntax-for-github-actions#github-context)."
## `on.schedule` ## `on.schedule`
{% data reusables.repositories.actions-scheduled-workflow-example %} {% data reusables.repositories.actions-scheduled-workflow-example %}
@@ -621,15 +640,15 @@ Actions are either JavaScript files or Docker containers. If the action you're u
### Example: Using versioned actions ### Example: Using versioned actions
```yaml ```yaml
steps: steps:
# Reference a specific commit # Reference a specific commit
- uses: actions/setup-node@c46424eee26de4078d34105d3de3cc4992202b1e - uses: actions/checkout@a81bbbf8298c0fa03ea29cdc473d45769f953675
# Reference the major version of a release # Reference the major version of a release
- uses: actions/setup-node@v2 - uses: actions/checkout@v2
# Reference a specific version # Reference a specific version
- uses: actions/setup-node@v2.2.0 - uses: actions/checkout@v2.2.0
# Reference a branch # Reference a branch
- uses: actions/setup-node@main - uses: actions/checkout@main
``` ```
### Example: Using a public action ### Example: Using a public action
@@ -780,7 +799,7 @@ Using the `working-directory` keyword, you can specify the working directory of
### Using a specific shell ### Using a specific shell
You can override the default shell settings in the runner's operating system using the `shell` keyword. You can use built-in `shell` keywords, or you can define a custom set of shell options. You can override the default shell settings in the runner's operating system using the `shell` keyword. You can use built-in `shell` keywords, or you can define a custom set of shell options. The shell command that is run internally executes a temporary file that contains the commands specifed in the `run` keyword.
| Supported platform | `shell` parameter | Description | Command run internally | | Supported platform | `shell` parameter | Description | Command run internally |
|--------------------|-------------------|-------------|------------------------| |--------------------|-------------------|-------------|------------------------|

View File

@@ -84,7 +84,7 @@ You must have administrative access on your IdP to configure the application for
| Value | Other names | Description | Example | | Value | Other names | Description | Example |
| :- | :- | :- | :- | | :- | :- | :- | :- |
| URL | Tenant URL | URL to the SCIM provisioning API for your enterprise on {% data variables.product.prodname_ghe_managed %} | <nobr><code>{% data variables.product.api_url_pre %}</nobr></code> | | URL | Tenant URL | URL to the SCIM provisioning API for your enterprise on {% data variables.product.prodname_ghe_managed %} | <nobr><code>{% data variables.product.api_url_pre %}/scim/v2</nobr></code> |
| Shared secret | Personal access token, secret token | Token for application on your IdP to perform provisioning tasks on behalf of an enterprise owner | Personal access token you created in step 1 | | Shared secret | Personal access token, secret token | Token for application on your IdP to perform provisioning tasks on behalf of an enterprise owner | Personal access token you created in step 1 |
{% endif %} {% endif %}

View File

@@ -70,7 +70,7 @@ The UFW firewall also opens several other ports that are required for {% data va
``` ```
3. To back up your custom firewall rules, use the `cp`command to move the rules to a new file. 3. To back up your custom firewall rules, use the `cp`command to move the rules to a new file.
```shell ```shell
$ sudo cp -r /lib/ufw ~/ufw.backup $ sudo cp -r /etc/ufw ~/ufw.backup
``` ```
After you upgrade {% data variables.product.product_location %}, you must reapply your custom firewall rules. We recommend that you create a script to reapply your firewall custom rules. After you upgrade {% data variables.product.product_location %}, you must reapply your custom firewall rules. We recommend that you create a script to reapply your firewall custom rules.
@@ -88,7 +88,7 @@ If something goes wrong after you change the firewall rules, you can reset the r
{% data reusables.enterprise_installation.ssh-into-instance %} {% data reusables.enterprise_installation.ssh-into-instance %}
2. To restore the previous backup rules, copy them back to the firewall with the `cp` command. 2. To restore the previous backup rules, copy them back to the firewall with the `cp` command.
```shell ```shell
$ sudo cp -f ~/ufw.backup/*rules /lib/ufw $ sudo cp -f ~/ufw.backup/*rules /etc/ufw
``` ```
3. Restart the firewall with the `systemctl` command. 3. Restart the firewall with the `systemctl` command.
```shell ```shell

View File

@@ -1,27 +1,36 @@
--- ---
title: Enabling alerts for vulnerable dependencies on GitHub Enterprise Server title: Enabling alerts for vulnerable dependencies on GitHub Enterprise Server
intro: 'You can connect {% data variables.product.product_location %} to {% data variables.product.prodname_ghe_cloud %} and enable {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies in repositories in your instance.' intro: 'You can connect {% data variables.product.product_location %} to {% data variables.product.prodname_ghe_cloud %} and enable the dependency graph and {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts in repositories in your instance.'
shortTitle: Enable alerts for dependencies
redirect_from: redirect_from:
- /enterprise/admin/installation/enabling-security-alerts-for-vulnerable-dependencies-on-github-enterprise-server - /enterprise/admin/installation/enabling-security-alerts-for-vulnerable-dependencies-on-github-enterprise-server
- /enterprise/admin/configuration/enabling-security-alerts-for-vulnerable-dependencies-on-github-enterprise-server - /enterprise/admin/configuration/enabling-security-alerts-for-vulnerable-dependencies-on-github-enterprise-server
- /enterprise/admin/configuration/enabling-alerts-for-vulnerable-dependencies-on-github-enterprise-server - /enterprise/admin/configuration/enabling-alerts-for-vulnerable-dependencies-on-github-enterprise-server
- /admin/configuration/enabling-alerts-for-vulnerable-dependencies-on-github-enterprise-server - /admin/configuration/enabling-alerts-for-vulnerable-dependencies-on-github-enterprise-server
permissions: 'Site administrators for {% data variables.product.prodname_ghe_server %} who are also owners of the connected {% data variables.product.prodname_ghe_cloud %} organization or enterprise account can enable {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies on {% data variables.product.prodname_ghe_server %}.' permissions: 'Site administrators for {% data variables.product.prodname_ghe_server %} who are also owners of the connected {% data variables.product.prodname_ghe_cloud %} organization or enterprise account can enable the dependency graph and {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts on {% data variables.product.prodname_ghe_server %}.'
versions: versions:
ghes: '*' ghes: '*'
type: how_to type: how_to
topics: topics:
- Enterprise - Enterprise
- Security - Security
shortTitle: Enable alerts for dependencies - Dependency graph
- Dependabot
--- ---
## About alerts for vulnerable dependencies on {% data variables.product.prodname_ghe_server %} ## About alerts for vulnerable dependencies on {% data variables.product.prodname_ghe_server %}
{% data reusables.repositories.tracks-vulnerabilities %} For more information, see "[About alerts for vulnerable dependencies](/github/managing-security-vulnerabilities/about-alerts-for-vulnerable-dependencies)." To identify vulnerable dependencies in your repository and receive vulnerability alerts, you can use two security features:
- The dependency graph
- {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}Security{% endif %} alerts
For more information, see "[About the dependency graph](/code-security/supply-chain-security/understanding-your-software-supply-chain/about-the-dependency-graph)" and "[About alerts for vulnerable dependencies](/github/managing-security-vulnerabilities/about-alerts-for-vulnerable-dependencies)."
{% data reusables.repositories.tracks-vulnerabilities %}
You can connect {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %}, then sync vulnerability data to your instance and generate {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts in repositories with a vulnerable dependency. You can connect {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %}, then sync vulnerability data to your instance and generate {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts in repositories with a vulnerable dependency.
After connecting {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %} and enabling {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies, vulnerability data is synced from {% data variables.product.prodname_dotcom_the_website %} to your instance once every hour. You can also choose to manually sync vulnerability data at any time. No code or information about code from {% data variables.product.product_location %} is uploaded to {% data variables.product.prodname_dotcom_the_website %}. After connecting {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %} and enabling the dependency graph and {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies, vulnerability data is synced from {% data variables.product.prodname_dotcom_the_website %} to your instance once every hour. You can also choose to manually sync vulnerability data at any time. No code or information about code from {% data variables.product.product_location %} is uploaded to {% data variables.product.prodname_dotcom_the_website %}.
{% ifversion ghes > 2.21 %}When {% data variables.product.product_location %} receives information about a vulnerability, it will identify repositories in your instance that use the affected version of the dependency and generate {% data variables.product.prodname_dependabot_alerts %}. You can customize how you receive {% data variables.product.prodname_dependabot_alerts %}. For more information, see "[Configuring notifications for vulnerable dependencies](/github/managing-security-vulnerabilities/configuring-notifications-for-vulnerable-dependencies/#configuring-notifications-for-dependabot-alerts)." {% ifversion ghes > 2.21 %}When {% data variables.product.product_location %} receives information about a vulnerability, it will identify repositories in your instance that use the affected version of the dependency and generate {% data variables.product.prodname_dependabot_alerts %}. You can customize how you receive {% data variables.product.prodname_dependabot_alerts %}. For more information, see "[Configuring notifications for vulnerable dependencies](/github/managing-security-vulnerabilities/configuring-notifications-for-vulnerable-dependencies/#configuring-notifications-for-dependabot-alerts)."
{% endif %} {% endif %}
@@ -30,12 +39,12 @@ After connecting {% data variables.product.product_location %} to {% data variab
{% endif %} {% endif %}
{% ifversion ghes > 2.21 %} {% ifversion ghes > 2.21 %}
## Enabling {% data variables.product.prodname_dependabot_alerts %} for vulnerable dependencies on {% data variables.product.prodname_ghe_server %} ## Enabling the dependency graph and {% data variables.product.prodname_dependabot_alerts %} for vulnerable dependencies on {% data variables.product.prodname_ghe_server %}
{% else %} {% else %}
## Enabling security alerts for vulnerable dependencies on {% data variables.product.prodname_ghe_server %} ## Enabling the dependency graph and security alerts for vulnerable dependencies on {% data variables.product.prodname_ghe_server %}
{% endif %} {% endif %}
Before enabling {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies on {% data variables.product.product_location %}, you must connect {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %}. For more information, see "[Connecting {% data variables.product.prodname_ghe_server %} to {% data variables.product.prodname_ghe_cloud %}](/enterprise/{{ currentVersion }}/admin/guides/installation/connecting-github-enterprise-server-to-github-enterprise-cloud)." Before enabling the dependency graph and {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies on {% data variables.product.product_location %}, you must connect {% data variables.product.product_location %} to {% data variables.product.prodname_dotcom_the_website %}. For more information, see "[Connecting {% data variables.product.prodname_ghe_server %} to {% data variables.product.prodname_ghe_cloud %}](/enterprise/{{ currentVersion }}/admin/guides/installation/connecting-github-enterprise-server-to-github-enterprise-cloud)."
{% ifversion ghes %} {% ifversion ghes %}
@@ -47,7 +56,7 @@ Before enabling {% ifversion ghes > 2.21 %}{% data variables.product.prodname_de
{% data reusables.enterprise_site_admin_settings.sign-in %} {% data reusables.enterprise_site_admin_settings.sign-in %}
1. In the administrative shell, enable the {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies on {% data variables.product.product_location %}: 1. In the administrative shell, enable the dependency graph and {% ifversion ghes > 2.21 %}{% data variables.product.prodname_dependabot %}{% else %}security{% endif %} alerts for vulnerable dependencies on {% data variables.product.product_location %}:
``` shell ``` shell
$ ghe-dep-graph-enable $ ghe-dep-graph-enable

View File

@@ -5,7 +5,7 @@ redirect_from:
- /enterprise/admin/categories/admin-bootcamp/ - /enterprise/admin/categories/admin-bootcamp/
- /enterprise/admin/user-management/organizations-and-teams - /enterprise/admin/user-management/organizations-and-teams
- /enterprise/admin/user-management/managing-organizations-in-your-enterprise - /enterprise/admin/user-management/managing-organizations-in-your-enterprise
intro: 'Organizations are great for creating distinct groups of users within your company, such as divisions or groups working on similar projects. {% ifversion ghae %}Internal{% else %}Public and internal{% endif %} repositories that belong to an organization are accessible to users in other organizations, while private repositories are inaccessible to anyone but members of the organization that are granted access.' intro: 'Organizations are great for creating distinct groups of users within your company, such as divisions or groups working on similar projects. {% ifversion ghae %}Internal{% else %}Public and internal{% endif %} repositories that belong to an organization are accessible to members of other organizations in the enterprise, while private repositories are inaccessible to anyone but members of the organization that are granted access.'
versions: versions:
ghes: '*' ghes: '*'
ghae: '*' ghae: '*'
@@ -22,4 +22,3 @@ children:
- /continuous-integration-using-jenkins - /continuous-integration-using-jenkins
shortTitle: Manage organizations shortTitle: Manage organizations
--- ---

View File

@@ -67,8 +67,6 @@ At the end of the month, {% data variables.product.prodname_dotcom %} calculates
- 3,000 Linux minutes at $0.008 per minute = $24. - 3,000 Linux minutes at $0.008 per minute = $24.
- 2,000 Windows minutes at $0.016 per minute = $32. - 2,000 Windows minutes at $0.016 per minute = $32.
At the end of the month, {% data variables.product.prodname_dotcom %} rounds your data transfer to the nearest GB.
{% data variables.product.prodname_dotcom %} calculates your storage usage for each month based on hourly usage during that month. For example, if you use 3 GB of storage for 10 days of March and 12 GB for 21 days of March, your storage usage would be: {% data variables.product.prodname_dotcom %} calculates your storage usage for each month based on hourly usage during that month. For example, if you use 3 GB of storage for 10 days of March and 12 GB for 21 days of March, your storage usage would be:
- 3 GB x 10 days x (24 hours per day) = 720 GB-Hours - 3 GB x 10 days x (24 hours per day) = 720 GB-Hours

View File

@@ -254,7 +254,7 @@ on: pull_request_target
permissions: permissions:
pull-requests: write pull-requests: write
content: write contents: write
jobs: jobs:
dependabot: dependabot:
@@ -271,6 +271,7 @@ jobs:
run: gh pr merge --auto --merge "$PR_URL" run: gh pr merge --auto --merge "$PR_URL"
env: env:
PR_URL: ${{github.event.pull_request.html_url}} PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
``` ```
{% endraw %} {% endraw %}

View File

@@ -15,6 +15,7 @@ versions:
ghae: '*' ghae: '*'
shortTitle: Handle line endings shortTitle: Handle line endings
--- ---
## About line endings
Every time you press <kbd>return</kbd> on your keyboard you insert an invisible character called a line ending. Different operating systems handle line endings differently. Every time you press <kbd>return</kbd> on your keyboard you insert an invisible character called a line ending. Different operating systems handle line endings differently.
When you're collaborating on projects with Git and {% data variables.product.product_name %}, Git might produce unexpected results if, for example, you're working on a Windows machine, and your collaborator has made a change in macOS. When you're collaborating on projects with Git and {% data variables.product.product_name %}, Git might produce unexpected results if, for example, you're working on a Windows machine, and your collaborator has made a change in macOS.

View File

@@ -12,6 +12,7 @@ versions:
ghae: '*' ghae: '*'
shortTitle: Set your username shortTitle: Set your username
--- ---
## About Git usernames
You can change the name that is associated with your Git commits using the `git config` command. The new name you set will be visible in any future commits you push to {% data variables.product.product_name %} from the command line. If you'd like to keep your real name private, you can use any text as your Git username. You can change the name that is associated with your Git commits using the `git config` command. The new name you set will be visible in any future commits you push to {% data variables.product.product_name %} from the command line. If you'd like to keep your real name private, you can use any text as your Git username.
Changing the name associated with your Git commits using `git config` will only affect future commits and will not change the name used for past commits. Changing the name associated with your Git commits using `git config` will only affect future commits and will not change the name used for past commits.

View File

@@ -13,6 +13,7 @@ versions:
ghae: '*' ghae: '*'
shortTitle: Push commits to a remote shortTitle: Push commits to a remote
--- ---
## About `git push`
The `git push` command takes two arguments: The `git push` command takes two arguments:
* A remote name, for example, `origin` * A remote name, for example, `origin`

View File

@@ -1,6 +1,6 @@
--- ---
title: Connecting to GitHub with SSH title: Connecting to GitHub with SSH
intro: 'You can connect to {% data variables.product.product_name %} using SSH.' intro: 'You can connect to {% data variables.product.product_name %} using the Secure Shell Protocol (SSH), which provides a secure channel over an unsecured network.'
redirect_from: redirect_from:
- /key-setup-redirect/ - /key-setup-redirect/
- /linux-key-setup/ - /linux-key-setup/

View File

@@ -13,7 +13,7 @@ topics:
Millions of developers host millions of projects on GitHub — both open and closed source — and we're honored to play a part in enabling collaboration across the developer community every day. Together, we all have an exciting opportunity and responsibility to make this a community we can be proud of. Millions of developers host millions of projects on GitHub — both open and closed source — and we're honored to play a part in enabling collaboration across the developer community every day. Together, we all have an exciting opportunity and responsibility to make this a community we can be proud of.
The GitHub Community Forum is intended to be a place for further collaboration, support, and brainstorming. This is a civilized place for connecting with other users, learning new skills, sharing feedback and ideas, and finding all the support you need for your GitHub projects. By participating in this Community, you are agreeing to the same [Terms of Service](/articles/github-terms-of-service/) that apply to GitHub.com, as well as this Community-specific Code of Conduct. The GitHub Community Forum is intended to be a place for further collaboration, support, and brainstorming. This is a civilized place for connecting with other users, learning new skills, sharing feedback and ideas, and finding all the support you need for your GitHub projects. By participating in this Community, you are agreeing to the same [Terms of Service](/github/site-policy/github-terms-of-service/) and [GitHub Acceptable Use Policies](/github/site-policy/github-acceptable-use-policies) that apply to GitHub.com, as well as this Community-specific Code of Conduct.
With this Code of Conduct, we hope to help you understand how best to collaborate in the GitHub Community Forum, what you can expect from moderators, and what type of actions or content may result in temporary or permanent suspension from Community participation. We will investigate any abuse reports and may moderate public content within the Community that we determine to be in violation of either the GitHub Terms of Service or this Code of Conduct. With this Code of Conduct, we hope to help you understand how best to collaborate in the GitHub Community Forum, what you can expect from moderators, and what type of actions or content may result in temporary or permanent suspension from Community participation. We will investigate any abuse reports and may moderate public content within the Community that we determine to be in violation of either the GitHub Terms of Service or this Code of Conduct.
@@ -21,7 +21,7 @@ GitHub users worldwide bring wildly different perspectives, ideas, and experienc
## Pledge ## Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in the GitHub Community Forum a harassment-free experience for everyone, regardless of age, body size, ability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in the GitHub Community Forum a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Standards ## Standards
@@ -63,28 +63,36 @@ The following are not hard and fast rules, merely aids to the human judgment of
### *What is not Allowed* ### *What is not Allowed*
- **Threats of violence.** You may not threaten violence towards others or use the site to organize, promote, or incite acts of real-world violence or terrorism. Think carefully about the words you use, the images you post, and even the software you write, and how they may be interpreted by others. Even if you mean something as a joke, it might not be received that way. If you think that someone else might interpret the content you post as a threat, or as promoting violence or terrorism, stop. Don't post it. In extraordinary cases, we may report threats of violence to law enforcement if we think there may be a genuine risk of physical harm or a threat to public safety. - **Threats of violence.** You may not threaten violence towards others or use the site to organize, promote, or incite acts of real-world violence or terrorism. Think carefully about the words you use, the images you post, and even the software you write, and how they may be interpreted by others. Even if you mean something as a joke, it might not be received that way. If you think that someone else might interpret the content you post as a threat, or as promoting violence or terrorism, stop. Don't post it on GitHub. In extraordinary cases, we may report threats of violence to law enforcement if we think there may be a genuine risk of physical harm or a threat to public safety.
- **Hate speech and discrimination.** While it is not forbidden to broach topics such as age, body size, ability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation, we do not tolerate speech that attacks a person or group of people on the basis of who they are. Just realize that talking about these or other sensitive topics can make others feel unwelcome, or perhaps even unsafe, if approached in an aggressive or insulting manner. We expect our Community members to be respectful when discussing sensitive topics. - **Hate speech and discrimination.** While it is not forbidden to broach topics such as age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation, we do not tolerate speech that attacks a person or group of people on the basis of who they are. Just realize that when approached in an aggressive or insulting manner, these (and other) sensitive topics can make others feel unwelcome, or perhaps even unsafe. While there's always the potential for misunderstandings, we expect our community members to remain respectful and civil when discussing sensitive topics.
- **Bullying and harassment.** We do not tolerate bullying or harassment. This means any habitual badgering or intimidation targeted at a specific person or group of people. In general, if your actions are unwanted and you continue to engage in them, there's a good chance you are headed into bullying or harassment territory. - **Bullying and harassment.** We do not tolerate bullying or harassment. This means any habitual badgering or intimidation targeted at a specific person or group of people. In general, if your actions are unwanted and you continue to engage in them, there's a good chance you are headed into bullying or harassment territory.
- **Impersonation.** You may not impersonate another person by copying their avatar, posting content under their email address, intentionally using a deceptively similar username or otherwise posing as someone else. Impersonation is a form of harassment. **Disrupting the experience of other users.** Being part of a community includes recognizing how your behavior affects others and engaging in meaningful and productive interactions with people and the platform they rely on. Behaviors such as repeatedly posting off-topic comments, opening empty or meaningless topics or using any other platform feature in a way that continually disrupts the experience of other users are not allowed.
- **Doxxing and invasion of privacy.** Don't post other people's personal information, such as phone numbers, private email addresses, physical addresses, credit card numbers, Social Security/National Identity numbers, or passwords. Depending on the context, such as in the case of intimidation or harassment, we may consider other information, such as photos or videos that were taken or distributed without the subject's consent, to be an invasion of privacy, especially when such material presents a safety risk to the subject. - **Impersonation.** You may not impersonate another person by copying their avatar, posting content under their email address, using a similar username or otherwise posing as someone else. Impersonation is a form of harassment.
- **Prurient/Sexually explicit content.** Basically, don't post pornography. This does not mean that all nudity or sexual content is prohibited. We recognize that sexuality is a part of life and non-pornographic sexual content may be a part of your project, or may be presented for educational or artistic purposes. If you have any questions or concerns about something you want to post, [feel free to reach out and ask](https://support.github.com/contact) beforehand. - **Doxxing and invasion of privacy.** Don't post other people's personal information, such as personal, private email addresses, phone numbers, physical addresses, credit card numbers, Social Security/National Identity numbers, or passwords. Depending on the context, such as in the case of intimidation or harassment, we may consider other information, such as photos or videos that were taken or distributed without the subject's consent, to be an invasion of privacy, especially when such material presents a safety risk to the subject.
- **Spam.** Respect the GitHub Community Forum. Dont post advertisements, link to spammy websites, or otherwise vandalize the Community. This Community is meant for GitHub users to discuss projects, learn, and share ideas with one another - not for advertising or other spam-like content. Content that we deem spammy will be removed. - **Sexually obscene content.** Dont post content that is pornographic. This does not mean that all nudity, or all code and content related to sexuality, is prohibited. We recognize that sexuality is a part of life and non-pornographic sexual content may be a part of your project, or may be presented for educational or artistic purposes. We do not allow obscene sexual content or content that may involve the exploitation or sexualization of minors. If you have any questions or concerns about something you want to post, [feel free to reach out and ask](https://github.com/contact) beforehand.
- **Spam.** Respect the GitHub Community Forum. Dont post advertisements, link to spammy websites, ping or mention long lists of usernames inappropriately or otherwise vandalize the Community. This Community is meant for GitHub users to discuss projects, learn, and share ideas with one another - not for advertising or other spam-like content. Content that we deem spammy will be removed. Please see our [GitHub Acceptable Use Policies](/github/site-policy/github-acceptable-use-policies#4-spam-and-inauthentic-activity-on-github) for additional detail on spam and inauthentic activity.
- **Copyrighted or illegal content.** Only post your own stuff. You are responsible for what you post. If you post something you didnt create yourself, you must have the right to post it. You may not post illegal content, including content illegal under copyright and trademark laws, links to illegal content, or methods for circumventing the law. - **Copyrighted or illegal content.** Only post your own stuff. You are responsible for what you post. If you post something you didnt create yourself, you must have the right to post it. You may not post illegal content, including content illegal under copyright and trademark laws, links to illegal content, or methods for circumventing the law.
- **Active malware or exploits.** Being part of this Community includes not taking advantage of other members of the Community. We do not allow anyone to use our platform for exploit delivery (e.g. Using the Community as a means to deliver malicious executables) or as attack infrastructure (e.g. Organizing denial of service attacks or managing command and control servers). Note, however, that we do not prohibit the posting of source code which could be used to develop malware or exploits, as the publication and distribution of such source code has educational value and provides a net benefit to the security community. - **Gratuitously violent content.** Dont post violent images, text, or other content without reasonable context or warnings. While it's often okay to include violent content in video games, news reports, and descriptions of historical events, we do not allow violent content that is posted indiscriminately, or that is posted in a way that makes it difficult for other users to avoid (such as a profile avatar, profile description, topic or reply). A clear warning or disclaimer in other contexts helps users make an educated decision as to whether or not they want to engage with such content.
- **Anyone under the age of 13.** If you're a child under the age of 13, you may not have an account on the GitHub Community Forum (or GitHub itself). GitHub does not knowingly collect information from or direct any of our content specifically to children under 13. If we learn or have reason to suspect that you are a user who is under the age of 13, we will unfortunately have to close both your GitHub Community Forum and GitHub.com accounts. We don't want to discourage you from learning to code, but those are the rules. Please see our [Terms of Service](/articles/github-terms-of-service/) for information about account termination. - **Active malware or exploits.** Being part of a community includes not taking advantage of other members of the community. We do not allow anyone to use our platform in direct support of unlawful attacks that cause technical harms, such as using GitHub as a means to deliver malicious executables or as attack infrastructure, for example by organizing denial of service attacks or managing command and control servers. Technical harms means overconsumption of resources, physical damage, downtime, denial of service, or data loss, with no implicit or explicit dual-use purpose prior to the abuse occurring. Note that GitHub allows dual-use content and supports the posting of content that is used for research into vulnerabilities, malware, or exploits, as the publication and distribution of such content has educational value and provides a net benefit to the security community. We assume positive intention and use of these projects to promote and drive improvements across the ecosystem. Please see our [GitHub Community Guidelines](/github/site-policy/github-community-guidelines#active-malware-or-exploits) for complete detail on this section.
- **Misinformation and disinformation.** You may not post content that presents a distorted view of reality, whether it is inaccurate or false (misinformation) or is intentionally deceptive (disinformation) where such content is likely to result in harm to the public or to interfere with fair and equal opportunities for all to participate in public life. For example, we do not allow content that may put the well-being of groups of people at risk or limit their ability to take part in a free and open society. We encourage active participation in the expression of ideas, perspectives, and experiences and may not be in a position to dispute personal accounts or observations. We generally allow parody and satire that is in line with our Acceptable Use Polices, and we consider context to be important in how information is received and understood; therefore, it may be appropriate to clarify your intentions via disclaimers or other means, as well as the source(s) of your information.
- **Anyone under the age of 13.** If you're a child under the age of 13, you may not have an account on the GitHub Community Forum (or GitHub itself). GitHub does not knowingly collect information from or direct any of our content specifically to children under 13. If we learn or have reason to suspect that you are a user who is under the age of 13, we will unfortunately have to close both your GitHub Community Forum and GitHub.com accounts. We don't want to discourage you from learning to code, but those are the rules. Please see our [Terms of Service](/github/site-policy/github-terms-of-service/) for information about account termination.
- **Other conduct which could reasonably be considered inappropriate in a professional setting.** The GitHub Community Forum is a professional space and should be treated as such. - **Other conduct which could reasonably be considered inappropriate in a professional setting.** The GitHub Community Forum is a professional space and should be treated as such.
- **Violation of Terms of Service.** If your Github.com account is identified in violation of [Terms of Service](/github/site-policy/github-terms-of-service/) we will have to close your GitHub Community Forum account as well. [Terms of Service](/github/site-policy/github-terms-of-service/) also indicates that "One person or legal entity may maintain no more than one free Account" therefore additional free accounts created to inquire about flagged or suspended accounts in the community forum will also be removed.
## Enforcement ## Enforcement
### *What GitHub Community Forum Members Can Do* ### *What GitHub Community Forum Members Can Do*
@@ -109,12 +117,16 @@ Community Forum moderators who do not follow or enforce the Code of Conduct in g
## Contacting GitHub Staff ## Contacting GitHub Staff
If, for any reason, you want to contact GitHub Staff, the Community Managers, Administrators, or Moderators of this forum privately, you can use our [Support contact form](https://support.github.com/contact?tags=community-support). Contacting any member of GitHub Staff via channels other than the forum itself or the Support contact form is strongly discouraged and may be considered a violation of our prohibition against harassment. If, for any reason, you want to contact GitHub Staff, the Community Managers, Administrators, or Moderators of this forum privately, you can use our [Support contact form](https://support.github.com/contact?tags=community-support). Contacting any member of GitHub Staff via unsolicited mentions or pings, or via channels other than the forum itself, or the Support contact form is strongly discouraged and may be considered a violation of our prohibition against harassment.
Let's work together to keep the forum a place where people feel safe to participate by being respectful of them and their time. Let's work together to keep the forum a place where people feel safe to participate by being respectful of them and their time.
## Legal Notices ## Legal Notices
Yes, legalese is boring, but we must protect ourselves and by extension, you and your data against unfriendly folks. We have a [Terms of Service](/articles/github-terms-of-service/) and [Privacy Statement](/articles/github-privacy-statement/) describing your (and our) behavior and rights related to content, privacy, and laws. To use this service, you must agree to abide by our [Terms of Service](/articles/github-terms-of-service/) and the [Privacy Statement](/articles/github-privacy-statement/). Yes, legalese is boring, but we must protect ourselves and by extension, you and your data against unfriendly folks. We have a [Terms of Service](/github/site-policy/github-terms-of-service/), which includes our [Acceptable Use Policies](/github/site-policy/github-acceptable-use-policies), and our [Privacy Statement](/github/site-policy/github-privacy-statement/) describing your (and our) behavior and rights related to content, privacy, and laws. To use this service, you must agree to abide by our [Terms of Service](/github/site-policy/github-terms-of-service/), [GitHub Acceptable Use Policies](/github/site-policy/github-acceptable-use-policies) and the [Privacy Statement](/github/site-policy/github-privacy-statement/).
This Code of Conduct does not modify our [Terms of Service](/articles/github-terms-of-service/) and is not intended to be a complete list. GitHub retains full discretion under the [Terms of Service](/articles/github-terms-of-service/) to remove any content or terminate any accounts for activity that is "unlawful, offensive, threatening, libelous, defamatory, pornographic, obscene or otherwise objectionable or violates any party's intellectual property or these Terms of Service." This Code of Conduct describes when we will exercise that discretion. This Code of Conduct does not modify our [Terms of Service](/github/site-policy/github-terms-of-service/)—which includes our [Acceptable Use Policies](/github/site-policy/github-acceptable-use-policies)—and is not intended to be a complete list. GitHub retains full discretion under the [Terms of Service](/github/site-policy/github-terms-of-service/) to remove or restrict any content or accounts for activity that violates those policies, including because it is unlawful, offensive, threatening, libelous, defamatory, pornographic, obscene or otherwise objectionable, or violates any party's intellectual property or our Terms of Service. This Code of Conduct describes when we will exercise that discretion.
### Data Retention and Deletion of Data
You may contact [GitHub Support](https://support.github.com/contact) to request the deletion of your community forum profile. We will retain and use your information as necessary to comply with our legal obligations, resolve disputes, and enforce our agreements, but barring legal requirements, will delete your full profile (within reason) within 90 days of your request. For more information please see the [GitHub Privacy Statement](/github/site-policy/github-privacy-statement).

View File

@@ -64,7 +64,7 @@ We are committed to maintaining a community where users are free to express them
Being part of a community includes recognizing how your behavior affects others and engaging in meaningful and productive interactions with people and the platform they rely on. Behaviors such as repeatedly posting off-topic comments, opening empty or meaningless issues or pull requests, or using any other platform feature in a way that continually disrupts the experience of other users are not allowed. While we encourage maintainers to moderate their own projects on an individual basis, GitHub staff may take further restrictive action against accounts that are engaging in these types of behaviors. Being part of a community includes recognizing how your behavior affects others and engaging in meaningful and productive interactions with people and the platform they rely on. Behaviors such as repeatedly posting off-topic comments, opening empty or meaningless issues or pull requests, or using any other platform feature in a way that continually disrupts the experience of other users are not allowed. While we encourage maintainers to moderate their own projects on an individual basis, GitHub staff may take further restrictive action against accounts that are engaging in these types of behaviors.
- #### Impersonation - #### Impersonation
You may not seek to mislead others as to your identity by copying another person's avatar, posting content under their email address, using a similar username or otherwise posing as someone else. Impersonation is a form of harassment. You may not impersonate another person by copying their avatar, posting content under their email address, using a similar username or otherwise posing as someone else. Impersonation is a form of harassment.
- #### Doxxing and invasion of privacy - #### Doxxing and invasion of privacy
Don't post other people's personal information, such as personal, private email addresses, phone numbers, physical addresses, credit card numbers, Social Security/National Identity numbers, or passwords. Depending on the context, such as in the case of intimidation or harassment, we may consider other information, such as photos or videos that were taken or distributed without the subject's consent, to be an invasion of privacy, especially when such material presents a safety risk to the subject. Don't post other people's personal information, such as personal, private email addresses, phone numbers, physical addresses, credit card numbers, Social Security/National Identity numbers, or passwords. Depending on the context, such as in the case of intimidation or harassment, we may consider other information, such as photos or videos that were taken or distributed without the subject's consent, to be an invasion of privacy, especially when such material presents a safety risk to the subject.

View File

@@ -27,7 +27,7 @@ To see if there are currently any incidents affecting services on {% data variab
You can use the {% data variables.contact.community_support_forum %} to browse topics, ask questions, share solutions, and interact directly with {% data variables.contact.community_support %}. You can use the {% data variables.contact.community_support_forum %} to browse topics, ask questions, share solutions, and interact directly with {% data variables.contact.community_support %}.
To report account, security, and abuse issues, or to receive assisted support for a paid account, visit the {% data variables.contact.contact_support_portal %}. If you are an administrator for {% data variables.product.prodname_ghe_server %} without a user account on {% data variables.product.prodname_dotcom_the_website %}, visit the {% data variables.contact.contact_enterprise_portal %}. To report account, security, and abuse issues, or to receive assisted support for a paid account, visit the {% data variables.contact.contact_support_portal %}. If you are an administrator for {% data variables.product.prodname_ghe_server %} without a user account on {% data variables.product.prodname_dotcom_the_website %}, visit the {% data variables.contact.contact_enterprise_portal %}. Email communication from GitHub Support will always be sent from either a `github.com` or `githubsupport.com` address.
## Granting {% data variables.contact.github_support %} temporary access to a private repository ## Granting {% data variables.contact.github_support %} temporary access to a private repository

View File

@@ -50,7 +50,10 @@ $ curl https://api.github.com/users/defunkt
> { > {
> "login": "defunkt", > "login": "defunkt",
> "id": 2, > "id": 2,
> "url": "{% data variables.product.api_url_pre %}/users/defunkt", > "node_id": "MDQ6VXNlcjI=",
> "avatar_url": "https://avatars.githubusercontent.com/u/2?v=4",
> "gravatar_id": "",
> "url": "https://api.github.com/users/defunkt",
> "html_url": "https://github.com/defunkt", > "html_url": "https://github.com/defunkt",
> ... > ...
> } > }
@@ -61,26 +64,41 @@ Mmmmm, tastes like [JSON][json]. Let's add the `-i` flag to include headers:
```shell ```shell
$ curl -i https://api.github.com/users/defunkt $ curl -i https://api.github.com/users/defunkt
> HTTP/2 200 > HTTP/2 200
> Server: GitHub.com > server: GitHub.com
> Date: Sun, 11 Nov 2012 18:43:28 GMT > date: Thu, 08 Jul 2021 07:04:08 GMT
> Content-Type: application/json; charset=utf-8 > content-type: application/json; charset=utf-8
> ETag: "bfd85cbf23ac0b0c8a29bee02e7117c6" > cache-control: public, max-age=60, s-maxage=60
> X-RateLimit-Limit: 60 > vary: Accept, Accept-Encoding, Accept, X-Requested-With
> X-RateLimit-Remaining: 57 > etag: W/"61e964bf6efa3bc3f9e8549e56d4db6e0911d8fa20fcd8ab9d88f13d513f26f0"
> X-RateLimit-Reset: 1352660008 > last-modified: Fri, 01 Nov 2019 21:56:00 GMT
> X-GitHub-Media-Type: github.v3 > x-github-media-type: github.v3; format=json
> Vary: Accept > access-control-expose-headers: ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, Deprecation, Sunset
> Cache-Control: public, max-age=60, s-maxage=60 > access-control-allow-origin: *
> X-Content-Type-Options: nosniff > strict-transport-security: max-age=31536000; includeSubdomains; preload
> Content-Length: 692 > x-frame-options: deny
> Last-Modified: Tue, 30 Oct 2012 18:58:42 GMT > x-content-type-options: nosniff
> x-xss-protection: 0
> referrer-policy: origin-when-cross-origin, strict-origin-when-cross-origin
> content-security-policy: default-src 'none'
> x-ratelimit-limit: 60
> x-ratelimit-remaining: 53
> x-ratelimit-reset: 1625731053
> x-ratelimit-resource: core
> x-ratelimit-used: 7
> accept-ranges: bytes
> content-length: 1305
> x-github-request-id: 9F60:7019:ACC5CD5:B03C931:60E6A368
>
> { > {
> "login": "defunkt", > "login": "defunkt",
> "id": 2, > "id": 2,
> "url": "{% data variables.product.api_url_pre %}/users/defunkt", > "node_id": "MDQ6VXNlcjI=",
> "html_url": "https://github.com/defunkt", > "avatar_url": "https://avatars.githubusercontent.com/u/2?v=4",
> "gravatar_id": "",
> "url": "https://api.github.com/users/defunkt",
> "html_url": "https://github.com/defunkt",
>
> ... > ...
> } > }
``` ```
@@ -400,7 +418,7 @@ first call we made to get defunkt's profile:
$ curl -i {% data variables.product.api_url_pre %}/users/defunkt $ curl -i {% data variables.product.api_url_pre %}/users/defunkt
> HTTP/2 200 > HTTP/2 200
> ETag: "bfd85cbf23ac0b0c8a29bee02e7117c6" > etag: W/"61e964bf6efa3bc3f9e8549e56d4db6e0911d8fa20fcd8ab9d88f13d513f26f0"
``` ```
In addition to the JSON body, take note of the HTTP status code of `200` and In addition to the JSON body, take note of the HTTP status code of `200` and
@@ -409,7 +427,7 @@ The [ETag][etag] is a fingerprint of the response. If we pass that on subsequent
we can tell the API to give us the resource again, only if it has changed: we can tell the API to give us the resource again, only if it has changed:
```shell ```shell
$ curl -i -H 'If-None-Match: "bfd85cbf23ac0b0c8a29bee02e7117c6"' \ $ curl -i -H 'If-None-Match: "61e964bf6efa3bc3f9e8549e56d4db6e0911d8fa20fcd8ab9d88f13d513f26f0"' \
$ {% data variables.product.api_url_pre %}/users/defunkt $ {% data variables.product.api_url_pre %}/users/defunkt
> HTTP/2 304 > HTTP/2 304

View File

@@ -1,5 +1,6 @@
--- ---
title: Markdown title: Markdown
intro: 'The Markdown API enables you to render a markdown document as an HTML page or as raw text.'
redirect_from: redirect_from:
- /v3/markdown - /v3/markdown
versions: versions:

View File

@@ -1,5 +1,6 @@
--- ---
title: Meta title: Meta
intro: 'The Meta API returns meta information about {% data variables.product.product_name %} including the IP addresses of {% data variables.product.product_name %} services.'
redirect_from: redirect_from:
- /v3/meta - /v3/meta
versions: versions:

View File

@@ -1,5 +1,6 @@
--- ---
title: Migrations title: Migrations
intro: 'The Migration API lets you migrate the repositories and users of your organization from {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.prodname_ghe_server %}.'
redirect_from: redirect_from:
- /v3/migrations - /v3/migrations
- /v3/migration - /v3/migration

View File

@@ -1,5 +1,6 @@
--- ---
title: Organizations title: Organizations
intro: 'The Organizations API gives you access to control and manage all your {% data variables.product.product_name %} organizations.'
allowTitleToDifferFromFilename: true allowTitleToDifferFromFilename: true
redirect_from: redirect_from:
- /v3/orgs - /v3/orgs

View File

@@ -1,5 +1,6 @@
--- ---
title: Packages title: Packages
intro: 'With the {% data variables.product.prodname_registry %} API, you can manage packages for your {% data variables.product.prodname_dotcom %} repositories and organizations.'
product: '{% data reusables.gated-features.packages %}' product: '{% data reusables.gated-features.packages %}'
versions: versions:
fpt: '*' fpt: '*'

View File

@@ -10,7 +10,7 @@
// content model, add the entry to this list, and ensure you loop in the // content model, add the entry to this list, and ensure you loop in the
// content and/or content strategy team for review. // content and/or content strategy team for review.
module.exports = [ export default [
'2FA', '2FA',
'Access management', 'Access management',
'Accounts', 'Accounts',
@@ -155,5 +155,5 @@ module.exports = [
'Xamarin.Android', 'Xamarin.Android',
'Xamarin.iOS', 'Xamarin.iOS',
'Xamarin', 'Xamarin',
'Xcode' 'Xcode',
] ]

View File

@@ -33773,6 +33773,11 @@ enum SecurityAdvisoryEcosystem {
""" """
NUGET NUGET
"""
Applications, runtimes, operating systems and other kinds of software
"""
OTHER
""" """
Python packages hosted at PyPI.org Python packages hosted at PyPI.org
""" """

View File

@@ -22641,6 +22641,41 @@ type Organization implements Actor & MemberStatusable & Node & PackageOwner & Pr
""" """
samlIdentityProvider: OrganizationIdentityProvider samlIdentityProvider: OrganizationIdentityProvider
"""
Events involving this sponsorable, such as new sponsorships.
"""
sponsorsActivities(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
Ordering options for activity returned from the connection.
"""
orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
"""
Filter activities returned to only those that occurred in a given time range.
"""
period: SponsorsActivityPeriod = MONTH
): SponsorsActivityConnection!
""" """
The GitHub Sponsors listing for this user or organization. The GitHub Sponsors listing for this user or organization.
""" """
@@ -36340,6 +36375,11 @@ enum SecurityAdvisoryEcosystem {
""" """
NUGET NUGET
"""
Applications, runtimes, operating systems and other kinds of software
"""
OTHER
""" """
Python packages hosted at PyPI.org Python packages hosted at PyPI.org
""" """
@@ -36852,6 +36892,41 @@ interface Sponsorable {
""" """
isSponsoringViewer: Boolean! isSponsoringViewer: Boolean!
"""
Events involving this sponsorable, such as new sponsorships.
"""
sponsorsActivities(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
Ordering options for activity returned from the connection.
"""
orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
"""
Filter activities returned to only those that occurred in a given time range.
"""
period: SponsorsActivityPeriod = MONTH
): SponsorsActivityConnection!
""" """
The GitHub Sponsors listing for this user or organization. The GitHub Sponsors listing for this user or organization.
""" """
@@ -37010,6 +37085,167 @@ enum SponsorableOrderField {
LOGIN LOGIN
} }
"""
An event related to sponsorship activity.
"""
type SponsorsActivity implements Node {
"""
What action this activity indicates took place.
"""
action: SponsorsActivityAction!
id: ID!
"""
The tier that the sponsorship used to use, for tier change events.
"""
previousSponsorsTier: SponsorsTier
"""
The user or organization who triggered this activity and was/is sponsoring the sponsorable.
"""
sponsor: Sponsor
"""
The user or organization that is being sponsored, the maintainer.
"""
sponsorable: Sponsorable!
"""
The associated sponsorship tier.
"""
sponsorsTier: SponsorsTier
"""
The timestamp of this event.
"""
timestamp: DateTime
}
"""
The possible actions that GitHub Sponsors activities can represent.
"""
enum SponsorsActivityAction {
"""
The activity was cancelling a sponsorship.
"""
CANCELLED_SPONSORSHIP
"""
The activity was starting a sponsorship.
"""
NEW_SPONSORSHIP
"""
The activity was scheduling a downgrade or cancellation.
"""
PENDING_CHANGE
"""
The activity was funds being refunded to the sponsor or GitHub.
"""
REFUND
"""
The activity was disabling matching for a previously matched sponsorship.
"""
SPONSOR_MATCH_DISABLED
"""
The activity was changing the sponsorship tier, either directly by the sponsor or by a scheduled/pending change.
"""
TIER_CHANGE
}
"""
The connection type for SponsorsActivity.
"""
type SponsorsActivityConnection {
"""
A list of edges.
"""
edges: [SponsorsActivityEdge]
"""
A list of nodes.
"""
nodes: [SponsorsActivity]
"""
Information to aid in pagination.
"""
pageInfo: PageInfo!
"""
Identifies the total count of items in the connection.
"""
totalCount: Int!
}
"""
An edge in a connection.
"""
type SponsorsActivityEdge {
"""
A cursor for use in pagination.
"""
cursor: String!
"""
The item at the end of the edge.
"""
node: SponsorsActivity
}
"""
Ordering options for GitHub Sponsors activity connections.
"""
input SponsorsActivityOrder {
"""
The ordering direction.
"""
direction: OrderDirection!
"""
The field to order activity by.
"""
field: SponsorsActivityOrderField!
}
"""
Properties by which GitHub Sponsors activity connections can be ordered.
"""
enum SponsorsActivityOrderField {
"""
Order activities by when they happened.
"""
TIMESTAMP
}
"""
The possible time periods for which Sponsors activities can be requested.
"""
enum SponsorsActivityPeriod {
"""
Don't restrict the activity to any date range, include all activity.
"""
ALL
"""
The previous calendar day.
"""
DAY
"""
The previous thirty days.
"""
MONTH
"""
The previous seven days.
"""
WEEK
}
""" """
A goal associated with a GitHub Sponsors listing, representing a target the sponsored maintainer would like to attain. A goal associated with a GitHub Sponsors listing, representing a target the sponsored maintainer would like to attain.
""" """
@@ -37138,8 +37374,8 @@ type SponsorsTier implements Node {
""" """
Get a different tier for this tier's maintainer that is at the same frequency Get a different tier for this tier's maintainer that is at the same frequency
as this tier but with a lesser cost. Returns the published tier with the as this tier but with an equal or lesser cost. Returns the published tier with
monthly price closest to this tier's without going over. the monthly price closest to this tier's without going over.
""" """
closestLesserValueTier: SponsorsTier closestLesserValueTier: SponsorsTier
@@ -43841,6 +44077,41 @@ type User implements Actor & Node & PackageOwner & ProfileOwner & ProjectOwner &
orderBy: SavedReplyOrder = {field: UPDATED_AT, direction: DESC} orderBy: SavedReplyOrder = {field: UPDATED_AT, direction: DESC}
): SavedReplyConnection ): SavedReplyConnection
"""
Events involving this sponsorable, such as new sponsorships.
"""
sponsorsActivities(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
Ordering options for activity returned from the connection.
"""
orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
"""
Filter activities returned to only those that occurred in a given time range.
"""
period: SponsorsActivityPeriod = MONTH
): SponsorsActivityConnection!
""" """
The GitHub Sponsors listing for this user or organization. The GitHub Sponsors listing for this user or organization.
""" """

1
data/package.json Normal file
View File

@@ -0,0 +1 @@
{"type":"module"}

View File

@@ -0,0 +1,20 @@
date: '2021-07-14'
sections:
security_fixes:
- '**HIGH:** A path traversal vulnerability was identified in GitHub Enterprise Server that could be exploited when building a GitHub Pages site. User-controlled configuration options used by GitHub Pages were not sufficiently restricted and made it possible to read files on the GitHub Enterprise Server instance. To exploit this vulnerability, an attacker would need permission to create and build a GitHub Pages site on the GitHub Enterprise Server instance. This vulnerability affected all versions of GitHub Enterprise Server prior to 3.1.3 and has been assigned CVE-2021-22867. This vulnerability was reported via the GitHub Bug Bounty program.'
- Packages have been updated to the latest security versions.
bugs:
- '`ghe-cluster-config-node-init` would fail during cluster setup if HTTP proxy is enabled.'
- Collectd would not resolve the forwarding destination hostname after the initial startup.
- The job that purged stale archived repositories could fail to make progress if some of those repositories were protected from deletion by legal holds.
- Git pushes could result in a 500 Internal Server Error during the user reconciliation process on instances using LDAP authentication mode.
- A significant number of 503 errors were logged every time a user visited a repository's `/settings` page if the dependency graph was not enabled.
changes:
- Improved the efficiency of config apply by skipping IP allow firewall rules that had not changed, which saved significant time on large clusters.
known_issues:
- On a freshly set up GitHub Enterprise Server without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- When a replica node is offline in a high availability configuration, {% data variables.product.product_name %} may still route {% data variables.product.prodname_pages %} requests to the offline node, reducing the availability of {% data variables.product.prodname_pages %} for users.

View File

@@ -0,0 +1,25 @@
date: '2021-07-14'
sections:
security_fixes:
- '**HIGH:** A path traversal vulnerability was identified in GitHub Enterprise Server that could be exploited when building a GitHub Pages site. User-controlled configuration options used by GitHub Pages were not sufficiently restricted and made it possible to read files on the GitHub Enterprise Server instance. To exploit this vulnerability, an attacker would need permission to create and build a GitHub Pages site on the GitHub Enterprise Server instance. This vulnerability affected all versions of GitHub Enterprise Server prior to 3.1.3 and has been assigned CVE-2021-22867. This vulnerability was reported via the GitHub Bug Bounty program.'
- Packages have been updated to the latest security versions.
bugs:
- SAML expiration date variable was not configurable.
- Application services would fail their health checks during config apply before they could enter a healthy state.
- '`ghe-cluster-config-node-init` would fail during cluster setup if HTTP proxy is enabled.'
- Pre-receive hooks could encounter an error `Failed to resolve full path of the current executable` due to `/proc` not being mounted on the container.
- Collectd would not resolve the forwarding destination hostname after the initial startup.
- The job that purged stale archived repositories could fail to make progress if some of those repositories were protected from deletion by legal holds.
- Running `git nw-gc --pristine` would result in an error.
- Background jobs were being queued to the `spam` queue which were not being processed.
- The preferred merge method would be reset when retrying after a failed PR merge.
- Git pushes could result in a 500 Internal Server Error during the user reconciliation process on instances using LDAP authentication mode.
changes:
- Improved the efficiency of config apply by skipping IP allow firewall rules that had not changed, which saved significant time on large clusters.
known_issues:
- On a freshly set up GitHub Enterprise Server without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- When a replica node is offline in a high availability configuration, {% data variables.product.product_name %} may still route {% data variables.product.prodname_pages %} requests to the offline node, reducing the availability of {% data variables.product.prodname_pages %} for users.

View File

@@ -0,0 +1,25 @@
date: '2021-07-14'
sections:
security_fixes:
- '**HIGH:** A path traversal vulnerability was identified in GitHub Enterprise Server that could be exploited when building a GitHub Pages site. User-controlled configuration options used by GitHub Pages were not sufficiently restricted and made it possible to read files on the GitHub Enterprise Server instance. To exploit this vulnerability, an attacker would need permission to create and build a GitHub Pages site on the GitHub Enterprise Server instance. This vulnerability affected all versions of GitHub Enterprise Server prior to 3.1.3 and has been assigned CVE-2021-22867. This vulnerability was reported via the GitHub Bug Bounty program.'
- Packages have been updated to the latest security versions.
bugs:
- SAML expiration date variable was not configurable.
- Application services would fail their health checks during config apply before they could enter a healthy state.
- '`ghe-cluster-config-node-init` would fail during cluster setup if HTTP proxy is enabled.'
- Pre-receive hooks could encounter an error `Failed to resolve full path of the current executable` due to `/proc` not being mounted on the container.
- Collectd would not resolve the forwarding destination hostname after the initial startup.
- The job that purged stale archived repositories could fail to make progress if some of those repositories were protected from deletion by legal holds.
- Background jobs were being queued to the `spam` queue which were not being processed.
- The preferred merge method would be reset when retrying after a failed PR merge.
- Git pushes could result in a 500 Internal Server Error during the user reconciliation process on instances using LDAP authentication mode.
- 'After upgrading from 3.0.x to 3.1.x, in some cases GitHub Actions would fail with an error: `An unexpected error occurred when executing this workflow.`'
changes:
- Improved the efficiency of config apply by skipping IP allow firewall rules that had not changed, which saved significant time on large clusters.
known_issues:
- The {% data variables.product.prodname_registry %} npm registry no longer returns a time value in metadata responses. This was done to allow for substantial performance improvements. We continue to have all the data necessary to return a time value as part of the metadata response and will resume returning this value in the future once we have solved the existing performance issues.
- On a freshly set up {% data variables.product.prodname_ghe_server %} without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.

View File

@@ -1,2 +1,3 @@
1. Type a name for your repository and an optional description. If you're creating a user or organization site, your repository must be named `<user>.github.io` or `<organization>.github.io`. For more information, see "[About {% data variables.product.prodname_pages %}](/articles/about-github-pages#types-of-github-pages-sites)." 1. Type a name for your repository and an optional description. If you're creating a user or organization site, your repository must be named `<user>.github.io` or `<organization>.github.io`. If your user or organization name contains uppercase letters, you must lowercase the letters.
For more information, see "[About {% data variables.product.prodname_pages %}](/articles/about-github-pages#types-of-github-pages-sites)."
![Create repository field](/assets/images/help/pages/create-repository-name-pages.png) ![Create repository field](/assets/images/help/pages/create-repository-name-pages.png)

View File

@@ -36,6 +36,10 @@ Azure | Azure Service Management Certificate | azure_management_certificate{% en
Azure | Azure SQL Connection String | azure_sql_connection_string{% endif %} Azure | Azure SQL Connection String | azure_sql_connection_string{% endif %}
{%- ifversion fpt or ghes > 2.22 or ghae-next %} {%- ifversion fpt or ghes > 2.22 or ghae-next %}
Azure | Azure Storage Account Key | azure_storage_account_key{% endif %} Azure | Azure Storage Account Key | azure_storage_account_key{% endif %}
{%- ifversion fpt or ghes > 3.1 or ghae-next %}
Checkout.com | Checkout.com Production Secret Key | checkout_production_secret_key{% endif %}
{%- ifversion fpt or ghes > 3.1 or ghae-next %}
Checkout.com | Checkout.com Test Secret Key | checkout_test_secret_key{% endif %}
{%- ifversion fpt or ghes > 2.22 or ghae-next %} {%- ifversion fpt or ghes > 2.22 or ghae-next %}
Clojars | Clojars Deploy Token | clojars_deploy_token{% endif %} Clojars | Clojars Deploy Token | clojars_deploy_token{% endif %}
{%- ifversion fpt or ghes > 3.1 or ghae-next %} {%- ifversion fpt or ghes > 3.1 or ghae-next %}
@@ -70,6 +74,10 @@ Facebook | Facebook Access Token | facebook_access_token{% endif %}
Fastly | Fastly API Token | fastly_api_token{% endif %} Fastly | Fastly API Token | fastly_api_token{% endif %}
{%- ifversion fpt or ghes > 2.22 or ghae-next %} {%- ifversion fpt or ghes > 2.22 or ghae-next %}
Finicity | Finicity App Key | finicity_app_key{% endif %} Finicity | Finicity App Key | finicity_app_key{% endif %}
{%- ifversion fpt or ghes > 3.1 or ghae-next %}
Flutterwave | Flutterwave Live API Secret Key | flutterwave_live_api_secret_key{% endif %}
{%- ifversion fpt or ghes > 3.1 or ghae-next %}
Flutterwave | Flutterwave Test API Secret Key | flutterwave_test_api_secret_key{% endif %}
{%- ifversion fpt or ghes > 2.22 or ghae-next %} {%- ifversion fpt or ghes > 2.22 or ghae-next %}
Frame.io | Frame.io JSON Web Token | frameio_jwt{% endif %} Frame.io | Frame.io JSON Web Token | frameio_jwt{% endif %}
{%- ifversion fpt or ghes > 2.22 or ghae-next %} {%- ifversion fpt or ghes > 2.22 or ghae-next %}

View File

@@ -1,7 +1,7 @@
user_migrations_intro: >- user_migrations_intro: >-
You can use this API to review, backup, or migrate your user data stored on {% data variables.product.product_name %}.com. You can use this API to review, backup, or migrate your user data stored on {% data variables.product.product_name %}.com.
organization_migrations_intro: >- organization_migrations_intro: >-
The organization migrations API lets you move a repository from GitHub to {% data variables.product.prodname_ghe_server %}. The organization migrations API lets you move a repository from {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.prodname_ghe_server %}. For more information, see "[Exporting migration data from GitHub.com](/admin/user-management/migrating-data-to-and-from-your-enterprise/exporting-migration-data-from-githubcom)."
source_imports_intro: >- source_imports_intro: >-
The Source Import API lets you start an import from a Git, Subversion, The Source Import API lets you start an import from a Git, Subversion,
Mercurial, or Team Foundation Version Control source repository. This is the same Mercurial, or Team Foundation Version Control source repository. This is the same

View File

@@ -2,7 +2,7 @@
The code in this directory is destined to run in a web browser! The code in this directory is destined to run in a web browser!
The [index.js](index.js) file in this directory is bundled by [Webpack](https://github.com/webpack/webpack). In development, we use [webpack-dev-middleware](https://github.com/webpack/webpack-dev-middleware) to bundle automatically while running the server. In production, we run `webpack --mode production` via `npm run build`. The [index.ts](index.ts) file in this directory is bundled by [Webpack](https://github.com/webpack/webpack). In development, we use [webpack-dev-middleware](https://github.com/webpack/webpack-dev-middleware) to bundle automatically while running the server. In production, we run `webpack --mode production` via `npm run build`.
This file is then referenced in the footer: This file is then referenced in the footer:
@@ -11,7 +11,7 @@ This file is then referenced in the footer:
``` ```
Every other file in this directory is written as a module to Every other file in this directory is written as a module to
be required in `index.js`. They all have this format: be required in `index.ts`. They all have this format:
```js ```js
module.exports = () => { module.exports = () => {
@@ -20,7 +20,7 @@ module.exports = () => {
} }
``` ```
In development, the bundle will recompile every time a file used by `javascripts/index.js` is changed. This In development, the bundle will recompile every time a file used by `javascripts/index.ts` is changed. This
ensures that you're always getting an up-to-date version of the script. ensures that you're always getting an up-to-date version of the script.
In production, the bundle is compiled during build-time. In production, the bundle is compiled during build-time.

View File

@@ -1,5 +1,3 @@
module.exports = { module.exports = {
launch: process.env.GITHUB_ACTIONS launch: process.env.GITHUB_ACTIONS ? { executablePath: 'google-chrome-stable' } : {},
? { executablePath: 'google-chrome-stable' }
: {}
} }

View File

@@ -19,28 +19,22 @@ module.exports = {
branches: 95, branches: 95,
functions: 95, functions: 95,
lines: 95, lines: 95,
statements: -5 statements: -5,
} },
}, },
preset: isBrowser preset: isBrowser ? 'jest-puppeteer' : undefined,
? 'jest-puppeteer'
: undefined,
reporters, reporters,
modulePathIgnorePatterns: [ modulePathIgnorePatterns: ['assets/'],
'assets/'
],
setupFilesAfterEnv: ['jest-expect-message'], setupFilesAfterEnv: ['jest-expect-message'],
...isBrowser ? {} : { testEnvironment: 'node' }, ...(isBrowser ? {} : { testEnvironment: 'node' }),
testPathIgnorePatterns: [ testPathIgnorePatterns: [
'node_modules/', 'node_modules/',
'vendor/', 'vendor/',
'tests/fixtures/', 'tests/fixtures/',
'tests/helpers/', 'tests/helpers/',
'tests/javascripts/', 'tests/javascripts/',
...isBrowser ? [] : ['tests/browser/browser.js'] ...(isBrowser ? [] : ['tests/browser/browser.js']),
], ],
testMatch: [ testMatch: ['**/tests/**/*.js'],
'**/tests/**/*.js' testLocationInResults: isActions,
],
testLocationInResults: isActions
} }

View File

@@ -1,18 +1,18 @@
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const frontmatter = require('./read-frontmatter') import frontmatter from './read-frontmatter.js'
const getApplicableVersions = require('./get-applicable-versions') import getApplicableVersions from './get-applicable-versions.js'
const removeFPTFromPath = require('./remove-fpt-from-path') import removeFPTFromPath from './remove-fpt-from-path.js'
// Both internal and external products are specified in content/index.md // Both internal and external products are specified in content/index.md
const homepage = path.posix.join(process.cwd(), 'content/index.md') const homepage = path.posix.join(process.cwd(), 'content/index.md')
const { data } = frontmatter(fs.readFileSync(homepage, 'utf8')) const { data } = frontmatter(fs.readFileSync(homepage, 'utf8'))
const productIds = data.children export const productIds = data.children
const externalProducts = data.externalProducts const externalProducts = data.externalProducts
const internalProducts = {} const internalProducts = {}
productIds.forEach(productId => { productIds.forEach((productId) => {
const relPath = productId const relPath = productId
const dir = path.posix.join('content', relPath) const dir = path.posix.join('content', relPath)
@@ -31,15 +31,15 @@ productIds.forEach(productId => {
dir, dir,
toc, toc,
wip: data.wip || false, wip: data.wip || false,
hidden: data.hidden || false hidden: data.hidden || false,
} }
internalProducts[productId].versions = applicableVersions internalProducts[productId].versions = applicableVersions
}) })
const productMap = Object.assign({}, internalProducts, externalProducts) export const productMap = Object.assign({}, internalProducts, externalProducts)
module.exports = { export default {
productIds, productIds,
productMap productMap,
} }

View File

@@ -1,4 +1,4 @@
const enterpriseServerReleases = require('./enterprise-server-releases') import enterpriseServerReleases from './enterprise-server-releases.js'
// version = "plan"@"release" // version = "plan"@"release"
// example: enterprise-server@2.21 // example: enterprise-server@2.21
@@ -7,7 +7,8 @@ const versionDelimiter = '@'
const latestNonNumberedRelease = 'latest' const latestNonNumberedRelease = 'latest'
const plans = [ const plans = [
{ // free-pro-team is **not** a user-facing version and is stripped from URLs. {
// free-pro-team is **not** a user-facing version and is stripped from URLs.
// See lib/remove-fpt-from-path.js for details. // See lib/remove-fpt-from-path.js for details.
plan: 'free-pro-team', plan: 'free-pro-team',
planTitle: 'GitHub.com', planTitle: 'GitHub.com',
@@ -16,7 +17,7 @@ const plans = [
latestRelease: latestNonNumberedRelease, latestRelease: latestNonNumberedRelease,
nonEnterpriseDefault: true, // permanent way to refer to this plan if the name changes nonEnterpriseDefault: true, // permanent way to refer to this plan if the name changes
openApiBaseName: 'api.github.com', // used for REST openApiBaseName: 'api.github.com', // used for REST
miscBaseName: 'dotcom' // used for GraphQL and webhooks miscBaseName: 'dotcom', // used for GraphQL and webhooks
}, },
{ {
plan: 'enterprise-server', plan: 'enterprise-server',
@@ -26,7 +27,7 @@ const plans = [
latestRelease: enterpriseServerReleases.latest, latestRelease: enterpriseServerReleases.latest,
hasNumberedReleases: true, hasNumberedReleases: true,
openApiBaseName: 'ghes-', openApiBaseName: 'ghes-',
miscBaseName: 'ghes-' miscBaseName: 'ghes-',
}, },
{ {
plan: 'github-ae', plan: 'github-ae',
@@ -35,29 +36,35 @@ const plans = [
releases: [latestNonNumberedRelease], releases: [latestNonNumberedRelease],
latestRelease: latestNonNumberedRelease, latestRelease: latestNonNumberedRelease,
openApiBaseName: 'github.ae', openApiBaseName: 'github.ae',
miscBaseName: 'ghae' miscBaseName: 'ghae',
} },
] ]
const allVersions = {} const allVersions = {}
// combine the plans and releases to get allVersions object // combine the plans and releases to get allVersions object
// e.g. free-pro-team@latest, enterprise-server@2.21, enterprise-server@2.20, etc. // e.g. free-pro-team@latest, enterprise-server@2.21, enterprise-server@2.20, etc.
plans.forEach(planObj => { plans.forEach((planObj) => {
planObj.releases.forEach(release => { planObj.releases.forEach((release) => {
const version = `${planObj.plan}${versionDelimiter}${release}` const version = `${planObj.plan}${versionDelimiter}${release}`
const versionObj = { const versionObj = {
version, version,
versionTitle: planObj.hasNumberedReleases ? `${planObj.planTitle} ${release}` : planObj.planTitle, versionTitle: planObj.hasNumberedReleases
? `${planObj.planTitle} ${release}`
: planObj.planTitle,
latestVersion: `${planObj.plan}${versionDelimiter}${planObj.latestRelease}`, latestVersion: `${planObj.plan}${versionDelimiter}${planObj.latestRelease}`,
currentRelease: release, currentRelease: release,
openApiVersionName: planObj.hasNumberedReleases ? `${planObj.openApiBaseName}${release}` : planObj.openApiBaseName, openApiVersionName: planObj.hasNumberedReleases
miscVersionName: planObj.hasNumberedReleases ? `${planObj.miscBaseName}${release}` : planObj.miscBaseName ? `${planObj.openApiBaseName}${release}`
: planObj.openApiBaseName,
miscVersionName: planObj.hasNumberedReleases
? `${planObj.miscBaseName}${release}`
: planObj.miscBaseName,
} }
allVersions[version] = Object.assign(versionObj, planObj) allVersions[version] = Object.assign(versionObj, planObj)
}) })
}) })
module.exports = allVersions export default allVersions

View File

@@ -1,10 +1,10 @@
const express = require('express') import express from 'express'
const middleware = require('../middleware') import middleware from '../middleware/index.js'
function createApp () { function createApp() {
const app = express() const app = express()
middleware(app) middleware(app)
return app return app
} }
module.exports = createApp export default createApp

View File

@@ -1,23 +1,23 @@
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const crypto = require('crypto') import crypto from 'crypto'
// Get an MD4 Digest Hex content hash, loosely based on Webpack `[contenthash]` // Get an MD4 Digest Hex content hash, loosely based on Webpack `[contenthash]`
function getContentHash (absFilePath) { function getContentHash(absFilePath) {
const buffer = fs.readFileSync(absFilePath) const buffer = fs.readFileSync(absFilePath)
const hash = crypto.createHash('md4') const hash = crypto.createHash('md4')
hash.update(buffer) hash.update(buffer)
return hash.digest('hex') return hash.digest('hex')
} }
function getUrl (relFilePath) { function getUrl(relFilePath) {
const absFilePath = path.join(process.cwd(), relFilePath) const absFilePath = path.join(process.cwd(), relFilePath)
return `/${relFilePath}?hash=${getContentHash(absFilePath)}` return `/${relFilePath}?hash=${getContentHash(absFilePath)}`
} }
module.exports = { export default {
main: { main: {
js: getUrl('dist/index.js'), js: getUrl('dist/index.js'),
css: getUrl('dist/index.css') css: getUrl('dist/index.css'),
} },
} }

View File

@@ -1,6 +1,6 @@
const Parser = require('rss-parser') import Parser from 'rss-parser'
async function getRssFeed (url) { export async function getRssFeed(url) {
const parser = new Parser({ timeout: 5000 }) const parser = new Parser({ timeout: 5000 })
const feedUrl = `${url}/feed` const feedUrl = `${url}/feed`
let feed let feed
@@ -15,7 +15,7 @@ async function getRssFeed (url) {
return feed return feed
} }
async function getChangelogItems (prefix, feed) { export async function getChangelogItems(prefix, feed) {
if (!feed || !feed.items) { if (!feed || !feed.items) {
console.log(feed) console.log(feed)
console.error('feed is not valid or has no items') console.error('feed is not valid or has no items')
@@ -23,20 +23,18 @@ async function getChangelogItems (prefix, feed) {
} }
// only show the first 3 posts // only show the first 3 posts
const changelog = feed.items const changelog = feed.items.slice(0, 3).map((item) => {
.slice(0, 3) // remove the prefix if it exists (Ex: 'GitHub Actions: '), where the colon and expected whitespace should be hardcoded.
.map(item => { const title = prefix ? item.title.replace(new RegExp(`^${prefix}`), '') : item.title
// remove the prefix if it exists (Ex: 'GitHub Actions: '), where the colon and expected whitespace should be hardcoded. return {
const title = prefix ? item.title.replace(new RegExp(`^${prefix}`), '') : item.title // capitalize the first letter of the title
return { title: title.trim().charAt(0).toUpperCase() + title.slice(1),
// capitalize the first letter of the title date: item.isoDate,
title: title.trim().charAt(0).toUpperCase() + title.slice(1), href: item.link,
date: item.isoDate, }
href: item.link })
}
})
return changelog return changelog
} }
module.exports = { getRssFeed, getChangelogItems } export default { getRssFeed, getChangelogItems }

View File

@@ -1,13 +1,14 @@
const { next, latest } = require('./enterprise-server-releases') import { next, latest } from './enterprise-server-releases.js'
const versionSatisfiesRange = require('./version-satisfies-range') import versionSatisfiesRange from './version-satisfies-range.js'
// Special handling for frontmatter that evalues to the next GHES release number or a hardcoded `next`: // Special handling for frontmatter that evalues to the next GHES release number or a hardcoded `next`:
// we don't want to return it as an applicable version or it will become a permalink, // we don't want to return it as an applicable version or it will become a permalink,
// but we also don't want to throw an error if no other versions are found. // but we also don't want to throw an error if no other versions are found.
module.exports = function checkIfNextVersionOnly (value) { export default function checkIfNextVersionOnly(value) {
if (value === '*') return false if (value === '*') return false
const ghesNextVersionOnly = versionSatisfiesRange(next, value) && !versionSatisfiesRange(latest, value) const ghesNextVersionOnly =
versionSatisfiesRange(next, value) && !versionSatisfiesRange(latest, value)
return (ghesNextVersionOnly || value === 'next') return ghesNextVersionOnly || value === 'next'
} }

View File

@@ -1,5 +1,9 @@
const semver = require('semver') import semver from 'semver'
const { engines } = require('../package.json') import fs from 'fs'
import path from 'path'
const packageFile = JSON.parse(fs.readFileSync(path.join(process.cwd(), './package.json')))
const { engines } = packageFile
/* istanbul ignore next */ /* istanbul ignore next */
if (!semver.satisfies(process.version, engines.node)) { if (!semver.satisfies(process.version, engines.node)) {

View File

@@ -1,10 +1,10 @@
module.exports = { export default {
httpOnly: true, // can't access these cookies through browser JavaScript httpOnly: true, // can't access these cookies through browser JavaScript
secure: !['test', 'development'].includes(process.env.NODE_ENV), secure: !['test', 'development'].includes(process.env.NODE_ENV),
// requires https protocol // requires https protocol
// `secure` doesn't work with supertest at all // `secure` doesn't work with supertest at all
// http://localhost fails on chrome with secure // http://localhost fails on chrome with secure
sameSite: 'lax' sameSite: 'lax',
// most browsers are "lax" these days, // most browsers are "lax" these days,
// but older browsers used to default to "none" // but older browsers used to default to "none"
} }

View File

@@ -1,8 +1,11 @@
const fs = require('fs').promises import { fileURLToPath } from 'url'
const path = require('path') import path from 'path'
const Page = require('./page') import xFs from 'fs'
import Page from './page.js'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const fs = xFs.promises
module.exports = async function createTree (originalPath, langObj) { export default async function createTree(originalPath, langObj) {
// This basePath definition is needed both here and in lib/page-data.js because this // This basePath definition is needed both here and in lib/page-data.js because this
// function runs recursively, and the value for originalPath changes on recursive runs. // function runs recursively, and the value for originalPath changes on recursive runs.
const basePath = path.posix.join(__dirname, '..', langObj.dir, 'content') const basePath = path.posix.join(__dirname, '..', langObj.dir, 'content')
@@ -24,7 +27,7 @@ module.exports = async function createTree (originalPath, langObj) {
const page = await Page.init({ const page = await Page.init({
basePath, basePath,
relativePath, relativePath,
languageCode: langObj.code languageCode: langObj.code,
}) })
if (!page) { if (!page) {
@@ -38,14 +41,18 @@ module.exports = async function createTree (originalPath, langObj) {
// Create the root tree object on the first run, and create children recursively. // Create the root tree object on the first run, and create children recursively.
const item = { const item = {
page page,
} }
// Process frontmatter children recursively. // Process frontmatter children recursively.
if (item.page.children) { if (item.page.children) {
item.childPages = (await Promise.all(item.page.children item.childPages = (
.map(async (child) => await createTree(path.posix.join(originalPath, child), langObj)))) await Promise.all(
.filter(Boolean) item.page.children.map(
async (child) => await createTree(path.posix.join(originalPath, child), langObj)
)
)
).filter(Boolean)
} }
return item return item

View File

@@ -1,22 +1,18 @@
const assert = require('assert') import assert from 'assert'
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const walk = require('walk-sync') import walk from 'walk-sync'
const yaml = require('js-yaml') import yaml from 'js-yaml'
const { isRegExp, set } = require('lodash') import { isRegExp, set } from 'lodash-es'
const filenameToKey = require('./filename-to-key') import filenameToKey from './filename-to-key.js'
module.exports = function dataDirectory (dir, opts = {}) { export default function dataDirectory(dir, opts = {}) {
const defaultOpts = { const defaultOpts = {
preprocess: (content) => { return content }, preprocess: (content) => {
return content
},
ignorePatterns: [/README\.md$/i], ignorePatterns: [/README\.md$/i],
extensions: [ extensions: ['.json', '.md', '.markdown', '.yaml', '.yml'],
'.json',
'.md',
'.markdown',
'.yaml',
'.yml'
]
} }
opts = Object.assign({}, defaultOpts, opts) opts = Object.assign({}, defaultOpts, opts)
@@ -31,18 +27,15 @@ module.exports = function dataDirectory (dir, opts = {}) {
const data = {} const data = {}
// find YAML and Markdown files in the given directory, recursively // find YAML and Markdown files in the given directory, recursively
const filenames = walk(dir, { includeBasePath: true }) const filenames = walk(dir, { includeBasePath: true }).filter((filename) => {
.filter(filename => { // ignore files that match any of ignorePatterns regexes
// ignore files that match any of ignorePatterns regexes if (opts.ignorePatterns.some((pattern) => pattern.test(filename))) return false
if (opts.ignorePatterns.some(pattern => pattern.test(filename))) return false
// ignore files that don't have a whitelisted file extension // ignore files that don't have a whitelisted file extension
return opts.extensions.includes(path.extname(filename).toLowerCase()) return opts.extensions.includes(path.extname(filename).toLowerCase())
}) })
const files = filenames.map( const files = filenames.map((filename) => [filename, fs.readFileSync(filename, 'utf8')])
filename => [filename, fs.readFileSync(filename, 'utf8')]
)
files.forEach(([filename, fileContent]) => { files.forEach(([filename, fileContent]) => {
// derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename // derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename
const key = filenameToKey(path.relative(dir, filename)) const key = filenameToKey(path.relative(dir, filename))
@@ -64,8 +57,7 @@ module.exports = function dataDirectory (dir, opts = {}) {
set(data, key, fileContent) set(data, key, fileContent)
break break
} }
} })
)
return data return data
} }

View File

@@ -1,6 +1,6 @@
// prevent `[foo] (bar)` strings with a space between from being interpreted as markdown links // prevent `[foo] (bar)` strings with a space between from being interpreted as markdown links
// by encoding the space character // by encoding the space character
module.exports = function encodeBracketedParentheses (input) { export default function encodeBracketedParentheses(input) {
return input.replace(/] \(/g, ']&nbsp;(') return input.replace(/] \(/g, ']&nbsp;(')
} }

View File

@@ -1,19 +1,19 @@
const versionSatisfiesRange = require('./version-satisfies-range') import versionSatisfiesRange from './version-satisfies-range.js'
import fs from 'fs'
import path from 'path'
export const dates = JSON.parse(
fs.readFileSync(path.join(process.cwd(), './lib/enterprise-dates.json'))
)
// GHES Release Lifecycle Dates: // GHES Release Lifecycle Dates:
// enterprise-releases/docs/supported-versions.md#release-lifecycle-dates // enterprise-releases/docs/supported-versions.md#release-lifecycle-dates
const dates = require('../lib/enterprise-dates.json')
// Some frontmatter may contain the upcoming GHES release number // Some frontmatter may contain the upcoming GHES release number
const next = '3.2' export const next = '3.2'
const supported = [ export const supported = ['3.1', '3.0', '2.22', '2.21']
'3.1', export const deprecated = [
'3.0',
'2.22',
'2.21'
]
const deprecated = [
'2.20', '2.20',
'2.19', '2.19',
'2.18', '2.18',
@@ -35,37 +35,47 @@ const deprecated = [
'2.2', '2.2',
'2.1', '2.1',
'2.0', '2.0',
'11.10.340' '11.10.340',
]
const legacyAssetVersions = [
'3.0',
'2.22',
'2.21'
] ]
export const legacyAssetVersions = ['3.0', '2.22', '2.21']
const all = supported.concat(deprecated) export const all = supported.concat(deprecated)
const latest = supported[0] export const latest = supported[0]
const oldestSupported = supported[supported.length - 1] export const oldestSupported = supported[supported.length - 1]
const nextDeprecationDate = dates[oldestSupported].deprecationDate export const nextDeprecationDate = dates[oldestSupported].deprecationDate
const isOldestReleaseDeprecated = new Date() > new Date(nextDeprecationDate) export const isOldestReleaseDeprecated = new Date() > new Date(nextDeprecationDate)
const deprecatedOnNewSite = deprecated.filter(version => versionSatisfiesRange(version, '>=2.13')) export const deprecatedOnNewSite = deprecated.filter((version) =>
const firstVersionDeprecatedOnNewSite = '2.13' versionSatisfiesRange(version, '>=2.13')
)
export const firstVersionDeprecatedOnNewSite = '2.13'
// starting from 2.18, we updated the archival script to create a redirects.json top-level file in the archived repo // starting from 2.18, we updated the archival script to create a redirects.json top-level file in the archived repo
const lastVersionWithoutArchivedRedirectsFile = '2.17' export const lastVersionWithoutArchivedRedirectsFile = '2.17'
// last version using paths like /enterprise/<release>/<user>/<product>/<category>/<article> // last version using paths like /enterprise/<release>/<user>/<product>/<category>/<article>
// instead of /enterprise-server@<release>/<product>/<category>/<article> // instead of /enterprise-server@<release>/<product>/<category>/<article>
const lastReleaseWithLegacyFormat = '2.18' export const lastReleaseWithLegacyFormat = '2.18'
const deprecatedReleasesWithLegacyFormat = deprecated.filter(version => versionSatisfiesRange(version, '<=2.18')) export const deprecatedReleasesWithLegacyFormat = deprecated.filter((version) =>
const deprecatedReleasesWithNewFormat = deprecated.filter(version => versionSatisfiesRange(version, '>2.18')) versionSatisfiesRange(version, '<=2.18')
const deprecatedReleasesOnDeveloperSite = deprecated.filter(version => versionSatisfiesRange(version, '<=2.16')) )
const firstReleaseNote = '2.20' export const deprecatedReleasesWithNewFormat = deprecated.filter((version) =>
const firstRestoredAdminGuides = '2.21' versionSatisfiesRange(version, '>2.18')
)
export const deprecatedReleasesOnDeveloperSite = deprecated.filter((version) =>
versionSatisfiesRange(version, '<=2.16')
)
export const firstReleaseNote = '2.20'
export const firstRestoredAdminGuides = '2.21'
const findReleaseNumberIndex = (releaseNum) => { return all.findIndex(i => i === releaseNum) } export const findReleaseNumberIndex = (releaseNum) => {
const getNextReleaseNumber = (releaseNum) => { return all[findReleaseNumberIndex(releaseNum) - 1] } return all.findIndex((i) => i === releaseNum)
const getPreviousReleaseNumber = (releaseNum) => { return all[findReleaseNumberIndex(releaseNum) + 1] } }
export const getNextReleaseNumber = (releaseNum) => {
return all[findReleaseNumberIndex(releaseNum) - 1]
}
export const getPreviousReleaseNumber = (releaseNum) => {
return all[findReleaseNumberIndex(releaseNum) + 1]
}
module.exports = { export default {
next, next,
supported, supported,
deprecated, deprecated,
@@ -86,5 +96,5 @@ module.exports = {
firstReleaseNote, firstReleaseNote,
firstRestoredAdminGuides, firstRestoredAdminGuides,
getNextReleaseNumber, getNextReleaseNumber,
getPreviousReleaseNumber getPreviousReleaseNumber,
} }

View File

@@ -1,5 +1,5 @@
// Linkinator treats the following as regex. // Linkinator treats the following as regex.
module.exports = [ export default [
// Skip GitHub search links. // Skip GitHub search links.
'https://github.com/search\\?', 'https://github.com/search\\?',
'https://github.com/github/gitignore/search\\?', 'https://github.com/github/gitignore/search\\?',
@@ -18,5 +18,5 @@ module.exports = [
'https://ko-fi.com/', 'https://ko-fi.com/',
'https://en.liberapay.com/', 'https://en.liberapay.com/',
'https://nbviewer.jupyter.org/github/bokeh/bokeh-notebooks/blob/main/tutorial/06%20-%20Linking%20and%20Interactions.ipynb', 'https://nbviewer.jupyter.org/github/bokeh/bokeh-notebooks/blob/main/tutorial/06%20-%20Linking%20and%20Interactions.ipynb',
'https://www.vmware.com/products/esxi-and-esx.html' 'https://www.vmware.com/products/esxi-and-esx.html',
] ]

View File

@@ -1,7 +1,7 @@
const fetch = require('node-fetch') import fetch from 'node-fetch'
module.exports = class FailBot { export default class FailBot {
constructor ({ app, haystackURL, headers }) { constructor({ app, haystackURL, headers }) {
this.app = app this.app = app
this.haystackURL = haystackURL this.haystackURL = haystackURL
this.headers = headers this.headers = headers
@@ -13,14 +13,14 @@ module.exports = class FailBot {
* @param {any} metadata * @param {any} metadata
* @param {any} [headers] * @param {any} [headers]
*/ */
static async report (error, metadata, headers = {}) { static async report(error, metadata, headers = {}) {
// If there's no HAYSTACK_URL set, bail early // If there's no HAYSTACK_URL set, bail early
if (!process.env.HAYSTACK_URL) return if (!process.env.HAYSTACK_URL) return
const failbot = new FailBot({ const failbot = new FailBot({
app: 'docs', app: 'docs',
haystackURL: process.env.HAYSTACK_URL, haystackURL: process.env.HAYSTACK_URL,
headers headers,
}) })
return failbot.sendException(error, metadata) return failbot.sendException(error, metadata)
@@ -30,7 +30,7 @@ module.exports = class FailBot {
* Create a rollup of this error by generating a base64 representation * Create a rollup of this error by generating a base64 representation
* @param {Error} error * @param {Error} error
*/ */
createRollup (error) { createRollup(error) {
const stackLine = error.stack && error.stack.split('\n')[1] const stackLine = error.stack && error.stack.split('\n')[1]
const str = `${error.name}:${stackLine}`.replace(/=/g, '') const str = `${error.name}:${stackLine}`.replace(/=/g, '')
return Buffer.from(str).toString('base64') return Buffer.from(str).toString('base64')
@@ -41,7 +41,7 @@ module.exports = class FailBot {
* @param {Error} error * @param {Error} error
* @param {any} metadata * @param {any} metadata
*/ */
formatJSON (error, metadata) { formatJSON(error, metadata) {
return Object.assign({}, metadata, { return Object.assign({}, metadata, {
/* eslint-disable camelcase */ /* eslint-disable camelcase */
created_at: new Date().toISOString(), created_at: new Date().toISOString(),
@@ -49,7 +49,7 @@ module.exports = class FailBot {
class: error.name, class: error.name,
message: error.message, message: error.message,
backtrace: error.stack || '', backtrace: error.stack || '',
js_environment: `Node.js ${process.version}` js_environment: `Node.js ${process.version}`,
/* eslint-enable camelcase */ /* eslint-enable camelcase */
}) })
} }
@@ -58,7 +58,7 @@ module.exports = class FailBot {
* Populate default context from settings. Since settings commonly comes from * Populate default context from settings. Since settings commonly comes from
* ENV, this allows setting defaults for the context via the environment. * ENV, this allows setting defaults for the context via the environment.
*/ */
getFailbotContext () { getFailbotContext() {
const failbotKeys = {} const failbotKeys = {}
for (const key in process.env) { for (const key in process.env) {
@@ -76,7 +76,7 @@ module.exports = class FailBot {
* @param {Error} error * @param {Error} error
* @param {any} metadata * @param {any} metadata
*/ */
async sendException (error, metadata = {}) { async sendException(error, metadata = {}) {
const data = Object.assign({ app: this.app }, this.getFailbotContext(), metadata) const data = Object.assign({ app: this.app }, this.getFailbotContext(), metadata)
const body = this.formatJSON(error, Object.assign({ app: this.app }, data)) const body = this.formatJSON(error, Object.assign({ app: this.app }, data))
@@ -85,8 +85,8 @@ module.exports = class FailBot {
body: JSON.stringify(body), body: JSON.stringify(body),
headers: { headers: {
...this.headers, ...this.headers,
'Content-Type': 'application/json' 'Content-Type': 'application/json',
} },
}) })
} }
} }

View File

@@ -1,4 +1,4 @@
const readJsonFile = require('./read-json-file') import readJsonFile from './read-json-file.js'
const featureFlags = readJsonFile('./feature-flags.json') const featureFlags = readJsonFile('./feature-flags.json')
// add feature flags as environment variables // add feature flags as environment variables

View File

@@ -1,6 +1,6 @@
import path from 'path'
import { escapeRegExp } from 'lodash-es'
/* eslint-disable prefer-regex-literals */ /* eslint-disable prefer-regex-literals */
const path = require('path')
const { escapeRegExp } = require('lodash')
// slash at the beginning of a filename // slash at the beginning of a filename
const leadingPathSeparator = new RegExp(`^${escapeRegExp(path.sep)}`) const leadingPathSeparator = new RegExp(`^${escapeRegExp(path.sep)}`)
@@ -14,7 +14,7 @@ const windowsPathSeparator = new RegExp('/', 'g')
const windowsDoubleSlashSeparator = new RegExp('\\\\', 'g') const windowsDoubleSlashSeparator = new RegExp('\\\\', 'g')
// derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename // derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename
module.exports = function filenameToKey (filename) { export default function filenameToKey(filename) {
const extension = new RegExp(`${path.extname(filename)}$`) const extension = new RegExp(`${path.extname(filename)}$`)
const key = filename const key = filename
.replace(extension, '') .replace(extension, '')

View File

@@ -1,8 +1,8 @@
const { getLanguageCode } = require('./patterns') import { getLanguageCode } from './patterns.js'
// This module recursively searches a given part of the site tree by iterating through child // This module recursively searches a given part of the site tree by iterating through child
// pages and finding a path that matches the original path provided. // pages and finding a path that matches the original path provided.
module.exports = function findPageInSiteTree (treePage, englishTree, originalPath, modifiedPath) { export default function findPageInSiteTree(treePage, englishTree, originalPath, modifiedPath) {
if (Array.isArray(treePage)) throw new Error('received array instead of object') if (Array.isArray(treePage)) throw new Error('received array instead of object')
// If the tree page already matches the path, or if it has no child pages, return the page itself. // If the tree page already matches the path, or if it has no child pages, return the page itself.
@@ -24,10 +24,10 @@ module.exports = function findPageInSiteTree (treePage, englishTree, originalPat
// If we found a page... // If we found a page...
if (foundPage) { if (foundPage) {
return modifiedPath === originalPath return modifiedPath === originalPath
// Check if it matches the _original_ path, and return it if so. ? // Check if it matches the _original_ path, and return it if so.
? foundPage foundPage
// If we found a page with the modified path, keep going down the tree until we find the original path. : // If we found a page with the modified path, keep going down the tree until we find the original path.
: findPageInSiteTree(foundPage, englishTree, originalPath) findPageInSiteTree(foundPage, englishTree, originalPath)
} }
// If no page was found at the path we tried, try again by removing the last segment of the path. // If no page was found at the path we tried, try again by removing the last segment of the path.

View File

@@ -1,6 +1,6 @@
const { getLanguageCode } = require('./patterns') import { getLanguageCode } from './patterns.js'
module.exports = function findPage (href, pageMap, redirects) { export default function findPage(href, pageMap, redirects) {
// remove any fragments // remove any fragments
href = href.replace(/#.*$/, '') href = href.replace(/#.*$/, '')

View File

@@ -1,166 +1,168 @@
const fs = require('fs') import fs from 'fs'
const path = require('path') import path from 'path'
const parse = require('./read-frontmatter') import parse from './read-frontmatter.js'
const semver = require('semver') import semver from 'semver'
const layouts = require('./layouts') import layouts from './layouts.js'
import xAllVersions from './all-versions.js'
const semverValidRange = semver.validRange const semverValidRange = semver.validRange
const layoutNames = Object.keys(layouts).concat([false]) const layoutNames = Object.keys(layouts).concat([false])
const semverRange = { const semverRange = {
type: 'string', type: 'string',
conform: semverValidRange, conform: semverValidRange,
message: 'Must be a valid SemVer range' message: 'Must be a valid SemVer range',
} }
const versionObjs = Object.values(require('./all-versions')) const versionObjs = Object.values(xAllVersions)
const guideTypes = ['overview', 'quick_start', 'tutorial', 'how_to', 'reference'] const guideTypes = ['overview', 'quick_start', 'tutorial', 'how_to', 'reference']
const featureVersions = fs.readdirSync(path.posix.join(process.cwd(), 'data/features')) const featureVersions = fs
.map(file => path.basename(file, '.yml')) .readdirSync(path.posix.join(process.cwd(), 'data/features'))
.map((file) => path.basename(file, '.yml'))
const schema = { export const schema = {
properties: { properties: {
title: { title: {
type: 'string', type: 'string',
required: true, required: true,
translatable: true translatable: true,
}, },
shortTitle: { shortTitle: {
type: 'string', type: 'string',
translatable: true translatable: true,
}, },
intro: { intro: {
type: 'string', type: 'string',
translatable: true translatable: true,
}, },
product: { product: {
type: 'string', type: 'string',
translatable: true translatable: true,
}, },
permissions: { permissions: {
type: 'string' type: 'string',
}, },
// true by default on articles, false on all other content // true by default on articles, false on all other content
showMiniToc: { showMiniToc: {
type: 'boolean' type: 'boolean',
}, },
miniTocMaxHeadingLevel: { miniTocMaxHeadingLevel: {
type: 'number', type: 'number',
default: 2, default: 2,
minimum: 2, minimum: 2,
maximum: 4 maximum: 4,
}, },
mapTopic: { mapTopic: {
type: 'boolean' type: 'boolean',
}, },
// allow hidden articles under `early-access` // allow hidden articles under `early-access`
hidden: { hidden: {
type: 'boolean' type: 'boolean',
}, },
layout: { layout: {
type: ['string', 'boolean'], type: ['string', 'boolean'],
enum: layoutNames, enum: layoutNames,
message: 'must be the filename of an existing layout file, or `false` for no layout' message: 'must be the filename of an existing layout file, or `false` for no layout',
}, },
redirect_from: { redirect_from: {
type: ['array', 'string'] type: ['array', 'string'],
}, },
allowTitleToDifferFromFilename: { allowTitleToDifferFromFilename: {
type: 'boolean' type: 'boolean',
}, },
introLinks: { introLinks: {
type: 'object', type: 'object',
properties: { properties: {
quickstart: { type: 'string' }, quickstart: { type: 'string' },
reference: { type: 'string' }, reference: { type: 'string' },
overview: { type: 'string' } overview: { type: 'string' },
} },
}, },
authors: { authors: {
type: 'array', type: 'array',
items: { items: {
type: 'string' type: 'string',
} },
}, },
examples_source: { examples_source: {
type: 'string' type: 'string',
}, },
featuredLinks: { featuredLinks: {
type: 'object', type: 'object',
properties: { properties: {
gettingStarted: { gettingStarted: {
type: 'array', type: 'array',
items: { type: 'string' } items: { type: 'string' },
}, },
guides: { guides: {
type: 'array', type: 'array',
items: { type: 'string' } items: { type: 'string' },
}, },
guideCards: { guideCards: {
type: 'array', type: 'array',
items: { type: 'string' } items: { type: 'string' },
}, },
popular: { popular: {
type: 'array', type: 'array',
items: { type: 'string' } items: { type: 'string' },
}, },
// allows you to use an alternate heading for the popular column // allows you to use an alternate heading for the popular column
popularHeading: { popularHeading: {
type: 'string' type: 'string',
} },
} },
}, },
// Shown in `product-landing.html` "What's new" section // Shown in `product-landing.html` "What's new" section
changelog: { changelog: {
type: 'object', type: 'object',
properties: { properties: {
label: { type: 'string' }, label: { type: 'string' },
prefix: { type: 'string' } prefix: { type: 'string' },
} },
}, },
type: { type: {
type: 'string', type: 'string',
enum: guideTypes enum: guideTypes,
}, },
topics: { topics: {
type: 'array' type: 'array',
}, },
includeGuides: { includeGuides: {
type: 'array' type: 'array',
}, },
learningTracks: { learningTracks: {
type: 'array' type: 'array',
}, },
// Used in `product-landing.html` // Used in `product-landing.html`
beta_product: { beta_product: {
type: 'boolean' type: 'boolean',
}, },
// Show in `product-landing.html` // Show in `product-landing.html`
product_video: { product_video: {
type: 'string' type: 'string',
}, },
interactive: { interactive: {
type: 'boolean' type: 'boolean',
}, },
// Platform-specific content preference // Platform-specific content preference
defaultPlatform: { defaultPlatform: {
type: 'string', type: 'string',
enum: ['mac', 'windows', 'linux'] enum: ['mac', 'windows', 'linux'],
}, },
// Tool-specific content preference // Tool-specific content preference
defaultTool: { defaultTool: {
type: 'string', type: 'string',
enum: ['webui', 'cli', 'desktop', 'curl'] enum: ['webui', 'cli', 'desktop', 'curl'],
}, },
// Documentation contributed by a third party, such as a GitHub Partner // Documentation contributed by a third party, such as a GitHub Partner
contributor: { contributor: {
type: 'object', type: 'object',
properties: { properties: {
name: { type: 'string' }, name: { type: 'string' },
URL: { type: 'string' } URL: { type: 'string' },
} },
}, },
// Child links specified on any TOC page // Child links specified on any TOC page
children: { children: {
type: 'array' type: 'array',
}, },
// External products specified on the homepage // External products specified on the homepage
externalProducts: { externalProducts: {
@@ -173,8 +175,8 @@ const schema = {
id: { type: 'string', required: true }, id: { type: 'string', required: true },
name: { type: 'string', required: true }, name: { type: 'string', required: true },
href: { type: 'string', format: 'url', required: true }, href: { type: 'string', format: 'url', required: true },
external: { type: 'boolean', required: true } external: { type: 'boolean', required: true },
} },
}, },
atom: { atom: {
type: 'object', type: 'object',
@@ -183,8 +185,8 @@ const schema = {
id: { type: 'string', required: true }, id: { type: 'string', required: true },
name: { type: 'string', required: true }, name: { type: 'string', required: true },
href: { type: 'string', format: 'url', required: true }, href: { type: 'string', format: 'url', required: true },
external: { type: 'boolean', required: true } external: { type: 'boolean', required: true },
} },
}, },
electron: { electron: {
type: 'object', type: 'object',
@@ -193,12 +195,12 @@ const schema = {
id: { type: 'string', required: true }, id: { type: 'string', required: true },
name: { type: 'string', required: true }, name: { type: 'string', required: true },
href: { type: 'string', format: 'url', required: true }, href: { type: 'string', format: 'url', required: true },
external: { type: 'boolean', required: true } external: { type: 'boolean', required: true },
} },
} },
} },
} },
} },
} }
const featureVersionsProp = { const featureVersionsProp = {
@@ -206,10 +208,11 @@ const featureVersionsProp = {
type: ['string', 'array'], type: ['string', 'array'],
enum: featureVersions, enum: featureVersions,
items: { items: {
type: 'string' type: 'string',
}, },
message: 'must be the name (or names) of a feature that matches "filename" in data/features/_filename_.yml' message:
} 'must be the name (or names) of a feature that matches "filename" in data/features/_filename_.yml',
},
} }
schema.properties.versions = { schema.properties.versions = {
@@ -219,18 +222,18 @@ schema.properties.versions = {
acc[versionObj.plan] = semverRange acc[versionObj.plan] = semverRange
acc[versionObj.shortName] = semverRange acc[versionObj.shortName] = semverRange
return acc return acc
}, featureVersionsProp) }, featureVersionsProp),
} }
// Support 'github-ae': next // Support 'github-ae': next
schema.properties.versions.properties['github-ae'] = 'next' schema.properties.versions.properties['github-ae'] = 'next'
schema.properties.versions.properties.ghae = 'next' schema.properties.versions.properties.ghae = 'next'
function frontmatter (markdown, opts = {}) { function frontmatter(markdown, opts = {}) {
const defaults = { const defaults = {
schema, schema,
validateKeyNames: true, validateKeyNames: true,
validateKeyOrder: false // TODO: enable this once we've sorted all the keys. See issue 9658 validateKeyOrder: false, // TODO: enable this once we've sorted all the keys. See issue 9658
} }
return parse(markdown, Object.assign({}, defaults, opts)) return parse(markdown, Object.assign({}, defaults, opts))
@@ -239,4 +242,4 @@ function frontmatter (markdown, opts = {}) {
// attach the schema object so it can be `require`d elsewhere. // attach the schema object so it can be `require`d elsewhere.
frontmatter.schema = schema frontmatter.schema = schema
module.exports = frontmatter export default frontmatter

View File

@@ -1,20 +1,21 @@
const path = require('path') import { fileURLToPath } from 'url'
const { reduce, sortBy } = require('lodash') import path from 'path'
const allVersions = require('./all-versions') import { reduce, sortBy } from 'lodash-es'
const versionSatisfiesRange = require('./version-satisfies-range') import allVersions from './all-versions.js'
const checkIfNextVersionOnly = require('./check-if-next-version-only') import versionSatisfiesRange from './version-satisfies-range.js'
const dataDirectory = require('./data-directory') import checkIfNextVersionOnly from './check-if-next-version-only.js'
const encodeBracketedParentheses = require('./encode-bracketed-parentheses') import dataDirectory from './data-directory.js'
import encodeBracketedParentheses from './encode-bracketed-parentheses.js'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const featuresDir = path.posix.join(__dirname, '../data/features') const featuresDir = path.posix.join(__dirname, '../data/features')
const featureData = dataDirectory(featuresDir, { const featureData = dataDirectory(featuresDir, {
preprocess: dataString => preprocess: (dataString) => encodeBracketedParentheses(dataString.trimEnd()),
encodeBracketedParentheses(dataString.trimEnd()), ignorePatterns: [/README\.md$/],
ignorePatterns: [/README\.md$/]
}) })
// return an array of versions that an article's product versions encompasses // return an array of versions that an article's product versions encompasses
function getApplicableVersions (frontmatterVersions, filepath) { function getApplicableVersions(frontmatterVersions, filepath) {
if (typeof frontmatterVersions === 'undefined') { if (typeof frontmatterVersions === 'undefined') {
throw new Error(`No \`versions\` frontmatter found in ${filepath}`) throw new Error(`No \`versions\` frontmatter found in ${filepath}`)
} }
@@ -35,19 +36,23 @@ function getApplicableVersions (frontmatterVersions, filepath) {
// ghes: '>=2.23' // ghes: '>=2.23'
// ghae: '*' // ghae: '*'
// where the feature is bringing the ghes and ghae versions into the mix. // where the feature is bringing the ghes and ghae versions into the mix.
const featureVersions = reduce(frontmatterVersions, (result, value, key) => { const featureVersions = reduce(
if (key === 'feature') { frontmatterVersions,
if (typeof value === 'string') { (result, value, key) => {
Object.assign(result, { ...featureData[value].versions }) if (key === 'feature') {
} else if (Array.isArray(value)) { if (typeof value === 'string') {
value.forEach(str => { Object.assign(result, { ...featureData[value].versions })
Object.assign(result, { ...featureData[str].versions }) } else if (Array.isArray(value)) {
}) value.forEach((str) => {
Object.assign(result, { ...featureData[str].versions })
})
}
delete result[key]
} }
delete result[key] return result
} },
return result {}
}, {}) )
// We will be evaluating feature versions separately, so we can remove this. // We will be evaluating feature versions separately, so we can remove this.
delete frontmatterVersions.feature delete frontmatterVersions.feature
@@ -57,19 +62,29 @@ function getApplicableVersions (frontmatterVersions, filepath) {
const foundFrontmatterVersions = evaluateVersions(frontmatterVersions) const foundFrontmatterVersions = evaluateVersions(frontmatterVersions)
// Combine them! // Combine them!
const applicableVersions = [...new Set(foundFrontmatterVersions.versions.concat(foundFeatureVersions.versions))] const applicableVersions = [
...new Set(foundFrontmatterVersions.versions.concat(foundFeatureVersions.versions)),
]
if (!applicableVersions.length && !foundFrontmatterVersions.isNextVersionOnly && !foundFeatureVersions.isNextVersionOnly) { if (
throw new Error(`No applicable versions found for ${filepath}. Please double-check the page's \`versions\` frontmatter.`) !applicableVersions.length &&
!foundFrontmatterVersions.isNextVersionOnly &&
!foundFeatureVersions.isNextVersionOnly
) {
throw new Error(
`No applicable versions found for ${filepath}. Please double-check the page's \`versions\` frontmatter.`
)
} }
// Sort them by the order in lib/all-versions. // Sort them by the order in lib/all-versions.
const sortedVersions = sortBy(applicableVersions, (v) => { return Object.keys(allVersions).indexOf(v) }) const sortedVersions = sortBy(applicableVersions, (v) => {
return Object.keys(allVersions).indexOf(v)
})
return sortedVersions return sortedVersions
} }
function evaluateVersions (versionsObj) { function evaluateVersions(versionsObj) {
let isNextVersionOnly = false let isNextVersionOnly = false
// get an array like: [ 'free-pro-team@latest', 'enterprise-server@2.21', 'enterprise-cloud@latest' ] // get an array like: [ 'free-pro-team@latest', 'enterprise-server@2.21', 'enterprise-cloud@latest' ]
@@ -80,26 +95,29 @@ function evaluateVersions (versionsObj) {
// ghes: '>=2.19' // ghes: '>=2.19'
// ghae: '*' // ghae: '*'
// ^ where each key corresponds to a plan's short name (defined in lib/all-versions.js) // ^ where each key corresponds to a plan's short name (defined in lib/all-versions.js)
Object.entries(versionsObj) Object.entries(versionsObj).forEach(([plan, planValue]) => {
.forEach(([plan, planValue]) => { // Special handling for frontmatter that evalues to the next GHES release number or a hardcoded `next`.
// Special handling for frontmatter that evalues to the next GHES release number or a hardcoded `next`. isNextVersionOnly = checkIfNextVersionOnly(planValue)
isNextVersionOnly = checkIfNextVersionOnly(planValue)
// For each plan (e.g., enterprise-server), get matching versions from allVersions object // For each plan (e.g., enterprise-server), get matching versions from allVersions object
Object.values(allVersions) Object.values(allVersions)
.filter(relevantVersion => relevantVersion.plan === plan || relevantVersion.shortName === plan) .filter(
.forEach(relevantVersion => { (relevantVersion) => relevantVersion.plan === plan || relevantVersion.shortName === plan
// Use a dummy value of '1.0' for non-numbered versions like free-pro-team and github-ae )
// This will evaluate to true against '*' but false against 'next', which is what we want. .forEach((relevantVersion) => {
const versionToCompare = relevantVersion.hasNumberedReleases ? relevantVersion.currentRelease : '1.0' // Use a dummy value of '1.0' for non-numbered versions like free-pro-team and github-ae
// This will evaluate to true against '*' but false against 'next', which is what we want.
const versionToCompare = relevantVersion.hasNumberedReleases
? relevantVersion.currentRelease
: '1.0'
if (versionSatisfiesRange(versionToCompare, planValue)) { if (versionSatisfiesRange(versionToCompare, planValue)) {
versions.push(relevantVersion.version) versions.push(relevantVersion.version)
} }
}) })
}) })
return { versions, isNextVersionOnly } return { versions, isNextVersionOnly }
} }
module.exports = getApplicableVersions export default getApplicableVersions

View File

@@ -1,7 +1,7 @@
// This function derives the document type from the *relative path* segment length, // This function derives the document type from the *relative path* segment length,
// where a relative path refers to the content path starting with the product dir. // where a relative path refers to the content path starting with the product dir.
// For example: actions/index.md or github/getting-started-with-github/quickstart.md. // For example: actions/index.md or github/getting-started-with-github/quickstart.md.
module.exports = function getDocumentType (relativePath) { export default function getDocumentType(relativePath) {
// A non-index file is ALWAYS considered an article in this approach, // A non-index file is ALWAYS considered an article in this approach,
// even if it's at the category level (like actions/quickstart.md) // even if it's at the category level (like actions/quickstart.md)
if (!relativePath.endsWith('index.md')) { if (!relativePath.endsWith('index.md')) {
@@ -17,7 +17,7 @@ module.exports = function getDocumentType (relativePath) {
1: 'homepage', 1: 'homepage',
2: 'product', 2: 'product',
3: 'category', 3: 'category',
4: 'mapTopic' 4: 'mapTopic',
} }
const earlyAccessDocs = { const earlyAccessDocs = {
@@ -25,10 +25,8 @@ module.exports = function getDocumentType (relativePath) {
2: 'early-access', 2: 'early-access',
3: 'product', 3: 'product',
4: 'category', 4: 'category',
5: 'mapTopic' 5: 'mapTopic',
} }
return isEarlyAccess return isEarlyAccess ? earlyAccessDocs[segmentLength] : publicDocs[segmentLength]
? earlyAccessDocs[segmentLength]
: publicDocs[segmentLength]
} }

View File

@@ -1,12 +1,12 @@
const astFromMarkdown = require('mdast-util-from-markdown') import astFromMarkdown from 'mdast-util-from-markdown'
const toString = require('mdast-util-to-string') import toString from 'mdast-util-to-string'
const visit = require('unist-util-visit') import visit from 'unist-util-visit'
const findPage = require('./find-page') import findPage from './find-page.js'
// for any translated page, first get corresponding English markdown // for any translated page, first get corresponding English markdown
// then get the headings on both the translated and English pageMap // then get the headings on both the translated and English pageMap
// finally, create a map of translation:English for all headings on the page // finally, create a map of translation:English for all headings on the page
module.exports = function getEnglishHeadings (page, context) { export default function getEnglishHeadings(page, context) {
// Special handling for glossaries, because their headings are // Special handling for glossaries, because their headings are
// generated programatically. // generated programatically.
if (page.relativePath.endsWith('/github-glossary.md')) { if (page.relativePath.endsWith('/github-glossary.md')) {
@@ -21,7 +21,11 @@ module.exports = function getEnglishHeadings (page, context) {
const translatedHeadings = getHeadings(page.markdown) const translatedHeadings = getHeadings(page.markdown)
if (!translatedHeadings.length) return if (!translatedHeadings.length) return
const englishPage = findPage(`/en/${page.relativePath.replace(/.md$/, '')}`, context.pages, context.redirects) const englishPage = findPage(
`/en/${page.relativePath.replace(/.md$/, '')}`,
context.pages,
context.redirects
)
if (!englishPage) return if (!englishPage) return
// FIX there may be bugs if English headings are updated before Crowdin syncs up :/ // FIX there may be bugs if English headings are updated before Crowdin syncs up :/
@@ -29,16 +33,18 @@ module.exports = function getEnglishHeadings (page, context) {
if (!englishHeadings.length) return if (!englishHeadings.length) return
// return a map from translation:English // return a map from translation:English
return Object.assign(...translatedHeadings.map((k, i) => ({ return Object.assign(
[k]: englishHeadings[i] ...translatedHeadings.map((k, i) => ({
}))) [k]: englishHeadings[i],
}))
)
} }
function getHeadings (markdown) { function getHeadings(markdown) {
const ast = astFromMarkdown(markdown) const ast = astFromMarkdown(markdown)
const headings = [] const headings = []
visit(ast, node => { visit(ast, (node) => {
if (node.type !== 'heading') return if (node.type !== 'heading') return
if (![2, 3, 4].includes(node.depth)) return if (![2, 3, 4].includes(node.depth)) return
headings.push(toString(node)) headings.push(toString(node))

View File

@@ -1,12 +1,12 @@
const path = require('path') import path from 'path'
const findPage = require('./find-page') import findPage from './find-page.js'
const nonEnterpriseDefaultVersion = require('./non-enterprise-default-version') import nonEnterpriseDefaultVersion from './non-enterprise-default-version.js'
const removeFPTFromPath = require('./remove-fpt-from-path') import removeFPTFromPath from './remove-fpt-from-path.js'
const renderContent = require('./render-content') import renderContent from './render-content/index.js'
// rawLinks is an array of paths: [ '/foo' ] // rawLinks is an array of paths: [ '/foo' ]
// we need to convert it to an array of localized objects: [ { href: '/en/foo', title: 'Foo', intro: 'Description here' } ] // we need to convert it to an array of localized objects: [ { href: '/en/foo', title: 'Foo', intro: 'Description here' } ]
module.exports = async (rawLinks, context, option = { title: true, intro: true }) => { export default async (rawLinks, context, option = { title: true, intro: true }) => {
if (!rawLinks) return if (!rawLinks) return
if (typeof rawLinks === 'string') { if (typeof rawLinks === 'string') {
@@ -30,10 +30,11 @@ module.exports = async (rawLinks, context, option = { title: true, intro: true }
const processLink = async (link, context, option) => { const processLink = async (link, context, option) => {
const opts = { textOnly: true, encodeEntities: true } const opts = { textOnly: true, encodeEntities: true }
// Parse the link in case it includes Liquid conditionals // Parse the link in case it includes Liquid conditionals
const linkPath = await renderContent((link.href || link), context, opts) const linkPath = await renderContent(link.href || link, context, opts)
if (!linkPath) return null if (!linkPath) return null
const version = context.currentVersion === 'homepage' ? nonEnterpriseDefaultVersion : context.currentVersion const version =
context.currentVersion === 'homepage' ? nonEnterpriseDefaultVersion : context.currentVersion
const href = removeFPTFromPath(path.join('/', context.currentLanguage, version, linkPath)) const href = removeFPTFromPath(path.join('/', context.currentLanguage, version, linkPath))
const linkedPage = findPage(href, context.pages, context.redirects) const linkedPage = findPage(href, context.pages, context.redirects)

View File

@@ -1,15 +1,15 @@
import patterns from './patterns.js'
// This module searches a string for references to data objects // This module searches a string for references to data objects
// It finds all references matching {{site.data.*}} and return an array of them // It finds all references matching {{site.data.*}} and return an array of them
const patterns = require('./patterns')
module.exports = function getLiquidDataReferences (text) { export default function getLiquidDataReferences(text) {
return (text.match(patterns.dataReference) || []) return (text.match(patterns.dataReference) || []).map((ref) => {
.map(ref => { const cleaned = ref
const cleaned = ref.replace(/\.\.\//g, '') .replace(/\.\.\//g, '')
.replace('{% data', '') .replace('{% data', '')
.replace('%}', '') .replace('%}', '')
.trim() .trim()
return `site.data.${cleaned}` return `site.data.${cleaned}`
}) })
} }

View File

@@ -1,11 +1,13 @@
const cheerio = require('cheerio') import cheerio from 'cheerio'
const { range } = require('lodash') import { range } from 'lodash-es'
module.exports = function getMiniTocItems (html, maxHeadingLevel = 2, headingScope = '') { export default function getMiniTocItems(html, maxHeadingLevel = 2, headingScope = '') {
const $ = cheerio.load(html, { xmlMode: true }) const $ = cheerio.load(html, { xmlMode: true })
// eg `h2, h3` or `h2, h3, h4` depending on maxHeadingLevel // eg `h2, h3` or `h2, h3, h4` depending on maxHeadingLevel
const selector = range(2, maxHeadingLevel + 1).map(num => `${headingScope} h${num}`).join(', ') const selector = range(2, maxHeadingLevel + 1)
.map((num) => `${headingScope} h${num}`)
.join(', ')
const headings = $(selector) const headings = $(selector)
// return an array of objects containing each heading's contents, level, and optional platform. // return an array of objects containing each heading's contents, level, and optional platform.
@@ -15,13 +17,13 @@ module.exports = function getMiniTocItems (html, maxHeadingLevel = 2, headingSco
// - `platform` to show or hide platform-specific headings via client JS // - `platform` to show or hide platform-specific headings via client JS
const items = headings const items = headings
.get() .get()
.filter(item => { .filter((item) => {
if (!item.parent || !item.parent.attribs) return true if (!item.parent || !item.parent.attribs) return true
// Hide any items that belong to a hidden div // Hide any items that belong to a hidden div
const { attribs } = item.parent const { attribs } = item.parent
return !('hidden' in attribs) return !('hidden' in attribs)
}) })
.map(item => { .map((item) => {
// remove any <span> tags including their content // remove any <span> tags including their content
$('span').remove() $('span').remove()
@@ -36,8 +38,8 @@ module.exports = function getMiniTocItems (html, maxHeadingLevel = 2, headingSco
// determine indentation level for each item based on the largest // determine indentation level for each item based on the largest
// heading level in the current article // heading level in the current article
const largestHeadingLevel = items.map(item => item.headingLevel).sort()[0] const largestHeadingLevel = items.map((item) => item.headingLevel).sort()[0]
items.forEach(item => { items.forEach((item) => {
item.indentationLevel = item.headingLevel - largestHeadingLevel item.indentationLevel = item.headingLevel - largestHeadingLevel
}) })

View File

@@ -1,13 +1,13 @@
const { productMap } = require('./all-products') import { productMap } from './all-products.js'
const productTOCs = Object.values(productMap) const productTOCs = Object.values(productMap)
.filter(product => !product.external) .filter((product) => !product.external)
.map(product => product.toc.replace('content/', '')) .map((product) => product.toc.replace('content/', ''))
const linkString = /{% [^}]*?link.*? \/(.*?) ?%}/m const linkString = /{% [^}]*?link.*? \/(.*?) ?%}/m
const linksArray = new RegExp(linkString.source, 'gm') const linksArray = new RegExp(linkString.source, 'gm')
// return an array of objects like { type: 'category|maptopic|article', href: 'path' } // return an array of objects like { type: 'category|maptopic|article', href: 'path' }
module.exports = function getTocItems (page) { export default function getTocItems(page) {
// only process product and category tocs // only process product and category tocs
if (!page.relativePath.endsWith('index.md')) return if (!page.relativePath.endsWith('index.md')) return
if (page.relativePath === 'index.md') return if (page.relativePath === 'index.md') return
@@ -23,14 +23,16 @@ module.exports = function getTocItems (page) {
return [] return []
} }
return rawItems.map(item => { return rawItems.map((item) => {
const tocItem = {} const tocItem = {}
// a product's toc items are always categories // a product's toc items are always categories
// whereas a category's toc items can be either maptopics or articles // whereas a category's toc items can be either maptopics or articles
tocItem.type = productTOCs.includes(page.relativePath) tocItem.type = productTOCs.includes(page.relativePath)
? 'category' ? 'category'
: item.includes('topic_') ? 'maptopic' : 'article' : item.includes('topic_')
? 'maptopic'
: 'article'
tocItem.href = item.match(linkString)[1] tocItem.href = item.match(linkString)[1]

View File

@@ -1,4 +1,27 @@
[ [
{
"schemaChanges": [
{
"title": "The GraphQL schema includes these changes:",
"changes": [
"Type `SponsorsActivity` was added",
"Type `SponsorsActivityAction` was added",
"Type `SponsorsActivityConnection` was added",
"Type `SponsorsActivityEdge` was added",
"Type `SponsorsActivityOrder` was added",
"Type `SponsorsActivityOrderField` was added",
"Type `SponsorsActivityPeriod` was added",
"Field `sponsorsActivities` was added to object type `Organization`",
"Enum value `OTHER` was added to enum `SecurityAdvisoryEcosystem`",
"Field `sponsorsActivities` was added to interface `Sponsorable`",
"Field `sponsorsActivities` was added to object type `User`"
]
}
],
"previewChanges": [],
"upcomingChanges": [],
"date": "2021-07-13"
},
{ {
"schemaChanges": [ "schemaChanges": [
{ {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -33655,6 +33655,77 @@
"kind": "objects", "kind": "objects",
"href": "/graphql/reference/objects#organizationidentityprovider" "href": "/graphql/reference/objects#organizationidentityprovider"
}, },
{
"name": "sponsorsActivities",
"description": "<p>Events involving this sponsorable, such as new sponsorships.</p>",
"type": "SponsorsActivityConnection!",
"id": "sponsorsactivityconnection",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivityconnection",
"arguments": [
{
"name": "after",
"description": "<p>Returns the elements in the list that come after the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "before",
"description": "<p>Returns the elements in the list that come before the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "first",
"description": "<p>Returns the first <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "last",
"description": "<p>Returns the last <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "orderBy",
"description": "<p>Ordering options for activity returned from the connection.</p>",
"type": {
"name": "SponsorsActivityOrder",
"id": "sponsorsactivityorder",
"kind": "input-objects",
"href": "/graphql/reference/input-objects#sponsorsactivityorder"
}
},
{
"name": "period",
"defaultValue": "MONTH",
"description": "<p>Filter activities returned to only those that occurred in a given time range.</p>",
"type": {
"name": "SponsorsActivityPeriod",
"id": "sponsorsactivityperiod",
"kind": "enums",
"href": "/graphql/reference/enums#sponsorsactivityperiod"
}
}
]
},
{ {
"name": "sponsorsListing", "name": "sponsorsListing",
"description": "<p>The GitHub Sponsors listing for this user or organization.</p>", "description": "<p>The GitHub Sponsors listing for this user or organization.</p>",
@@ -51915,6 +51986,136 @@
} }
] ]
}, },
{
"name": "SponsorsActivity",
"kind": "objects",
"id": "sponsorsactivity",
"href": "/graphql/reference/objects#sponsorsactivity",
"description": "<p>An event related to sponsorship activity.</p>",
"implements": [
{
"name": "Node",
"id": "node",
"href": "/graphql/reference/interfaces#node"
}
],
"fields": [
{
"name": "action",
"description": "<p>What action this activity indicates took place.</p>",
"type": "SponsorsActivityAction!",
"id": "sponsorsactivityaction",
"kind": "enums",
"href": "/graphql/reference/enums#sponsorsactivityaction"
},
{
"name": "previousSponsorsTier",
"description": "<p>The tier that the sponsorship used to use, for tier change events.</p>",
"type": "SponsorsTier",
"id": "sponsorstier",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorstier"
},
{
"name": "sponsor",
"description": "<p>The user or organization who triggered this activity and was/is sponsoring the sponsorable.</p>",
"type": "Sponsor",
"id": "sponsor",
"kind": "unions",
"href": "/graphql/reference/unions#sponsor"
},
{
"name": "sponsorable",
"description": "<p>The user or organization that is being sponsored, the maintainer.</p>",
"type": "Sponsorable!",
"id": "sponsorable",
"kind": "interfaces",
"href": "/graphql/reference/interfaces#sponsorable"
},
{
"name": "sponsorsTier",
"description": "<p>The associated sponsorship tier.</p>",
"type": "SponsorsTier",
"id": "sponsorstier",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorstier"
},
{
"name": "timestamp",
"description": "<p>The timestamp of this event.</p>",
"type": "DateTime",
"id": "datetime",
"kind": "scalars",
"href": "/graphql/reference/scalars#datetime"
}
]
},
{
"name": "SponsorsActivityConnection",
"kind": "objects",
"id": "sponsorsactivityconnection",
"href": "/graphql/reference/objects#sponsorsactivityconnection",
"description": "<p>The connection type for SponsorsActivity.</p>",
"fields": [
{
"name": "edges",
"description": "<p>A list of edges.</p>",
"type": "[SponsorsActivityEdge]",
"id": "sponsorsactivityedge",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivityedge"
},
{
"name": "nodes",
"description": "<p>A list of nodes.</p>",
"type": "[SponsorsActivity]",
"id": "sponsorsactivity",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivity"
},
{
"name": "pageInfo",
"description": "<p>Information to aid in pagination.</p>",
"type": "PageInfo!",
"id": "pageinfo",
"kind": "objects",
"href": "/graphql/reference/objects#pageinfo"
},
{
"name": "totalCount",
"description": "<p>Identifies the total count of items in the connection.</p>",
"type": "Int!",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
]
},
{
"name": "SponsorsActivityEdge",
"kind": "objects",
"id": "sponsorsactivityedge",
"href": "/graphql/reference/objects#sponsorsactivityedge",
"description": "<p>An edge in a connection.</p>",
"fields": [
{
"name": "cursor",
"description": "<p>A cursor for use in pagination.</p>",
"type": "String!",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
},
{
"name": "node",
"description": "<p>The item at the end of the edge.</p>",
"type": "SponsorsActivity",
"id": "sponsorsactivity",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivity"
}
]
},
{ {
"name": "SponsorsGoal", "name": "SponsorsGoal",
"kind": "objects", "kind": "objects",
@@ -52120,7 +52321,7 @@
}, },
{ {
"name": "closestLesserValueTier", "name": "closestLesserValueTier",
"description": "<p>Get a different tier for this tier's maintainer that is at the same frequency\nas this tier but with a lesser cost. Returns the published tier with the\nmonthly price closest to this tier's without going over.</p>", "description": "<p>Get a different tier for this tier's maintainer that is at the same frequency\nas this tier but with an equal or lesser cost. Returns the published tier with\nthe monthly price closest to this tier's without going over.</p>",
"type": "SponsorsTier", "type": "SponsorsTier",
"id": "sponsorstier", "id": "sponsorstier",
"kind": "objects", "kind": "objects",
@@ -59162,6 +59363,77 @@
} }
] ]
}, },
{
"name": "sponsorsActivities",
"description": "<p>Events involving this sponsorable, such as new sponsorships.</p>",
"type": "SponsorsActivityConnection!",
"id": "sponsorsactivityconnection",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivityconnection",
"arguments": [
{
"name": "after",
"description": "<p>Returns the elements in the list that come after the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "before",
"description": "<p>Returns the elements in the list that come before the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "first",
"description": "<p>Returns the first <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "last",
"description": "<p>Returns the last <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "orderBy",
"description": "<p>Ordering options for activity returned from the connection.</p>",
"type": {
"name": "SponsorsActivityOrder",
"id": "sponsorsactivityorder",
"kind": "input-objects",
"href": "/graphql/reference/input-objects#sponsorsactivityorder"
}
},
{
"name": "period",
"defaultValue": "MONTH",
"description": "<p>Filter activities returned to only those that occurred in a given time range.</p>",
"type": {
"name": "SponsorsActivityPeriod",
"id": "sponsorsactivityperiod",
"kind": "enums",
"href": "/graphql/reference/enums#sponsorsactivityperiod"
}
}
]
},
{ {
"name": "sponsorsListing", "name": "sponsorsListing",
"description": "<p>The GitHub Sponsors listing for this user or organization.</p>", "description": "<p>The GitHub Sponsors listing for this user or organization.</p>",
@@ -62766,6 +63038,77 @@
"kind": "scalars", "kind": "scalars",
"href": "/graphql/reference/scalars#boolean" "href": "/graphql/reference/scalars#boolean"
}, },
{
"name": "sponsorsActivities",
"description": "<p>Events involving this sponsorable, such as new sponsorships.</p>",
"type": "SponsorsActivityConnection!",
"id": "sponsorsactivityconnection",
"kind": "objects",
"href": "/graphql/reference/objects#sponsorsactivityconnection",
"arguments": [
{
"name": "after",
"description": "<p>Returns the elements in the list that come after the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "before",
"description": "<p>Returns the elements in the list that come before the specified cursor.</p>",
"type": {
"name": "String",
"id": "string",
"kind": "scalars",
"href": "/graphql/reference/scalars#string"
}
},
{
"name": "first",
"description": "<p>Returns the first <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "last",
"description": "<p>Returns the last <em>n</em> elements from the list.</p>",
"type": {
"name": "Int",
"id": "int",
"kind": "scalars",
"href": "/graphql/reference/scalars#int"
}
},
{
"name": "orderBy",
"description": "<p>Ordering options for activity returned from the connection.</p>",
"type": {
"name": "SponsorsActivityOrder",
"id": "sponsorsactivityorder",
"kind": "input-objects",
"href": "/graphql/reference/input-objects#sponsorsactivityorder"
}
},
{
"name": "period",
"defaultValue": "MONTH",
"description": "<p>Filter activities returned to only those that occurred in a given time range.</p>",
"type": {
"name": "SponsorsActivityPeriod",
"id": "sponsorsactivityperiod",
"kind": "enums",
"href": "/graphql/reference/enums#sponsorsactivityperiod"
}
}
]
},
{ {
"name": "sponsorsListing", "name": "sponsorsListing",
"description": "<p>The GitHub Sponsors listing for this user or organization.</p>", "description": "<p>The GitHub Sponsors listing for this user or organization.</p>",
@@ -66358,6 +66701,10 @@
"name": "NUGET", "name": "NUGET",
"description": "<p>.NET packages hosted at the NuGet Gallery.</p>" "description": "<p>.NET packages hosted at the NuGet Gallery.</p>"
}, },
{
"name": "OTHER",
"description": "<p>Applications, runtimes, operating systems and other kinds of software.</p>"
},
{ {
"name": "PIP", "name": "PIP",
"description": "<p>Python packages hosted at PyPI.org.</p>" "description": "<p>Python packages hosted at PyPI.org.</p>"
@@ -66453,6 +66800,77 @@
} }
] ]
}, },
{
"name": "SponsorsActivityAction",
"kind": "enums",
"id": "sponsorsactivityaction",
"href": "/graphql/reference/enums#sponsorsactivityaction",
"description": "<p>The possible actions that GitHub Sponsors activities can represent.</p>",
"values": [
{
"name": "CANCELLED_SPONSORSHIP",
"description": "<p>The activity was cancelling a sponsorship.</p>"
},
{
"name": "NEW_SPONSORSHIP",
"description": "<p>The activity was starting a sponsorship.</p>"
},
{
"name": "PENDING_CHANGE",
"description": "<p>The activity was scheduling a downgrade or cancellation.</p>"
},
{
"name": "REFUND",
"description": "<p>The activity was funds being refunded to the sponsor or GitHub.</p>"
},
{
"name": "SPONSOR_MATCH_DISABLED",
"description": "<p>The activity was disabling matching for a previously matched sponsorship.</p>"
},
{
"name": "TIER_CHANGE",
"description": "<p>The activity was changing the sponsorship tier, either directly by the sponsor or by a scheduled/pending change.</p>"
}
]
},
{
"name": "SponsorsActivityOrderField",
"kind": "enums",
"id": "sponsorsactivityorderfield",
"href": "/graphql/reference/enums#sponsorsactivityorderfield",
"description": "<p>Properties by which GitHub Sponsors activity connections can be ordered.</p>",
"values": [
{
"name": "TIMESTAMP",
"description": "<p>Order activities by when they happened.</p>"
}
]
},
{
"name": "SponsorsActivityPeriod",
"kind": "enums",
"id": "sponsorsactivityperiod",
"href": "/graphql/reference/enums#sponsorsactivityperiod",
"description": "<p>The possible time periods for which Sponsors activities can be requested.</p>",
"values": [
{
"name": "ALL",
"description": "<p>Don't restrict the activity to any date range, include all activity.</p>"
},
{
"name": "DAY",
"description": "<p>The previous calendar day.</p>"
},
{
"name": "MONTH",
"description": "<p>The previous thirty days.</p>"
},
{
"name": "WEEK",
"description": "<p>The previous seven days.</p>"
}
]
},
{ {
"name": "SponsorsGoalKind", "name": "SponsorsGoalKind",
"kind": "enums", "kind": "enums",
@@ -74266,6 +74684,31 @@
} }
] ]
}, },
{
"name": "SponsorsActivityOrder",
"kind": "inputObjects",
"id": "sponsorsactivityorder",
"href": "/graphql/reference/input-objects#sponsorsactivityorder",
"description": "<p>Ordering options for GitHub Sponsors activity connections.</p>",
"inputFields": [
{
"name": "direction",
"description": "<p>The ordering direction.</p>",
"type": "OrderDirection!",
"id": "orderdirection",
"kind": "enums",
"href": "/graphql/reference/enums#orderdirection"
},
{
"name": "field",
"description": "<p>The field to order activity by.</p>",
"type": "SponsorsActivityOrderField!",
"id": "sponsorsactivityorderfield",
"kind": "enums",
"href": "/graphql/reference/enums#sponsorsactivityorderfield"
}
]
},
{ {
"name": "SponsorsTierOrder", "name": "SponsorsTierOrder",
"kind": "inputObjects", "kind": "inputObjects",

View File

@@ -61530,6 +61530,10 @@
"name": "NUGET", "name": "NUGET",
"description": "<p>.NET packages hosted at the NuGet Gallery.</p>" "description": "<p>.NET packages hosted at the NuGet Gallery.</p>"
}, },
{
"name": "OTHER",
"description": "<p>Applications, runtimes, operating systems and other kinds of software.</p>"
},
{ {
"name": "PIP", "name": "PIP",
"description": "<p>Python packages hosted at PyPI.org.</p>" "description": "<p>Python packages hosted at PyPI.org.</p>"

View File

@@ -2,70 +2,70 @@
// of the data in lib/graphql/static/*.json // of the data in lib/graphql/static/*.json
// PREVIEWS // PREVIEWS
const previewsValidator = { export const previewsValidator = {
properties: { properties: {
title: { title: {
type: 'string', type: 'string',
required: true required: true,
}, },
description: { description: {
type: 'string', type: 'string',
required: true required: true,
}, },
toggled_by: { toggled_by: {
type: 'string', type: 'string',
required: true required: true,
}, },
toggled_on: { toggled_on: {
type: 'array', type: 'array',
required: true required: true,
}, },
owning_teams: { owning_teams: {
type: 'array', type: 'array',
required: true required: true,
}, },
accept_header: { accept_header: {
type: 'string', type: 'string',
required: true required: true,
}, },
href: { href: {
type: 'string', type: 'string',
required: true required: true,
} },
} },
} }
// UPCOMING CHANGES // UPCOMING CHANGES
const upcomingChangesValidator = { export const upcomingChangesValidator = {
properties: { properties: {
location: { location: {
type: 'string', type: 'string',
required: true required: true,
}, },
description: { description: {
type: 'string', type: 'string',
required: true required: true,
}, },
reason: { reason: {
type: 'string', type: 'string',
required: true required: true,
}, },
date: { date: {
type: 'string', type: 'string',
required: true, required: true,
pattern: /^\d{4}-\d{2}-\d{2}$/ pattern: /^\d{4}-\d{2}-\d{2}$/,
}, },
criticality: { criticality: {
type: 'string', type: 'string',
required: true, required: true,
pattern: '(breaking|dangerous)' pattern: '(breaking|dangerous)',
}, },
owner: { owner: {
type: 'string', type: 'string',
required: true, required: true,
pattern: /^[\S]*$/ pattern: /^[\S]*$/,
} },
} },
} }
// SCHEMAS // SCHEMAS
@@ -74,38 +74,38 @@ const coreProps = {
properties: { properties: {
name: { name: {
type: 'string', type: 'string',
required: true required: true,
}, },
type: { type: {
type: 'string', type: 'string',
required: true required: true,
}, },
kind: { kind: {
type: 'string', type: 'string',
required: true required: true,
}, },
id: { id: {
type: 'string', type: 'string',
required: true required: true,
}, },
href: { href: {
type: 'string', type: 'string',
required: true required: true,
}, },
description: { description: {
type: 'string', type: 'string',
required: true required: true,
}, },
isDeprecated: { isDeprecated: {
type: 'boolean', type: 'boolean',
required: false required: false,
}, },
preview: { preview: {
type: 'object', type: 'object',
required: false, required: false,
properties: previewsValidator.properties properties: previewsValidator.properties,
} },
} },
} }
// some GraphQL schema members have the core properties plus an 'args' object // some GraphQL schema members have the core properties plus an 'args' object
@@ -114,13 +114,13 @@ const corePropsPlusArgs = dup(coreProps)
corePropsPlusArgs.properties.args = { corePropsPlusArgs.properties.args = {
type: 'array', type: 'array',
required: false, required: false,
properties: coreProps.properties properties: coreProps.properties,
} }
// the args object can have defaultValue prop // the args object can have defaultValue prop
corePropsPlusArgs.properties.args.properties.defaultValue = { corePropsPlusArgs.properties.args.properties.defaultValue = {
type: 'boolean', type: 'boolean',
required: false required: false,
} }
const corePropsNoType = dup(coreProps) const corePropsNoType = dup(coreProps)
@@ -139,13 +139,13 @@ const mutations = dup(corePropsNoType)
mutations.properties.inputFields = { mutations.properties.inputFields = {
type: 'array', type: 'array',
required: true, required: true,
properties: corePropsNoDescription.properties properties: corePropsNoDescription.properties,
} }
mutations.properties.returnFields = { mutations.properties.returnFields = {
type: 'array', type: 'array',
required: true, required: true,
properties: coreProps.properties properties: coreProps.properties,
} }
// OBJECTS // OBJECTS
@@ -154,7 +154,7 @@ const objects = dup(corePropsNoType)
objects.properties.fields = { objects.properties.fields = {
type: 'array', type: 'array',
required: true, required: true,
properties: corePropsPlusArgs.properties properties: corePropsPlusArgs.properties,
} }
objects.properties.implements = { objects.properties.implements = {
@@ -163,17 +163,17 @@ objects.properties.implements = {
properties: { properties: {
name: { name: {
type: 'string', type: 'string',
required: true required: true,
}, },
id: { id: {
type: 'string', type: 'string',
required: true required: true,
}, },
href: { href: {
type: 'string', type: 'string',
required: true required: true,
} },
} },
} }
// INTERFACES // INTERFACES
@@ -182,7 +182,7 @@ const interfaces = dup(corePropsNoType)
interfaces.properties.fields = { interfaces.properties.fields = {
type: 'array', type: 'array',
required: true, required: true,
properties: corePropsPlusArgs.properties properties: corePropsPlusArgs.properties,
} }
// ENUMS // ENUMS
@@ -194,13 +194,13 @@ enums.properties.values = {
properties: { properties: {
name: { name: {
type: 'string', type: 'string',
required: true required: true,
}, },
description: { description: {
type: 'string', type: 'string',
required: true required: true,
} },
} },
} }
// UNIONS // UNIONS
@@ -212,17 +212,17 @@ unions.properties.possibleTypes = {
properties: { properties: {
name: { name: {
type: 'string', type: 'string',
required: true required: true,
}, },
id: { id: {
type: 'string', type: 'string',
required: true required: true,
}, },
href: { href: {
type: 'string', type: 'string',
required: true required: true,
} },
} },
} }
// INPUT OBJECTS // INPUT OBJECTS
@@ -231,29 +231,25 @@ const inputObjects = dup(corePropsNoType)
inputObjects.properties.inputFields = { inputObjects.properties.inputFields = {
type: 'array', type: 'array',
required: true, required: true,
properties: coreProps.properties properties: coreProps.properties,
} }
// SCALARS // SCALARS
const scalars = dup(corePropsNoType) const scalars = dup(corePropsNoType)
scalars.properties.kind.required = false scalars.properties.kind.required = false
function dup (obj) { function dup(obj) {
return JSON.parse(JSON.stringify(obj)) return JSON.parse(JSON.stringify(obj))
} }
module.exports = { export const schemaValidator = {
schemaValidator: { queryConnections,
queryConnections, queryFields,
queryFields, mutations,
mutations, objects,
objects, interfaces,
interfaces, enums,
enums, unions,
unions, inputObjects,
inputObjects, scalars,
scalars
},
previewsValidator,
upcomingChangesValidator
} }

View File

@@ -1,16 +1,18 @@
const FailBot = require('./failbot') import FailBot from './failbot.js'
process.on('uncaughtException', async err => { process.on('uncaughtException', async (err) => {
if (err.code === 'MODULE_NOT_FOUND') { if (err.code === 'MODULE_NOT_FOUND') {
console.error('\n\n🔥 Uh oh! It looks you are missing a required npm module.') console.error('\n\n🔥 Uh oh! It looks you are missing a required npm module.')
console.error('Please run `npm install` to make sure you have all the required dependencies.\n\n') console.error(
'Please run `npm install` to make sure you have all the required dependencies.\n\n'
)
} }
console.error(err) console.error(err)
await FailBot.report(err) await FailBot.report(err)
}) })
process.on('unhandledRejection', async err => { process.on('unhandledRejection', async (err) => {
console.error(err) console.error(err)
await FailBot.report(err) await FailBot.report(err)
}) })

View File

@@ -1,7 +1,7 @@
const crypto = require('crypto') import crypto from 'crypto'
const fetch = require('node-fetch') import fetch from 'node-fetch'
const statsd = require('../lib/statsd') import statsd from '../lib/statsd.js'
const FailBot = require('../lib/failbot') import FailBot from '../lib/failbot.js'
const SCHEMAS = { const SCHEMAS = {
page: 'docs.v0.PageEvent', page: 'docs.v0.PageEvent',
@@ -14,11 +14,11 @@ const SCHEMAS = {
redirect: 'docs.v0.RedirectEvent', redirect: 'docs.v0.RedirectEvent',
clipboard: 'docs.v0.ClipboardEvent', clipboard: 'docs.v0.ClipboardEvent',
print: 'docs.v0.PrintEvent', print: 'docs.v0.PrintEvent',
preference: 'docs.v0.PreferenceEvent' preference: 'docs.v0.PreferenceEvent',
} }
module.exports = class Hydro { export default class Hydro {
constructor ({ secret, endpoint } = {}) { constructor({ secret, endpoint } = {}) {
this.secret = secret || process.env.HYDRO_SECRET this.secret = secret || process.env.HYDRO_SECRET
this.endpoint = endpoint || process.env.HYDRO_ENDPOINT this.endpoint = endpoint || process.env.HYDRO_ENDPOINT
this.schemas = SCHEMAS this.schemas = SCHEMAS
@@ -27,7 +27,7 @@ module.exports = class Hydro {
/** /**
* Can check if it can actually send to Hydro * Can check if it can actually send to Hydro
*/ */
maySend () { maySend() {
return Boolean(this.secret && this.endpoint) return Boolean(this.secret && this.endpoint)
} }
@@ -36,10 +36,8 @@ module.exports = class Hydro {
* to authenticate with Hydro * to authenticate with Hydro
* @param {string} body * @param {string} body
*/ */
generatePayloadHmac (body) { generatePayloadHmac(body) {
return crypto.createHmac('sha256', this.secret) return crypto.createHmac('sha256', this.secret).update(body).digest('hex')
.update(body)
.digest('hex')
} }
/** /**
@@ -47,7 +45,7 @@ module.exports = class Hydro {
* @param {string} schema * @param {string} schema
* @param {any} value * @param {any} value
*/ */
async publish (schema, value) { async publish(schema, value) {
return this.publishMany([{ schema, value }]) return this.publishMany([{ schema, value }])
} }
@@ -55,25 +53,26 @@ module.exports = class Hydro {
* Publish multiple events to Hydro * Publish multiple events to Hydro
* @param {[{ schema: string, value: any }]} events * @param {[{ schema: string, value: any }]} events
*/ */
async publishMany (events) { async publishMany(events) {
const body = JSON.stringify({ const body = JSON.stringify({
events: events.map(({ schema, value }) => ({ events: events.map(({ schema, value }) => ({
schema, schema,
value: JSON.stringify(value), // We must double-encode the value property value: JSON.stringify(value), // We must double-encode the value property
cluster: 'potomac' // We only have ability to publish externally to potomac cluster cluster: 'potomac', // We only have ability to publish externally to potomac cluster
})) })),
}) })
const token = this.generatePayloadHmac(body) const token = this.generatePayloadHmac(body)
const doFetch = () => fetch(this.endpoint, { const doFetch = () =>
method: 'POST', fetch(this.endpoint, {
body, method: 'POST',
headers: { body,
Authorization: `Hydro ${token}`, headers: {
'Content-Type': 'application/json', Authorization: `Hydro ${token}`,
'X-Hydro-App': 'docs-production' 'Content-Type': 'application/json',
} 'X-Hydro-App': 'docs-production',
}) },
})
const res = await statsd.asyncTimer(doFetch, 'hydro.response_time')() const res = await statsd.asyncTimer(doFetch, 'hydro.response_time')()
@@ -88,13 +87,17 @@ module.exports = class Hydro {
FailBot.report(err, { FailBot.report(err, {
hydroStatus: res.status, hydroStatus: res.status,
hydroText: res.statusText hydroText: res.statusText,
}) })
// If the Hydro request failed as an "Unprocessable Entity", log it for diagnostics // If the Hydro request failed as an "Unprocessable Entity", log it for diagnostics
if (res.status === 422) { if (res.status === 422) {
const failures = await res.json() const failures = await res.json()
console.error(`Hydro schema validation failed:\n - Request: ${body}\n - Failures: ${JSON.stringify(failures)}`) console.error(
`Hydro schema validation failed:\n - Request: ${body}\n - Failures: ${JSON.stringify(
failures
)}`
)
} }
throw err throw err

View File

@@ -1,7 +1,7 @@
const path = require('path') import path from 'path'
const statsd = require('./statsd') import statsd from './statsd.js'
module.exports = function instrumentMiddleware (middleware, relativePath) { export default function instrumentMiddleware(middleware, relativePath) {
// Requires the file as if it were being required from '../middleware/index.js'. // Requires the file as if it were being required from '../middleware/index.js'.
// This is a little wonky, but let's us write `app.use(instrument(path))` and // This is a little wonky, but let's us write `app.use(instrument(path))` and
// maintain the name of the file, instead of hard-coding it for each middleware. // maintain the name of the file, instead of hard-coding it for each middleware.

View File

@@ -1,15 +1,18 @@
const patterns = require('../lib/patterns') import patterns from '../lib/patterns.js'
const { deprecated } = require('../lib/enterprise-server-releases') import { deprecated } from '../lib/enterprise-server-releases.js'
module.exports = function isArchivedVersion (req) { export default function isArchivedVersion(req) {
// if this is an assets path, use the referrer // if this is an assets path, use the referrer
// if this is a docs path, use the req.path // if this is a docs path, use the req.path
const pathToCheck = patterns.assetPaths.test(req.path) const pathToCheck = patterns.assetPaths.test(req.path) ? req.get('referrer') : req.path
? req.get('referrer')
: req.path
// ignore paths that don't have an enterprise version number // ignore paths that don't have an enterprise version number
if (!(patterns.getEnterpriseVersionNumber.test(pathToCheck) || patterns.getEnterpriseServerNumber.test(pathToCheck))) { if (
!(
patterns.getEnterpriseVersionNumber.test(pathToCheck) ||
patterns.getEnterpriseServerNumber.test(pathToCheck)
)
) {
return {} return {}
} }

View File

@@ -6,7 +6,7 @@ const languages = {
code: 'en', code: 'en',
hreflang: 'en', hreflang: 'en',
dir: '', dir: '',
wip: false wip: false,
}, },
cn: { cn: {
name: 'Simplified Chinese', name: 'Simplified Chinese',
@@ -15,7 +15,7 @@ const languages = {
hreflang: 'zh-Hans', hreflang: 'zh-Hans',
redirectPatterns: [/^\/zh-\w{2}/, /^\/zh/], redirectPatterns: [/^\/zh-\w{2}/, /^\/zh/],
dir: 'translations/zh-CN', dir: 'translations/zh-CN',
wip: false wip: false,
}, },
ja: { ja: {
name: 'Japanese', name: 'Japanese',
@@ -24,7 +24,7 @@ const languages = {
hreflang: 'ja', hreflang: 'ja',
redirectPatterns: [/^\/jp/], redirectPatterns: [/^\/jp/],
dir: 'translations/ja-JP', dir: 'translations/ja-JP',
wip: false wip: false,
}, },
es: { es: {
name: 'Spanish', name: 'Spanish',
@@ -32,7 +32,7 @@ const languages = {
code: 'es', code: 'es',
hreflang: 'es', hreflang: 'es',
dir: 'translations/es-XL', dir: 'translations/es-XL',
wip: false wip: false,
}, },
pt: { pt: {
name: 'Portuguese', name: 'Portuguese',
@@ -40,7 +40,7 @@ const languages = {
code: 'pt', code: 'pt',
hreflang: 'pt', hreflang: 'pt',
dir: 'translations/pt-BR', dir: 'translations/pt-BR',
wip: false wip: false,
}, },
de: { de: {
name: 'German', name: 'German',
@@ -48,15 +48,15 @@ const languages = {
code: 'de', code: 'de',
hreflang: 'de', hreflang: 'de',
dir: 'translations/de-DE', dir: 'translations/de-DE',
wip: true wip: true,
} },
} }
if (process.env.ENABLED_LANGUAGES) { if (process.env.ENABLED_LANGUAGES) {
Object.keys(languages).forEach(code => { Object.keys(languages).forEach((code) => {
if (!process.env.ENABLED_LANGUAGES.includes(code)) delete languages[code] if (!process.env.ENABLED_LANGUAGES.includes(code)) delete languages[code]
}) })
console.log(`ENABLED_LANGUAGES: ${process.env.ENABLED_LANGUAGES}`) console.log(`ENABLED_LANGUAGES: ${process.env.ENABLED_LANGUAGES}`)
} }
module.exports = languages export default languages

View File

@@ -1,18 +1,21 @@
const fs = require('fs') import { fileURLToPath } from 'url'
const path = require('path') import path from 'path'
const walk = require('walk-sync').entries import fs from 'fs'
import xWalkSync from 'walk-sync'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const walk = xWalkSync.entries
const validLayoutExtensions = ['.md', '.html'] const validLayoutExtensions = ['.md', '.html']
const layoutsDirectory = path.join(__dirname, '../layouts') const layoutsDirectory = path.join(__dirname, '../layouts')
const layouts = {} const layouts = {}
walk(layoutsDirectory, { directories: false }) walk(layoutsDirectory, { directories: false })
.filter(entry => validLayoutExtensions.includes(path.extname(entry.relativePath))) .filter((entry) => validLayoutExtensions.includes(path.extname(entry.relativePath)))
.filter(entry => !entry.relativePath.includes('README')) .filter((entry) => !entry.relativePath.includes('README'))
.forEach(entry => { .forEach((entry) => {
const key = path.basename(entry.relativePath).split('.').slice(0, -1).join('.') const key = path.basename(entry.relativePath).split('.').slice(0, -1).join('.')
const fullPath = path.join(entry.basePath, entry.relativePath) const fullPath = path.join(entry.basePath, entry.relativePath)
const content = fs.readFileSync(fullPath, 'utf8') const content = fs.readFileSync(fullPath, 'utf8')
layouts[key] = content layouts[key] = content
}) })
module.exports = layouts export default layouts

View File

@@ -74,7 +74,7 @@ Each custom tag has the following:
The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.js` and `includes/liquid-tags/my-tag.html` The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.js` and `includes/liquid-tags/my-tag.html`
You must also register the new tag in `lib/render-content.js` with a line like this: You must also register the new tag in `lib/render-content/index.js` with a line like this:
``` ```
renderContent.liquid.registerTag('my_tag', require('./liquid-tags/my-tag')) renderContent.liquid.registerTag('my_tag', require('./liquid-tags/my-tag'))

View File

@@ -1,10 +1,10 @@
const { TokenizationError } = require('liquidjs') import { TokenizationError } from 'liquidjs'
const Syntax = /([a-z0-9/\\_.\-[\]]+)/i const Syntax = /([a-z0-9/\\_.\-[\]]+)/i
const SyntaxHelp = "Syntax Error in 'data' - Valid syntax: data [path]" const SyntaxHelp = "Syntax Error in 'data' - Valid syntax: data [path]"
module.exports = { export default {
parse (tagToken) { parse(tagToken) {
if (!tagToken || !Syntax.test(tagToken.args)) { if (!tagToken || !Syntax.test(tagToken.args)) {
throw new TokenizationError(SyntaxHelp, tagToken) throw new TokenizationError(SyntaxHelp, tagToken)
} }
@@ -12,9 +12,9 @@ module.exports = {
this.path = tagToken.args this.path = tagToken.args
}, },
async render (scope) { async render(scope) {
const value = await this.liquid.evalValue(`site.data.${this.path}`, scope) const value = await this.liquid.evalValue(`site.data.${this.path}`, scope)
if (typeof value !== 'string') return value if (typeof value !== 'string') return value
return this.liquid.parseAndRender(value, scope.environments) return this.liquid.parseAndRender(value, scope.environments)
} },
} }

View File

@@ -1,4 +1,4 @@
const tags = { export const tags = {
mac: '', mac: '',
windows: '', windows: '',
linux: '', linux: '',
@@ -10,39 +10,40 @@ const tags = {
tip: 'border rounded-1 mb-4 p-3 color-border-info color-bg-info f5', tip: 'border rounded-1 mb-4 p-3 color-border-info color-bg-info f5',
note: 'border rounded-1 mb-4 p-3 color-border-info color-bg-info f5', note: 'border rounded-1 mb-4 p-3 color-border-info color-bg-info f5',
warning: 'border rounded-1 mb-4 p-3 color-border-danger color-bg-danger f5', warning: 'border rounded-1 mb-4 p-3 color-border-danger color-bg-danger f5',
danger: 'border rounded-1 mb-4 p-3 color-border-danger color-bg-danger f5' danger: 'border rounded-1 mb-4 p-3 color-border-danger color-bg-danger f5',
} }
const template = '<div class="extended-markdown {{ tagName }} {{ classes }}">{{ output }}</div>' export const template =
'<div class="extended-markdown {{ tagName }} {{ classes }}">{{ output }}</div>'
const ExtendedMarkdown = { export const ExtendedMarkdown = {
type: 'block', type: 'block',
parse (tagToken, remainTokens) { parse(tagToken, remainTokens) {
this.tagName = tagToken.name this.tagName = tagToken.name
this.templates = [] this.templates = []
const stream = this.liquid.parser.parseStream(remainTokens) const stream = this.liquid.parser.parseStream(remainTokens)
stream stream
.on(`tag:end${this.tagName}`, () => stream.stop()) .on(`tag:end${this.tagName}`, () => stream.stop())
.on('template', tpl => this.templates.push(tpl)) .on('template', (tpl) => this.templates.push(tpl))
.on('end', () => { .on('end', () => {
throw new Error(`tag ${tagToken.getText()} not closed`) throw new Error(`tag ${tagToken.getText()} not closed`)
}) })
stream.start() stream.start()
}, },
render: function * (scope) { render: function* (scope) {
const output = yield this.liquid.renderer.renderTemplates(this.templates, scope) const output = yield this.liquid.renderer.renderTemplates(this.templates, scope)
return yield this.liquid.parseAndRender(template, { return yield this.liquid.parseAndRender(template, {
tagName: this.tagName, tagName: this.tagName,
classes: tags[this.tagName], classes: tags[this.tagName],
output output,
}) })
} },
} }
module.exports = { export default {
tags, tags,
ExtendedMarkdown ExtendedMarkdown,
} }

View File

@@ -1,3 +1,3 @@
import link from './link.js'
// For details, see class method in lib/liquid-tags/link.js // For details, see class method in lib/liquid-tags/link.js
const link = require('./link') export default link('homepage-link-with-intro')
module.exports = link('homepage-link-with-intro')

View File

@@ -1,6 +1 @@
module.exports = [ export default ['=', '<', '>', '!=']
'=',
'<',
'>',
'!='
]

View File

@@ -1,8 +1,9 @@
const { isTruthy, Expression, TokenizationError } = require('liquidjs') import { isTruthy, Expression, TokenizationError } from 'liquidjs'
const versionSatisfiesRange = require('../version-satisfies-range') import versionSatisfiesRange from '../version-satisfies-range.js'
const supportedOperators = require('./ifversion-supported-operators') import supportedOperators from './ifversion-supported-operators.js'
const SyntaxHelp = "Syntax Error in 'ifversion' with range - Valid syntax: ifversion [operator] [releaseNumber]" const SyntaxHelp =
"Syntax Error in 'ifversion' with range - Valid syntax: ifversion [operator] [releaseNumber]"
const supportedOperatorsRegex = new RegExp(`[${supportedOperators.join('')}]`) const supportedOperatorsRegex = new RegExp(`[${supportedOperators.join('')}]`)
const releaseRegex = /\d\d?\.\d\d?/ const releaseRegex = /\d\d?\.\d\d?/
@@ -12,23 +13,26 @@ const notRegex = /(?:^|\s)not\s/
// native Liquid `if` block tag. It has special handling for statements like {% ifversion ghes < 3.0 %}, // native Liquid `if` block tag. It has special handling for statements like {% ifversion ghes < 3.0 %},
// using semver to evaluate release numbers instead of doing standard number comparisons, which // using semver to evaluate release numbers instead of doing standard number comparisons, which
// don't work the way we want because they evaluate 3.2 > 3.10 = true. // don't work the way we want because they evaluate 3.2 > 3.10 = true.
module.exports = { export default {
// The following is verbatim from https://github.com/harttle/liquidjs/blob/v9.22.1/src/builtin/tags/if.ts // The following is verbatim from https://github.com/harttle/liquidjs/blob/v9.22.1/src/builtin/tags/if.ts
parse (tagToken, remainTokens) { parse(tagToken, remainTokens) {
this.tagToken = tagToken this.tagToken = tagToken
this.branches = [] this.branches = []
this.elseTemplates = [] this.elseTemplates = []
let p let p
const stream = this.liquid.parser.parseStream(remainTokens) const stream = this.liquid.parser
.on('start', () => this.branches.push({ .parseStream(remainTokens)
cond: tagToken.args, .on('start', () =>
templates: (p = []) this.branches.push({
})) cond: tagToken.args,
templates: (p = []),
})
)
.on('tag:elsif', (token) => { .on('tag:elsif', (token) => {
this.branches.push({ this.branches.push({
cond: token.args, cond: token.args,
templates: p = [] templates: (p = []),
}) })
}) })
.on('tag:else', () => (p = this.elseTemplates)) .on('tag:else', () => (p = this.elseTemplates))
@@ -43,7 +47,7 @@ module.exports = {
// The following is _mostly_ verbatim from https://github.com/harttle/liquidjs/blob/v9.22.1/src/builtin/tags/if.ts // The following is _mostly_ verbatim from https://github.com/harttle/liquidjs/blob/v9.22.1/src/builtin/tags/if.ts
// The additions here are the handleNots() and handleOperators() calls. // The additions here are the handleNots() and handleOperators() calls.
render: function * (ctx, emitter) { render: function* (ctx, emitter) {
const r = this.liquid.renderer const r = this.liquid.renderer
const { operators, operatorsTrie } = this.liquid.options const { operators, operatorsTrie } = this.liquid.options
@@ -61,7 +65,12 @@ module.exports = {
resolvedBranchCond = this.handleOperators(resolvedBranchCond) resolvedBranchCond = this.handleOperators(resolvedBranchCond)
// Use Liquid's native function for the final evaluation. // Use Liquid's native function for the final evaluation.
const cond = yield new Expression(resolvedBranchCond, operators, operatorsTrie, ctx.opts.lenientIf).value(ctx) const cond = yield new Expression(
resolvedBranchCond,
operators,
operatorsTrie,
ctx.opts.lenientIf
).value(ctx)
if (isTruthy(cond, ctx)) { if (isTruthy(cond, ctx)) {
yield r.renderTemplates(branch.templates, ctx, emitter) yield r.renderTemplates(branch.templates, ctx, emitter)
@@ -71,13 +80,13 @@ module.exports = {
yield r.renderTemplates(this.elseTemplates, ctx, emitter) yield r.renderTemplates(this.elseTemplates, ctx, emitter)
}, },
handleNots (resolvedBranchCond) { handleNots(resolvedBranchCond) {
if (!notRegex.test(resolvedBranchCond)) return resolvedBranchCond if (!notRegex.test(resolvedBranchCond)) return resolvedBranchCond
const condArray = resolvedBranchCond.split(' ') const condArray = resolvedBranchCond.split(' ')
// Find the first index in the array that contains "not". // Find the first index in the array that contains "not".
const notIndex = condArray.findIndex(el => el === 'not') const notIndex = condArray.findIndex((el) => el === 'not')
// E.g., ['not', 'fpt'] // E.g., ['not', 'fpt']
const condParts = condArray.slice(notIndex, notIndex + 2) const condParts = condArray.slice(notIndex, notIndex + 2)
@@ -86,7 +95,7 @@ module.exports = {
const versionToEvaluate = condParts[1] const versionToEvaluate = condParts[1]
// If the current version is the version being evaluated in the conditional, // If the current version is the version being evaluated in the conditional,
// that is negated and resolved to false. If it's NOT the version being // that is negated and resolved to false. If it's NOT the version being
// evaluated, that resolves to true. // evaluated, that resolves to true.
const resolvedBoolean = !(versionToEvaluate === this.currentVersionShortName) const resolvedBoolean = !(versionToEvaluate === this.currentVersionShortName)
@@ -101,14 +110,14 @@ module.exports = {
return resolvedBranchCond return resolvedBranchCond
}, },
handleOperators (resolvedBranchCond) { handleOperators(resolvedBranchCond) {
if (!supportedOperatorsRegex.test(resolvedBranchCond)) return resolvedBranchCond if (!supportedOperatorsRegex.test(resolvedBranchCond)) return resolvedBranchCond
// If this conditional contains multiple parts using `or` or `and`, get only the conditional with operators. // If this conditional contains multiple parts using `or` or `and`, get only the conditional with operators.
const condArray = resolvedBranchCond.split(' ') const condArray = resolvedBranchCond.split(' ')
// Find the first index in the array that contains an operator. // Find the first index in the array that contains an operator.
const operatorIndex = condArray.findIndex(el => supportedOperators.find(op => el === op)) const operatorIndex = condArray.findIndex((el) => supportedOperators.find((op) => el === op))
// E.g., ['ghae', '<', '3.1'] // E.g., ['ghae', '<', '3.1']
const condParts = condArray.slice(operatorIndex - 1, operatorIndex + 2) const condParts = condArray.slice(operatorIndex - 1, operatorIndex + 2)
@@ -127,15 +136,17 @@ module.exports = {
if (operator === '!=') { if (operator === '!=') {
// If this is the current version, compare the release numbers. (Our semver package doesn't handle !=.) // If this is the current version, compare the release numbers. (Our semver package doesn't handle !=.)
// If it's not the current version, it's always true. // If it's not the current version, it's always true.
resolvedBoolean = versionShortName === this.currentVersionShortName resolvedBoolean =
? releaseToEvaluate !== this.currentRelease versionShortName === this.currentVersionShortName
: true ? releaseToEvaluate !== this.currentRelease
: true
} else { } else {
// If this is the current version, evaluate the operator using semver. // If this is the current version, evaluate the operator using semver.
// If it's not the current version, it's always false. // If it's not the current version, it's always false.
resolvedBoolean = versionShortName === this.currentVersionShortName resolvedBoolean =
? versionSatisfiesRange(this.currentRelease, `${operator}${releaseToEvaluate}`) versionShortName === this.currentVersionShortName
: false ? versionSatisfiesRange(this.currentRelease, `${operator}${releaseToEvaluate}`)
: false
} }
// Replace syntax like `fpt or ghes < 3.0` with `fpt or true` or `fpt or false`. // Replace syntax like `fpt or ghes < 3.0` with `fpt or true` or `fpt or false`.
@@ -147,5 +158,5 @@ module.exports = {
} }
return resolvedBranchCond return resolvedBranchCond
} },
} }

View File

@@ -1,4 +1,4 @@
const assert = require('assert') import assert from 'assert'
// This class supports a tag that expects two parameters, a data reference and `spaces=NUMBER`: // This class supports a tag that expects two parameters, a data reference and `spaces=NUMBER`:
// //
@@ -10,12 +10,12 @@ const assert = require('assert')
// reference is used inside a block element (like a list or nested list) without // reference is used inside a block element (like a list or nested list) without
// affecting the formatting when the reference is used elsewhere via {{ site.data.foo.bar }}. // affecting the formatting when the reference is used elsewhere via {{ site.data.foo.bar }}.
module.exports = { export default {
parse (tagToken) { parse(tagToken) {
this.markup = tagToken.args.trim() this.markup = tagToken.args.trim()
}, },
async render (scope) { async render(scope) {
// obfuscate first legit space, remove all other spaces, then restore legit space // obfuscate first legit space, remove all other spaces, then restore legit space
// this way we can support spaces=NUMBER as well as spaces = NUMBER // this way we can support spaces=NUMBER as well as spaces = NUMBER
const input = this.markup const input = this.markup
@@ -38,8 +38,8 @@ module.exports = {
if (!value) return if (!value) return
// add spaces to each line // add spaces to each line
const renderedReferenceWithIndent = value.replace(/^/mg, ' '.repeat(numSpaces)) const renderedReferenceWithIndent = value.replace(/^/gm, ' '.repeat(numSpaces))
return this.liquid.parseAndRender(renderedReferenceWithIndent, scope.environments) return this.liquid.parseAndRender(renderedReferenceWithIndent, scope.environments)
} },
} }

Some files were not shown because too many files have changed in this diff Show More