1
0
mirror of synced 2025-12-22 11:26:57 -05:00

Merge branch 'main' into move-bookmarklets

This commit is contained in:
Sarah Schneider
2022-04-20 14:43:20 -04:00
committed by GitHub
1398 changed files with 21183 additions and 7351 deletions

View File

@@ -6,22 +6,60 @@
//
// [end-readme]
import fs from 'fs/promises'
import got, { RequestError } from 'got'
import { getContents, getPathsWithMatchingStrings } from './helpers/git-utils.js'
import got from 'got'
if (!process.env.GITHUB_TOKEN) {
console.error('Error! You must have a GITHUB_TOKEN set in an .env file to run this script.')
process.exit(1)
throw new Error('Error! You must have a GITHUB_TOKEN set in an .env file to run this script.')
}
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
const FORCE_DOWNLOAD = Boolean(JSON.parse(process.env.FORCE_DOWNLOAD || 'false'))
const BATCH_SIZE = JSON.parse(process.env.BATCH_SIZE || '10')
const BASE_URL = process.env.BASE_URL || 'http://localhost:4000'
main()
// The way `got` does retries:
//
// sleep = 1000 * Math.pow(2, retry - 1) + Math.random() * 100
//
// So, it means:
//
// 1. ~1000ms
// 2. ~2000ms
// 3. ~4000ms
//
// ...if the limit we set is 3.
// Our own timeout, in ./middleware/timeout.js defaults to 10 seconds.
// So there's no point in trying more attempts than 3 because it would
// just timeout on the 10s. (i.e. 1000 + 2000 + 4000 + 8000 > 10,000)
const retryConfiguration = {
limit: 3,
}
// According to our Datadog metrics, the *average* time for the
// the 'archive_enterprise_proxy' metric is ~70ms (excluding spikes)
// which much less than 500ms.
const timeoutConfiguration = 1000
async function main() {
const searchStrings = ['https://docs.github.com', 'GitHub help_url', 'GitHub developer_help_url']
const foundFiles = await getPathsWithMatchingStrings(searchStrings, 'github', 'github')
const searchFiles = [...foundFiles]
const foundFiles = []
try {
foundFiles.push(...JSON.parse(await fs.readFile('/tmp/foundFiles.json', 'utf-8')))
} catch (error) {
if (!(error.code && error.code === 'ENOENT')) {
throw error
}
}
if (!foundFiles.length || FORCE_DOWNLOAD) {
foundFiles.push(...(await getPathsWithMatchingStrings(searchStrings, 'github', 'github')))
await fs.writeFile('/tmp/foundFiles.json', JSON.stringify(foundFiles, undefined, 2), 'utf-8')
}
const searchFiles = [...new Set(foundFiles)] // filters out dupes
.filter((file) => endsWithAny(['.rb', '.yml', '.yaml', '.txt', '.pdf', '.erb', '.js'], file))
.filter(
(file) =>
@@ -35,79 +73,106 @@ async function main() {
const urlRegEx =
/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/g
for (const file of searchFiles) {
const contents = await getContents('github', 'github', 'master', file)
if (
contents.includes('https://docs.github.com') ||
contents.includes('GitHub.help_url') ||
contents.includes('GitHub.developer_help_url')
) {
const docsIndices = getIndicesOf('https://docs.github.com', contents)
const helpIndices = getIndicesOf('GitHub.help_url', contents)
helpIndices.push(...getIndicesOf('GitHub.developer_help_url', contents))
if (docsIndices.length > 0) {
docsIndices.forEach((numIndex) => {
// Assuming we don't have links close to 500 characters long
const docsLink = contents.substring(numIndex, numIndex + 500).match(urlRegEx)
docsLinksFiles.push([docsLink[0].toString().replace(/[^a-zA-Z0-9]*$|\\n$/g, ''), file])
})
}
if (helpIndices.length > 0) {
helpIndices.forEach((numIndex) => {
// There are certain links like #{GitHub.help_url}#{learn_more_path} and #{GitHub.developer_help_url}#{learn_more_path} that we should skip
if (
(contents.substring(numIndex, numIndex + 11) === 'GitHub.help' &&
contents.charAt(numIndex + 16) === '#') ||
(contents.substring(numIndex, numIndex + 16) === 'GitHub.developer' &&
contents.charAt(numIndex + 26) === '#')
) {
return
}
const startSearchIndex = contents.indexOf('/', numIndex)
// Looking for the closest '/' after GitHub.developer_help_url or GitHub.help_url
// There are certain links that don't start with `/` so we want to skip those.
// If there's no `/` within 30 characters of GitHub.help_url/GitHub.developer_help_url, skip
if (startSearchIndex - numIndex < 30) {
const linkPath = contents
.substring(
startSearchIndex,
regexIndexOf(
contents,
/\n|"\)|{@email_tracking_params}|\^http|Ahttps|example|This|TODO"|[{}|"%><.,')* ]/,
startSearchIndex + 1
)
)
.trim()
// Certain specific links can be ignored as well
if (['/deprecation-1'].includes(linkPath)) {
return
}
docsLinksFiles.push([`https://docs.github.com${linkPath}`, file])
}
})
}
try {
docsLinksFiles.push(...JSON.parse(await fs.readFile('/tmp/docsLinksFiles.json', 'utf-8')))
} catch (error) {
if (!(error.code && error.code === 'ENOENT')) {
throw error
}
}
const brokenLinks = []
// Done serially with delay to avoid hitting the rate limiter
for (const file of docsLinksFiles) {
try {
await got(file[0], {
headers: {
'X-WAF-TOKEN': process.env.WAF_TOKEN,
},
})
} catch (e) {
brokenLinks.push(file)
} finally {
await sleep(300)
if (!docsLinksFiles.length || FORCE_DOWNLOAD) {
for (const file of searchFiles) {
const contents = await getContents('github', 'github', 'master', file)
if (
contents.includes('https://docs.github.com') ||
contents.includes('GitHub.help_url') ||
contents.includes('GitHub.developer_help_url')
) {
const docsIndices = getIndicesOf('https://docs.github.com', contents)
const helpIndices = getIndicesOf('GitHub.help_url', contents)
helpIndices.push(...getIndicesOf('GitHub.developer_help_url', contents))
if (docsIndices.length > 0) {
docsIndices.forEach((numIndex) => {
// Assuming we don't have links close to 500 characters long
const docsLink = contents.substring(numIndex, numIndex + 500).match(urlRegEx)
const linkURL = new URL(docsLink[0].toString().replace(/[^a-zA-Z0-9]*$|\\n$/g, ''))
const linkPath = linkURL.pathname + linkURL.hash
docsLinksFiles.push({ linkPath, file })
})
}
if (helpIndices.length > 0) {
helpIndices.forEach((numIndex) => {
// There are certain links like #{GitHub.help_url}#{learn_more_path} and #{GitHub.developer_help_url}#{learn_more_path} that we should skip
if (
(contents.substring(numIndex, numIndex + 11) === 'GitHub.help' &&
contents.charAt(numIndex + 16) === '#') ||
(contents.substring(numIndex, numIndex + 16) === 'GitHub.developer' &&
contents.charAt(numIndex + 26) === '#')
) {
return
}
const startSearchIndex = contents.indexOf('/', numIndex)
// Looking for the closest '/' after GitHub.developer_help_url or GitHub.help_url
// There are certain links that don't start with `/` so we want to skip those.
// If there's no `/` within 30 characters of GitHub.help_url/GitHub.developer_help_url, skip
if (startSearchIndex - numIndex < 30) {
const linkPath = contents
.substring(
startSearchIndex,
regexIndexOf(
contents,
/\n|"\)|{@email_tracking_params}|\^http|Ahttps|example|This|TODO"|[{}|"%><.,')* ]/,
startSearchIndex + 1
)
)
.trim()
// Certain specific links can be ignored as well
if (['/deprecation-1'].includes(linkPath)) {
return
}
docsLinksFiles.push({ linkPath, file })
}
})
}
}
}
await fs.writeFile(
'/tmp/docsLinksFiles.json',
JSON.stringify(docsLinksFiles, undefined, 2),
'utf-8'
)
}
const brokenLinks = []
// Break up the long list of URLs to test into batches
for (const batch of [...Array(Math.floor(docsLinksFiles.length / BATCH_SIZE)).keys()]) {
const slice = docsLinksFiles.slice(batch * BATCH_SIZE, batch * BATCH_SIZE + BATCH_SIZE)
await Promise.all(
slice.map(async ({ linkPath, file }) => {
// This isn't necessary but if it can't be constructed, it'll
// fail in quite a nice way and not "blame got".
const url = new URL(BASE_URL + linkPath)
try {
await got(url.href, {
retry: retryConfiguration,
timeout: timeoutConfiguration,
})
} catch (error) {
if (error instanceof RequestError) {
brokenLinks.push({ linkPath, file })
} else {
console.warn(`URL when it threw: ${url}`)
throw error
}
}
})
)
}
if (!brokenLinks.length) {

View File

@@ -0,0 +1,248 @@
#!/usr/bin/env node
// [start-readme]
//
// Run this script to check if OpenAPI operations match versions in content/rest operations
//
// [end-readme]
import fs from 'fs'
import path from 'path'
import { readFile, readdir } from 'fs/promises'
import readFileAsync from '../../lib/readfile-async.js'
import getOperations from './utils/get-operations.js'
import frontmatter from '../../lib/read-frontmatter.js'
import _ from 'lodash'
let LOWEST_SUPPORTED_GHES_VERSION = Number.MAX_VALUE
let HIGHEST_SUPPORTED_GHES_VERSION = Number.MIN_VALUE
const dereferencedPath = path.join(process.cwd(), 'lib/rest/static/dereferenced')
const contentPath = path.join(process.cwd(), 'content/rest')
const schemas = await readdir(dereferencedPath)
const contentFiles = []
const contentCheck = {}
const openAPISchemaCheck = {}
const dereferencedSchemas = {}
export async function getDiffOpenAPIContentRest() {
// Recursively go through the content/rest directory and add all categories/subcategories to contentFiles
throughDirectory(contentPath)
// Add version keys to contentCheck and dereferencedSchema objects
await addVersionKeys()
// Creating the categories/subcategories based on the current content directory
await createCheckContentDirectory()
// Create categories/subcategories from OpenAPI Schemas
await createOpenAPISchemasCheck()
// Get Differences between categories/subcategories from dereferenced schemas and the content/rest directory frontmatter versions
const differences = getDifferences(openAPISchemaCheck, contentCheck)
const errorMessages = {}
if (Object.keys(differences).length > 0) {
for (const schemaName in differences) {
errorMessages[schemaName] = {}
for (const category in differences[schemaName]) {
if (!errorMessages[schemaName]) errorMessages[schemaName] = category
errorMessages[schemaName][category] = {
contentDir: contentCheck[schemaName][category],
openAPI: openAPISchemaCheck[schemaName][category],
}
}
}
}
return errorMessages
}
async function addVersionKeys() {
for (const filename of schemas) {
const schema = JSON.parse(await readFile(path.join(dereferencedPath, filename)))
const key = filename.replace('.deref.json', '')
contentCheck[key] = {}
if (key.startsWith('ghes')) {
const version = parseFloat(key.split('-')[1]).toFixed(1)
LOWEST_SUPPORTED_GHES_VERSION = Math.min(LOWEST_SUPPORTED_GHES_VERSION, version)
HIGHEST_SUPPORTED_GHES_VERSION = Math.max(HIGHEST_SUPPORTED_GHES_VERSION, version)
}
dereferencedSchemas[key] = schema
}
// GitHub Enterprise Cloud is just github.com bc it is not in the OpenAPI schema yet. Once it is, this should be updated
contentCheck['ghec.github.com'] = {}
dereferencedSchemas['ghec.github.com'] = dereferencedSchemas['api.github.com']
}
async function createOpenAPISchemasCheck() {
for (const [schemaName, schema] of Object.entries(dereferencedSchemas)) {
try {
const operationsByCategory = {}
// munge OpenAPI definitions object in an array of operations objects
const operations = await getOperations(schema)
// process each operation, asynchronously rendering markdown and stuff
await Promise.all(operations.map((operation) => operation.process()))
// Remove any keys not needed in the decorated files
const decoratedOperations = operations.map(
({
tags,
description,
serverUrl,
operationId,
categoryLabel,
subcategoryLabel,
contentType,
externalDocs,
...props
}) => props
)
const categories = [
...new Set(decoratedOperations.map((operation) => operation.category)),
].sort()
categories.forEach((category) => {
operationsByCategory[category] = {}
const categoryOperations = decoratedOperations.filter(
(operation) => operation.category === category
)
categoryOperations
.filter((operation) => !operation.subcategory)
.map((operation) => (operation.subcategory = operation.category))
const subcategories = [
...new Set(categoryOperations.map((operation) => operation.subcategory)),
].sort()
// the first item should be the item that has no subcategory
// e.g., when the subcategory = category
const firstItemIndex = subcategories.indexOf(category)
if (firstItemIndex > -1) {
const firstItem = subcategories.splice(firstItemIndex, 1)[0]
subcategories.unshift(firstItem)
}
operationsByCategory[category] = subcategories.sort()
})
openAPISchemaCheck[schemaName] = operationsByCategory
// One off edge case where secret-scanning should be removed from FPT. Docs Content #6637
delete openAPISchemaCheck['api.github.com']['secret-scanning']
} catch (error) {
console.error(error)
console.log('🐛 Whoops! Could not get operations by category!')
process.exit(1)
}
}
}
async function createCheckContentDirectory() {
for (const filename of contentFiles) {
const { data } = frontmatter(await readFileAsync(filename, 'utf8'))
const splitPath = filename.split('/')
const subCategory = splitPath[splitPath.length - 1].replace('.md', '')
const category =
splitPath[splitPath.length - 2] === 'rest' ? subCategory : splitPath[splitPath.length - 2]
const versions = data.versions
for (const version in versions) {
const schemaNames = getSchemaName(version, versions[version])
for (const name of schemaNames) {
if (!contentCheck[name][category]) {
contentCheck[name][category] = [subCategory]
} else {
contentCheck[name][category].push(subCategory)
}
contentCheck[name][category].sort()
}
}
}
}
function getDifferences(openAPISchemaCheck, contentCheck) {
const differences = {}
for (const version in openAPISchemaCheck) {
const diffOpenApiContent = difference(openAPISchemaCheck[version], contentCheck[version])
if (Object.keys(diffOpenApiContent).length > 0) differences[version] = diffOpenApiContent
}
return differences
}
function getSchemaName(version, versionValues) {
const versions = []
if (version === 'fpt') {
if (versionValues === '*') versions.push('api.github.com')
} else if (version === 'ghec') {
if (versionValues === '*') versions.push('ghec.github.com')
} else if (version === 'ghae') {
if (versionValues === '*') versions.push('github.ae')
} else if (version === 'ghes') {
if (versionValues === '*') {
versions.push('ghes-3.1', 'ghes-3.2', 'ghes-3.3', 'ghes-3.4')
} else {
let ver = ''
let includeVersion = false
let goUp
for (const char of versionValues) {
if ((char >= '0' && char <= '9') || char === '.') {
ver += char
} else if (char === '=') {
includeVersion = true
} else if (char === '>') {
goUp = true
} else if (char === '<') {
goUp = false
}
}
let numVersion = parseFloat(ver).toFixed(1)
if (!includeVersion) {
numVersion = goUp
? (parseFloat(numVersion) + 0.1).toFixed(1)
: (parseFloat(numVersion) - 0.1).toFixed(1)
}
while (
numVersion <= HIGHEST_SUPPORTED_GHES_VERSION &&
numVersion >= LOWEST_SUPPORTED_GHES_VERSION
) {
numVersion = parseFloat(numVersion).toFixed(1)
versions.push('ghes-' + numVersion)
numVersion = goUp
? (parseFloat(numVersion) + 0.1).toFixed(1)
: (parseFloat(numVersion) - 0.1).toFixed(1)
}
}
}
return versions
}
function throughDirectory(directory) {
fs.readdirSync(directory).forEach((file) => {
const absolute = path.join(directory, file)
if (fs.statSync(absolute).isDirectory()) {
return throughDirectory(absolute)
} else if (
!directory.includes('rest/guides') &&
!directory.includes('rest/overview') &&
!file.includes('index.md') &&
!file.includes('README.md')
) {
return contentFiles.push(absolute)
}
})
}
function difference(obj1, obj2) {
const diff = Object.keys(obj1).reduce((result, key) => {
if (!Object.prototype.hasOwnProperty.call(obj2, key)) {
result.push(key)
} else if (_.isEqual(obj1[key], obj2[key])) {
const resultKeyIndex = result.indexOf(key)
result.splice(resultKeyIndex, 1)
}
return result
}, Object.keys(obj2))
return diff
}

View File

@@ -79,8 +79,6 @@ async function main() {
await decorate()
}
await updateRedirectOverrides()
console.log(
'\n🏁 The static REST API files are now up-to-date with your local `github/github` checkout. To revert uncommitted changes, run `git checkout lib/rest/static/*.\n\n'
)
@@ -135,26 +133,27 @@ async function getDereferencedFiles() {
}
}
async function updateRedirectOverrides() {
const overrides = JSON.parse(await readFile('script/rest/utils/rest-api-overrides.json', 'utf8'))
const redirects = {}
console.log('\n➡ Updating REST API redirect exception list.\n')
for (const [key, value] of Object.entries(overrides)) {
const oldUrl = value.originalUrl
const anchor = oldUrl.replace('/rest/reference', '').split('#')[1]
if (key.includes('#')) {
// We are updating a subcategory into a category
redirects[oldUrl] = `/rest/reference/${value.category}`
} else {
redirects[oldUrl] = `/rest/reference/${value.category}#${anchor}`
}
}
await writeFile(
'lib/redirects/static/client-side-rest-api-redirects.json',
JSON.stringify(redirects, null, 2),
'utf8'
async function getCategoryOverrideRedirects() {
const { operationUrls, sectionUrls } = JSON.parse(
await readFile('script/rest/utils/rest-api-overrides.json', 'utf8')
)
const operationRedirects = {}
console.log('\n➡ Updating REST API redirect exception list.\n')
Object.values(operationUrls).forEach((value) => {
const oldUrl = value.originalUrl.replace('/rest/reference', '/rest')
const anchor = oldUrl.split('#')[1]
const subcategory = value.subcategory
const redirectTo = subcategory
? `/rest/${value.category}/${subcategory}#${anchor}`
: `/rest/${value.category}#${anchor}`
operationRedirects[oldUrl] = redirectTo
})
const redirects = {
...operationRedirects,
...sectionUrls,
}
return redirects
}
async function decorate() {
@@ -167,12 +166,78 @@ async function decorate() {
}
const operationsEnabledForGitHubApps = {}
const clientSideRedirects = await getCategoryOverrideRedirects()
const skipCategory = [
'billing',
'code-scanning',
'codes-of-conduct',
'deploy-keys',
'emojis',
'gitignore',
'licenses',
'markdown',
'meta',
'oauth-authorizations',
'packages',
'pages',
'rate-limit',
'reactions',
'scim',
'search',
'secret-scanning',
]
for (const [schemaName, schema] of Object.entries(dereferencedSchemas)) {
try {
// munge OpenAPI definitions object in an array of operations objects
// get all of the operations for a particular version of the openapi
const operations = await getOperations(schema)
// process each operation, asynchronously rendering markdown and stuff
await Promise.all(operations.map((operation) => operation.process()))
// For each rest operation that doesn't have an override defined
// in script/rest/utils/rest-api-overrides.json,
// add a client-side redirect
operations.forEach((operation) => {
// A handful of operations don't have external docs properties
const externalDocs = operation.getExternalDocs()
if (!externalDocs) {
return
}
const oldUrl = `/rest${
externalDocs.url.replace('/rest/reference', '/rest').split('/rest')[1]
}`
if (!(oldUrl in clientSideRedirects)) {
// There are some operations that aren't nested in the sidebar
// For these, don't need to add a client-side redirect, the
// frontmatter redirect will handle it for us.
if (skipCategory.includes(operation.category)) {
return
}
const anchor = oldUrl.split('#')[1]
const subcategory = operation.subcategory
// If there is no subcategory, a new page with the same name as the
// category was created. That page name may change going forward.
const redirectTo = subcategory
? `/rest/${operation.category}/${subcategory}#${anchor}`
: `/rest/${operation.category}/${operation.category}#${anchor}`
clientSideRedirects[oldUrl] = redirectTo
}
// There are a lot of section headings that we'll want to redirect too,
// now that subcategories are on their own page. For example,
// /rest/reference/actions#artifacts should redirect to
// /rest/actions/artifacts
if (operation.subcategory) {
const sectionRedirectFrom = `/rest/${operation.category}#${operation.subcategory}`
const sectionRedirectTo = `/rest/${operation.category}/${operation.subcategory}`
if (!(sectionRedirectFrom in clientSideRedirects)) {
clientSideRedirects[sectionRedirectFrom] = sectionRedirectTo
}
}
})
const categories = [...new Set(operations.map((operation) => operation.category))].sort()
// Orders the operations by their category and subcategories.
@@ -258,6 +323,15 @@ async function decorate() {
JSON.stringify(operationsEnabledForGitHubApps, null, 2)
)
console.log('Wrote', path.relative(process.cwd(), `${appsStaticPath}/enabled-for-apps.json`))
await writeFile(
'lib/redirects/static/client-side-rest-api-redirects.json',
JSON.stringify(clientSideRedirects, null, 2),
'utf8'
)
console.log(
'Wrote',
path.relative(process.cwd(), `lib/redirects/static/client-side-rest-api-redirects.json`)
)
}
async function validateInputParameters(schemas) {

View File

@@ -1,15 +1,16 @@
#!/usr/bin/env node
import { readFile } from 'fs/promises'
import { get, flatten, isPlainObject } from 'lodash-es'
import Ajv from 'ajv'
import GitHubSlugger from 'github-slugger'
import httpStatusCodes from 'http-status-code'
import renderContent from '../../../lib/render-content/index.js'
import getCodeSamples from './create-rest-examples.js'
import Ajv from 'ajv'
import operationSchema from './operation-schema.js'
import { readFile } from 'fs/promises'
import { get, flatten, isPlainObject } from 'lodash-es'
import { parseTemplate } from 'url-template'
const overrideOperations = JSON.parse(
import renderContent from '../../../lib/render-content/index.js'
import getCodeSamples from './create-rest-examples.js'
import operationSchema from './operation-schema.js'
const { operationUrls } = JSON.parse(
await readFile('script/rest/utils/rest-api-overrides.json', 'utf8')
)
const slugger = new GitHubSlugger()
@@ -62,8 +63,8 @@ export default class Operation {
// the openapi schema. Without it, we'd have to update several
// @documentation_urls in the api code every time we move
// an endpoint to a new page.
this.category = overrideOperations[operationId]
? overrideOperations[operationId].category
this.category = operationUrls[operationId]
? operationUrls[operationId].category
: xGithub.category
// Set subcategory
@@ -71,9 +72,9 @@ export default class Operation {
// defined in the openapi schema. Without it, we'd have to update several
// @documentation_urls in the api code every time we move
// an endpoint to a new page.
if (overrideOperations[operationId]) {
if (overrideOperations[operationId].subcategory) {
this.subcategory = overrideOperations[operationId].subcategory
if (operationUrls[operationId]) {
if (operationUrls[operationId].subcategory) {
this.subcategory = operationUrls[operationId].subcategory
}
} else if (xGithub.subcategory) {
this.subcategory = xGithub.subcategory
@@ -108,6 +109,10 @@ export default class Operation {
}
}
getExternalDocs() {
return this.#operation.externalDocs
}
async renderDescription() {
this.descriptionHTML = await renderContent(this.#operation.description)
return this

File diff suppressed because it is too large Load Diff