1
0
mirror of synced 2026-01-07 09:01:31 -05:00

Merge branch 'main' into pwsh-commands

This commit is contained in:
Martin Lopes
2022-02-14 15:55:56 +10:00
committed by GitHub
453 changed files with 1630 additions and 4660 deletions

1
.github/CODEOWNERS vendored
View File

@@ -11,7 +11,6 @@
/script/ @github/docs-engineering
/includes/ @github/docs-engineering
/lib/search/popular-pages.json @github/docs-engineering
app.json @github/docs-engineering
Dockerfile @github/docs-engineering
package-lock.json @github/docs-engineering
package.json @github/docs-engineering

View File

@@ -1,51 +0,0 @@
#!/usr/bin/env node
import getOctokit from '../../script/helpers/github.js'
import deployToProduction from '../../script/deployment/deploy-to-production.js'
const {
GITHUB_TOKEN,
HEROKU_API_TOKEN,
HEROKU_PRODUCTION_APP_NAME,
SOURCE_BLOB_URL,
DELAY_FOR_PREBOOT,
RUN_ID,
} = process.env
// Exit if GitHub Actions PAT is not found
if (!GITHUB_TOKEN) {
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
}
// Exit if Heroku API token is not found
if (!HEROKU_API_TOKEN) {
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
}
// Exit if Heroku App name is not found
if (!HEROKU_PRODUCTION_APP_NAME) {
throw new Error('You must supply a HEROKU_PRODUCTION_APP_NAME environment variable!')
}
if (!RUN_ID) {
throw new Error('$RUN_ID not set')
}
// This helper uses the `GITHUB_TOKEN` implicitly!
// We're using our usual version of Octokit vs. the provided `github`
// instance to avoid versioning discrepancies.
const octokit = getOctokit()
try {
await deployToProduction({
octokit,
includeDelayForPreboot: DELAY_FOR_PREBOOT !== 'false',
// These parameters will ONLY be set by Actions
sourceBlobUrl: SOURCE_BLOB_URL,
runId: RUN_ID,
})
} catch (error) {
console.error(`Failed to deploy to production: ${error.message}`)
console.error(error)
throw error
}

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env node
import * as github from '@actions/github'
import getOctokit from '../../script/helpers/github.js'
const { GITHUB_TOKEN } = process.env
// Exit if GitHub Actions PAT is not found
if (!GITHUB_TOKEN) {
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
}
// This helper uses the `GITHUB_TOKEN` implicitly!
// We're using our usual version of Octokit vs. the provided `github`
// instance to avoid versioning discrepancies.
const octokit = getOctokit()
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
if (!CONTEXT_NAME) {
throw new Error('$CONTEXT_NAME not set')
}
if (!ACTIONS_RUN_LOG) {
throw new Error('$ACTIONS_RUN_LOG not set')
}
if (!HEAD_SHA) {
throw new Error('$HEAD_SHA not set')
}
const { context } = github
const owner = context.repo.owner
const repo = context.payload.repository.name
await octokit.repos.createCommitStatus({
owner,
repo,
sha: HEAD_SHA,
context: CONTEXT_NAME,
state: 'success',
description: 'Successfully deployed! See logs.',
target_url: ACTIONS_RUN_LOG,
})

View File

@@ -1,55 +0,0 @@
#!/usr/bin/env node
import parsePrUrl from '../../script/deployment/parse-pr-url.js'
import getOctokit from '../../script/helpers/github.js'
import deployToStaging from '../../script/deployment/deploy-to-staging.js'
const { GITHUB_TOKEN, HEROKU_API_TOKEN } = process.env
// Exit if GitHub Actions PAT is not found
if (!GITHUB_TOKEN) {
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
}
// Exit if Heroku API token is not found
if (!HEROKU_API_TOKEN) {
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
}
// This helper uses the `GITHUB_TOKEN` implicitly!
// We're using our usual version of Octokit vs. the provided `github`
// instance to avoid versioning discrepancies.
const octokit = getOctokit()
const { RUN_ID, PR_URL, SOURCE_BLOB_URL } = process.env
if (!RUN_ID) {
throw new Error('$RUN_ID not set')
}
if (!PR_URL) {
throw new Error('$PR_URL not set')
}
if (!SOURCE_BLOB_URL) {
throw new Error('$SOURCE_BLOB_URL not set')
}
const { owner, repo, pullNumber } = parsePrUrl(PR_URL)
if (!owner || !repo || !pullNumber) {
throw new Error(
`'pullRequestUrl' input must match URL format 'https://github.com/github/(docs|docs-internal)/pull/123' but was '${PR_URL}'`
)
}
const { data: pullRequest } = await octokit.pulls.get({
owner,
repo,
pull_number: pullNumber,
})
await deployToStaging({
octokit,
pullRequest,
forceRebuild: false,
// These parameters will ONLY be set by Actions
sourceBlobUrl: SOURCE_BLOB_URL,
runId: RUN_ID,
})

View File

@@ -157,6 +157,8 @@ jobs:
- if: ${{ env.IS_PUBLIC_BUILD == 'true' }}
name: Move acceptable user changes
run: |
# Make sure recursive path expansion is enabled
shopt -s globstar
rsync -rptovR ./user-code/content/./**/*.md ./content
rsync -rptovR ./user-code/assets/./**/*.png ./assets
rsync -rptovR ./user-code/data/./**/*.{yml,md} ./data

View File

@@ -41,7 +41,7 @@ jobs:
run: |
gh pr comment $PR --body "Thanks so much for opening this PR and contributing to GitHub Docs!
- When you're ready for the Docs team to review this PR, apply the **ready-for-doc-review** label and your PR will be added to the [Docs Content review board](https://github.com/orgs/github/memexes/901?layout=table&groupedBy%5BcolumnId%5D=11024). **Please factor in at least 72 hours for a review, even longer if this is a substantial change.**
- When you're ready for the Docs team to review this PR, request a review by *docs-content* and your PR will be added to the [Docs Content review board](https://github.com/orgs/github/memexes/901?layout=table&groupedBy%5BcolumnId%5D=11024). **Please factor in at least 72 hours for a review, even longer if this is a substantial change.**
- If this is a major update to the docs, you might want to go back and open an [issue](https://github.com/github/docs-content/issues/new/choose) to ensure we've covered all areas of the docs in these updates. Not doing so may result in delays or inaccurate documentation."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,31 +0,0 @@
name: Ping staging apps
# **What it does**: This keeps our staging applications from automatically spinning down.
# **Why we have it**: Staging applications can hiberate without use.
# **Who does it impact**: Anyone with a pull request in docs-internal.
on:
schedule:
- cron: '10,30,50 * * * *' # every twenty minutes
permissions:
contents: read
jobs:
ping_staging_apps:
name: Ping
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
steps:
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: npm ci
run: npm ci
- name: Run script
run: script/ping-staging-apps.js

View File

@@ -1,199 +0,0 @@
name: Production - Build and Deploy
# **What it does**: Builds and deploys the default branch to production
# **Why we have it**: To enable us to deploy the latest to production whenever necessary rather than relying on PR merges.
# **Who does it impact**: All contributors.
on:
push:
branches:
- main
workflow_dispatch:
permissions:
contents: read
deployments: write
# This allows a subsequently queued workflow run to take priority over
# previously queued runs but NOT interrupt currently executing runs
concurrency:
group: '${{ github.workflow }}'
cancel-in-progress: false
jobs:
build-and-deploy:
if: ${{ github.repository == 'github/docs-internal'}}
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Check out repo
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
with:
persist-credentials: 'false'
lfs: 'true'
- name: Check out LFS objects
run: git lfs checkout
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
# Required for `npm pkg ...` command support
- name: Update to npm@^7.20.0
run: npm install --global npm@^7.20.0
- name: Install dependencies
run: npm ci
- name: Clone early access
run: node script/early-access/clone-for-build.js
env:
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
GIT_BRANCH: main
- name: Cache nextjs build
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- name: Build
run: npm run build
- name: Remove development-only dependencies
run: npm prune --production
- name: Remove all npm scripts
run: npm pkg delete scripts
- name: Set npm script for Heroku build to noop
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
- name: Create a gzipped archive
run: |
tar -cz --file=app.tar.gz \
node_modules/ \
.next/ \
assets/ \
content/ \
data/ \
includes/ \
lib/ \
middleware/ \
translations/ \
server.mjs \
package*.json \
.npmrc \
feature-flags.json \
next.config.js \
app.json \
Procfile
- name: Install the development dependencies again
run: npm install
- name: Create a Heroku build source
id: build-source
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
with:
script: |
const { owner, repo } = context.repo
if (owner !== 'github') {
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
}
if (repo !== 'docs-internal') {
throw new Error(`Repository name must be 'docs-internal' but was: ${repo}`)
}
const Heroku = require('heroku-client')
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
try {
const { source_blob: sourceBlob } = await heroku.post('/sources')
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
core.setOutput('upload_url', uploadUrl)
core.setOutput('download_url', downloadUrl)
} catch (error) {
if (error.statusCode === 503) {
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
}
throw error
}
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
- name: Upload to the Heroku build source
env:
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
run: |
curl "$UPLOAD_URL" \
-X PUT \
-H 'Content-Type:' \
--data-binary @app.tar.gz
- name: Deploy
id: deploy
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
HEROKU_PRODUCTION_APP_NAME: ${{ secrets.HEROKU_PRODUCTION_APP_NAME }}
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
SOURCE_BLOB_URL: ${{ steps.build-source.outputs.download_url }}
DELAY_FOR_PREBOOT: 'true'
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
RUN_ID: ${{ github.run_id }}
run: .github/actions-scripts/prod-deploy.js
- name: Mark the deployment as inactive if timed out
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
if: ${{ steps.deploy.outcome == 'cancelled' }}
env:
DEPLOYMENT_ID: ${{ steps.deploy.outputs.deploymentId }}
LOG_URL: ${{ steps.deploy.outputs.logUrl }}
with:
script: |
const { DEPLOYMENT_ID, LOG_URL } = process.env
const { owner, repo } = context.repo
if (!DEPLOYMENT_ID) {
throw new Error('A deployment wasn't created before a timeout occurred!')
}
await github.repos.createDeploymentStatus({
owner,
repo,
deployment_id: DEPLOYMENT_ID,
state: 'error',
description: 'The deployment step timed out. See workflow logs.',
log_url: LOG_URL,
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
mediaType: {
previews: ['ant-man', 'flash'],
},
})
console.log('⏲️ Deployment status: error - The deployment timed out...')
# - name: Purge Fastly edge cache
# env:
# FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
# FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
# FASTLY_SURROGATE_KEY: 'every-deployment'
# run: .github/actions-scripts/purge-fastly-edge-cache.js
- name: Send Slack notification if workflow failed
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
if: ${{ failure() }}
with:
channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: Production deployment failed at commit ${{ github.sha }}. See https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}

View File

@@ -1,12 +1,12 @@
name: Ready for docs-content review
# **What it does**: Adds pull requests in the docs-internal repository to the docs-content review board when the "ready-for-doc-review" label is added
# **What it does**: Adds pull requests in the docs-internal repository to the docs-content review board when the "ready-for-doc-review" label is added or when a review by docs-content is requested
# **Why we have it**: So that other GitHub teams can easily request reviews from the docs-content team, and so that writers can see when a PR is ready for review
# **Who does it impact**: Writers working in the docs-internal repository
on:
pull_request_target:
types: [labeled]
types: [labeled, review_requested]
permissions:
contents: read
@@ -14,7 +14,9 @@ permissions:
jobs:
request_doc_review:
name: Request a review from the docs-content team
if: github.event.label.name == 'ready-for-doc-review' && github.repository == 'github/docs-internal'
if: >-
github.repository == 'github/docs-internal' &&
(github.event.label.name == 'ready-for-doc-review' || github.event.requested_team.name == 'docs-content')
runs-on: ubuntu-latest
steps:
- name: Check out repo content

View File

@@ -1,69 +0,0 @@
name: Remove stale staging resources
# **What it does**:
# This cleans up any rogue staging applications and environments that outlasted
# the closure of their corresponding pull requests.
# **Why we have it**:
# Staging applications and environments should be destroyed after their
# corresponding pull request is closed or merged, especially to save money spent
# on Heroku App staging deployments for closed PRs.
# **Who does it impact**:
# Anyone with a closed, spammy, or deleted pull request in docs or docs-internal.
on:
schedule:
- cron: '15,45 * * * *' # every thirty minutes at :15 and :45
permissions:
actions: read
contents: read
deployments: write
pull-requests: write
jobs:
remove_stale_staging_apps:
name: Remove stale staging apps
if: ${{ github.repository == 'github/docs-internal' }}
runs-on: ubuntu-latest
steps:
- name: Check out repo's default branch
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
- name: Setup Node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Run script
run: script/remove-stale-staging-apps.js
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
remove_stale_staging_envs:
name: Remove stale staging environments
runs-on: ubuntu-latest
steps:
- name: Check out repo's default branch
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
- name: Setup Node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Run script
run: script/remove-stale-staging-envs.js
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ELEVATED_TOKEN: ${{ secrets.DOCS_BOT_FR }}
REPO: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}

View File

@@ -1,210 +0,0 @@
name: Staging - Build and Deploy PR (fast and private-only)
# **What it does**: Builds and deploys PRs to staging but ONLY for docs-internal
# **Why we have it**: Most PRs are made on the private repo. Let's make those extra fast if we can worry less about security.
# **Who does it impact**: All staff.
# This whole workflow is only guaranteed to be secure in the *private
# repo* and because we repo-sync these files over the to the public one,
# IT'S IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs-internal!
on:
# The advantage of 'pull_request' over 'pull_request_target' is that we
# can make changes to this file and test them in a pull request, instead
# of relying on landing it in 'main' first.
# From a security point of view, its arguably safer this way because
# unlike 'pull_request_target', these only have secrets if the pull
# request creator has permission to access secrets.
pull_request:
permissions:
actions: read
contents: read
deployments: write
pull-requests: read
statuses: write
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
cancel-in-progress: true
jobs:
build-and-deploy-pr:
# Important. This whole file is only supposed to run in the PRIVATE repo.
if: ${{ github.repository == 'github/docs-internal' }}
# The assumption here is that self-hosted is faster (e.g CPU power)
# that the regular ones. And it matters in this workflow because
# we do heavy CPU stuff with `npm run build` and `tar`
# runs-on: ubuntu-latest
runs-on: self-hosted
timeout-minutes: 5
steps:
- name: Check out repo
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
with:
lfs: 'true'
# To prevent issues with cloning early access content later
persist-credentials: 'false'
- name: Check out LFS objects
run: git lfs checkout
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Cache nextjs build
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- name: Build
run: npm run build
- name: Clone early access
run: node script/early-access/clone-for-build.js
env:
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
GIT_BRANCH: ${{ github.head_ref || github.ref }}
- name: Create a Heroku build source
id: build-source
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
with:
script: |
const { owner, repo } = context.repo
if (owner !== 'github') {
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
}
if (repo !== 'docs-internal') {
throw new Error(`Repository name must be 'docs-internal' but was: ${repo}`)
}
const Heroku = require('heroku-client')
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
try {
const { source_blob: sourceBlob } = await heroku.post('/sources')
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
core.setOutput('upload_url', uploadUrl)
core.setOutput('download_url', downloadUrl)
} catch (error) {
if (error.statusCode === 503) {
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
}
throw error
}
- name: Remove development-only dependencies
run: npm prune --production
- name: Remove all npm scripts
run: npm pkg delete scripts
- name: Set npm script for Heroku build to noop
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
- name: Delete heavy things we won't need deployed
run: |
# The dereferenced file is not used in runtime once the
# decorated file has been created from it.
rm -rf lib/rest/static/dereferenced
# Translations are never tested in Staging builds
# but let's keep the empty directory.
rm -rf translations
mkdir translations
# Delete all the big search indexes that are NOT English (`*-en-*`)
pushd lib/search/indexes
ls | grep -ve '\-en\b' | xargs rm
popd
# Note! Some day it would be nice to be able to delete
# all the heavy assets because they bloat the tarball.
# But it's not obvious how to test it then. For now, we'll have
# to accept that every staging build has a copy of the images.
# The assumption here is that a staging build will not
# need these legacy redirects. Only the redirects from
# front-matter will be at play.
# These static redirects json files are notoriously large
# and they make the tarball unnecessarily large.
echo '[]' > lib/redirects/static/archived-frontmatter-fallbacks.json
echo '{}' > lib/redirects/static/developer.json
echo '{}' > lib/redirects/static/archived-redirects-from-213-to-217.json
# This will turn every `lib/**/static/*.json` into
# an equivalent `lib/**/static/*.json.br` file.
# Once the server starts, it'll know to fall back to reading
# the `.br` equivalent if the `.json` file isn't present.
node .github/actions-scripts/compress-large-files.js
- name: Make the tarball for Heroku
run: |
# We can't delete the .next/cache directory from the workflow
# because it's needed for caching, but we can at least exclude it
# from the tarball. Then it can be cached but not weigh down the
# tarball we intend to deploy.
tar -zc --exclude=.next/cache --file=app.tar.gz \
node_modules/ \
.next/ \
assets/ \
content/ \
data/ \
includes/ \
lib/ \
middleware/ \
translations/ \
server.mjs \
package*.json \
.npmrc \
feature-flags.json \
next.config.js \
app.json \
Procfile
du -sh app.tar.gz
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
- name: Upload to the Heroku build source
env:
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
run: |
curl "$UPLOAD_URL" \
-X PUT \
-H 'Content-Type:' \
--data-binary @app.tar.gz
# 'npm install' is faster than 'npm ci' because it only needs to
# *append* what's missing from ./node_modules/
- name: Re-install dependencies so we get devDependencies back
run: npm install --no-audit --no-fund --only=dev
- name: Deploy
id: deploy
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
PR_URL: ${{ github.event.pull_request.html_url }}
SOURCE_BLOB_URL: ${{ steps.build-source.outputs.download_url }}
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
RUN_ID: ${{ github.run_id }}
run: .github/actions-scripts/staging-deploy.js

View File

@@ -1,135 +0,0 @@
name: Staging - Build PR
# **What it does**: Builds PRs before deploying them.
# **Why we have it**: Because it's not safe to share our deploy secrets with forked repos: https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
# **Who does it impact**: All contributors.
# IT'S CRUCIALLY IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs!
on:
pull_request:
permissions:
contents: read
# This allows a subsequently queued workflow run to interrupt previous runs
# These are different from the concurrency in that here it checks if the
# whole workflow runs again. The "inner concurrency" is used for
# undeployments to cleaning up resources.
concurrency:
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
cancel-in-progress: true
jobs:
build-pr:
# Important. This whole file is only supposed to run in the PUBLIC repo.
if: ${{ github.repository == 'github/docs' }}
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
timeout-minutes: 5
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
concurrency:
group: 'PR Staging @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
cancel-in-progress: true
steps:
- name: Check out repo
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
# Make sure only approved files are changed if it's in github/docs
- name: Check changed files
if: ${{ github.event.pull_request.user.login != 'Octomerger' }}
uses: dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58
id: filter
with:
# Base branch used to get changed files
base: 'main'
# Enables setting an output in the format in `${FILTER_NAME}_files
# with the names of the matching files formatted as JSON array
list-files: json
# Returns list of changed files matching each filter
filters: |
notAllowed:
- '*.js'
- '*.mjs'
- '*.cjs'
- '*.ts'
- '*.tsx'
- '*.json'
- '.npmrc'
- '.babelrc*'
- '.env*'
- 'script/**'
- 'Procfile'
# When there are changes to files we can't accept
- name: Fail when disallowed files are changed
if: ${{ steps.filter.outputs.notAllowed == 'true' }}
run: exit 1
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
# Required for `npm pkg ...` command support
- name: Update to npm@^7.20.0
run: npm install --global npm@^7.20.0
- name: Install dependencies
run: npm ci
- name: Cache nextjs build
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- name: Build
run: npm run build
- name: Remove development-only dependencies
run: npm prune --production
- name: Remove all npm scripts
run: npm pkg delete scripts
- name: Set npm script for Heroku build to noop
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
- name: Create an archive
run: |
tar -c --file=app.tar \
node_modules/ \
.next/ \
assets/ \
content/ \
data/ \
includes/ \
lib/ \
middleware/ \
translations/ \
server.mjs \
package*.json \
.npmrc \
feature-flags.json \
next.config.js \
app.json \
Procfile
# We can't delete the .next/cache directory from the workflow
# because it's needed for caching, but we can at least delete it
# from within the tarball. Then it can be cached but not
# weigh down the tarball we intend to deploy.
tar --delete --file=app.tar .next/cache
# Upload only the files needed to run this application.
# We are not willing to trust the rest (e.g. script/) for the remainder
# of the deployment process.
- name: Upload build artifact
uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2
with:
name: pr_build
path: app.tar

View File

@@ -1,466 +0,0 @@
name: Staging - Deploy PR
# **What it does**: To deploy PRs to a Heroku staging environment.
# **Why we have it**: To deploy with high visibility in case of failures.
# **Who does it impact**: All contributors.
# IT'S CRUCIALLY IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs!
on:
workflow_run:
workflows:
- 'Staging - Build PR'
types:
- completed
permissions:
actions: read
contents: read
deployments: write
pull-requests: read
statuses: write
# IMPORTANT: Intentionally OMIT a `concurrency` configuration from this workflow's
# top-level as we do not have any guarantee of identifying values being available
# within the `github.event` context for PRs from forked repos!
#
# The implication of this shortcoming is that we may have multiple workflow runs
# of this running at the same time for different commits within the same PR.
# However, once they reach the `concurrency` configurations deeper down within
# this workflow's jobs, then we can expect concurrent short-circuiting to begin.
env:
CONTEXT_NAME: '${{ github.workflow }} / deploy (${{ github.event.workflow_run.event }})'
ACTIONS_RUN_LOG: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
BUILD_ACTIONS_RUN_ID: ${{ github.event.workflow_run.id }}
BUILD_ACTIONS_RUN_LOG: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}
jobs:
pr-metadata:
# This is needed because the workflow we depend on
# (see on.workflow_run.workflows) might be running from pushes on
# main. That's because it needs to do that to popular the cache.
if: >-
${{
github.repository == 'github/docs' &&
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
}}
runs-on: ubuntu-latest
outputs:
number: ${{ steps.pr.outputs.number }}
url: ${{ steps.pr.outputs.url }}
state: ${{ steps.pr.outputs.state }}
head_sha: ${{ steps.pr.outputs.head_sha }}
head_branch: ${{ steps.pr.outputs.head_branch }}
head_label: ${{ steps.pr.outputs.head_label }}
head_ref: ${{ steps.pr.outputs.head_ref }}
steps:
- name: Find the originating pull request
id: pr
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
with:
script: |
// Curious about what version of node you get
console.log('Node version:', process.version)
// In order to find out the PR info for a forked repo, we must query
// the API for more info based on the originating workflow run
const { BUILD_ACTIONS_RUN_ID } = process.env
const { owner, repo } = context.repo
const { data: run } = await github.actions.getWorkflowRun({
owner,
repo,
run_id: BUILD_ACTIONS_RUN_ID,
})
// Gather PR-identifying information from the workflow run
const {
head_branch: headBranch,
head_sha: headSha,
head_repository: {
owner: { login: prRepoOwner },
name: prRepoName
}
} = run
const prIsInternal = owner === prRepoOwner && repo === prRepoName
let headLabel = `${prRepoOwner}:${headBranch}`
// If the PR is external, prefix its head branch name with the
// forked repo owner's login and their fork repo name e.g.
// "octocat/my-fork:docs". We need to include the fork repo
// name as well to account for an API issue (this will work fine
// if they don't have a different fork repo name).
if (!prIsInternal) {
headLabel = `${prRepoOwner}/${prRepoName}:${headBranch}`
}
// If the PR is external, prefix its head branch name with the
// forked repo owner's login, e.g. "octocat:docs"
const headRef = prIsInternal ? headBranch : headLabel
// Retrieve matching PRs (up to 30)
const { data: pulls } = await github.pulls.list({
owner,
repo,
head: headLabel,
sort: 'updated',
direction: 'desc',
per_page: 30
})
// Find the open PR, if any, otherwise choose the most recently updated
const targetPull = pulls.find(pr => pr.state === 'open') || pulls[0] || {}
const pullNumber = targetPull.number || 0
const pullUrl = targetPull.html_url || 'about:blank'
const pullState = targetPull.state || 'closed'
core.setOutput('number', pullNumber.toString())
core.setOutput('url', pullUrl)
core.setOutput('state', pullState)
core.setOutput('head_sha', headSha)
core.setOutput('head_branch', headBranch)
core.setOutput('head_label', headLabel)
core.setOutput('head_ref', headRef)
debug-originating-trigger:
needs: pr-metadata
runs-on: ubuntu-latest
steps:
- name: Dump info about the originating workflow run
env:
PR_NUMBER: ${{ needs.pr-metadata.outputs.number }}
PR_URL: ${{ needs.pr-metadata.outputs.url }}
PR_STATE: ${{ needs.pr-metadata.outputs.state }}
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
HEAD_BRANCH: ${{ needs.pr-metadata.outputs.head_branch }}
HEAD_LABEL: ${{ needs.pr-metadata.outputs.head_label }}
HEAD_REF: ${{ needs.pr-metadata.outputs.head_ref }}
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
BUILD_ACTIONS_RUN_LOG: ${{ env.BUILD_ACTIONS_RUN_LOG }}
run: |
echo "Originating workflow info:"
echo " - PR_NUMBER = $PR_NUMBER"
echo " - PR_URL = $PR_URL"
echo " - PR_STATE = $PR_STATE"
echo " - HEAD_SHA = $HEAD_SHA"
echo " - HEAD_BRANCH = $HEAD_BRANCH"
echo " - HEAD_LABEL = $HEAD_LABEL"
echo " - HEAD_REF = $HEAD_REF"
echo " - BUILD_ACTIONS_RUN_ID = $BUILD_ACTIONS_RUN_ID"
echo " - BUILD_ACTIONS_RUN_LOG = $BUILD_ACTIONS_RUN_LOG"
notify-of-failed-builds:
needs: pr-metadata
if: >-
${{
needs.pr-metadata.outputs.number != '0' &&
github.event.workflow_run.conclusion == 'failure'
}}
runs-on: ubuntu-latest
timeout-minutes: 1
# Specifically omitting a concurrency group here in case the build was not
# successful BECAUSE a subsequent build already canceled it
steps:
- name: Verify build workflow run was not cancelled
id: check-workflow-run
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
with:
script: |
const { owner, repo } = context.repo
const { data: { jobs: buildJobs } } = await github.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: process.env.BUILD_ACTIONS_RUN_ID,
filter: 'latest'
})
const wasCancelled = (
buildJobs.length > 0 &&
buildJobs.every(({ status, conclusion }) => {
return status === 'completed' && conclusion === 'cancelled'
})
)
core.setOutput('cancelled', wasCancelled.toString())
- if: ${{ steps.check-workflow-run.outputs.cancelled == 'false' }}
name: Send Slack notification if build workflow failed
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
with:
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: Staging build failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.BUILD_ACTIONS_RUN_LOG }}. This run was ${{ env.ACTIONS_RUN_LOG }}.
prepare-for-deploy:
needs: pr-metadata
if: ${{ needs.pr-metadata.outputs.state == 'open' }}
runs-on: ubuntu-latest
timeout-minutes: 5
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
concurrency:
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
cancel-in-progress: true
outputs:
source_blob_url: ${{ steps.build-source.outputs.download_url }}
steps:
- name: Create initial status
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
with:
script: |
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
const { owner, repo } = context.repo
await github.repos.createCommitStatus({
owner,
repo,
sha: HEAD_SHA,
context: CONTEXT_NAME,
state: 'pending',
description: 'The app is being deployed. See logs.',
target_url: ACTIONS_RUN_LOG
})
- name: Check out repo's default branch
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
with:
# To prevent issues with cloning early access content later
persist-credentials: 'false'
lfs: 'true'
- name: Check out LFS objects
run: git lfs checkout
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
# Install any additional dependencies *before* downloading the build artifact
- name: Install Heroku client development-only dependency
run: npm install --no-save heroku-client
# Download the previously built "app.tar"
- name: Download build artifact
uses: dawidd6/action-download-artifact@af92a8455a59214b7b932932f2662fdefbd78126
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ env.BUILD_ACTIONS_RUN_ID }}
name: pr_build
path: ${{ runner.temp }}
# gzip the app.tar to meet Heroku's expected format
- name: Create a gzipped archive (docs)
run: gzip -9 < "$RUNNER_TEMP/app.tar" > app.tar.gz
- name: Create a Heroku build source
id: build-source
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
with:
script: |
const { owner, repo } = context.repo
if (owner !== 'github') {
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
}
if (repo !== 'docs') {
throw new Error(`Repository name must be 'docs' but was: ${repo}`)
}
const Heroku = require('heroku-client')
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
try {
const { source_blob: sourceBlob } = await heroku.post('/sources')
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
core.setOutput('upload_url', uploadUrl)
core.setOutput('download_url', downloadUrl)
} catch (error) {
if (error.statusCode === 503) {
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
}
throw error
}
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
- name: Upload to the Heroku build source
env:
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
run: |
curl "$UPLOAD_URL" \
-X PUT \
-H 'Content-Type:' \
--data-binary @app.tar.gz
- name: Create failure status
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
if: ${{ failure() }}
env:
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
with:
script: |
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
const { owner, repo } = context.repo
await github.repos.createCommitStatus({
owner,
repo,
sha: HEAD_SHA,
context: CONTEXT_NAME,
state: 'error',
description: 'Failed to deploy. See logs.',
target_url: ACTIONS_RUN_LOG
})
- name: Send Slack notification if deployment preparation job failed
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
if: ${{ failure() }}
with:
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: Staging preparation failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.ACTIONS_RUN_LOG }}.
check-pr-before-deploy:
needs: [pr-metadata, prepare-for-deploy]
runs-on: ubuntu-latest
timeout-minutes: 1
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
concurrency:
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
cancel-in-progress: true
outputs:
pull_request_state: ${{ steps.check-pr.outputs.state }}
steps:
- name: Check pull request state
id: check-pr
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
PR_NUMBER: ${{ needs.pr-metadata.outputs.number }}
with:
script: |
const { owner, repo } = context.repo
const { data: pullRequest } = await github.pulls.get({
owner,
repo,
pull_number: process.env.PR_NUMBER
})
core.setOutput('state', pullRequest.state)
deploy:
needs: [pr-metadata, prepare-for-deploy, check-pr-before-deploy]
if: ${{ needs.check-pr-before-deploy.outputs.pull_request_state == 'open' }}
runs-on: ubuntu-latest
timeout-minutes: 10
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
concurrency:
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
cancel-in-progress: true
steps:
- name: Check out repo's default branch
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Deploy
id: deploy
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
PR_URL: ${{ needs.pr-metadata.outputs.url }}
SOURCE_BLOB_URL: ${{ needs.prepare-for-deploy.outputs.source_blob_url }}
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
RUN_ID: ${{ github.run_id }}
run: .github/actions-scripts/staging-deploy.js
- name: Create successful commit status
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
run: .github/actions-scripts/staging-commit-status-success.js
- name: Mark the deployment as inactive if timed out
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
if: ${{ steps.deploy.outcome == 'cancelled' }}
env:
DEPLOYMENT_ID: ${{ steps.deploy.outputs.deploymentId }}
LOG_URL: ${{ steps.deploy.outputs.logUrl }}
with:
script: |
const { DEPLOYMENT_ID, LOG_URL } = process.env
const { owner, repo } = context.repo
if (!DEPLOYMENT_ID) {
throw new Error('A deployment wasn't created before a timeout occurred!')
}
await github.repos.createDeploymentStatus({
owner,
repo,
deployment_id: DEPLOYMENT_ID,
state: 'error',
description: 'The deployment step timed out. See workflow logs.',
log_url: LOG_URL,
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
mediaType: {
previews: ['ant-man', 'flash'],
},
})
console.log('⏲️ Deployment status: error - The deployment timed out...')
- name: Create failure status
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
if: ${{ failure() }}
env:
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
with:
script: |
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
const { owner, repo } = context.repo
await github.repos.createCommitStatus({
owner,
repo,
sha: HEAD_SHA,
context: CONTEXT_NAME,
state: 'error',
description: 'Failed to deploy. See logs.',
target_url: ACTIONS_RUN_LOG
})
- name: Send Slack notification if deployment job failed
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
if: ${{ failure() }}
with:
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: Staging deployment failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.ACTIONS_RUN_LOG }}.

View File

@@ -1,76 +0,0 @@
# NOTE: Changes to this file should also be applied to './test.yml'
name: Node.js Tests - Windows
# **What it does**: This runs our tests on Windows.
# **Why we have it**: We want to support Windows contributors to docs.
# **Who does it impact**: Anyone working on docs on a Windows device.
on:
workflow_dispatch:
pull_request:
schedule:
- cron: '50 19 * * *' # once a day at 19:50 UTC / 11:50 PST
permissions:
contents: read
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
cancel-in-progress: true
jobs:
test:
runs-on: windows-latest
if: (github.event_name != 'pull_request') || (github.event_name == 'pull_request' && (contains(github.event.pull_request.labels.*.name, 'Windows') || contains(github.event.pull_request.labels.*.name, 'windows')))
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
test-group:
[
content,
graphql,
meta,
rendering,
routing,
unit,
linting,
translations,
]
steps:
- name: Check out repo
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
with:
# Enables cloning the Early Access repo later with the relevant PAT
persist-credentials: 'false'
- name: Setup node
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
with:
node-version: 16.13.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Cache nextjs build
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- if: ${{ github.repository == 'github/docs-internal' }}
name: Clone early access
run: npm run heroku-postbuild
env:
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
GIT_BRANCH: ${{ github.head_ref || github.ref }}
- if: ${{ github.repository != 'github/docs-internal' }}
name: Run build script
run: npm run build
- name: Run tests
run: npm test -- tests/${{ matrix.test-group }}/

View File

@@ -1,5 +1,3 @@
# NOTE: Changes to this file should also be applied to './test-windows.yml'
name: Node.js Tests
# **What it does**: Runs our tests.

View File

@@ -10,7 +10,6 @@ on:
- '.github/actions-scripts/**'
- '.github/workflows/**'
- '.github/CODEOWNERS'
- 'app.json'
- 'assets/fonts/**'
- 'data/graphql/**'
- 'Dockerfile*'
@@ -20,7 +19,6 @@ on:
- 'lib/webhooks/**'
- 'lib/search/indexes/**'
- 'package*.json'
- 'Procfile'
- 'script/**'
- 'translations/**'
@@ -58,7 +56,6 @@ jobs:
- '.github/actions-scripts/**'
- '.github/workflows/**'
- '.github/CODEOWNERS'
- 'app.json'
- 'assets/fonts/**'
- 'data/graphql/**'
- 'Dockerfile*'
@@ -68,7 +65,6 @@ jobs:
- 'lib/webhooks/**'
- 'lib/search/indexes/**'
- 'package*.json'
- 'Procfile'
- 'scripts/**'
- 'translations/**'
@@ -83,7 +79,6 @@ jobs:
'.github/actions-scripts/**',
'.github/workflows/**',
'.github/CODEOWNERS',
'app.json',
'assets/fonts/**',
'data/graphql/**',
'Dockerfile*',
@@ -93,7 +88,6 @@ jobs:
'lib/webhooks/**',
'lib/search/indexes/**',
'package*.json',
'Procfile',
'scripts/**',
'translations/**',
]
@@ -111,7 +105,7 @@ jobs:
body: reviewMessage,
})
workflowFailMessage = `${workflowFailMessage} Please see ${createdComment.data.html_url} for details.`
workflowFailMessage = `${workflowFailMessage} Please see ${createdComment.data.html_url} for details.`
} catch(err) {
console.log("Error creating comment.", err)
}

View File

@@ -1,7 +1,7 @@
name: Check for unallowed internal changes
# **What it does**: If someone changes app.json or search indexes, we fail the check.
# **Why we have it**: app.json should rarely be edited, so we'll require an admin merge if the file really needs to be changed. The search indexes are synced every 4 hours, so changes should not need to be made.
# **What it does**: If someone changes search indexes, we fail the check.
# **Why we have it**: The search indexes are synced every 4 hours, so changes should not need to be made.
# **Who does it impact**: Docs engineering and content writers.
on:
@@ -44,19 +44,8 @@ jobs:
# Returns list of changed files matching each filter
filters: |
notAllowed:
- 'app.json'
notAllowedSearchSyncLabel:
- 'lib/search/indexes/**'
notAllowed:
needs: check-internal-changes
if: ${{ needs.check-internal-changes.outputs.notAllowed == 'true' }}
runs-on: ubuntu-latest
steps:
- name: Fail if unallowed changes were made
run: |
echo "Please admin merge if you really need to update app.json!"
exit 1
notAllowedSearchSyncLabel:
needs: check-internal-changes
if: ${{ needs.check-internal-changes.outputs.notAllowedSearchSyncLabel == 'true' }}

View File

@@ -1,6 +1,4 @@
# This Dockerfile can be used for docker-based deployments to platforms
# like Now or Moda, but it is currently _not_ used by our Heroku deployments
# It uses two multi-stage builds: `install` and the main build to keep the image size down.
# This Dockerfile is used for docker-based deployments to Azure for both preview environments and production
# --------------------------------------------------------------------------------
# BASE IMAGE

View File

@@ -1 +0,0 @@
web: NODE_ENV=production node server.mjs

View File

@@ -1,17 +0,0 @@
{
"name": "docs.github.com",
"env": {
"NODE_ENV": "production",
"ENABLED_LANGUAGES": "en",
"WEB_CONCURRENCY": "1"
},
"buildpacks": [
{ "url": "heroku/nodejs" }
],
"formation": {
"web": {
"quantity": 1,
"size": "standard-2x"
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 134 KiB

After

Width:  |  Height:  |  Size: 488 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 26 KiB

View File

@@ -89,10 +89,14 @@ This procedure demonstrates how to create the service account for your GKE integ
{% raw %}
```
$ gcloud projects add-iam-policy-binding $GKE_PROJECT \
--member=serviceAccount:$SA_EMAIL \
--role=roles/container.admin \
--role=roles/storage.admin \
--role=roles/container.clusterViewer
--member=serviceAccount:$SA_EMAIL \
--role=roles/container.admin
$ gcloud projects add-iam-policy-binding $GKE_PROJECT \
--member=serviceAccount:$SA_EMAIL \
--role=roles/storage.admin
$ gcloud projects add-iam-policy-binding $GKE_PROJECT \
--member=serviceAccount:$SA_EMAIL \
--role=roles/container.clusterViewer
```
{% endraw %}
1. Download the JSON keyfile for the service account:

View File

@@ -76,7 +76,7 @@ The following table indicates where each context and special function can be use
| <code>concurrency</code> | <code>github, inputs</code> | |
| <code>env</code> | <code>github, secrets, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.concurrency</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.container</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.container</code> | <code>github, needs, strategy, matrix, secrets, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.container.credentials</code> | <code>github, needs, strategy, matrix, env, secrets, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.container.env.&lt;env_id&gt;</code> | <code>github, needs, strategy, matrix, job, runner, env, secrets, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.continue-on-error</code> | <code>github, needs, strategy, matrix, inputs</code> | |

View File

@@ -757,6 +757,8 @@ strategy:
## `jobs.<job_id>.container`
{% data reusables.github-actions.docker-container-os-support %}
{% data reusables.actions.jobs.section-running-jobs-in-a-container %}
### `jobs.<job_id>.container.image`

View File

@@ -98,7 +98,7 @@ shortTitle: Export from your enterprise
> logout
> Connection to <em>hostname</em> closed.
```
9. Copy the migration archive to your computer using the [`scp`](https://linuxacademy.com/blog/linux/ssh-and-scp-howto-tips-tricks#scp) command. The archive file will be named with the Migration GUID:
9. Copy the migration archive to your computer using the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command. The archive file will be named with the Migration GUID:
```shell
$ scp -P 122 admin@<em>hostname</em>:/data/github/current/tmp/<em>MIGRATION_GUID</em>.tar.gz ~/Desktop
```

View File

@@ -19,7 +19,7 @@ shortTitle: Prepare to migrate data
---
## Preparing the migrated data for import to {% data variables.product.prodname_ghe_server %}
1. Using the [`scp`](https://linuxacademy.com/blog/linux/ssh-and-scp-howto-tips-tricks#scp) command, copy the migration archive generated from your source instance or organization to your {% data variables.product.prodname_ghe_server %} target:
1. Using the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command, copy the migration archive generated from your source instance or organization to your {% data variables.product.prodname_ghe_server %} target:
```shell
$ scp -P 122 <em>/path/to/archive/MIGRATION_GUID.tar.gz</em> admin@<em>hostname</em>:/home/admin/
@@ -43,7 +43,7 @@ shortTitle: Prepare to migrate data
$ ghe-migrator conflicts -g <em>MIGRATION_GUID</em> > conflicts.csv
```
- If no conflicts are reported, you can safely import the data by following the steps in "[Migrating data to your enterprise](/enterprise/admin/guides/migrations/applying-the-imported-data-on-github-enterprise-server/)".
2. If there are conflicts, using the [`scp`](https://linuxacademy.com/blog/linux/ssh-and-scp-howto-tips-tricks#scp) command, copy *conflicts.csv* to your local computer:
2. If there are conflicts, using the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command, copy *conflicts.csv* to your local computer:
```shell
$ scp -P 122 admin@<em>hostname</em>:conflicts.csv ~/Desktop
```
@@ -133,7 +133,7 @@ The same process can be used to create mappings for each record that supports cu
### Applying modified migration data
1. After making changes, use the [`scp`](https://linuxacademy.com/blog/linux/ssh-and-scp-howto-tips-tricks#scp) command to apply your modified *conflicts.csv* (or any other mapping *.csv* file in the correct format) to the target instance:
1. After making changes, use the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command to apply your modified *conflicts.csv* (or any other mapping *.csv* file in the correct format) to the target instance:
```shell
$ scp -P 122 ~/Desktop/conflicts.csv admin@<em>hostname</em>:/home/admin/

View File

@@ -29,7 +29,7 @@ You can retrieve a list of {% data variables.product.prodname_dotcom %}'s IP add
These IP addresses are used by {% data variables.product.prodname_dotcom %} to serve our content, deliver webhooks, and perform hosted {% data variables.product.prodname_actions %} builds.
These ranges are in [CIDR notation](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation). You can use an online conversion tool such as this [CIDR / VLSM Supernet Calculator](http://www.subnet-calculator.com/cidr.php) to convert from CIDR notation to IP address ranges.
These ranges are in [CIDR notation](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation). You can use an online conversion tool to convert from CIDR notation to IP address ranges, for example: [CIDR to IPv4 conversion site](https://www.ipaddressguide.com/cidr).
We make changes to our IP addresses from time to time. We do not recommend allowing by IP address, however if you use these IP ranges we strongly encourage regular monitoring of our API.

View File

@@ -60,4 +60,4 @@ For an example of an incident response workflow, see the "Data Breach Response P
You should conduct regular vulnerability scans of production infrastructure. You should triage the results of vulnerability scans and define a period of time in which you agree to remediate the vulnerability.
If you are not ready to set up a full vulnerability management program, it's useful to start by creating a patching process. For guidance in creating a patch management policy, see this TechRepublic article "[Establish a patch management policy](https://www.techrepublic.com/blog/it-security/establish-a-patch-management-policy-87756/)."
If you are not ready to set up a full vulnerability management program, it's useful to start by creating a patching process. For guidance in creating a patch management policy, see this TechRepublic article "[Establish a patch management policy](https://www.techrepublic.com/article/establish-a-patch-management-policy-87756/)."

View File

@@ -24,6 +24,11 @@ To create a heading, add one to six <kbd>#</kbd> symbols before your heading tex
![Rendered H1, H2, and H6 headings](/assets/images/help/writing/headings-rendered.png)
When you use two or more headings, GitHub automatically generates a table of contents which you can access by clicking {% octicon "list-unordered" aria-label="The unordered list icon" %} within the file header. Each heading title is listed in the table of contents and you can click a title to navigate to the selected section.
![Screenshot highlighting the table of contents icon](/assets/images/help/repository/headings_toc.png)
## Styling text
You can indicate emphasis with bold, italic, or strikethrough text in comment fields and `.md` files.

View File

@@ -17,7 +17,7 @@ topics:
---
People with admin or owner permissions can set up a CODEOWNERS file in a repository.
The people you choose as code owners must have write permissions for the repository. When the code owner is a team, that team must be visible and it must have write permissions, even if all the individual members of the team already have write permissions directly, through organization membership, or through another team membership.
The people you choose as code owners must have read permissions for the repository. When the code owner is a team, that team must be visible and it must have write permissions, even if all the individual members of the team already have write permissions directly, through organization membership, or through another team membership.
## About code owners
@@ -42,7 +42,7 @@ For code owners to receive review requests, the CODEOWNERS file must be on the b
CODEOWNERS files must be under 3 MB in size. A CODEOWNERS file over this limit will not be loaded, which means that code owner information is not shown and the appropriate code owners will not be requested to review changes in a pull request.
To reduce the size of your CODEOWNERS file, consider using wildcard patterns to consolidate multiple entries into a single entry.
To reduce the size of your CODEOWNERS file, consider using wildcard patterns to consolidate multiple entries into a single entry.
{% endif %}
## CODEOWNERS syntax
@@ -103,11 +103,11 @@ apps/ @octocat
# will require approval from @doctocat or @octocat.
/scripts/ @doctocat @octocat
# In this example, @octocat owns any file in the `/apps`
# directory in the root of your repository except for the `/apps/github`
# In this example, @octocat owns any file in the `/apps`
# directory in the root of your repository except for the `/apps/github`
# subdirectory, as its owners are left empty.
/apps/ @octocat
/apps/github
/apps/github
```
### Syntax exceptions
There are some syntax rules for gitignore files that do not work in CODEOWNERS files:
@@ -116,7 +116,7 @@ There are some syntax rules for gitignore files that do not work in CODEOWNERS f
- Using `[ ]` to define a character range
## CODEOWNERS and branch protection
Repository owners can add branch protection rules to ensure that changed code is reviewed by the owners of the changed files. For more information, see "[About protected branches](/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches)."
Repository owners can add branch protection rules to ensure that changed code is reviewed by the owners of the changed files. For more information, see "[About protected branches](/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches)."
## Further reading

View File

@@ -1,6 +1,6 @@
{% note %}
**Note:** If your workflows use Docker container actions or service containers, then you must use a Linux runner:
**Note:** If your workflows use Docker container actions, job containers, or service containers, then you must use a Linux runner:
* If you are using {% data variables.product.prodname_dotcom %}-hosted runners, you must use an Ubuntu runner.
* If you are using self-hosted runners, you must use a Linux machine as your runner and Docker must be installed.

View File

@@ -118,5 +118,13 @@
"3.6": {
"releaseDate": "2022-07-19",
"deprecationDate": "2023-08-16"
},
"3.7": {
"releaseDate": "2022-10-04",
"deprecationDate": "2023-11-08"
},
"3.8": {
"releaseDate": "2023-02-07",
"deprecationDate": "2024-03-07"
}
}

View File

@@ -31,5 +31,4 @@ export default [
'https://www.vmware.com/products/esxi-and-esx.html',
'https://www.ecfr.gov/cgi-bin/text-idx?SID=ad384e1f1e017076f8c0136f322f0a4c&mc=true&node=pt15.2.744&rgn=div5',
'https://wiki.oasis-open.org/security',
'https://www.techrepublic.com/article/establish-a-patch-management-policy-87756/',
]

View File

@@ -34221,86 +34221,6 @@
}
]
},
{
"verb": "delete",
"requestPath": "/reactions/{reaction_id}",
"serverUrl": "https://api.github.com",
"parameters": [
{
"name": "reaction_id",
"in": "path",
"required": true,
"schema": {
"type": "integer"
},
"descriptionHTML": ""
}
],
"x-codeSamples": [
{
"lang": "Shell",
"source": "curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://api.github.com/reactions/42",
"html": "<pre><code class=\"hljs language-shell\">curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://api.github.com/reactions/42</code></pre>"
},
{
"lang": "JavaScript",
"source": "await octokit.request('DELETE /reactions/{reaction_id}', {\n reaction_id: 42\n})",
"html": "<pre><code class=\"hljs language-javascript\"><span class=\"hljs-keyword\">await</span> octokit.<span class=\"hljs-title hljs-function\">request</span>(<span class=\"hljs-string\">'DELETE /reactions/{reaction_id}'</span>, {\n <span class=\"hljs-attr\">reaction_id</span>: <span class=\"hljs-number\">42</span>\n})\n</code></pre>"
}
],
"summary": "Delete a reaction (Legacy)",
"description": "**Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/).\n\nOAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments).",
"tags": [
"reactions"
],
"operationId": "reactions/delete-legacy",
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy"
},
"x-github": {
"githubCloudOnly": false,
"enabledForGitHubApps": true,
"removalDate": "2021-02-21",
"deprecationDate": "2020-02-26",
"category": "reactions",
"subcategory": null
},
"deprecated": true,
"slug": "delete-a-reaction-legacy",
"category": "reactions",
"categoryLabel": "Reactions",
"notes": [],
"bodyParameters": [],
"descriptionHTML": "<p><strong>Deprecation Notice:</strong> This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this <a href=\"https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/\">blog post</a>.</p>\n<p>OAuth access tokens require the <code>write:discussion</code> <a href=\"https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/\">scope</a>, when deleting a <a href=\"https://docs.github.com/rest/reference/teams#discussions\">team discussion</a> or <a href=\"https://docs.github.com/rest/reference/teams#discussion-comments\">team discussion comment</a>.</p>",
"responses": [
{
"httpStatusCode": "204",
"httpStatusMessage": "No Content",
"description": "<p>Response</p>"
},
{
"httpStatusCode": "304",
"httpStatusMessage": "Not Modified",
"description": "<p>Not modified</p>"
},
{
"httpStatusCode": "401",
"httpStatusMessage": "Unauthorized",
"description": "<p>Requires authentication</p>"
},
{
"httpStatusCode": "403",
"httpStatusMessage": "Forbidden",
"description": "<p>Forbidden</p>"
},
{
"httpStatusCode": "410",
"httpStatusMessage": "Gone",
"description": "<p>Gone</p>"
}
]
},
{
"verb": "get",
"requestPath": "/repos/{owner}/{repo}",

View File

@@ -24966,86 +24966,6 @@
}
]
},
{
"verb": "delete",
"requestPath": "/reactions/{reaction_id}",
"serverUrl": "https://{hostname}/api/v3",
"parameters": [
{
"name": "reaction_id",
"in": "path",
"required": true,
"schema": {
"type": "integer"
},
"descriptionHTML": ""
}
],
"x-codeSamples": [
{
"lang": "Shell",
"source": "curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://{hostname}/api/v3/reactions/42",
"html": "<pre><code class=\"hljs language-shell\">curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://{hostname}/api/v3/reactions/42</code></pre>"
},
{
"lang": "JavaScript",
"source": "await octokit.request('DELETE /reactions/{reaction_id}', {\n reaction_id: 42\n})",
"html": "<pre><code class=\"hljs language-javascript\"><span class=\"hljs-keyword\">await</span> octokit.<span class=\"hljs-title hljs-function\">request</span>(<span class=\"hljs-string\">'DELETE /reactions/{reaction_id}'</span>, {\n <span class=\"hljs-attr\">reaction_id</span>: <span class=\"hljs-number\">42</span>\n})\n</code></pre>"
}
],
"summary": "Delete a reaction (Legacy)",
"description": "**Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/).\n\nOAuth access tokens require the `write:discussion` [scope](https://docs.github.com/github-ae@latest/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/github-ae@latest/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/github-ae@latest/rest/reference/teams#discussion-comments).",
"tags": [
"reactions"
],
"operationId": "reactions/delete-legacy",
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/github-ae@latest/rest/reference/reactions/#delete-a-reaction-legacy"
},
"x-github": {
"githubCloudOnly": false,
"enabledForGitHubApps": true,
"removalDate": "2021-02-21",
"deprecationDate": "2020-02-26",
"category": "reactions",
"subcategory": null
},
"deprecated": true,
"slug": "delete-a-reaction-legacy",
"category": "reactions",
"categoryLabel": "Reactions",
"notes": [],
"bodyParameters": [],
"descriptionHTML": "<p><strong>Deprecation Notice:</strong> This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this <a href=\"https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/\">blog post</a>.</p>\n<p>OAuth access tokens require the <code>write:discussion</code> <a href=\"https://docs.github.com/github-ae@latest/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/\">scope</a>, when deleting a <a href=\"https://docs.github.com/github-ae@latest/rest/reference/teams#discussions\">team discussion</a> or <a href=\"https://docs.github.com/github-ae@latest/rest/reference/teams#discussion-comments\">team discussion comment</a>.</p>",
"responses": [
{
"httpStatusCode": "204",
"httpStatusMessage": "No Content",
"description": "<p>Response</p>"
},
{
"httpStatusCode": "304",
"httpStatusMessage": "Not Modified",
"description": "<p>Not modified</p>"
},
{
"httpStatusCode": "401",
"httpStatusMessage": "Unauthorized",
"description": "<p>Requires authentication</p>"
},
{
"httpStatusCode": "403",
"httpStatusMessage": "Forbidden",
"description": "<p>Forbidden</p>"
},
{
"httpStatusCode": "410",
"httpStatusMessage": "Gone",
"description": "<p>Gone</p>"
}
]
},
{
"verb": "get",
"requestPath": "/repos/{owner}/{repo}",

View File

@@ -125023,125 +125023,6 @@
}
}
},
"/reactions/{reaction_id}": {
"delete": {
"summary": "Delete a reaction (Legacy)",
"description": "**Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/).\n\nOAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments).",
"tags": [
"reactions"
],
"operationId": "reactions/delete-legacy",
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy"
},
"parameters": [
{
"name": "reaction_id",
"in": "path",
"required": true,
"schema": {
"type": "integer"
}
}
],
"responses": {
"204": {
"description": "Response"
},
"304": {
"description": "Not modified"
},
"403": {
"description": "Forbidden",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"401": {
"description": "Requires authentication",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"410": {
"description": "Gone",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
}
},
"x-github": {
"githubCloudOnly": false,
"enabledForGitHubApps": true,
"removalDate": "2021-02-21",
"deprecationDate": "2020-02-26",
"category": "reactions",
"subcategory": null
},
"deprecated": true
}
},
"/repos/{owner}/{repo}": {
"get": {
"summary": "Get a repository",

View File

@@ -77292,125 +77292,6 @@
}
}
},
"/reactions/{reaction_id}": {
"delete": {
"summary": "Delete a reaction (Legacy)",
"description": "**Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/).\n\nOAuth access tokens require the `write:discussion` [scope](https://docs.github.com/github-ae@latest/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/github-ae@latest/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/github-ae@latest/rest/reference/teams#discussion-comments).",
"tags": [
"reactions"
],
"operationId": "reactions/delete-legacy",
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/github-ae@latest/rest/reference/reactions/#delete-a-reaction-legacy"
},
"parameters": [
{
"name": "reaction_id",
"in": "path",
"required": true,
"schema": {
"type": "integer"
}
}
],
"responses": {
"204": {
"description": "Response"
},
"304": {
"description": "Not modified"
},
"403": {
"description": "Forbidden",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"401": {
"description": "Requires authentication",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"410": {
"description": "Gone",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
}
},
"x-github": {
"githubCloudOnly": false,
"enabledForGitHubApps": true,
"removalDate": "2021-02-21",
"deprecationDate": "2020-02-26",
"category": "reactions",
"subcategory": null
},
"deprecated": true
}
},
"/repos/{owner}/{repo}": {
"get": {
"summary": "Get a repository",

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:47f2a1bb71c44eb3c14690449ffc37eafb1684500109ff16eb534a993a1ab4e2
size 660732
oid sha256:ad495955bf4dbf471ff73384117657af07d4febfd430d332410e6487a4601867
size 661523

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4a9da08bd9e586ce171f93454eac32c2885c741e7ac774ed397e0ea3fb0c9308
size 1377623
oid sha256:5ad03c624fd895682493a613f0d52d592cc28f45f0157394c24c6bcb34d49b57
size 1377641

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c8b98f298f8c276301a1eb4f0d599b794d8dc950ec13c229ed53d431b387374c
size 980831
oid sha256:55092e613eab6c38284ddc5f695466d16fe67f28eb662881ce138a09c3bdab5b
size 979739

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b1ca84b937c9dd0aa0eb580638c887f7af43135dde7700fe9a797a79eeaacbb5
size 3989912
oid sha256:e46fc8b08c5dafa39171231434e113181e481e70bc936c26f4af77be35192a34
size 3989151

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:32978fe3141fee7148015cc9051fe4f9c550faaf1b4bc42c8bc181c2bf7ebe2d
size 614498
oid sha256:d9a44cad36399623e67812bd04fadd4b301506dba1f704e66746f918dae9aa29
size 614734

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a1b13e1411ad4db2b7592dd952ab4702c7be942635045b9ce09fab80108e06b7
size 2572359
oid sha256:666bb6ea14b3285420cb53ddfd61b96e8c54bfdee5586e557909d52306081db8
size 2571661

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2e41390628d2a6af9fa323fcc78b96b1fa867f2337a8a1d68a9a414916cfa0a8
size 680711
oid sha256:8a5f856f1cd6f32b5ccc30c7c9700e78391d0620dbc71ed6b219fef207d25e7d
size 680096

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4056ffa70370838f88b9fe2694e257c4692bcd29a0a6693a0fe969b5d03d68d4
size 3602301
oid sha256:28717e93df6608c3ce5d73722a0fe28096888320057932f266fead61e0c56e8a
size 3598394

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1318284320faf8b349cc340d983cab28fc11ef18e0698d81c5fa2fa93566a4c6
size 605075
oid sha256:bc5c4b1ac7bd72100f98078b8f1d69e776471a24e9bc0514141eb3e98c8b3023
size 606078

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5f95b862e592737ec7372226f74f6e88576a6819715de17641238337fcc6a990
size 2468070
oid sha256:f1f741dd3dcda753c9307bd336f0c4a00ca300375155d51e7d0db51e0918bd25
size 2466806

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eddf78f62bffcadb7942c66be544a78c076e5cabc04c4e208f84e9f22ce8224b
size 675183
oid sha256:2401c7797a3c30fba923d0ed54331df67911e2d85292628a3327e288d6e3d3b2
size 675282

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4d84604ee31476e632a44a9424a1b1f5beb7db2cdae9d4550ae11b84e5251d38
size 1413778
oid sha256:95c2321b8587ad217d4c2558498615bcbe186fcff1de15d950ca484f037060d8
size 1413900

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:488e795fdf2c77ec061c6ec97623e297e3c657a02321538a9ec59d1f3ca98f69
size 1006261
oid sha256:f0c6b6f75fc051cefc0f5a1c5307c8c3d4e0d8d20c87f65dc39977997b389560
size 1005018

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:923e43b3a50c22d686834c514dd02c381ded234cdc53f5deefec92806fb2e285
size 4079033
oid sha256:b8d854bb453c129c459229683f1f386273f8ac642e371b85aa8716f542974cb9
size 4077692

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0011455a5d11bc33ba2af5c5703da6698ae6b9d746a89ae88a68c4cca1664185
size 627075
oid sha256:f590dc84c133e2e733cadde4788fd36bbd4992495a44c05e8e9641d4f0c04315
size 626202

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f6b547cf671384d5e632f5c75a289a462b4b4db8d8c417ce2469bf88a3bcd503
size 2632887
oid sha256:47ac7e471aed6604b9edcce596f17501cb2741bf4182e7c42c84503a25d83d7b
size 2632609

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:30794b66196dca5d7cd844e7a1ec9c7ff64fec8d566db0860fdda0c4db2ac500
size 694092
oid sha256:a6726d64418ea8c768e9a5e72f0f377e49b48a2f4ed8e2905a8d6eef016677cf
size 694174

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bafc02f6e8e424b0cf7be57da2083b8e91f809698e3d44e835d5c80d662fa88c
size 3680366
oid sha256:7033475bd2828f6236c887b3746d8a93ec52d550c56422e646ae55063d632c94
size 3679134

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:92d1fa4bd98655c1957297d7206be6953527255bae9740c851fea9412144958e
size 617448
oid sha256:188b07f5c8c04bccd252676415a3ced2cb4b968f4f578056b70dbc6b4865ad33
size 617214

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d767f63307ac25a99e08d07ef484fe69c7e3805ebe9959d67f636847cb6876fc
size 2519795
oid sha256:fd658c5fb357920bd1f49349f79097d59f130670e913ed009e0885b0dc34b861
size 2519431

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:34829eb8612d1f810af323f5581eb62e33cd0763ca0787d76847bd923674c818
size 691980
oid sha256:915a06d90236340a4b4372b93e44a55434acd531eca3ded771defbd13e4aba4b
size 690251

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1e9af186991894f3b9dfaadb97e5ae577944ce8bf918c4a342e1f5b5e57154a7
size 1446142
oid sha256:233423a48886ea0fb7bd542f40cc7f45b2e41eb9f730882e9e1a428ac9e0e55a
size 1446159

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bf7e6e4e93c01d6345b1a08da2750b2253fda1ba9c0510aa6238edd2ed708d38
size 1039550
oid sha256:869eacd89cb812dfb884e06ce77f5335a2aa73069cc38a2f776363cf168eb9da
size 1037730

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3223e5113f0e3b18e10aa0a9b350609495aa608291e285a3457c1d0f37db0e8d
size 4214510
oid sha256:df7de3478533adc4e4e6f7586d7ac25acac20cd642f7abd92d2525addc1909d6
size 4210326

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d2701762992d04ae5a4256235dc5a8322028ca01071f9b947253ee5e66dc6676
size 641530
oid sha256:c417de5bc9d32897568c66608db3fab9a532fb626f2272c58a95ec1c647ef669
size 640659

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e24e0061ac20fb620e949c00c7fdbbc063236400cc8e7074ff1a5160873eea5e
size 2696420
oid sha256:6f447a6f7f487b5beb870e10a4744753a2acef614a0c6fba62ad1e55894af2b0
size 2692986

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:78b5354fd197efc35df0c812472479e001e29d8e307fccb5a54c7f734c3d7632
size 708963
oid sha256:b4479845ed1bdc56a665303b5e228dfbbe19e048c883b240d177eebccafd564f
size 707720

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ca0907e1983c6cad844d4f51413a8e836900f3c5535e082cfce9b251e782b974
size 3765008
oid sha256:805de48302d421d89cb7de5ed87f563b7c28e6bc31856d884073221228911db5
size 3760558

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:beab35c298cf0ceaaae95fcf8a0366c0610f58368f79851ff835c7d1dd9279d6
size 632967
oid sha256:bdf9412b1ee1727d2ef8ec8a8dae728eebe5a64509f1e4198ea1ae046de3bf2b
size 630968

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1fe6ecafeb217e7ce74fe7cf2a38b15e263f04a0ba823cbae20f4caeb116edea
size 2580079
oid sha256:950db6f7dcd9d2ee5d0ac691126c7066e8640f0e00cdaa0e38acd654257cccb2
size 2578754

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3bbf2f35668d8050bf520aca714d861fbc22fd4a102895280b395985466deaa4
size 712820
oid sha256:0a72f5dd062b0e1628016f706906342a3e198f248bfb0951ed0c4ba8472c5330
size 712899

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0cef26bbf55015e1be4e51a5ac11530b9611633cbf6985e3e2562f2ba605f0b0
size 1505929
oid sha256:b4e6db2740dbb871cfbcdc730b845d1276a0af00a22e66eaa30839dc2706933b
size 1505664

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1e124cd6fcaa95e2df0edd42426c82d395b9f74a69e49ab4d64db9d70b9d9235
size 1079134
oid sha256:96e6d0ac4483be6d5a2b0567f17a1ddb414058703c7607e080fb23fe045e7d64
size 1073502

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c98b85d478d58207e212583715b6e6c1b4048c27316f5c6af1a3eca6dad52198
size 4315518
oid sha256:06d26640d0668fce291b6b99c92b0a43ab7f2fc36843112423923a0d52e864ab
size 4313205

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bd30526d5517c1c32711708f15073af206a954fd870b21fb1bb5ae672331beba
size 661016
oid sha256:ed3ef73ec189760efa247b361d00d011925d70b25aa3f67754ef9d52ca735cd6
size 660641

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:de49151c9f250aa5aa6ae7aa10eb82a86c49a5c3f60e7450658cee4bec699b25
size 2797541
oid sha256:832bd101501946de847558f5048bf61f9b3286f6079e8f9445101b3bbf41e6d6
size 2795859

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:13330f28c9e496df248bd006e3b0bb3d825bf1578b62c604e70e2e3ee00da218
size 732092
oid sha256:0aa05ef0f2c91bda47f3b13a415191efa2053ef0d478ce5f0363e1c547d34a28
size 730845

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f9f5e19dd7f1c7780e4f9aab9647cb98de82c4d7d958556b665f0ff73084cbf4
size 3891349
oid sha256:c6b3477463aa7ce1aa714f23cfb62a755c3c6000cfd16ca26461808ffd3300b0
size 3885440

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f56744f881b0257b612aeec6fe657870327d53ce5d69e9582a9c863f3bdb9b8a
size 650798
oid sha256:64124d66a630fec0d14d2078d44c12b6a0e805a86404a92bf9efeea16d0b4a2a
size 650744

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:11cc0508cc1ad069ffe751d3e0ae676ae949aeaa0e57fd2b5ba002a6a2ba5ce5
size 2661406
oid sha256:d8d6e54b1b2efde0044ec8e5a13ae4f9e15b13b8c0162119de864be22330eb33
size 2659878

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:751deeb265410a437d6aaea7b61d1ef8f8ea761e08231c13ad6608add4d610c2
size 914158
oid sha256:b6b70df7657927142c3044994d780da0232e81bf5c81d92f29228be2d286cda9
size 913561

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2774a524a7d307eb599fec438272dcbba4620ed18fc419103efa4df8388146e5
size 1681024
oid sha256:d22d36852b06fa338883bc71f72e2865fbb38a3be87b4e6894c1355737935f79
size 1683070

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:76a3fabbb2c816e60d0f1856ae8a4f7b63aa657790209871367af7ebd33995b1
size 1366262
oid sha256:5b2df0016b9d815939adb5fad570038317bd55ee5feed797c20a71386bd8de4f
size 1362871

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:831f2c630befa95f7b0dc1195244bede9c1242a8d240ae033ad4c475dabc88b6
size 5205576
oid sha256:096b3892b67ffd9137c3eb7b230e30e1386d53aa267da609136cb1fc20f0cb96
size 5202022

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8c256c303a2065ef015f03b4726cef00d7516f948f38896b5acdfea23664867a
size 830646
oid sha256:7b5609f12f94015845b5c548081c549a2c98fc248271c5a5338bea5a1ce563c2
size 829425

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:820e67288c8ede26cafa5481db87db665d8c2c0d505985a75d62d4455ad671fb
size 3348903
oid sha256:b34a6c0bc4b9c4a46e1ef559a760ae13674435787aef9eed54bc893ed8aca0f2
size 3352707

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5d789fb3a6eeda67b45e715cfa15764e605fc0a2c1fd0e211b7c2d82446fa4c9
size 933494
oid sha256:3d33f9a7393939783e4d3e3096e6ca5503870b34433938f08a03fd0542a34292
size 932489

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e20f510633d1a7c158b6d7bf53c80538713d92f66aa79310c17e1b412a5a9426
size 4751551
oid sha256:3ab3c7ddb0245fc369a36ea67ce0042c532fc022b2f2a5b4726ee4d2218e60e4
size 4746133

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eb79a4c7712539d04ffcf6723f5a72c348aad81b363a8993a6b95552d788d87c
size 820063
oid sha256:ba48313cf5878d45ce342758356c028dde6c019d767998d7df71906730385c46
size 820358

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:da8a5dc9ac23bc19b0ba2201c8b19bc252a7d92a4db02a0d9fb708f62aba2afd
size 3199105
oid sha256:4be930827ff2cab6ad6c18da618498d8da5445e4abd0c73e56ee6dc6deac9ec4
size 3195820

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:523c008ead84776d546b2295dc26933ded47bcc1be4bbd1d4ae28f77f580550a
size 559246
oid sha256:96a76ad2ae2e23d2cafc71fbe5a760e6e7ede83bef7635cc03ba4906c9eb1f1e
size 559467

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e9706d9c59162558b254fc5813830ac8ca11ed4611c6cc19af1e360612daf1f8
size 1127281
oid sha256:3754c6bda2308aedba5d66acc6ea156f8f64969b462a733146c26d8d3e93b8de
size 1127318

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:dbd9c5ab249a73d044d984f08977aeadfcaf6c001fa598f7d29e27158c98db68
size 857900
oid sha256:728017aa0e305bca0e37bef4f85e8e42fb7e4a2e8e08814713a665945aa72c49
size 858190

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:be65f4807bdd555bd89024d1077a6e7e7de6c56af3f49c00ede7b26b9998a751
size 3436123
oid sha256:d1227d56b3eb198f83640056bd743b4f234f1b2ad20644ea03f06eff34529284
size 3434582

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:10bbd20fd9ffea263a4a6a0a5ae846c0981743c0eaa301d6984c6689039f61f4
size 520267
oid sha256:c4fd97bb4345d12f0e063e70b4b376f1d53b63f1aabf95387e7ec355b601da61
size 519495

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a7333febe7ffe6bcb0176ddad6a57dab5d1c36017e0f0934f614a2c32f7e1fd8
size 2117042
oid sha256:811e8f249e5c028e8a09fda609c702d42940de21060152bd8e65f7abc6d5480d
size 2115662

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:adad47de10961bcfe088d7192c6d29f7719bc95a6b1fb109d1de99080f43a807
size 572337
oid sha256:ad63cf60d4399157b2aa910d81520aeacde42dc5552aa4199bf82f067434a0a6
size 571996

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:af9b793135c26c3bef76a83fe5675817354ecec19ddb0cc61ebb1d54eebbe47f
size 2933511
oid sha256:7764209b0698957bd0975e15b3436412dcc820545a621cf5114c0c3ba50b9980
size 2931596

Some files were not shown because too many files have changed in this diff Show More