Merge branch 'main' into repo-sync
This commit is contained in:
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -11,7 +11,6 @@
|
||||
/script/ @github/docs-engineering
|
||||
/includes/ @github/docs-engineering
|
||||
/lib/search/popular-pages.json @github/docs-engineering
|
||||
app.json @github/docs-engineering
|
||||
Dockerfile @github/docs-engineering
|
||||
package-lock.json @github/docs-engineering
|
||||
package.json @github/docs-engineering
|
||||
|
||||
51
.github/actions-scripts/prod-deploy.js
vendored
51
.github/actions-scripts/prod-deploy.js
vendored
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import getOctokit from '../../script/helpers/github.js'
|
||||
import deployToProduction from '../../script/deployment/deploy-to-production.js'
|
||||
|
||||
const {
|
||||
GITHUB_TOKEN,
|
||||
HEROKU_API_TOKEN,
|
||||
HEROKU_PRODUCTION_APP_NAME,
|
||||
SOURCE_BLOB_URL,
|
||||
DELAY_FOR_PREBOOT,
|
||||
RUN_ID,
|
||||
} = process.env
|
||||
|
||||
// Exit if GitHub Actions PAT is not found
|
||||
if (!GITHUB_TOKEN) {
|
||||
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
|
||||
}
|
||||
|
||||
// Exit if Heroku API token is not found
|
||||
if (!HEROKU_API_TOKEN) {
|
||||
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
|
||||
}
|
||||
|
||||
// Exit if Heroku App name is not found
|
||||
if (!HEROKU_PRODUCTION_APP_NAME) {
|
||||
throw new Error('You must supply a HEROKU_PRODUCTION_APP_NAME environment variable!')
|
||||
}
|
||||
|
||||
if (!RUN_ID) {
|
||||
throw new Error('$RUN_ID not set')
|
||||
}
|
||||
|
||||
// This helper uses the `GITHUB_TOKEN` implicitly!
|
||||
// We're using our usual version of Octokit vs. the provided `github`
|
||||
// instance to avoid versioning discrepancies.
|
||||
const octokit = getOctokit()
|
||||
|
||||
try {
|
||||
await deployToProduction({
|
||||
octokit,
|
||||
includeDelayForPreboot: DELAY_FOR_PREBOOT !== 'false',
|
||||
// These parameters will ONLY be set by Actions
|
||||
sourceBlobUrl: SOURCE_BLOB_URL,
|
||||
runId: RUN_ID,
|
||||
})
|
||||
} catch (error) {
|
||||
console.error(`Failed to deploy to production: ${error.message}`)
|
||||
console.error(error)
|
||||
throw error
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import * as github from '@actions/github'
|
||||
|
||||
import getOctokit from '../../script/helpers/github.js'
|
||||
|
||||
const { GITHUB_TOKEN } = process.env
|
||||
|
||||
// Exit if GitHub Actions PAT is not found
|
||||
if (!GITHUB_TOKEN) {
|
||||
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
|
||||
}
|
||||
|
||||
// This helper uses the `GITHUB_TOKEN` implicitly!
|
||||
// We're using our usual version of Octokit vs. the provided `github`
|
||||
// instance to avoid versioning discrepancies.
|
||||
const octokit = getOctokit()
|
||||
|
||||
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
|
||||
if (!CONTEXT_NAME) {
|
||||
throw new Error('$CONTEXT_NAME not set')
|
||||
}
|
||||
if (!ACTIONS_RUN_LOG) {
|
||||
throw new Error('$ACTIONS_RUN_LOG not set')
|
||||
}
|
||||
if (!HEAD_SHA) {
|
||||
throw new Error('$HEAD_SHA not set')
|
||||
}
|
||||
|
||||
const { context } = github
|
||||
const owner = context.repo.owner
|
||||
const repo = context.payload.repository.name
|
||||
|
||||
await octokit.repos.createCommitStatus({
|
||||
owner,
|
||||
repo,
|
||||
sha: HEAD_SHA,
|
||||
context: CONTEXT_NAME,
|
||||
state: 'success',
|
||||
description: 'Successfully deployed! See logs.',
|
||||
target_url: ACTIONS_RUN_LOG,
|
||||
})
|
||||
55
.github/actions-scripts/staging-deploy.js
vendored
55
.github/actions-scripts/staging-deploy.js
vendored
@@ -1,55 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import parsePrUrl from '../../script/deployment/parse-pr-url.js'
|
||||
import getOctokit from '../../script/helpers/github.js'
|
||||
import deployToStaging from '../../script/deployment/deploy-to-staging.js'
|
||||
|
||||
const { GITHUB_TOKEN, HEROKU_API_TOKEN } = process.env
|
||||
|
||||
// Exit if GitHub Actions PAT is not found
|
||||
if (!GITHUB_TOKEN) {
|
||||
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
|
||||
}
|
||||
|
||||
// Exit if Heroku API token is not found
|
||||
if (!HEROKU_API_TOKEN) {
|
||||
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
|
||||
}
|
||||
|
||||
// This helper uses the `GITHUB_TOKEN` implicitly!
|
||||
// We're using our usual version of Octokit vs. the provided `github`
|
||||
// instance to avoid versioning discrepancies.
|
||||
const octokit = getOctokit()
|
||||
|
||||
const { RUN_ID, PR_URL, SOURCE_BLOB_URL } = process.env
|
||||
if (!RUN_ID) {
|
||||
throw new Error('$RUN_ID not set')
|
||||
}
|
||||
if (!PR_URL) {
|
||||
throw new Error('$PR_URL not set')
|
||||
}
|
||||
if (!SOURCE_BLOB_URL) {
|
||||
throw new Error('$SOURCE_BLOB_URL not set')
|
||||
}
|
||||
|
||||
const { owner, repo, pullNumber } = parsePrUrl(PR_URL)
|
||||
if (!owner || !repo || !pullNumber) {
|
||||
throw new Error(
|
||||
`'pullRequestUrl' input must match URL format 'https://github.com/github/(docs|docs-internal)/pull/123' but was '${PR_URL}'`
|
||||
)
|
||||
}
|
||||
|
||||
const { data: pullRequest } = await octokit.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pullNumber,
|
||||
})
|
||||
|
||||
await deployToStaging({
|
||||
octokit,
|
||||
pullRequest,
|
||||
forceRebuild: false,
|
||||
// These parameters will ONLY be set by Actions
|
||||
sourceBlobUrl: SOURCE_BLOB_URL,
|
||||
runId: RUN_ID,
|
||||
})
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
run: |
|
||||
gh pr comment $PR --body "Thanks so much for opening this PR and contributing to GitHub Docs!
|
||||
|
||||
- When you're ready for the Docs team to review this PR, apply the **ready-for-doc-review** label and your PR will be added to the [Docs Content review board](https://github.com/orgs/github/memexes/901?layout=table&groupedBy%5BcolumnId%5D=11024). **Please factor in at least 72 hours for a review, even longer if this is a substantial change.**
|
||||
- When you're ready for the Docs team to review this PR, request a review by *docs-content* and your PR will be added to the [Docs Content review board](https://github.com/orgs/github/memexes/901?layout=table&groupedBy%5BcolumnId%5D=11024). **Please factor in at least 72 hours for a review, even longer if this is a substantial change.**
|
||||
- If this is a major update to the docs, you might want to go back and open an [issue](https://github.com/github/docs-content/issues/new/choose) to ensure we've covered all areas of the docs in these updates. Not doing so may result in delays or inaccurate documentation."
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
31
.github/workflows/ping-staging-apps.yml
vendored
31
.github/workflows/ping-staging-apps.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Ping staging apps
|
||||
|
||||
# **What it does**: This keeps our staging applications from automatically spinning down.
|
||||
# **Why we have it**: Staging applications can hiberate without use.
|
||||
# **Who does it impact**: Anyone with a pull request in docs-internal.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10,30,50 * * * *' # every twenty minutes
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
ping_staging_apps:
|
||||
name: Ping
|
||||
if: github.repository == 'github/docs-internal'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
- name: npm ci
|
||||
run: npm ci
|
||||
- name: Run script
|
||||
run: script/ping-staging-apps.js
|
||||
199
.github/workflows/prod-build-deploy.yml
vendored
199
.github/workflows/prod-build-deploy.yml
vendored
@@ -1,199 +0,0 @@
|
||||
name: Production - Build and Deploy
|
||||
|
||||
# **What it does**: Builds and deploys the default branch to production
|
||||
# **Why we have it**: To enable us to deploy the latest to production whenever necessary rather than relying on PR merges.
|
||||
# **Who does it impact**: All contributors.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
deployments: write
|
||||
|
||||
# This allows a subsequently queued workflow run to take priority over
|
||||
# previously queued runs but NOT interrupt currently executing runs
|
||||
concurrency:
|
||||
group: '${{ github.workflow }}'
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
if: ${{ github.repository == 'github/docs-internal'}}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
with:
|
||||
persist-credentials: 'false'
|
||||
lfs: 'true'
|
||||
|
||||
- name: Check out LFS objects
|
||||
run: git lfs checkout
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
# Required for `npm pkg ...` command support
|
||||
- name: Update to npm@^7.20.0
|
||||
run: npm install --global npm@^7.20.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Clone early access
|
||||
run: node script/early-access/clone-for-build.js
|
||||
env:
|
||||
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
|
||||
GIT_BRANCH: main
|
||||
|
||||
- name: Cache nextjs build
|
||||
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
|
||||
with:
|
||||
path: .next/cache
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Remove development-only dependencies
|
||||
run: npm prune --production
|
||||
|
||||
- name: Remove all npm scripts
|
||||
run: npm pkg delete scripts
|
||||
|
||||
- name: Set npm script for Heroku build to noop
|
||||
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
|
||||
|
||||
- name: Create a gzipped archive
|
||||
run: |
|
||||
tar -cz --file=app.tar.gz \
|
||||
node_modules/ \
|
||||
.next/ \
|
||||
assets/ \
|
||||
content/ \
|
||||
data/ \
|
||||
includes/ \
|
||||
lib/ \
|
||||
middleware/ \
|
||||
translations/ \
|
||||
server.mjs \
|
||||
package*.json \
|
||||
.npmrc \
|
||||
feature-flags.json \
|
||||
next.config.js \
|
||||
app.json \
|
||||
Procfile
|
||||
|
||||
- name: Install the development dependencies again
|
||||
run: npm install
|
||||
|
||||
- name: Create a Heroku build source
|
||||
id: build-source
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
if (owner !== 'github') {
|
||||
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
|
||||
}
|
||||
if (repo !== 'docs-internal') {
|
||||
throw new Error(`Repository name must be 'docs-internal' but was: ${repo}`)
|
||||
}
|
||||
|
||||
const Heroku = require('heroku-client')
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
|
||||
try {
|
||||
const { source_blob: sourceBlob } = await heroku.post('/sources')
|
||||
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
|
||||
|
||||
core.setOutput('upload_url', uploadUrl)
|
||||
core.setOutput('download_url', downloadUrl)
|
||||
} catch (error) {
|
||||
if (error.statusCode === 503) {
|
||||
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
|
||||
- name: Upload to the Heroku build source
|
||||
env:
|
||||
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
|
||||
run: |
|
||||
curl "$UPLOAD_URL" \
|
||||
-X PUT \
|
||||
-H 'Content-Type:' \
|
||||
--data-binary @app.tar.gz
|
||||
|
||||
- name: Deploy
|
||||
id: deploy
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
HEROKU_PRODUCTION_APP_NAME: ${{ secrets.HEROKU_PRODUCTION_APP_NAME }}
|
||||
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
|
||||
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
|
||||
SOURCE_BLOB_URL: ${{ steps.build-source.outputs.download_url }}
|
||||
DELAY_FOR_PREBOOT: 'true'
|
||||
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: .github/actions-scripts/prod-deploy.js
|
||||
|
||||
- name: Mark the deployment as inactive if timed out
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
if: ${{ steps.deploy.outcome == 'cancelled' }}
|
||||
env:
|
||||
DEPLOYMENT_ID: ${{ steps.deploy.outputs.deploymentId }}
|
||||
LOG_URL: ${{ steps.deploy.outputs.logUrl }}
|
||||
with:
|
||||
script: |
|
||||
const { DEPLOYMENT_ID, LOG_URL } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
if (!DEPLOYMENT_ID) {
|
||||
throw new Error('A deployment wasn't created before a timeout occurred!')
|
||||
}
|
||||
|
||||
await github.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: DEPLOYMENT_ID,
|
||||
state: 'error',
|
||||
description: 'The deployment step timed out. See workflow logs.',
|
||||
log_url: LOG_URL,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
console.log('⏲️ Deployment status: error - The deployment timed out...')
|
||||
|
||||
# - name: Purge Fastly edge cache
|
||||
# env:
|
||||
# FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
|
||||
# FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
|
||||
# FASTLY_SURROGATE_KEY: 'every-deployment'
|
||||
# run: .github/actions-scripts/purge-fastly-edge-cache.js
|
||||
|
||||
- name: Send Slack notification if workflow failed
|
||||
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
|
||||
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
||||
color: failure
|
||||
text: Production deployment failed at commit ${{ github.sha }}. See https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
8
.github/workflows/ready-for-doc-review.yml
vendored
8
.github/workflows/ready-for-doc-review.yml
vendored
@@ -1,12 +1,12 @@
|
||||
name: Ready for docs-content review
|
||||
|
||||
# **What it does**: Adds pull requests in the docs-internal repository to the docs-content review board when the "ready-for-doc-review" label is added
|
||||
# **What it does**: Adds pull requests in the docs-internal repository to the docs-content review board when the "ready-for-doc-review" label is added or when a review by docs-content is requested
|
||||
# **Why we have it**: So that other GitHub teams can easily request reviews from the docs-content team, and so that writers can see when a PR is ready for review
|
||||
# **Who does it impact**: Writers working in the docs-internal repository
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
types: [labeled, review_requested]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -14,7 +14,9 @@ permissions:
|
||||
jobs:
|
||||
request_doc_review:
|
||||
name: Request a review from the docs-content team
|
||||
if: github.event.label.name == 'ready-for-doc-review' && github.repository == 'github/docs-internal'
|
||||
if: >-
|
||||
github.repository == 'github/docs-internal' &&
|
||||
(github.event.label.name == 'ready-for-doc-review' || github.event.requested_team.name == 'docs-content')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo content
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
name: Remove stale staging resources
|
||||
|
||||
# **What it does**:
|
||||
# This cleans up any rogue staging applications and environments that outlasted
|
||||
# the closure of their corresponding pull requests.
|
||||
# **Why we have it**:
|
||||
# Staging applications and environments should be destroyed after their
|
||||
# corresponding pull request is closed or merged, especially to save money spent
|
||||
# on Heroku App staging deployments for closed PRs.
|
||||
# **Who does it impact**:
|
||||
# Anyone with a closed, spammy, or deleted pull request in docs or docs-internal.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '15,45 * * * *' # every thirty minutes at :15 and :45
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
deployments: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
remove_stale_staging_apps:
|
||||
name: Remove stale staging apps
|
||||
if: ${{ github.repository == 'github/docs-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo's default branch
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run script
|
||||
run: script/remove-stale-staging-apps.js
|
||||
env:
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
remove_stale_staging_envs:
|
||||
name: Remove stale staging environments
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo's default branch
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run script
|
||||
run: script/remove-stale-staging-envs.js
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ELEVATED_TOKEN: ${{ secrets.DOCS_BOT_FR }}
|
||||
REPO: ${{ github.repository }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
210
.github/workflows/staging-build-and-deploy-pr.yml
vendored
210
.github/workflows/staging-build-and-deploy-pr.yml
vendored
@@ -1,210 +0,0 @@
|
||||
name: Staging - Build and Deploy PR (fast and private-only)
|
||||
|
||||
# **What it does**: Builds and deploys PRs to staging but ONLY for docs-internal
|
||||
# **Why we have it**: Most PRs are made on the private repo. Let's make those extra fast if we can worry less about security.
|
||||
# **Who does it impact**: All staff.
|
||||
|
||||
# This whole workflow is only guaranteed to be secure in the *private
|
||||
# repo* and because we repo-sync these files over the to the public one,
|
||||
# IT'S IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs-internal!
|
||||
|
||||
on:
|
||||
# The advantage of 'pull_request' over 'pull_request_target' is that we
|
||||
# can make changes to this file and test them in a pull request, instead
|
||||
# of relying on landing it in 'main' first.
|
||||
# From a security point of view, its arguably safer this way because
|
||||
# unlike 'pull_request_target', these only have secrets if the pull
|
||||
# request creator has permission to access secrets.
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
deployments: write
|
||||
pull-requests: read
|
||||
statuses: write
|
||||
|
||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
||||
concurrency:
|
||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-deploy-pr:
|
||||
# Important. This whole file is only supposed to run in the PRIVATE repo.
|
||||
if: ${{ github.repository == 'github/docs-internal' }}
|
||||
|
||||
# The assumption here is that self-hosted is faster (e.g CPU power)
|
||||
# that the regular ones. And it matters in this workflow because
|
||||
# we do heavy CPU stuff with `npm run build` and `tar`
|
||||
# runs-on: ubuntu-latest
|
||||
runs-on: self-hosted
|
||||
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
with:
|
||||
lfs: 'true'
|
||||
# To prevent issues with cloning early access content later
|
||||
persist-credentials: 'false'
|
||||
|
||||
- name: Check out LFS objects
|
||||
run: git lfs checkout
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Cache nextjs build
|
||||
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
|
||||
with:
|
||||
path: .next/cache
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Clone early access
|
||||
run: node script/early-access/clone-for-build.js
|
||||
env:
|
||||
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
|
||||
GIT_BRANCH: ${{ github.head_ref || github.ref }}
|
||||
|
||||
- name: Create a Heroku build source
|
||||
id: build-source
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
if (owner !== 'github') {
|
||||
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
|
||||
}
|
||||
if (repo !== 'docs-internal') {
|
||||
throw new Error(`Repository name must be 'docs-internal' but was: ${repo}`)
|
||||
}
|
||||
|
||||
const Heroku = require('heroku-client')
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
|
||||
try {
|
||||
const { source_blob: sourceBlob } = await heroku.post('/sources')
|
||||
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
|
||||
|
||||
core.setOutput('upload_url', uploadUrl)
|
||||
core.setOutput('download_url', downloadUrl)
|
||||
} catch (error) {
|
||||
if (error.statusCode === 503) {
|
||||
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
- name: Remove development-only dependencies
|
||||
run: npm prune --production
|
||||
|
||||
- name: Remove all npm scripts
|
||||
run: npm pkg delete scripts
|
||||
|
||||
- name: Set npm script for Heroku build to noop
|
||||
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
|
||||
|
||||
- name: Delete heavy things we won't need deployed
|
||||
run: |
|
||||
|
||||
# The dereferenced file is not used in runtime once the
|
||||
# decorated file has been created from it.
|
||||
rm -rf lib/rest/static/dereferenced
|
||||
|
||||
# Translations are never tested in Staging builds
|
||||
# but let's keep the empty directory.
|
||||
rm -rf translations
|
||||
mkdir translations
|
||||
|
||||
# Delete all the big search indexes that are NOT English (`*-en-*`)
|
||||
pushd lib/search/indexes
|
||||
ls | grep -ve '\-en\b' | xargs rm
|
||||
popd
|
||||
|
||||
# Note! Some day it would be nice to be able to delete
|
||||
# all the heavy assets because they bloat the tarball.
|
||||
# But it's not obvious how to test it then. For now, we'll have
|
||||
# to accept that every staging build has a copy of the images.
|
||||
|
||||
# The assumption here is that a staging build will not
|
||||
# need these legacy redirects. Only the redirects from
|
||||
# front-matter will be at play.
|
||||
# These static redirects json files are notoriously large
|
||||
# and they make the tarball unnecessarily large.
|
||||
echo '[]' > lib/redirects/static/archived-frontmatter-fallbacks.json
|
||||
echo '{}' > lib/redirects/static/developer.json
|
||||
echo '{}' > lib/redirects/static/archived-redirects-from-213-to-217.json
|
||||
|
||||
# This will turn every `lib/**/static/*.json` into
|
||||
# an equivalent `lib/**/static/*.json.br` file.
|
||||
# Once the server starts, it'll know to fall back to reading
|
||||
# the `.br` equivalent if the `.json` file isn't present.
|
||||
node .github/actions-scripts/compress-large-files.js
|
||||
|
||||
- name: Make the tarball for Heroku
|
||||
run: |
|
||||
# We can't delete the .next/cache directory from the workflow
|
||||
# because it's needed for caching, but we can at least exclude it
|
||||
# from the tarball. Then it can be cached but not weigh down the
|
||||
# tarball we intend to deploy.
|
||||
tar -zc --exclude=.next/cache --file=app.tar.gz \
|
||||
node_modules/ \
|
||||
.next/ \
|
||||
assets/ \
|
||||
content/ \
|
||||
data/ \
|
||||
includes/ \
|
||||
lib/ \
|
||||
middleware/ \
|
||||
translations/ \
|
||||
server.mjs \
|
||||
package*.json \
|
||||
.npmrc \
|
||||
feature-flags.json \
|
||||
next.config.js \
|
||||
app.json \
|
||||
Procfile
|
||||
|
||||
du -sh app.tar.gz
|
||||
|
||||
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
|
||||
- name: Upload to the Heroku build source
|
||||
env:
|
||||
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
|
||||
run: |
|
||||
curl "$UPLOAD_URL" \
|
||||
-X PUT \
|
||||
-H 'Content-Type:' \
|
||||
--data-binary @app.tar.gz
|
||||
|
||||
# 'npm install' is faster than 'npm ci' because it only needs to
|
||||
# *append* what's missing from ./node_modules/
|
||||
- name: Re-install dependencies so we get devDependencies back
|
||||
run: npm install --no-audit --no-fund --only=dev
|
||||
|
||||
- name: Deploy
|
||||
id: deploy
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
|
||||
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
SOURCE_BLOB_URL: ${{ steps.build-source.outputs.download_url }}
|
||||
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: .github/actions-scripts/staging-deploy.js
|
||||
135
.github/workflows/staging-build-pr.yml
vendored
135
.github/workflows/staging-build-pr.yml
vendored
@@ -1,135 +0,0 @@
|
||||
name: Staging - Build PR
|
||||
|
||||
# **What it does**: Builds PRs before deploying them.
|
||||
# **Why we have it**: Because it's not safe to share our deploy secrets with forked repos: https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
# **Who does it impact**: All contributors.
|
||||
|
||||
# IT'S CRUCIALLY IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs!
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
||||
# These are different from the concurrency in that here it checks if the
|
||||
# whole workflow runs again. The "inner concurrency" is used for
|
||||
# undeployments to cleaning up resources.
|
||||
concurrency:
|
||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-pr:
|
||||
# Important. This whole file is only supposed to run in the PUBLIC repo.
|
||||
if: ${{ github.repository == 'github/docs' }}
|
||||
|
||||
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
|
||||
timeout-minutes: 5
|
||||
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
|
||||
concurrency:
|
||||
group: 'PR Staging @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
# Make sure only approved files are changed if it's in github/docs
|
||||
- name: Check changed files
|
||||
if: ${{ github.event.pull_request.user.login != 'Octomerger' }}
|
||||
uses: dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58
|
||||
id: filter
|
||||
with:
|
||||
# Base branch used to get changed files
|
||||
base: 'main'
|
||||
|
||||
# Enables setting an output in the format in `${FILTER_NAME}_files
|
||||
# with the names of the matching files formatted as JSON array
|
||||
list-files: json
|
||||
|
||||
# Returns list of changed files matching each filter
|
||||
filters: |
|
||||
notAllowed:
|
||||
- '*.js'
|
||||
- '*.mjs'
|
||||
- '*.cjs'
|
||||
- '*.ts'
|
||||
- '*.tsx'
|
||||
- '*.json'
|
||||
- '.npmrc'
|
||||
- '.babelrc*'
|
||||
- '.env*'
|
||||
- 'script/**'
|
||||
- 'Procfile'
|
||||
|
||||
# When there are changes to files we can't accept
|
||||
- name: Fail when disallowed files are changed
|
||||
if: ${{ steps.filter.outputs.notAllowed == 'true' }}
|
||||
run: exit 1
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
# Required for `npm pkg ...` command support
|
||||
- name: Update to npm@^7.20.0
|
||||
run: npm install --global npm@^7.20.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Cache nextjs build
|
||||
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
|
||||
with:
|
||||
path: .next/cache
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Remove development-only dependencies
|
||||
run: npm prune --production
|
||||
|
||||
- name: Remove all npm scripts
|
||||
run: npm pkg delete scripts
|
||||
|
||||
- name: Set npm script for Heroku build to noop
|
||||
run: npm set-script heroku-postbuild "echo 'Application was pre-built!'"
|
||||
|
||||
- name: Create an archive
|
||||
run: |
|
||||
tar -c --file=app.tar \
|
||||
node_modules/ \
|
||||
.next/ \
|
||||
assets/ \
|
||||
content/ \
|
||||
data/ \
|
||||
includes/ \
|
||||
lib/ \
|
||||
middleware/ \
|
||||
translations/ \
|
||||
server.mjs \
|
||||
package*.json \
|
||||
.npmrc \
|
||||
feature-flags.json \
|
||||
next.config.js \
|
||||
app.json \
|
||||
Procfile
|
||||
|
||||
# We can't delete the .next/cache directory from the workflow
|
||||
# because it's needed for caching, but we can at least delete it
|
||||
# from within the tarball. Then it can be cached but not
|
||||
# weigh down the tarball we intend to deploy.
|
||||
tar --delete --file=app.tar .next/cache
|
||||
|
||||
# Upload only the files needed to run this application.
|
||||
# We are not willing to trust the rest (e.g. script/) for the remainder
|
||||
# of the deployment process.
|
||||
- name: Upload build artifact
|
||||
uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2
|
||||
with:
|
||||
name: pr_build
|
||||
path: app.tar
|
||||
466
.github/workflows/staging-deploy-pr.yml
vendored
466
.github/workflows/staging-deploy-pr.yml
vendored
@@ -1,466 +0,0 @@
|
||||
name: Staging - Deploy PR
|
||||
|
||||
# **What it does**: To deploy PRs to a Heroku staging environment.
|
||||
# **Why we have it**: To deploy with high visibility in case of failures.
|
||||
# **Who does it impact**: All contributors.
|
||||
|
||||
# IT'S CRUCIALLY IMPORTANT THAT THIS WORKFLOW IS ONLY ENABLED IN docs!
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- 'Staging - Build PR'
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
deployments: write
|
||||
pull-requests: read
|
||||
statuses: write
|
||||
|
||||
# IMPORTANT: Intentionally OMIT a `concurrency` configuration from this workflow's
|
||||
# top-level as we do not have any guarantee of identifying values being available
|
||||
# within the `github.event` context for PRs from forked repos!
|
||||
#
|
||||
# The implication of this shortcoming is that we may have multiple workflow runs
|
||||
# of this running at the same time for different commits within the same PR.
|
||||
# However, once they reach the `concurrency` configurations deeper down within
|
||||
# this workflow's jobs, then we can expect concurrent short-circuiting to begin.
|
||||
|
||||
env:
|
||||
CONTEXT_NAME: '${{ github.workflow }} / deploy (${{ github.event.workflow_run.event }})'
|
||||
ACTIONS_RUN_LOG: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
BUILD_ACTIONS_RUN_ID: ${{ github.event.workflow_run.id }}
|
||||
BUILD_ACTIONS_RUN_LOG: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}
|
||||
|
||||
jobs:
|
||||
pr-metadata:
|
||||
# This is needed because the workflow we depend on
|
||||
# (see on.workflow_run.workflows) might be running from pushes on
|
||||
# main. That's because it needs to do that to popular the cache.
|
||||
if: >-
|
||||
${{
|
||||
github.repository == 'github/docs' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
}}
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
number: ${{ steps.pr.outputs.number }}
|
||||
url: ${{ steps.pr.outputs.url }}
|
||||
state: ${{ steps.pr.outputs.state }}
|
||||
head_sha: ${{ steps.pr.outputs.head_sha }}
|
||||
head_branch: ${{ steps.pr.outputs.head_branch }}
|
||||
head_label: ${{ steps.pr.outputs.head_label }}
|
||||
head_ref: ${{ steps.pr.outputs.head_ref }}
|
||||
steps:
|
||||
- name: Find the originating pull request
|
||||
id: pr
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
|
||||
with:
|
||||
script: |
|
||||
|
||||
// Curious about what version of node you get
|
||||
console.log('Node version:', process.version)
|
||||
|
||||
// In order to find out the PR info for a forked repo, we must query
|
||||
// the API for more info based on the originating workflow run
|
||||
const { BUILD_ACTIONS_RUN_ID } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
const { data: run } = await github.actions.getWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: BUILD_ACTIONS_RUN_ID,
|
||||
})
|
||||
|
||||
// Gather PR-identifying information from the workflow run
|
||||
const {
|
||||
head_branch: headBranch,
|
||||
head_sha: headSha,
|
||||
head_repository: {
|
||||
owner: { login: prRepoOwner },
|
||||
name: prRepoName
|
||||
}
|
||||
} = run
|
||||
|
||||
const prIsInternal = owner === prRepoOwner && repo === prRepoName
|
||||
let headLabel = `${prRepoOwner}:${headBranch}`
|
||||
|
||||
// If the PR is external, prefix its head branch name with the
|
||||
// forked repo owner's login and their fork repo name e.g.
|
||||
// "octocat/my-fork:docs". We need to include the fork repo
|
||||
// name as well to account for an API issue (this will work fine
|
||||
// if they don't have a different fork repo name).
|
||||
if (!prIsInternal) {
|
||||
headLabel = `${prRepoOwner}/${prRepoName}:${headBranch}`
|
||||
}
|
||||
|
||||
// If the PR is external, prefix its head branch name with the
|
||||
// forked repo owner's login, e.g. "octocat:docs"
|
||||
const headRef = prIsInternal ? headBranch : headLabel
|
||||
|
||||
// Retrieve matching PRs (up to 30)
|
||||
const { data: pulls } = await github.pulls.list({
|
||||
owner,
|
||||
repo,
|
||||
head: headLabel,
|
||||
sort: 'updated',
|
||||
direction: 'desc',
|
||||
per_page: 30
|
||||
})
|
||||
|
||||
// Find the open PR, if any, otherwise choose the most recently updated
|
||||
const targetPull = pulls.find(pr => pr.state === 'open') || pulls[0] || {}
|
||||
|
||||
const pullNumber = targetPull.number || 0
|
||||
const pullUrl = targetPull.html_url || 'about:blank'
|
||||
const pullState = targetPull.state || 'closed'
|
||||
|
||||
core.setOutput('number', pullNumber.toString())
|
||||
core.setOutput('url', pullUrl)
|
||||
core.setOutput('state', pullState)
|
||||
core.setOutput('head_sha', headSha)
|
||||
core.setOutput('head_branch', headBranch)
|
||||
core.setOutput('head_label', headLabel)
|
||||
core.setOutput('head_ref', headRef)
|
||||
|
||||
debug-originating-trigger:
|
||||
needs: pr-metadata
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump info about the originating workflow run
|
||||
env:
|
||||
PR_NUMBER: ${{ needs.pr-metadata.outputs.number }}
|
||||
PR_URL: ${{ needs.pr-metadata.outputs.url }}
|
||||
PR_STATE: ${{ needs.pr-metadata.outputs.state }}
|
||||
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
|
||||
HEAD_BRANCH: ${{ needs.pr-metadata.outputs.head_branch }}
|
||||
HEAD_LABEL: ${{ needs.pr-metadata.outputs.head_label }}
|
||||
HEAD_REF: ${{ needs.pr-metadata.outputs.head_ref }}
|
||||
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
|
||||
BUILD_ACTIONS_RUN_LOG: ${{ env.BUILD_ACTIONS_RUN_LOG }}
|
||||
run: |
|
||||
echo "Originating workflow info:"
|
||||
echo " - PR_NUMBER = $PR_NUMBER"
|
||||
echo " - PR_URL = $PR_URL"
|
||||
echo " - PR_STATE = $PR_STATE"
|
||||
echo " - HEAD_SHA = $HEAD_SHA"
|
||||
echo " - HEAD_BRANCH = $HEAD_BRANCH"
|
||||
echo " - HEAD_LABEL = $HEAD_LABEL"
|
||||
echo " - HEAD_REF = $HEAD_REF"
|
||||
echo " - BUILD_ACTIONS_RUN_ID = $BUILD_ACTIONS_RUN_ID"
|
||||
echo " - BUILD_ACTIONS_RUN_LOG = $BUILD_ACTIONS_RUN_LOG"
|
||||
|
||||
notify-of-failed-builds:
|
||||
needs: pr-metadata
|
||||
if: >-
|
||||
${{
|
||||
needs.pr-metadata.outputs.number != '0' &&
|
||||
github.event.workflow_run.conclusion == 'failure'
|
||||
}}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 1
|
||||
# Specifically omitting a concurrency group here in case the build was not
|
||||
# successful BECAUSE a subsequent build already canceled it
|
||||
steps:
|
||||
- name: Verify build workflow run was not cancelled
|
||||
id: check-workflow-run
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
BUILD_ACTIONS_RUN_ID: ${{ env.BUILD_ACTIONS_RUN_ID }}
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
const { data: { jobs: buildJobs } } = await github.actions.listJobsForWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: process.env.BUILD_ACTIONS_RUN_ID,
|
||||
filter: 'latest'
|
||||
})
|
||||
const wasCancelled = (
|
||||
buildJobs.length > 0 &&
|
||||
buildJobs.every(({ status, conclusion }) => {
|
||||
return status === 'completed' && conclusion === 'cancelled'
|
||||
})
|
||||
)
|
||||
core.setOutput('cancelled', wasCancelled.toString())
|
||||
|
||||
- if: ${{ steps.check-workflow-run.outputs.cancelled == 'false' }}
|
||||
name: Send Slack notification if build workflow failed
|
||||
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||
with:
|
||||
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
|
||||
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
||||
color: failure
|
||||
text: Staging build failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.BUILD_ACTIONS_RUN_LOG }}. This run was ${{ env.ACTIONS_RUN_LOG }}.
|
||||
|
||||
prepare-for-deploy:
|
||||
needs: pr-metadata
|
||||
if: ${{ needs.pr-metadata.outputs.state == 'open' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
|
||||
concurrency:
|
||||
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
|
||||
cancel-in-progress: true
|
||||
outputs:
|
||||
source_blob_url: ${{ steps.build-source.outputs.download_url }}
|
||||
steps:
|
||||
- name: Create initial status
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
|
||||
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
|
||||
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
await github.repos.createCommitStatus({
|
||||
owner,
|
||||
repo,
|
||||
sha: HEAD_SHA,
|
||||
context: CONTEXT_NAME,
|
||||
state: 'pending',
|
||||
description: 'The app is being deployed. See logs.',
|
||||
target_url: ACTIONS_RUN_LOG
|
||||
})
|
||||
|
||||
- name: Check out repo's default branch
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
with:
|
||||
# To prevent issues with cloning early access content later
|
||||
persist-credentials: 'false'
|
||||
lfs: 'true'
|
||||
|
||||
- name: Check out LFS objects
|
||||
run: git lfs checkout
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
# Install any additional dependencies *before* downloading the build artifact
|
||||
- name: Install Heroku client development-only dependency
|
||||
run: npm install --no-save heroku-client
|
||||
|
||||
# Download the previously built "app.tar"
|
||||
- name: Download build artifact
|
||||
uses: dawidd6/action-download-artifact@af92a8455a59214b7b932932f2662fdefbd78126
|
||||
with:
|
||||
workflow: ${{ github.event.workflow_run.workflow_id }}
|
||||
run_id: ${{ env.BUILD_ACTIONS_RUN_ID }}
|
||||
name: pr_build
|
||||
path: ${{ runner.temp }}
|
||||
|
||||
# gzip the app.tar to meet Heroku's expected format
|
||||
- name: Create a gzipped archive (docs)
|
||||
run: gzip -9 < "$RUNNER_TEMP/app.tar" > app.tar.gz
|
||||
|
||||
- name: Create a Heroku build source
|
||||
id: build-source
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
if (owner !== 'github') {
|
||||
throw new Error(`Repository owner must be 'github' but was: ${owner}`)
|
||||
}
|
||||
if (repo !== 'docs') {
|
||||
throw new Error(`Repository name must be 'docs' but was: ${repo}`)
|
||||
}
|
||||
|
||||
const Heroku = require('heroku-client')
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
|
||||
try {
|
||||
const { source_blob: sourceBlob } = await heroku.post('/sources')
|
||||
const { put_url: uploadUrl, get_url: downloadUrl } = sourceBlob
|
||||
|
||||
core.setOutput('upload_url', uploadUrl)
|
||||
core.setOutput('download_url', downloadUrl)
|
||||
} catch (error) {
|
||||
if (error.statusCode === 503) {
|
||||
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
# See: https://devcenter.heroku.com/articles/build-and-release-using-the-api#sources-endpoint
|
||||
- name: Upload to the Heroku build source
|
||||
env:
|
||||
UPLOAD_URL: ${{ steps.build-source.outputs.upload_url }}
|
||||
run: |
|
||||
curl "$UPLOAD_URL" \
|
||||
-X PUT \
|
||||
-H 'Content-Type:' \
|
||||
--data-binary @app.tar.gz
|
||||
|
||||
- name: Create failure status
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
|
||||
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
|
||||
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
await github.repos.createCommitStatus({
|
||||
owner,
|
||||
repo,
|
||||
sha: HEAD_SHA,
|
||||
context: CONTEXT_NAME,
|
||||
state: 'error',
|
||||
description: 'Failed to deploy. See logs.',
|
||||
target_url: ACTIONS_RUN_LOG
|
||||
})
|
||||
|
||||
- name: Send Slack notification if deployment preparation job failed
|
||||
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
|
||||
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
||||
color: failure
|
||||
text: Staging preparation failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.ACTIONS_RUN_LOG }}.
|
||||
|
||||
check-pr-before-deploy:
|
||||
needs: [pr-metadata, prepare-for-deploy]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 1
|
||||
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
|
||||
concurrency:
|
||||
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
|
||||
cancel-in-progress: true
|
||||
outputs:
|
||||
pull_request_state: ${{ steps.check-pr.outputs.state }}
|
||||
steps:
|
||||
- name: Check pull request state
|
||||
id: check-pr
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
env:
|
||||
PR_NUMBER: ${{ needs.pr-metadata.outputs.number }}
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
const { data: pullRequest } = await github.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: process.env.PR_NUMBER
|
||||
})
|
||||
core.setOutput('state', pullRequest.state)
|
||||
|
||||
deploy:
|
||||
needs: [pr-metadata, prepare-for-deploy, check-pr-before-deploy]
|
||||
if: ${{ needs.check-pr-before-deploy.outputs.pull_request_state == 'open' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
# This interrupts Build and Deploy workflow runs in progress for this PR branch.
|
||||
concurrency:
|
||||
group: 'PR Staging @ ${{ needs.pr-metadata.outputs.head_label }}'
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Check out repo's default branch
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Deploy
|
||||
id: deploy
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
|
||||
HYDRO_ENDPOINT: ${{ secrets.HYDRO_ENDPOINT }}
|
||||
HYDRO_SECRET: ${{ secrets.HYDRO_SECRET }}
|
||||
PR_URL: ${{ needs.pr-metadata.outputs.url }}
|
||||
SOURCE_BLOB_URL: ${{ needs.prepare-for-deploy.outputs.source_blob_url }}
|
||||
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: .github/actions-scripts/staging-deploy.js
|
||||
|
||||
- name: Create successful commit status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
|
||||
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
|
||||
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
|
||||
run: .github/actions-scripts/staging-commit-status-success.js
|
||||
|
||||
- name: Mark the deployment as inactive if timed out
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
if: ${{ steps.deploy.outcome == 'cancelled' }}
|
||||
env:
|
||||
DEPLOYMENT_ID: ${{ steps.deploy.outputs.deploymentId }}
|
||||
LOG_URL: ${{ steps.deploy.outputs.logUrl }}
|
||||
with:
|
||||
script: |
|
||||
const { DEPLOYMENT_ID, LOG_URL } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
if (!DEPLOYMENT_ID) {
|
||||
throw new Error('A deployment wasn't created before a timeout occurred!')
|
||||
}
|
||||
|
||||
await github.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: DEPLOYMENT_ID,
|
||||
state: 'error',
|
||||
description: 'The deployment step timed out. See workflow logs.',
|
||||
log_url: LOG_URL,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
console.log('⏲️ Deployment status: error - The deployment timed out...')
|
||||
|
||||
- name: Create failure status
|
||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
CONTEXT_NAME: ${{ env.CONTEXT_NAME }}
|
||||
ACTIONS_RUN_LOG: ${{ env.ACTIONS_RUN_LOG }}
|
||||
HEAD_SHA: ${{ needs.pr-metadata.outputs.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const { CONTEXT_NAME, ACTIONS_RUN_LOG, HEAD_SHA } = process.env
|
||||
const { owner, repo } = context.repo
|
||||
await github.repos.createCommitStatus({
|
||||
owner,
|
||||
repo,
|
||||
sha: HEAD_SHA,
|
||||
context: CONTEXT_NAME,
|
||||
state: 'error',
|
||||
description: 'Failed to deploy. See logs.',
|
||||
target_url: ACTIONS_RUN_LOG
|
||||
})
|
||||
|
||||
- name: Send Slack notification if deployment job failed
|
||||
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
channel: ${{ secrets.DOCS_STAGING_DEPLOYMENT_FAILURES_SLACK_CHANNEL_ID }}
|
||||
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
||||
color: failure
|
||||
text: Staging deployment failed for PR ${{ needs.pr-metadata.outputs.url }} at commit ${{ needs.pr-metadata.outputs.head_sha }}. See ${{ env.ACTIONS_RUN_LOG }}.
|
||||
76
.github/workflows/test-windows.yml
vendored
76
.github/workflows/test-windows.yml
vendored
@@ -1,76 +0,0 @@
|
||||
# NOTE: Changes to this file should also be applied to './test.yml'
|
||||
|
||||
name: Node.js Tests - Windows
|
||||
|
||||
# **What it does**: This runs our tests on Windows.
|
||||
# **Why we have it**: We want to support Windows contributors to docs.
|
||||
# **Who does it impact**: Anyone working on docs on a Windows device.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '50 19 * * *' # once a day at 19:50 UTC / 11:50 PST
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
||||
concurrency:
|
||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: windows-latest
|
||||
if: (github.event_name != 'pull_request') || (github.event_name == 'pull_request' && (contains(github.event.pull_request.labels.*.name, 'Windows') || contains(github.event.pull_request.labels.*.name, 'windows')))
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test-group:
|
||||
[
|
||||
content,
|
||||
graphql,
|
||||
meta,
|
||||
rendering,
|
||||
routing,
|
||||
unit,
|
||||
linting,
|
||||
translations,
|
||||
]
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
with:
|
||||
# Enables cloning the Early Access repo later with the relevant PAT
|
||||
persist-credentials: 'false'
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561
|
||||
with:
|
||||
node-version: 16.13.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Cache nextjs build
|
||||
uses: actions/cache@937d24475381cd9c75ae6db12cb4e79714b926ed
|
||||
with:
|
||||
path: .next/cache
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
|
||||
|
||||
- if: ${{ github.repository == 'github/docs-internal' }}
|
||||
name: Clone early access
|
||||
run: npm run heroku-postbuild
|
||||
env:
|
||||
DOCUBOT_REPO_PAT: ${{ secrets.DOCUBOT_REPO_PAT }}
|
||||
GIT_BRANCH: ${{ github.head_ref || github.ref }}
|
||||
|
||||
- if: ${{ github.repository != 'github/docs-internal' }}
|
||||
name: Run build script
|
||||
run: npm run build
|
||||
|
||||
- name: Run tests
|
||||
run: npm test -- tests/${{ matrix.test-group }}/
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -1,5 +1,3 @@
|
||||
# NOTE: Changes to this file should also be applied to './test-windows.yml'
|
||||
|
||||
name: Node.js Tests
|
||||
|
||||
# **What it does**: Runs our tests.
|
||||
|
||||
@@ -10,7 +10,6 @@ on:
|
||||
- '.github/actions-scripts/**'
|
||||
- '.github/workflows/**'
|
||||
- '.github/CODEOWNERS'
|
||||
- 'app.json'
|
||||
- 'assets/fonts/**'
|
||||
- 'data/graphql/**'
|
||||
- 'Dockerfile*'
|
||||
@@ -20,7 +19,6 @@ on:
|
||||
- 'lib/webhooks/**'
|
||||
- 'lib/search/indexes/**'
|
||||
- 'package*.json'
|
||||
- 'Procfile'
|
||||
- 'script/**'
|
||||
- 'translations/**'
|
||||
|
||||
@@ -58,7 +56,6 @@ jobs:
|
||||
- '.github/actions-scripts/**'
|
||||
- '.github/workflows/**'
|
||||
- '.github/CODEOWNERS'
|
||||
- 'app.json'
|
||||
- 'assets/fonts/**'
|
||||
- 'data/graphql/**'
|
||||
- 'Dockerfile*'
|
||||
@@ -68,7 +65,6 @@ jobs:
|
||||
- 'lib/webhooks/**'
|
||||
- 'lib/search/indexes/**'
|
||||
- 'package*.json'
|
||||
- 'Procfile'
|
||||
- 'scripts/**'
|
||||
- 'translations/**'
|
||||
|
||||
@@ -83,7 +79,6 @@ jobs:
|
||||
'.github/actions-scripts/**',
|
||||
'.github/workflows/**',
|
||||
'.github/CODEOWNERS',
|
||||
'app.json',
|
||||
'assets/fonts/**',
|
||||
'data/graphql/**',
|
||||
'Dockerfile*',
|
||||
@@ -93,7 +88,6 @@ jobs:
|
||||
'lib/webhooks/**',
|
||||
'lib/search/indexes/**',
|
||||
'package*.json',
|
||||
'Procfile',
|
||||
'scripts/**',
|
||||
'translations/**',
|
||||
]
|
||||
@@ -111,7 +105,7 @@ jobs:
|
||||
body: reviewMessage,
|
||||
})
|
||||
|
||||
workflowFailMessage = `${workflowFailMessage} Please see ${createdComment.data.html_url} for details.`
|
||||
workflowFailMessage = `${workflowFailMessage} Please see ${createdComment.data.html_url} for details.`
|
||||
} catch(err) {
|
||||
console.log("Error creating comment.", err)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: Check for unallowed internal changes
|
||||
|
||||
# **What it does**: If someone changes app.json or search indexes, we fail the check.
|
||||
# **Why we have it**: app.json should rarely be edited, so we'll require an admin merge if the file really needs to be changed. The search indexes are synced every 4 hours, so changes should not need to be made.
|
||||
# **What it does**: If someone changes search indexes, we fail the check.
|
||||
# **Why we have it**: The search indexes are synced every 4 hours, so changes should not need to be made.
|
||||
# **Who does it impact**: Docs engineering and content writers.
|
||||
|
||||
on:
|
||||
@@ -44,19 +44,8 @@ jobs:
|
||||
|
||||
# Returns list of changed files matching each filter
|
||||
filters: |
|
||||
notAllowed:
|
||||
- 'app.json'
|
||||
notAllowedSearchSyncLabel:
|
||||
- 'lib/search/indexes/**'
|
||||
notAllowed:
|
||||
needs: check-internal-changes
|
||||
if: ${{ needs.check-internal-changes.outputs.notAllowed == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail if unallowed changes were made
|
||||
run: |
|
||||
echo "Please admin merge if you really need to update app.json!"
|
||||
exit 1
|
||||
notAllowedSearchSyncLabel:
|
||||
needs: check-internal-changes
|
||||
if: ${{ needs.check-internal-changes.outputs.notAllowedSearchSyncLabel == 'true' }}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
# This Dockerfile can be used for docker-based deployments to platforms
|
||||
# like Now or Moda, but it is currently _not_ used by our Heroku deployments
|
||||
# It uses two multi-stage builds: `install` and the main build to keep the image size down.
|
||||
# This Dockerfile is used for docker-based deployments to Azure for both preview environments and production
|
||||
|
||||
# --------------------------------------------------------------------------------
|
||||
# BASE IMAGE
|
||||
|
||||
17
app.json
17
app.json
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "docs.github.com",
|
||||
"env": {
|
||||
"NODE_ENV": "production",
|
||||
"ENABLED_LANGUAGES": "en",
|
||||
"WEB_CONCURRENCY": "1"
|
||||
},
|
||||
"buildpacks": [
|
||||
{ "url": "heroku/nodejs" }
|
||||
],
|
||||
"formation": {
|
||||
"web": {
|
||||
"quantity": 1,
|
||||
"size": "standard-2x"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:47f2a1bb71c44eb3c14690449ffc37eafb1684500109ff16eb534a993a1ab4e2
|
||||
size 660732
|
||||
oid sha256:7356142eb5f683543608aa6945677669a8f5d590371f97055a4c1168b66721bc
|
||||
size 662137
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4a9da08bd9e586ce171f93454eac32c2885c741e7ac774ed397e0ea3fb0c9308
|
||||
size 1377623
|
||||
oid sha256:b36331649995ed8e01abb4ad255a2bd657382b5eeac8f4511ac2aa28013af577
|
||||
size 1377774
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c8b98f298f8c276301a1eb4f0d599b794d8dc950ec13c229ed53d431b387374c
|
||||
size 980831
|
||||
oid sha256:b795db885b235044454d19063c5cba86b89f9ce829a5bbe59830dbb83ce1a6ee
|
||||
size 981612
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b1ca84b937c9dd0aa0eb580638c887f7af43135dde7700fe9a797a79eeaacbb5
|
||||
size 3989912
|
||||
oid sha256:a053ec609ff920be2a0b2769c0b334a4b5128d5ec792edab6a089fc3b5f57d26
|
||||
size 3990367
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:32978fe3141fee7148015cc9051fe4f9c550faaf1b4bc42c8bc181c2bf7ebe2d
|
||||
size 614498
|
||||
oid sha256:12cdfa4b8bc08fbd2cef74464481de9ea4a0c74cc959435dc6c3e861d76d305d
|
||||
size 615099
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a1b13e1411ad4db2b7592dd952ab4702c7be942635045b9ce09fab80108e06b7
|
||||
size 2572359
|
||||
oid sha256:67346d89fc6d3c46de1bee915bd92d5f607a5a8cc91a2c1ecf0b15897ef296d9
|
||||
size 2574490
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2e41390628d2a6af9fa323fcc78b96b1fa867f2337a8a1d68a9a414916cfa0a8
|
||||
size 680711
|
||||
oid sha256:d1a2f6836e7728d29ff8464579ce31025448ab2c1ef5593b85db87b57c94226a
|
||||
size 680421
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4056ffa70370838f88b9fe2694e257c4692bcd29a0a6693a0fe969b5d03d68d4
|
||||
size 3602301
|
||||
oid sha256:f26bf5dd1f95ae8723041295c10d1b4d04b5d76bb3a9a8b3f6ee00ef876cc26d
|
||||
size 3600871
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1318284320faf8b349cc340d983cab28fc11ef18e0698d81c5fa2fa93566a4c6
|
||||
size 605075
|
||||
oid sha256:4348b4e119bce443c6568bb74edc7b8319e64a06c6f678a45e375a64bbdf71ab
|
||||
size 606348
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5f95b862e592737ec7372226f74f6e88576a6819715de17641238337fcc6a990
|
||||
size 2468070
|
||||
oid sha256:3da10d0fa14bf33a18c03678d2face01b4dc448e458dc9c9890fe66ea3cc342b
|
||||
size 2471041
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:eddf78f62bffcadb7942c66be544a78c076e5cabc04c4e208f84e9f22ce8224b
|
||||
size 675183
|
||||
oid sha256:ccc6983ef150a64d1652bac036ff580de022de3e292b3783b09056d8c73b49ec
|
||||
size 675779
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4d84604ee31476e632a44a9424a1b1f5beb7db2cdae9d4550ae11b84e5251d38
|
||||
size 1413778
|
||||
oid sha256:1a637aae29d1cab8b290b561bd883775dd395d30125927f60613658a803b8265
|
||||
size 1414600
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:488e795fdf2c77ec061c6ec97623e297e3c657a02321538a9ec59d1f3ca98f69
|
||||
size 1006261
|
||||
oid sha256:96ae5da8a4c8cfda98315233e50558168599f472ab80b370735ec3cac16a2966
|
||||
size 1005190
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:923e43b3a50c22d686834c514dd02c381ded234cdc53f5deefec92806fb2e285
|
||||
size 4079033
|
||||
oid sha256:dacbee2d29f41affa66845c48f197b4e846909386b93bb01573b5e5d9fbceaf5
|
||||
size 4080098
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0011455a5d11bc33ba2af5c5703da6698ae6b9d746a89ae88a68c4cca1664185
|
||||
size 627075
|
||||
oid sha256:d3a995ea1c11379384c0036dda12cd766519a9103455c59bb44fa22a55de54d4
|
||||
size 627171
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f6b547cf671384d5e632f5c75a289a462b4b4db8d8c417ce2469bf88a3bcd503
|
||||
size 2632887
|
||||
oid sha256:defcc4883b7253bf806e281d857c8cfde0c381d413592c3bd29cf454abc6868a
|
||||
size 2633152
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:30794b66196dca5d7cd844e7a1ec9c7ff64fec8d566db0860fdda0c4db2ac500
|
||||
size 694092
|
||||
oid sha256:8e13999abeac41661730a2b4dab9a44c12dd6ad938340c46e3639e33f8521ca0
|
||||
size 694630
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bafc02f6e8e424b0cf7be57da2083b8e91f809698e3d44e835d5c80d662fa88c
|
||||
size 3680366
|
||||
oid sha256:0e8dc0b27b360b2e8199810845238143ace60fe3ef42ada56cfc4fa2d817ce78
|
||||
size 3682104
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:92d1fa4bd98655c1957297d7206be6953527255bae9740c851fea9412144958e
|
||||
size 617448
|
||||
oid sha256:8d8a2779291b95ca1d4940cc421ef65748fe815216231c832d201e8f3fd0454e
|
||||
size 618252
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d767f63307ac25a99e08d07ef484fe69c7e3805ebe9959d67f636847cb6876fc
|
||||
size 2519795
|
||||
oid sha256:fed218f56f9daf0a504c87343c3396355e5c290a3f8a8a4e0ead1fd431994a2a
|
||||
size 2522625
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:34829eb8612d1f810af323f5581eb62e33cd0763ca0787d76847bd923674c818
|
||||
size 691980
|
||||
oid sha256:482755e1168d9e73d8df073ec79560ffc5f69fa882e07edff6134b6ca315e7f4
|
||||
size 691594
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1e9af186991894f3b9dfaadb97e5ae577944ce8bf918c4a342e1f5b5e57154a7
|
||||
size 1446142
|
||||
oid sha256:c4b89668c97dd7ef21ab342468892dfa52fb09d9ee51d2b145891a763ff501ed
|
||||
size 1447911
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bf7e6e4e93c01d6345b1a08da2750b2253fda1ba9c0510aa6238edd2ed708d38
|
||||
size 1039550
|
||||
oid sha256:8b89722fb30f6f53033127057725c6639fb332daca518a3e4163451f65944739
|
||||
size 1038865
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3223e5113f0e3b18e10aa0a9b350609495aa608291e285a3457c1d0f37db0e8d
|
||||
size 4214510
|
||||
oid sha256:8711bebcd7f090825807a47e0218e999c68c038808d9a1c9941622afbc90841b
|
||||
size 4211921
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d2701762992d04ae5a4256235dc5a8322028ca01071f9b947253ee5e66dc6676
|
||||
size 641530
|
||||
oid sha256:9dbefa51ee57ccb221d93e8f5a6cbc6fdbd53ee5d711a81740eaa7e49d55d348
|
||||
size 641099
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e24e0061ac20fb620e949c00c7fdbbc063236400cc8e7074ff1a5160873eea5e
|
||||
size 2696420
|
||||
oid sha256:c949ea4d0a2482442ef195ba163a0c5ad7e59d176b4a3506f3bfdb4121a0e1ab
|
||||
size 2696601
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:78b5354fd197efc35df0c812472479e001e29d8e307fccb5a54c7f734c3d7632
|
||||
size 708963
|
||||
oid sha256:4aac6fb7f729a01138ccc200b0493f56627f99b5f674c7a2af56480de5e78189
|
||||
size 708562
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ca0907e1983c6cad844d4f51413a8e836900f3c5535e082cfce9b251e782b974
|
||||
size 3765008
|
||||
oid sha256:f774d8575eb04036558c179ac0ca2a158db142d3463fa0f34aa36ca01fa4fec4
|
||||
size 3765281
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:beab35c298cf0ceaaae95fcf8a0366c0610f58368f79851ff835c7d1dd9279d6
|
||||
size 632967
|
||||
oid sha256:0774cb1fecdfea4aeb4f286f8ea24058d91bece75846786c7a4b801afe769ca7
|
||||
size 631909
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1fe6ecafeb217e7ce74fe7cf2a38b15e263f04a0ba823cbae20f4caeb116edea
|
||||
size 2580079
|
||||
oid sha256:a1b4086a220ca1f3e9d98970451d06c48ba9ab5c866539751ea01dc2e6ea3857
|
||||
size 2581243
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3bbf2f35668d8050bf520aca714d861fbc22fd4a102895280b395985466deaa4
|
||||
size 712820
|
||||
oid sha256:f08a8b7c6e655ee64fb99accadf7f5a176bad0beb88931456fb4d44eef420b94
|
||||
size 713733
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0cef26bbf55015e1be4e51a5ac11530b9611633cbf6985e3e2562f2ba605f0b0
|
||||
size 1505929
|
||||
oid sha256:2471af115b4077bb54c4f01aaf517ba8d00b7a1c23ac66b08cb0ba23e1c7d67b
|
||||
size 1507298
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1e124cd6fcaa95e2df0edd42426c82d395b9f74a69e49ab4d64db9d70b9d9235
|
||||
size 1079134
|
||||
oid sha256:4d52fe248c8b398e4367c25eabfc03a457594388a4351558fb86eb5f3e61a90b
|
||||
size 1077256
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c98b85d478d58207e212583715b6e6c1b4048c27316f5c6af1a3eca6dad52198
|
||||
size 4315518
|
||||
oid sha256:10b00bee0d8617c73b9d30d89e3ea4fed8331080ab0185fb5dc23804205581ce
|
||||
size 4312929
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bd30526d5517c1c32711708f15073af206a954fd870b21fb1bb5ae672331beba
|
||||
size 661016
|
||||
oid sha256:3cb1af9567e9cda9aff6c9a91520e1e673b3e8c7091f57dc2c5603e103fde55a
|
||||
size 661535
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:de49151c9f250aa5aa6ae7aa10eb82a86c49a5c3f60e7450658cee4bec699b25
|
||||
size 2797541
|
||||
oid sha256:d9112a43e1f06143ed7a7a7a443533231eab5670c4225fcf58b1cb8bb80bda61
|
||||
size 2798892
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:13330f28c9e496df248bd006e3b0bb3d825bf1578b62c604e70e2e3ee00da218
|
||||
size 732092
|
||||
oid sha256:ad37553f8d2069a29a2e2b8e7f75ab7817f0db0e36108f76027525b4474d02d7
|
||||
size 731797
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f9f5e19dd7f1c7780e4f9aab9647cb98de82c4d7d958556b665f0ff73084cbf4
|
||||
size 3891349
|
||||
oid sha256:e65dceda472aa770c190d24b63896d9a0d6dadfa7cf761c3843388ba99de6930
|
||||
size 3889921
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f56744f881b0257b612aeec6fe657870327d53ce5d69e9582a9c863f3bdb9b8a
|
||||
size 650798
|
||||
oid sha256:e289e8ac79e2add7f6c65ba0432ae8193b71d3838bedf718617d5b21311dbf15
|
||||
size 651325
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:11cc0508cc1ad069ffe751d3e0ae676ae949aeaa0e57fd2b5ba002a6a2ba5ce5
|
||||
size 2661406
|
||||
oid sha256:c5d6824b506fb9682657a28b48dde0939dec0f23edee1996c7bb1f0282bbfcdb
|
||||
size 2664654
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:751deeb265410a437d6aaea7b61d1ef8f8ea761e08231c13ad6608add4d610c2
|
||||
size 914158
|
||||
oid sha256:de353673fab57fc2465cc4c4e745086dd197403d78eee3f67caaac1554515d8b
|
||||
size 914893
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2774a524a7d307eb599fec438272dcbba4620ed18fc419103efa4df8388146e5
|
||||
size 1681024
|
||||
oid sha256:dfa36a6a4d35f775e88c41f4421164fc86ccd633642a0ff493fcc7a7ebaa6570
|
||||
size 1685433
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:76a3fabbb2c816e60d0f1856ae8a4f7b63aa657790209871367af7ebd33995b1
|
||||
size 1366262
|
||||
oid sha256:4a133dfef252d9601f7cce4b9d5d53674c107f9c159e8042033dbe6808fdf004
|
||||
size 1363989
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:831f2c630befa95f7b0dc1195244bede9c1242a8d240ae033ad4c475dabc88b6
|
||||
size 5205576
|
||||
oid sha256:db5a9e90c56db3a7885de4afbed18522575a6b6f7918ae50a542f36389da0748
|
||||
size 5203168
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8c256c303a2065ef015f03b4726cef00d7516f948f38896b5acdfea23664867a
|
||||
size 830646
|
||||
oid sha256:a04e41be3bf8f2a98d09da880a845045db112e00363e7d467fcaeb9b740c2c7a
|
||||
size 831469
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:820e67288c8ede26cafa5481db87db665d8c2c0d505985a75d62d4455ad671fb
|
||||
size 3348903
|
||||
oid sha256:8193d4bf1f0bdb5d98fa2f7ef4d81f7c555c25208c715fbdd9e31ff42763c0fe
|
||||
size 3353235
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5d789fb3a6eeda67b45e715cfa15764e605fc0a2c1fd0e211b7c2d82446fa4c9
|
||||
size 933494
|
||||
oid sha256:2cd262884ec326f03d314107fbcf564a25e6e596a757ac213f0a5f06c1db78aa
|
||||
size 934090
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e20f510633d1a7c158b6d7bf53c80538713d92f66aa79310c17e1b412a5a9426
|
||||
size 4751551
|
||||
oid sha256:3ac0ecf879ca198f7b2f127b3c4311944e7d34901ff69aec0c7c914991f45308
|
||||
size 4751090
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:eb79a4c7712539d04ffcf6723f5a72c348aad81b363a8993a6b95552d788d87c
|
||||
size 820063
|
||||
oid sha256:c9dba70d670b6958d1d903e53f0592d0a88e260c1fb3c6fd453b78548b54506d
|
||||
size 821229
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:da8a5dc9ac23bc19b0ba2201c8b19bc252a7d92a4db02a0d9fb708f62aba2afd
|
||||
size 3199105
|
||||
oid sha256:9e818f759277d2f8b0cc6aa0cd5988c299f8e0a6c45a162320825fbb46d977da
|
||||
size 3203205
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:523c008ead84776d546b2295dc26933ded47bcc1be4bbd1d4ae28f77f580550a
|
||||
size 559246
|
||||
oid sha256:eca4697702566b320d2b4380f0dff07f0719b73f4fc4b5e1d4afcac724f122b0
|
||||
size 559235
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e9706d9c59162558b254fc5813830ac8ca11ed4611c6cc19af1e360612daf1f8
|
||||
size 1127281
|
||||
oid sha256:75d838f241103a19812be7f45b14fef065cfbf2c837f61826c4ca9e3acd70285
|
||||
size 1128046
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:dbd9c5ab249a73d044d984f08977aeadfcaf6c001fa598f7d29e27158c98db68
|
||||
size 857900
|
||||
oid sha256:8c6e7617aedc957bf91483c0f4acf39932ebbf738fd6d5f52f989414ec233a9d
|
||||
size 858470
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:be65f4807bdd555bd89024d1077a6e7e7de6c56af3f49c00ede7b26b9998a751
|
||||
size 3436123
|
||||
oid sha256:37ca3069f02f462c6ce3c0ff11d0f3b36c9d1f9857b6a459957f123d09dd5827
|
||||
size 3435122
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:10bbd20fd9ffea263a4a6a0a5ae846c0981743c0eaa301d6984c6689039f61f4
|
||||
size 520267
|
||||
oid sha256:4de094eae68079cd4dd2750ed64cceee5450bd9bd3d9783c5837b1cd5f3dd8e2
|
||||
size 520464
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a7333febe7ffe6bcb0176ddad6a57dab5d1c36017e0f0934f614a2c32f7e1fd8
|
||||
size 2117042
|
||||
oid sha256:22ce992544e111667dd2ce917f375cdf45318f94ecd93776a2d4bfdc4c656c1d
|
||||
size 2117652
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:adad47de10961bcfe088d7192c6d29f7719bc95a6b1fb109d1de99080f43a807
|
||||
size 572337
|
||||
oid sha256:c0462ff5d136c0472ddc0ab18011881c20ab2128fa1b7db51a5246843a7c87c1
|
||||
size 573186
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:af9b793135c26c3bef76a83fe5675817354ecec19ddb0cc61ebb1d54eebbe47f
|
||||
size 2933511
|
||||
oid sha256:c123c5b6489b5677ea74163e0d1d94dbd84c48273f1a719a04cebc4c08552a0f
|
||||
size 2934307
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:04a911707894a670345c525a13e99285f954e969f7d24ec72be4177f7ca9c9b1
|
||||
size 513120
|
||||
oid sha256:ff49a5872ae2e7274c1cbe4dfb753e70d9c2fc3edc3f7a2a7387b3e30fbfc6cc
|
||||
size 512919
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f01d21d21ecabfb4a22bb46d77d04c4ede421de43dc97133daad3586f8b5d202
|
||||
size 2011354
|
||||
oid sha256:33c11cdbf43cd58fd9ce69f6462d66ff0ffb0c23ac9c004a4ec699243e002909
|
||||
size 2010917
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2d158e4655e7dc176ccfdbb332127244e5db2e4714109786527a9b4899a2aeb5
|
||||
size 841139
|
||||
oid sha256:2ab19f1c1360e3e481c8b831537f1449c8a82bb2ea6ee3ce8c60be169be43e9d
|
||||
size 840211
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4a12223ff160d45727ca3e8e34f0037d4eee674e2aed5ddd38b86daf47f510a7
|
||||
size 1750468
|
||||
oid sha256:bbb7aa7a87808cc2efb28b2d8ed3e3998548af9fc6178ac1f5d50044ab2f8030
|
||||
size 1752585
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8c18c7caaa4ee265bdab64886f3bcf5ee8dd7cf8035b8412a27a82e05dca82ec
|
||||
size 1240503
|
||||
oid sha256:1e9ea551626c7eb1c2108a8f15d83cb4ad328c4c00f2a0485dc4a455354fc0a8
|
||||
size 1240842
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e45f77dcc6138aba633fb7ac1d8cf78b8673e3b0f86bff1eacba2a4d07548b03
|
||||
size 4986557
|
||||
oid sha256:e216157635ab5494fa125996a442445cbfc4b6824a60fad1d62b9affbfc3009e
|
||||
size 4988077
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6b44f7963559dcaaea15d840de2ba347f4c0c7f6b55df6097b70d20b0769d7d4
|
||||
size 788407
|
||||
oid sha256:51aa67b19215044c97a15096027e0e42182d42aa5b011969986b3464b1a2796f
|
||||
size 789421
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7aa2764125d915c8a7456dba422f7d948e6dcdc6995609b6f629d860d9811f9d
|
||||
size 3338450
|
||||
oid sha256:116faa0d8ae25bb8ce6c2bb3fded65f2d8b1f9c58caa0b9c1102203d1050accd
|
||||
size 3342070
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b7d793bd277c6ef95acff91b5ed05b545f78378c01b34ec3b33ace9e8f10dc2f
|
||||
size 863368
|
||||
oid sha256:55894044b4033924a0e197315d48cc1921f81661f96586b952930eb5febb1807
|
||||
size 862776
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9407d83306f92fdeedfe304a7eb6faab2166dcb854e760002108ae4c14591b55
|
||||
size 4619826
|
||||
oid sha256:ef645d4b12584cd96a1057a029a58016c905fa1f6e8d928444786e5597140271
|
||||
size 4619997
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f97d40dd41595c76a7de608873e70795dab88038dd784973c08b243322e98de0
|
||||
size 778310
|
||||
oid sha256:89467e79729c853cfef26d15fba1e2892cd64adf7ab12f626678763d017863a4
|
||||
size 778203
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:40f8fdfc6fe07fb00a40e3268c43db774ff7fca8edc4b1c4d3bf0a9602502010
|
||||
size 3178281
|
||||
oid sha256:9920d55016900c54d3d785b44989d3bed3e88294d5e084d31ac93ccbb6e3780d
|
||||
size 3181671
|
||||
|
||||
60
package-lock.json
generated
60
package-lock.json
generated
@@ -135,7 +135,6 @@
|
||||
"git-diff": "^2.0.6",
|
||||
"glob": "^7.2.0",
|
||||
"graphql": "^16.3.0",
|
||||
"heroku-client": "^3.1.0",
|
||||
"http-status-code": "^2.1.0",
|
||||
"husky": "^7.0.4",
|
||||
"japanese-characters": "^1.1.0",
|
||||
@@ -10576,19 +10575,6 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/heroku-client": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/heroku-client/-/heroku-client-3.1.0.tgz",
|
||||
"integrity": "sha512-UfGKwUm5duzzSVI8uUXlNAE1mus6uPxmZPji4vuG1ArV5DYL1rXsZShp0OoxraWdEwYoxCUrM6KGztC68x5EZQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-retry-allowed": "^1.0.0",
|
||||
"tunnel-agent": "^0.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/hexoid": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz",
|
||||
@@ -11433,15 +11419,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-retry-allowed": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz",
|
||||
"integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-shared-array-buffer": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz",
|
||||
@@ -21037,18 +21014,6 @@
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
}
|
||||
},
|
||||
"node_modules/tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
@@ -30703,16 +30668,6 @@
|
||||
"resolved": "https://registry.npmjs.org/helmet/-/helmet-4.6.0.tgz",
|
||||
"integrity": "sha512-HVqALKZlR95ROkrnesdhbbZJFi/rIVSoNq6f3jA/9u6MIbTsPh3xZwihjeI5+DO/2sOV6HMHooXcEOuwskHpTg=="
|
||||
},
|
||||
"heroku-client": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/heroku-client/-/heroku-client-3.1.0.tgz",
|
||||
"integrity": "sha512-UfGKwUm5duzzSVI8uUXlNAE1mus6uPxmZPji4vuG1ArV5DYL1rXsZShp0OoxraWdEwYoxCUrM6KGztC68x5EZQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-retry-allowed": "^1.0.0",
|
||||
"tunnel-agent": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"hexoid": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz",
|
||||
@@ -31292,12 +31247,6 @@
|
||||
"has-tostringtag": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-retry-allowed": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz",
|
||||
"integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==",
|
||||
"dev": true
|
||||
},
|
||||
"is-shared-array-buffer": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz",
|
||||
@@ -38575,15 +38524,6 @@
|
||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
|
||||
"dev": true
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
|
||||
@@ -137,7 +137,6 @@
|
||||
"git-diff": "^2.0.6",
|
||||
"glob": "^7.2.0",
|
||||
"graphql": "^16.3.0",
|
||||
"heroku-client": "^3.1.0",
|
||||
"http-status-code": "^2.1.0",
|
||||
"husky": "^7.0.4",
|
||||
"japanese-characters": "^1.1.0",
|
||||
@@ -193,7 +192,6 @@
|
||||
"build": "next build",
|
||||
"debug": "cross-env NODE_ENV=development ENABLED_LANGUAGES='en,ja' nodemon --inspect server.mjs",
|
||||
"dev": "npm start",
|
||||
"heroku-postbuild": "node script/early-access/clone-for-build.js && npm run build",
|
||||
"lint": "eslint '**/*.{js,mjs,ts,tsx}'",
|
||||
"lint-translation": "cross-env NODE_OPTIONS=--experimental-vm-modules TEST_TRANSLATION=true jest tests/linting/lint-files.js",
|
||||
"pa11y-ci": "pa11y-ci",
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import GithubSlugger from 'github-slugger'
|
||||
const slugify = GithubSlugger.slug
|
||||
|
||||
const APP_NAME_MAX_LENGTH = 30
|
||||
|
||||
export default function ({ prefix = '', repo, pullNumber, branch }) {
|
||||
return (
|
||||
`${prefix}${repo}-${pullNumber}--${slugify(branch)}`
|
||||
// Shorten the string to the max allowed length
|
||||
.slice(0, APP_NAME_MAX_LENGTH)
|
||||
// Convert underscores to dashes
|
||||
.replace(/_/g, '-')
|
||||
// Remove trailing dashes
|
||||
.replace(/-+$/, '')
|
||||
// Make it all lowercase
|
||||
.toLowerCase()
|
||||
)
|
||||
}
|
||||
@@ -1,465 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import got from 'got'
|
||||
import Heroku from 'heroku-client'
|
||||
import { setOutput } from '@actions/core'
|
||||
|
||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
|
||||
const SLEEP_INTERVAL = 5000
|
||||
const HEROKU_LOG_LINES_TO_SHOW = 25
|
||||
const DELAY_FOR_PREBOOT_SWAP = 135000 // 2:15
|
||||
|
||||
// Allow for a few 404 (Not Found), 429 (Too Many Requests), etc. responses from
|
||||
// the semi-unreliable Heroku API when we're polling for status updates
|
||||
const ALLOWED_MISSING_RESPONSE_COUNT =
|
||||
parseInt(process.env.ALLOWED_POLLING_FAILURES_PER_PHASE, 10) || 10
|
||||
const ALLOWABLE_ERROR_CODES = [404, 429, 500, 503]
|
||||
|
||||
export default async function deployToProduction({
|
||||
octokit,
|
||||
includeDelayForPreboot = true,
|
||||
// These parameters will only be set by Actions
|
||||
sourceBlobUrl = null,
|
||||
runId = null,
|
||||
}) {
|
||||
// Start a timer so we can report how long the deployment takes
|
||||
const startTime = Date.now()
|
||||
const [owner, repo, branch] = ['github', 'docs-internal', 'main']
|
||||
|
||||
let sha
|
||||
try {
|
||||
const {
|
||||
data: { sha: latestSha },
|
||||
} = await octokit.repos.getCommit({
|
||||
owner,
|
||||
repo,
|
||||
ref: branch,
|
||||
})
|
||||
sha = latestSha
|
||||
|
||||
if (!sha) {
|
||||
throw new Error('Latest commit SHA could not be found')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error: ${error}`)
|
||||
console.log(`🛑 There was an error getting latest commit.`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Put together application configuration variables
|
||||
const isPrebuilt = !!sourceBlobUrl
|
||||
const { DOCUBOT_REPO_PAT } = process.env
|
||||
const appConfigVars = {
|
||||
// Track the git branch
|
||||
GIT_BRANCH: branch,
|
||||
// If prebuilt: prevent the Heroku Node.js buildpack from installing devDependencies
|
||||
NPM_CONFIG_PRODUCTION: isPrebuilt.toString(),
|
||||
// If prebuilt: prevent the Heroku Node.js buildpack from using `npm ci` as it would
|
||||
// delete all of the vendored "node_modules/" directory.
|
||||
USE_NPM_INSTALL: isPrebuilt.toString(),
|
||||
// If not prebuilt, include the PAT required for cloning the `docs-early-access` repo.
|
||||
// Otherwise, set it to `null` to unset it from the environment for security.
|
||||
DOCUBOT_REPO_PAT: (!isPrebuilt && DOCUBOT_REPO_PAT) || null,
|
||||
}
|
||||
|
||||
const workflowRunLog = runId ? `https://github.com/${owner}/${repo}/actions/runs/${runId}` : null
|
||||
let deploymentId = null
|
||||
let logUrl = workflowRunLog
|
||||
|
||||
const appName = process.env.HEROKU_PRODUCTION_APP_NAME
|
||||
const environment = 'production'
|
||||
const homepageUrl = 'https://docs.github.com/'
|
||||
|
||||
try {
|
||||
const title = `branch '${branch}' at commit '${sha}' in the '${environment}' environment`
|
||||
|
||||
console.log(`About to deploy ${title}...`)
|
||||
|
||||
// Kick off a pending GitHub Deployment right away, so the PR author
|
||||
// will have instant feedback that their work is being deployed.
|
||||
const { data: deployment } = await octokit.repos.createDeployment({
|
||||
owner,
|
||||
repo,
|
||||
description: `Deploying ${title}`,
|
||||
ref: sha,
|
||||
|
||||
// In the GitHub API, there can only be one active deployment per environment.
|
||||
environment,
|
||||
|
||||
// The status contexts to verify against commit status checks. If you omit
|
||||
// this parameter, GitHub verifies all unique contexts before creating a
|
||||
// deployment. To bypass checking entirely, pass an empty array. Defaults
|
||||
// to all unique contexts.
|
||||
required_contexts: [],
|
||||
|
||||
// Do not try to merge the base branch into the feature branch
|
||||
auto_merge: false,
|
||||
})
|
||||
console.log('GitHub Deployment created', deployment)
|
||||
|
||||
// Store this ID for later updating
|
||||
deploymentId = deployment.id
|
||||
|
||||
// Set some output variables for workflow steps that run after this script
|
||||
if (process.env.GITHUB_ACTIONS) {
|
||||
setOutput('deploymentId', deploymentId)
|
||||
setOutput('logUrl', logUrl)
|
||||
}
|
||||
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'in_progress',
|
||||
description: 'Deploying the app...',
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
console.log('🚀 Deployment status: in_progress - Preparing to deploy the app...')
|
||||
|
||||
// Time to talk to Heroku...
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
let build = null
|
||||
|
||||
if (!sourceBlobUrl) {
|
||||
try {
|
||||
sourceBlobUrl = await getTarballUrl({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
sha,
|
||||
})
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to generate source blob URL. Error: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Updating Heroku app configuration variables...')
|
||||
|
||||
// Reconfigure environment variables
|
||||
// https://devcenter.heroku.com/articles/platform-api-reference#config-vars-update
|
||||
try {
|
||||
await heroku.patch(`/apps/${appName}/config-vars`, {
|
||||
body: appConfigVars,
|
||||
})
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to update Heroku app configuration variables. Error: ${error}`)
|
||||
}
|
||||
|
||||
console.log('Reconfigured')
|
||||
console.log('Building Heroku app...')
|
||||
|
||||
try {
|
||||
build = await heroku.post(`/apps/${appName}/builds`, {
|
||||
body: {
|
||||
source_blob: {
|
||||
url: sourceBlobUrl,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to create Heroku build. Error: ${error}`)
|
||||
}
|
||||
|
||||
console.log('Heroku build created', build)
|
||||
|
||||
const buildStartTime = Date.now() // Close enough...
|
||||
const buildId = build.id
|
||||
logUrl = build.output_stream_url
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Building a new Heroku slug...')
|
||||
|
||||
// Poll until the Build's status changes from "pending" to "succeeded" or "failed".
|
||||
let buildAcceptableErrorCount = 0
|
||||
while (!build || !build.release || !build.release.id) {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
build = await heroku.get(`/apps/${appName}/builds/${buildId}`)
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
buildAcceptableErrorCount += 1
|
||||
if (buildAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${buildAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to get build status. Error: ${error}`)
|
||||
}
|
||||
|
||||
if (build && build.status === 'failed') {
|
||||
throw new Error(
|
||||
`Failed to build after ${Math.round(
|
||||
(Date.now() - buildStartTime) / 1000
|
||||
)} seconds. See Heroku logs for more information:\n${logUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Heroku build status: ${(build || {}).status} (after ${Math.round(
|
||||
(Date.now() - buildStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Finished Heroku build after ${Math.round((Date.now() - buildStartTime) / 1000)} seconds.`,
|
||||
build
|
||||
)
|
||||
console.log('Heroku release detected', build.release)
|
||||
|
||||
const releaseStartTime = Date.now() // Close enough...
|
||||
const releaseId = build.release.id
|
||||
let release = null
|
||||
|
||||
// Poll until the associated Release's status changes from "pending" to "succeeded" or "failed".
|
||||
let releaseAcceptableErrorCount = 0
|
||||
while (!release || release.status === 'pending') {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
const result = await heroku.get(`/apps/${appName}/releases/${releaseId}`)
|
||||
|
||||
// Update the deployment status but only on the first retrieval
|
||||
if (!release) {
|
||||
logUrl = result.output_stream_url
|
||||
|
||||
console.log('Heroku Release created', result)
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Releasing the built Heroku slug...')
|
||||
}
|
||||
|
||||
release = result
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
releaseAcceptableErrorCount += 1
|
||||
if (releaseAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${releaseAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to get release status. Error: ${error}`)
|
||||
}
|
||||
|
||||
if (release && release.status === 'failed') {
|
||||
throw new Error(
|
||||
`Failed to release after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds. See Heroku logs for more information:\n${logUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Release status: ${(release || {}).status} (after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Finished Heroku release after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds.`,
|
||||
release
|
||||
)
|
||||
|
||||
// Monitor dyno state for this release to ensure it reaches "up" rather than crashing.
|
||||
// This will help us catch issues with faulty startup code and/or the package manifest.
|
||||
const dynoBootStartTime = Date.now()
|
||||
console.log('Checking Heroku dynos...')
|
||||
logUrl = workflowRunLog
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Monitoring the Heroku dyno start-up...')
|
||||
|
||||
// Keep checking while there are still dynos in non-terminal states
|
||||
let newDynos = []
|
||||
let dynoAcceptableErrorCount = 0
|
||||
while (newDynos.length === 0 || newDynos.some((dyno) => dyno.state === 'starting')) {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
const dynoList = await heroku.get(`/apps/${appName}/dynos`)
|
||||
const dynosForThisRelease = dynoList.filter((dyno) => dyno.release.id === releaseId)
|
||||
|
||||
// To track them afterward
|
||||
newDynos = dynosForThisRelease
|
||||
|
||||
console.log(
|
||||
`Dyno states: ${JSON.stringify(newDynos.map((dyno) => dyno.state))} (after ${Math.round(
|
||||
(Date.now() - dynoBootStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
dynoAcceptableErrorCount += 1
|
||||
if (dynoAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${dynoAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to find dynos for this release. Error: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
const crashedDynos = newDynos.filter((dyno) => ['crashed', 'restarting'].includes(dyno.state))
|
||||
const runningDynos = newDynos.filter((dyno) => dyno.state === 'up')
|
||||
|
||||
// If any dynos crashed on start-up, fail the deployment
|
||||
if (crashedDynos.length > 0) {
|
||||
const errorMessage = `At least ${crashedDynos.length} Heroku dyno(s) crashed on start-up!`
|
||||
|
||||
console.error(errorMessage)
|
||||
|
||||
// Attempt to dump some of the Heroku log here for debugging
|
||||
try {
|
||||
const logSession = await heroku.post(`/apps/${appName}/log-sessions`, {
|
||||
body: {
|
||||
dyno: crashedDynos[0].name,
|
||||
lines: HEROKU_LOG_LINES_TO_SHOW,
|
||||
tail: false,
|
||||
},
|
||||
})
|
||||
|
||||
logUrl = logSession.logplex_url
|
||||
|
||||
const logText = await got(logUrl).text()
|
||||
console.error(
|
||||
`Here are the last ${HEROKU_LOG_LINES_TO_SHOW} lines of the Heroku log:\n\n${logText}`
|
||||
)
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
// Don't fail because of this error
|
||||
console.error(`Failed to retrieve the Heroku logs for the crashed dynos. Error: ${error}`)
|
||||
}
|
||||
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`At least ${runningDynos.length} Heroku dyno(s) are ready after ${Math.round(
|
||||
(Date.now() - dynoBootStartTime) / 1000
|
||||
)} seconds.`
|
||||
)
|
||||
|
||||
// IMPORTANT:
|
||||
// If Heroku Preboot is enabled, then there is an additional delay of at
|
||||
// least 2 minutes before the new dynos are swapped into active serving.
|
||||
// If we move off Heroku in the future, this should be revisited and
|
||||
// updated/removed as relevant to align with the new hosting platform.
|
||||
if (includeDelayForPreboot) {
|
||||
console.log(`Waiting for Heroku Preboot to swap dynos (${DELAY_FOR_PREBOOT_SWAP} ms)...`)
|
||||
await sleep(DELAY_FOR_PREBOOT_SWAP)
|
||||
|
||||
// TODO:
|
||||
// Is there a faster alternative than this arbitrary delay? For example,
|
||||
// is there some Heroku API we can query to see when this release is
|
||||
// considered to be the live one, or when the old dynos are shut down?
|
||||
} else {
|
||||
console.warn(
|
||||
'⚠️ Bypassing the wait for Heroku Preboot....\nPlease understand that your changes will not be visible for at least another 2 minutes!'
|
||||
)
|
||||
}
|
||||
|
||||
// Report success!
|
||||
const successMessage = `Deployment succeeded after ${Math.round(
|
||||
(Date.now() - startTime) / 1000
|
||||
)} seconds.`
|
||||
console.log(successMessage)
|
||||
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'success',
|
||||
description: successMessage,
|
||||
...(logUrl && { log_url: logUrl }),
|
||||
environment_url: homepageUrl,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
|
||||
console.log(`🚀 Deployment status: success - ${successMessage}`)
|
||||
console.log(`Visit the newly deployed app at: ${homepageUrl}`)
|
||||
} catch (error) {
|
||||
// Report failure!
|
||||
const failureMessage = `Deployment failed after ${Math.round(
|
||||
(Date.now() - startTime) / 1000
|
||||
)} seconds. See logs for more information.`
|
||||
console.error(failureMessage)
|
||||
|
||||
try {
|
||||
if (deploymentId) {
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'error',
|
||||
description: failureMessage,
|
||||
...(logUrl && { log_url: logUrl }),
|
||||
environment_url: homepageUrl,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
|
||||
console.log(
|
||||
`🚀 Deployment status: error - ${failureMessage}` + (logUrl ? ` Logs: ${logUrl}` : '')
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to finalize GitHub Deployment Status as a failure. Error: ${error}`)
|
||||
}
|
||||
|
||||
// Re-throw the error to bubble up
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function getTarballUrl({ octokit, owner, repo, sha }) {
|
||||
// Get a URL for the tarballed source code bundle
|
||||
const {
|
||||
headers: { location: tarballUrl },
|
||||
} = await octokit.repos.downloadTarballArchive({
|
||||
owner,
|
||||
repo,
|
||||
ref: sha,
|
||||
// Override the underlying `node-fetch` module's `redirect` option
|
||||
// configuration to prevent automatically following redirects.
|
||||
request: {
|
||||
redirect: 'manual',
|
||||
},
|
||||
})
|
||||
return tarballUrl
|
||||
}
|
||||
|
||||
function isAllowableHerokuError(error) {
|
||||
return error && ALLOWABLE_ERROR_CODES.includes(error.statusCode)
|
||||
}
|
||||
|
||||
function announceIfHerokuIsDown(error) {
|
||||
if (error && error.statusCode === 503) {
|
||||
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
|
||||
}
|
||||
}
|
||||
@@ -1,680 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import got from 'got'
|
||||
import Heroku from 'heroku-client'
|
||||
import { setOutput } from '@actions/core'
|
||||
import createStagingAppName from './create-staging-app-name.js'
|
||||
|
||||
// Equivalent of the 'await-sleep' module without the install
|
||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
|
||||
const SLEEP_INTERVAL = 5000
|
||||
const HEROKU_LOG_LINES_TO_SHOW = 25
|
||||
|
||||
// Allow for a few 404 (Not Found), 429 (Too Many Requests), etc. responses from
|
||||
// the semi-unreliable Heroku API when we're polling for status updates
|
||||
const ALLOWED_MISSING_RESPONSE_COUNT =
|
||||
parseInt(process.env.ALLOWED_POLLING_FAILURES_PER_PHASE, 10) || 10
|
||||
const ALLOWABLE_ERROR_CODES = [404, 429, 500, 503]
|
||||
|
||||
export default async function deployToStaging({
|
||||
octokit,
|
||||
pullRequest,
|
||||
forceRebuild = false,
|
||||
// These parameters will only be set by Actions
|
||||
sourceBlobUrl = null,
|
||||
runId = null,
|
||||
}) {
|
||||
// Start a timer so we can report how long the deployment takes
|
||||
const startTime = Date.now()
|
||||
|
||||
// Extract some important properties from the PR
|
||||
const {
|
||||
number: pullNumber,
|
||||
base: {
|
||||
repo: {
|
||||
name: repo,
|
||||
owner: { login: owner },
|
||||
},
|
||||
},
|
||||
state,
|
||||
head: { ref: branch, sha },
|
||||
user: author,
|
||||
} = pullRequest
|
||||
|
||||
// Verify the PR is still open
|
||||
if (state !== 'open') {
|
||||
throw new Error(`This pull request is not open. State is: '${state}'`)
|
||||
}
|
||||
|
||||
// Put together application configuration variables
|
||||
const isPrivateRepo = owner === 'github' && repo === 'docs-internal'
|
||||
const isPrebuilt = !!sourceBlobUrl
|
||||
const { DOCUBOT_REPO_PAT, HYDRO_ENDPOINT, HYDRO_SECRET } = process.env
|
||||
const appConfigVars = {
|
||||
// Track the git branch
|
||||
GIT_BRANCH: branch,
|
||||
// If prebuilt: prevent the Heroku Node.js buildpack from installing devDependencies
|
||||
NPM_CONFIG_PRODUCTION: isPrebuilt.toString(),
|
||||
// If prebuilt: prevent the Heroku Node.js buildpack from using `npm ci` as it would
|
||||
// delete all of the vendored "node_modules/" directory.
|
||||
USE_NPM_INSTALL: isPrebuilt.toString(),
|
||||
// IMPORTANT: This secret should only be set in the private repo!
|
||||
// If not prebuilt, include the PAT required for cloning the `docs-early-access` repo.
|
||||
// Otherwise, set it to `null` to unset it from the environment for security.
|
||||
DOCUBOT_REPO_PAT: (isPrivateRepo && !isPrebuilt && DOCUBOT_REPO_PAT) || null,
|
||||
// IMPORTANT: These secrets should only be set in the private repo!
|
||||
// These are required for Hydro event tracking
|
||||
...(isPrivateRepo && HYDRO_ENDPOINT && HYDRO_SECRET && { HYDRO_ENDPOINT, HYDRO_SECRET }),
|
||||
}
|
||||
|
||||
const workflowRunLog = runId ? `https://github.com/${owner}/${repo}/actions/runs/${runId}` : null
|
||||
let deploymentId = null
|
||||
let logUrl = workflowRunLog
|
||||
let appIsNewlyCreated = false
|
||||
|
||||
const appName = createStagingAppName({ repo, pullNumber, branch })
|
||||
const environment = appName
|
||||
const homepageUrl = `https://${appName}.herokuapp.com/`
|
||||
|
||||
try {
|
||||
const title = `branch '${branch}' at commit '${sha}' in the '${environment}' staging environment`
|
||||
|
||||
console.log(`About to deploy ${title}...`)
|
||||
|
||||
// Kick off a pending GitHub Deployment right away, so the PR author
|
||||
// will have instant feedback that their work is being deployed.
|
||||
const { data: deployment } = await octokit.repos.createDeployment({
|
||||
owner,
|
||||
repo,
|
||||
|
||||
description: `Deploying ${title}`,
|
||||
|
||||
// Use a commit SHA instead of a branch name as the ref for more precise
|
||||
// feedback, and also because the branch may have already been deleted.
|
||||
ref: sha,
|
||||
|
||||
// In the GitHub API, there can only be one active deployment per environment.
|
||||
// For our many staging apps, we must use the unique appName as the environment.
|
||||
environment,
|
||||
|
||||
// The status contexts to verify against commit status checks. If you omit
|
||||
// this parameter, GitHub verifies all unique contexts before creating a
|
||||
// deployment. To bypass checking entirely, pass an empty array. Defaults
|
||||
// to all unique contexts.
|
||||
required_contexts: [],
|
||||
|
||||
// Do not try to merge the base branch into the feature branch
|
||||
auto_merge: false,
|
||||
})
|
||||
console.log('GitHub Deployment created', deployment)
|
||||
|
||||
// Store this ID for later updating
|
||||
deploymentId = deployment.id
|
||||
|
||||
// Set some output variables for workflow steps that run after this script
|
||||
if (process.env.GITHUB_ACTIONS) {
|
||||
setOutput('deploymentId', deploymentId)
|
||||
setOutput('logUrl', logUrl)
|
||||
}
|
||||
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'in_progress',
|
||||
description: 'Deploying the app...',
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
console.log('🚀 Deployment status: in_progress - Preparing to deploy the app...')
|
||||
|
||||
// Time to talk to Heroku...
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
let appSetup = null
|
||||
let build = null
|
||||
|
||||
// Is there already a Heroku App for this PR?
|
||||
let appExists = true
|
||||
try {
|
||||
await heroku.get(`/apps/${appName}`)
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
appExists = false
|
||||
}
|
||||
|
||||
// If there is an existing app but we want to forcibly rebuild, delete the app first
|
||||
if (appExists && forceRebuild) {
|
||||
console.log('🚀 Deployment status: in_progress - Destroying existing Heroku app...')
|
||||
|
||||
try {
|
||||
await heroku.delete(`/apps/${appName}`)
|
||||
appExists = false
|
||||
|
||||
console.log(`Heroku app '${appName}' deleted for forced rebuild`)
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(
|
||||
`Failed to delete Heroku app '${appName}' for forced rebuild. Error: ${error}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (!sourceBlobUrl) {
|
||||
try {
|
||||
sourceBlobUrl = await getTarballUrl({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
sha,
|
||||
})
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to generate source blob URL. Error: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
// If an app does not exist, create one!
|
||||
// This action will also trigger a build as a by-product.
|
||||
if (!appExists) {
|
||||
appIsNewlyCreated = true
|
||||
|
||||
console.log(`Heroku app '${appName}' does not exist. Creating a new AppSetup...`)
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Creating a new Heroku app...')
|
||||
|
||||
const appSetupStartTime = Date.now()
|
||||
try {
|
||||
appSetup = await heroku.post('/app-setups', {
|
||||
body: {
|
||||
app: {
|
||||
name: appName,
|
||||
},
|
||||
source_blob: {
|
||||
url: sourceBlobUrl,
|
||||
},
|
||||
|
||||
// Pass some environment variables to staging apps via Heroku
|
||||
// config variables.
|
||||
overrides: {
|
||||
// AppSetup API cannot handle `null` values for config vars
|
||||
env: removeEmptyProperties(appConfigVars),
|
||||
},
|
||||
},
|
||||
})
|
||||
console.log('Heroku AppSetup created', appSetup)
|
||||
|
||||
// This probably will not be available yet
|
||||
build = appSetup.build
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to create Heroku app '${appName}'. Error: ${error}`)
|
||||
}
|
||||
|
||||
// Add PR author (if staff) as a collaborator on the new staging app
|
||||
try {
|
||||
if (author.site_admin === true) {
|
||||
await heroku.post(`/apps/${appName}/collaborators`, {
|
||||
body: {
|
||||
user: `${author.login}@github.com`,
|
||||
// We don't want an email invitation for every new staging app
|
||||
silent: true,
|
||||
},
|
||||
})
|
||||
console.log(`Added PR author @${author.login} as a Heroku app collaborator`)
|
||||
}
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
// It's fine if this fails, it shouldn't block the app from deploying!
|
||||
console.warn(
|
||||
`Warning: failed to add PR author as a Heroku app collaborator. Error: ${error}`
|
||||
)
|
||||
}
|
||||
|
||||
// A new Build is created as a by-product of creating an AppSetup.
|
||||
// Poll until there is a Build object attached to the AppSetup.
|
||||
let setupAcceptableErrorCount = 0
|
||||
while (!appSetup || !build || !build.id) {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
appSetup = await heroku.get(`/app-setups/${appSetup.id}`)
|
||||
build = appSetup.build
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
setupAcceptableErrorCount += 1
|
||||
if (setupAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${setupAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to get AppSetup status. Error: ${error}`)
|
||||
}
|
||||
|
||||
if (appSetup && appSetup.status === 'failed') {
|
||||
const manifestErrors = appSetup.manifest_errors || []
|
||||
const hasManifestErrors = Array.isArray(manifestErrors) && manifestErrors.length > 0
|
||||
const manifestErrorMessage = hasManifestErrors
|
||||
? `\nManifest errors:\n - ${manifestErrors.join('\n - ')}`
|
||||
: ''
|
||||
throw new Error(
|
||||
`Failed to setup app after ${Math.round(
|
||||
(Date.now() - appSetupStartTime) / 1000
|
||||
)} seconds.
|
||||
Reason: ${appSetup.failure_message}${manifestErrorMessage}
|
||||
See Heroku logs for more information:\n${logUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`AppSetup status: ${appSetup.status} (after ${Math.round(
|
||||
(Date.now() - appSetupStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
console.log('Heroku AppSetup finished', appSetup)
|
||||
console.log('Heroku build detected', build)
|
||||
} else {
|
||||
// If the app does exist, just manually trigger a new build
|
||||
console.log(`Heroku app '${appName}' already exists.`)
|
||||
|
||||
console.log('Updating Heroku app configuration variables...')
|
||||
|
||||
// Reconfigure environment variables
|
||||
// https://devcenter.heroku.com/articles/platform-api-reference#config-vars-update
|
||||
try {
|
||||
await heroku.patch(`/apps/${appName}/config-vars`, {
|
||||
body: appConfigVars,
|
||||
})
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to update Heroku app configuration variables. Error: ${error}`)
|
||||
}
|
||||
|
||||
console.log('Reconfigured')
|
||||
console.log('Building Heroku app...')
|
||||
|
||||
try {
|
||||
build = await heroku.post(`/apps/${appName}/builds`, {
|
||||
body: {
|
||||
source_blob: {
|
||||
url: sourceBlobUrl,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to create Heroku build. Error: ${error}`)
|
||||
}
|
||||
|
||||
console.log('Heroku build created', build)
|
||||
}
|
||||
|
||||
const buildStartTime = Date.now() // Close enough...
|
||||
const buildId = build.id
|
||||
logUrl = build.output_stream_url
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Building a new Heroku slug...')
|
||||
|
||||
// Poll until the Build's status changes from "pending" to "succeeded" or "failed".
|
||||
let buildAcceptableErrorCount = 0
|
||||
while (!build || !build.release || !build.release.id) {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
build = await heroku.get(`/apps/${appName}/builds/${buildId}`)
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
buildAcceptableErrorCount += 1
|
||||
if (buildAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${buildAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to get build status. Error: ${error}`)
|
||||
}
|
||||
|
||||
if (build && build.status === 'failed') {
|
||||
throw new Error(
|
||||
`Failed to build after ${Math.round(
|
||||
(Date.now() - buildStartTime) / 1000
|
||||
)} seconds. See Heroku logs for more information:\n${logUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Heroku build status: ${(build || {}).status} (after ${Math.round(
|
||||
(Date.now() - buildStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Finished Heroku build after ${Math.round((Date.now() - buildStartTime) / 1000)} seconds.`,
|
||||
build
|
||||
)
|
||||
console.log('Heroku release detected', build.release)
|
||||
|
||||
const releaseStartTime = Date.now() // Close enough...
|
||||
let releaseId = build.release.id
|
||||
let release = null
|
||||
|
||||
// Poll until the associated Release's status changes from "pending" to "succeeded" or "failed".
|
||||
let releaseAcceptableErrorCount = 0
|
||||
while (!release || release.status === 'pending') {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
const result = await heroku.get(`/apps/${appName}/releases/${releaseId}`)
|
||||
|
||||
// Update the deployment status but only on the first retrieval
|
||||
if (!release) {
|
||||
logUrl = result.output_stream_url
|
||||
|
||||
console.log('Heroku Release created', result)
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Releasing the built Heroku slug...')
|
||||
}
|
||||
|
||||
release = result
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
releaseAcceptableErrorCount += 1
|
||||
if (releaseAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${releaseAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to get release status. Error: ${error}`)
|
||||
}
|
||||
|
||||
if (release && release.status === 'failed') {
|
||||
throw new Error(
|
||||
`Failed to release after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds. See Heroku logs for more information:\n${logUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Release status: ${(release || {}).status} (after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Finished Heroku release after ${Math.round(
|
||||
(Date.now() - releaseStartTime) / 1000
|
||||
)} seconds.`,
|
||||
release
|
||||
)
|
||||
|
||||
// Monitor dyno state for this release to ensure it reaches "up" rather than crashing.
|
||||
// This will help us catch issues with faulty startup code and/or the package manifest.
|
||||
const dynoBootStartTime = Date.now()
|
||||
console.log('Checking Heroku dynos...')
|
||||
logUrl = workflowRunLog
|
||||
|
||||
console.log('🚀 Deployment status: in_progress - Monitoring the Heroku dyno start-up...')
|
||||
|
||||
// Keep checking while there are still dynos in non-terminal states
|
||||
let newDynos = []
|
||||
let dynoAcceptableErrorCount = 0
|
||||
while (newDynos.length === 0 || newDynos.some((dyno) => dyno.state === 'starting')) {
|
||||
await sleep(SLEEP_INTERVAL)
|
||||
try {
|
||||
const dynoList = await heroku.get(`/apps/${appName}/dynos`)
|
||||
const dynosForThisRelease = dynoList.filter((dyno) => dyno.release.id === releaseId)
|
||||
|
||||
// To track them afterward
|
||||
newDynos = dynosForThisRelease
|
||||
|
||||
// Dynos for this release OR a newer release
|
||||
const relevantDynos = dynoList.filter((dyno) => dyno.release.version >= release.version)
|
||||
|
||||
// If this Heroku app was just newly created, often a secondary release
|
||||
// is requested to enable automatically managed SSL certificates. The
|
||||
// release description will read:
|
||||
// "Enable allow-multiple-sni-endpoints feature"
|
||||
//
|
||||
// If that is the case, we need to update to monitor that secondary
|
||||
// release instead.
|
||||
if (relevantDynos.length > 0 && dynosForThisRelease.length === 0) {
|
||||
// If the app is NOT newly created, fail fast!
|
||||
if (!appIsNewlyCreated) {
|
||||
throw new Error('The dynos for this release disappeared unexpectedly')
|
||||
}
|
||||
|
||||
// Check for the secondary release
|
||||
let nextRelease = null
|
||||
try {
|
||||
nextRelease = await heroku.get(`/apps/${appName}/releases/${release.version + 1}`)
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(
|
||||
`Could not find a secondary release to explain the disappearing dynos. Error: ${error}`
|
||||
)
|
||||
}
|
||||
|
||||
if (nextRelease) {
|
||||
if (nextRelease.description === 'Enable allow-multiple-sni-endpoints feature') {
|
||||
// Track dynos for the next release instead
|
||||
release = nextRelease
|
||||
releaseId = nextRelease.id
|
||||
|
||||
console.warn('Switching to monitor secondary release...')
|
||||
|
||||
// Allow the loop to repeat to fetch the dynos for the secondary release
|
||||
} else {
|
||||
// Otherwise, assume another release replaced this one but it
|
||||
// PROBABLY would've succeeded...?
|
||||
newDynos.forEach((dyno) => {
|
||||
dyno.state = 'up'
|
||||
})
|
||||
}
|
||||
}
|
||||
// else just keep monitoring and hope for the best
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Dyno states: ${JSON.stringify(newDynos.map((dyno) => dyno.state))} (after ${Math.round(
|
||||
(Date.now() - dynoBootStartTime) / 1000
|
||||
)} seconds)`
|
||||
)
|
||||
} catch (error) {
|
||||
// Allow for a few bad responses from the Heroku API
|
||||
if (isAllowableHerokuError(error)) {
|
||||
dynoAcceptableErrorCount += 1
|
||||
if (dynoAcceptableErrorCount <= ALLOWED_MISSING_RESPONSE_COUNT) {
|
||||
console.warn(
|
||||
`Ignoring allowable Heroku error #${dynoAcceptableErrorCount}: ${error.statusCode}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
announceIfHerokuIsDown(error)
|
||||
throw new Error(`Failed to find dynos for this release. Error: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
const crashedDynos = newDynos.filter((dyno) => ['crashed', 'restarting'].includes(dyno.state))
|
||||
const runningDynos = newDynos.filter((dyno) => dyno.state === 'up')
|
||||
|
||||
// If any dynos crashed on start-up, fail the deployment
|
||||
if (crashedDynos.length > 0) {
|
||||
const errorMessage = `At least ${crashedDynos.length} Heroku dyno(s) crashed on start-up!`
|
||||
|
||||
console.error(errorMessage)
|
||||
|
||||
// Attempt to dump some of the Heroku log here for debugging
|
||||
try {
|
||||
const logSession = await heroku.post(`/apps/${appName}/log-sessions`, {
|
||||
body: {
|
||||
dyno: crashedDynos[0].name,
|
||||
lines: HEROKU_LOG_LINES_TO_SHOW,
|
||||
tail: false,
|
||||
},
|
||||
})
|
||||
|
||||
logUrl = logSession.logplex_url
|
||||
|
||||
const logText = await got(logUrl).text()
|
||||
console.error(
|
||||
`Here are the last ${HEROKU_LOG_LINES_TO_SHOW} lines of the Heroku log:\n\n${logText}`
|
||||
)
|
||||
} catch (error) {
|
||||
announceIfHerokuIsDown(error)
|
||||
// Don't fail because of this error
|
||||
console.error(`Failed to retrieve the Heroku logs for the crashed dynos. Error: ${error}`)
|
||||
}
|
||||
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
console.log(
|
||||
`At least ${runningDynos.length} Heroku dyno(s) are ready after ${Math.round(
|
||||
(Date.now() - dynoBootStartTime) / 1000
|
||||
)} seconds.`
|
||||
)
|
||||
|
||||
// Send a series of requests to trigger the server warmup routines
|
||||
console.log('🚀 Deployment status: in_progress - Triggering server warmup routines...')
|
||||
|
||||
const warmupStartTime = Date.now()
|
||||
console.log(`Making warmup requests to: ${homepageUrl}`)
|
||||
try {
|
||||
await got(homepageUrl, {
|
||||
timeout: 10000, // Maximum 10 second timeout per request
|
||||
retry: {
|
||||
limit: 7, // About 2 minutes 7 seconds of delay, plus active request time for 8 requests
|
||||
statusCodes: [404, 421].concat(got.defaults.options.retry.statusCodes), // prepend extras
|
||||
},
|
||||
hooks: {
|
||||
beforeRetry: [
|
||||
(options, error = {}, retryCount = '?') => {
|
||||
const statusCode = error.statusCode || (error.response || {}).statusCode || -1
|
||||
console.log(
|
||||
`Retrying after warmup request attempt #${retryCount} (${statusCode}) after ${Math.round(
|
||||
(Date.now() - warmupStartTime) / 1000
|
||||
)} seconds...`
|
||||
)
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
console.log(
|
||||
`Warmup requests passed after ${Math.round((Date.now() - warmupStartTime) / 1000)} seconds`
|
||||
)
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Warmup requests failed after ${Math.round(
|
||||
(Date.now() - warmupStartTime) / 1000
|
||||
)} seconds. Error: ${error}`
|
||||
)
|
||||
}
|
||||
|
||||
// Report success!
|
||||
const successMessage = `Deployment succeeded after ${Math.round(
|
||||
(Date.now() - startTime) / 1000
|
||||
)} seconds.`
|
||||
console.log(successMessage)
|
||||
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'success',
|
||||
description: successMessage,
|
||||
...(logUrl && { log_url: logUrl }),
|
||||
environment_url: homepageUrl,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
|
||||
console.log(`🚀 Deployment status: success - ${successMessage}`)
|
||||
console.log(`Visit the newly deployed app at: ${homepageUrl}`)
|
||||
} catch (error) {
|
||||
// Report failure!
|
||||
const failureMessage = `Deployment failed after ${Math.round(
|
||||
(Date.now() - startTime) / 1000
|
||||
)} seconds. See logs for more information.`
|
||||
console.error(failureMessage)
|
||||
|
||||
try {
|
||||
if (deploymentId) {
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deploymentId,
|
||||
state: 'error',
|
||||
description: failureMessage,
|
||||
...(logUrl && { log_url: logUrl }),
|
||||
environment_url: homepageUrl,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
|
||||
console.log(
|
||||
`🚀 Deployment status: error - ${failureMessage}` + (logUrl ? ` Logs: ${logUrl}` : '')
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to finalize GitHub DeploymentStatus as a failure. Error: ${error}`)
|
||||
}
|
||||
|
||||
// Re-throw the error to bubble up
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function getTarballUrl({ octokit, owner, repo, sha }) {
|
||||
// Get a URL for the tarballed source code bundle
|
||||
const {
|
||||
headers: { location: tarballUrl },
|
||||
} = await octokit.repos.downloadTarballArchive({
|
||||
owner,
|
||||
repo,
|
||||
ref: sha,
|
||||
// Override the underlying `node-fetch` module's `redirect` option
|
||||
// configuration to prevent automatically following redirects.
|
||||
request: {
|
||||
redirect: 'manual',
|
||||
},
|
||||
})
|
||||
return tarballUrl
|
||||
}
|
||||
|
||||
function isAllowableHerokuError(error) {
|
||||
return error && ALLOWABLE_ERROR_CODES.includes(error.statusCode)
|
||||
}
|
||||
|
||||
function announceIfHerokuIsDown(error) {
|
||||
if (error && error.statusCode === 503) {
|
||||
console.error('💀 Heroku may be down! Please check its Status page: https://status.heroku.com/')
|
||||
}
|
||||
}
|
||||
|
||||
function removeEmptyProperties(obj) {
|
||||
return Object.fromEntries(Object.entries(obj).filter(([key, val]) => val != null))
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
const USERNAME_FORMAT = '([A-Za-z0-9-]+)'
|
||||
const REPO_NAME_FORMAT = '([A-Za-z0-9._-]+)'
|
||||
const PR_NUMBER_FORMAT = '(\\d+)'
|
||||
|
||||
const ALLOWED_PR_URL_FORMAT = new RegExp(
|
||||
'^' +
|
||||
'[\'"]?' +
|
||||
`https://github\\.com/${USERNAME_FORMAT}/${REPO_NAME_FORMAT}/pull/${PR_NUMBER_FORMAT}` +
|
||||
'[\'"]?' +
|
||||
'$'
|
||||
)
|
||||
|
||||
export default function parsePullRequestUrl(prUrl) {
|
||||
const [, /* fullMatch */ owner, repo, pr] = (prUrl || '').match(ALLOWED_PR_URL_FORMAT) || []
|
||||
return {
|
||||
owner,
|
||||
repo,
|
||||
pullNumber: parseInt(pr, 10) || undefined,
|
||||
}
|
||||
}
|
||||
@@ -1,140 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script is run as a postbuild script during staging and deployments on Heroku. It clones a branch
|
||||
// in the early-access repo that matches the current branch in the docs repo; if one can't be found, it
|
||||
// clones the `main` branch.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
import { execSync } from 'child_process'
|
||||
import rimraf from 'rimraf'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import os from 'os'
|
||||
|
||||
dotenv.config()
|
||||
const {
|
||||
DOCUBOT_REPO_PAT,
|
||||
HEROKU_PRODUCTION_APP,
|
||||
GIT_BRANCH, // Set by Actions and/or the deployer with the name of the docs-internal branch
|
||||
} = process.env
|
||||
|
||||
// Exit if PAT is not found
|
||||
if (!DOCUBOT_REPO_PAT) {
|
||||
console.log('Skipping early access, not authorized')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const EA_PRODUCTION_BRANCH = 'main'
|
||||
|
||||
// If a branch name is not provided in the environment, attempt to get
|
||||
// the local branch name; or default to 'main'
|
||||
let currentBranch = (GIT_BRANCH || '').replace(/^refs\/heads\//, '')
|
||||
if (!currentBranch) {
|
||||
try {
|
||||
currentBranch = execSync('git branch --show-current').toString()
|
||||
} catch (err) {
|
||||
// Ignore but log
|
||||
console.warn('Error checking for local branch:', err.message)
|
||||
}
|
||||
}
|
||||
if (!currentBranch) {
|
||||
currentBranch = EA_PRODUCTION_BRANCH
|
||||
}
|
||||
|
||||
// Early Access details
|
||||
const earlyAccessOwner = 'github'
|
||||
const earlyAccessRepoName = 'docs-early-access'
|
||||
const earlyAccessDirName = 'early-access'
|
||||
const earlyAccessFullRepo = `https://${DOCUBOT_REPO_PAT}@github.com/${earlyAccessOwner}/${earlyAccessRepoName}`
|
||||
|
||||
// On our Azure self-hosted runners, os.tmpdir() doesn't work reliably. On Heroku, os.homedir doesn't work reliably.
|
||||
const earlyAccessCloningParentDir = process.env.CI ? os.homedir() : os.tmpdir()
|
||||
const earlyAccessCloningDir = path.join(earlyAccessCloningParentDir, earlyAccessRepoName)
|
||||
|
||||
const destinationDirNames = ['content', 'data', 'assets/images']
|
||||
const destinationDirsMap = destinationDirNames.reduce((map, dirName) => {
|
||||
map[dirName] = path.join(process.cwd(), dirName, earlyAccessDirName)
|
||||
return map
|
||||
}, {})
|
||||
|
||||
// Production vs. staging environment
|
||||
// TODO test that this works as expected
|
||||
const environment = HEROKU_PRODUCTION_APP ? 'production' : 'staging'
|
||||
|
||||
// Early access branch to clone
|
||||
let earlyAccessBranch = HEROKU_PRODUCTION_APP ? EA_PRODUCTION_BRANCH : currentBranch
|
||||
|
||||
// Confirm that the branch exists in the remote
|
||||
let branchExists = execSync(
|
||||
`git ls-remote --heads ${earlyAccessFullRepo} ${earlyAccessBranch}`
|
||||
).toString()
|
||||
|
||||
// If the branch did NOT exist, try checking for the default branch instead
|
||||
if (!branchExists && earlyAccessBranch !== EA_PRODUCTION_BRANCH) {
|
||||
console.warn(
|
||||
`The branch '${earlyAccessBranch}' was not found in ${earlyAccessOwner}/${earlyAccessRepoName}!`
|
||||
)
|
||||
console.warn(`Attempting the default branch ${EA_PRODUCTION_BRANCH} instead...`)
|
||||
|
||||
earlyAccessBranch = EA_PRODUCTION_BRANCH
|
||||
branchExists = execSync(
|
||||
`git ls-remote --heads ${earlyAccessFullRepo} ${earlyAccessBranch}`
|
||||
).toString()
|
||||
}
|
||||
|
||||
// If no suitable branch was found, bail out now
|
||||
if (!branchExists) {
|
||||
console.error(
|
||||
`The branch '${earlyAccessBranch}' was not found in ${earlyAccessOwner}/${earlyAccessRepoName}!`
|
||||
)
|
||||
console.error('Exiting!')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Remove any previously cloned copies of the early access repo
|
||||
rimraf.sync(earlyAccessCloningDir)
|
||||
|
||||
// Clone the repo
|
||||
console.log(`Setting up: ${earlyAccessCloningDir}`)
|
||||
execSync(
|
||||
`git clone --single-branch --branch ${earlyAccessBranch} ${earlyAccessFullRepo} ${earlyAccessRepoName}`,
|
||||
{
|
||||
cwd: earlyAccessCloningParentDir,
|
||||
}
|
||||
)
|
||||
console.log(`Using early-access ${environment} branch: '${earlyAccessBranch}'`)
|
||||
|
||||
// Remove all existing early access directories from this repo
|
||||
destinationDirNames.forEach((key) => rimraf.sync(destinationDirsMap[key]))
|
||||
|
||||
// Move the latest early access source directories into this repo
|
||||
destinationDirNames.forEach((dirName) => {
|
||||
const sourceDir = path.join(earlyAccessCloningDir, dirName)
|
||||
const destDir = destinationDirsMap[dirName]
|
||||
|
||||
// If the source directory doesn't exist, skip it
|
||||
if (!fs.existsSync(sourceDir)) {
|
||||
console.warn(`Early access directory '${dirName}' does not exist. Skipping...`)
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure the base directory exists
|
||||
fs.mkdirSync(path.join(process.cwd(), dirName), { recursive: true })
|
||||
|
||||
// Move the directory from the cloned source to the destination
|
||||
fs.renameSync(sourceDir, destDir)
|
||||
|
||||
// Confirm the newly moved directory exist
|
||||
if (fs.existsSync(destDir)) {
|
||||
console.log(`Successfully moved early access directory '${dirName}' into this repo`)
|
||||
} else {
|
||||
throw new Error(`Failed to move early access directory '${dirName}'!`)
|
||||
}
|
||||
})
|
||||
|
||||
// Remove the source content again for good hygiene
|
||||
rimraf.sync(earlyAccessCloningDir)
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script finds all Heroku staging apps and pings them to make sure they're always "warmed" and responsive to requests.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
import assert from 'assert'
|
||||
import got from 'got'
|
||||
import { chain } from 'lodash-es'
|
||||
import chalk from 'chalk'
|
||||
import Heroku from 'heroku-client'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
assert(process.env.HEROKU_API_TOKEN)
|
||||
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
|
||||
main()
|
||||
|
||||
async function main() {
|
||||
const apps = chain(await heroku.get('/apps'))
|
||||
.orderBy('name')
|
||||
.value()
|
||||
|
||||
async function ping(app) {
|
||||
// ?warmup param has no effect but makes it easier to find these requests in the logs
|
||||
const url = `https://${app.name}.herokuapp.com/en?warmup`
|
||||
try {
|
||||
const response = await got(url)
|
||||
console.log(chalk.green(url, response.statusCode))
|
||||
} catch (error) {
|
||||
console.log(chalk.red(url, error.response.statusCode))
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(apps.map(ping))
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script removes all stale Heroku staging apps that outlasted the closure
|
||||
// of their corresponding pull requests, or correspond to spammy pull requests.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
import { chain } from 'lodash-es'
|
||||
import chalk from 'chalk'
|
||||
import Heroku from 'heroku-client'
|
||||
import getOctokit from './helpers/github.js'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
// Check for required Heroku API token
|
||||
if (!process.env.HEROKU_API_TOKEN) {
|
||||
console.error(
|
||||
'Error! You must have a HEROKU_API_TOKEN environment variable for deployer-level access.'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
// Check for required GitHub PAT
|
||||
if (!process.env.GITHUB_TOKEN) {
|
||||
console.error('Error! You must have a GITHUB_TOKEN environment variable for repo access.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const heroku = new Heroku({ token: process.env.HEROKU_API_TOKEN })
|
||||
// This helper uses the `GITHUB_TOKEN` implicitly
|
||||
const octokit = getOctokit()
|
||||
|
||||
const protectedAppNames = ['help-docs']
|
||||
|
||||
main()
|
||||
|
||||
async function main() {
|
||||
const apps = chain(await heroku.get('/apps'))
|
||||
.orderBy('name')
|
||||
.value()
|
||||
|
||||
const prInfoMatch = /^(?:gha-|ghd-)?(?<repo>docs(?:-internal)?)-(?<pullNumber>\d+)--.*$/
|
||||
|
||||
const appsPlusPullIds = apps.map((app) => {
|
||||
const match = prInfoMatch.exec(app.name)
|
||||
const { repo, pullNumber } = (match || {}).groups || {}
|
||||
|
||||
return {
|
||||
app,
|
||||
repo,
|
||||
pullNumber: parseInt(pullNumber, 10) || null,
|
||||
}
|
||||
})
|
||||
|
||||
const appsWithPullIds = appsPlusPullIds.filter((appi) => appi.repo && appi.pullNumber > 0)
|
||||
|
||||
const nonMatchingAppNames = appsPlusPullIds
|
||||
.filter((appi) => !(appi.repo && appi.pullNumber > 0))
|
||||
.map((appi) => appi.app.name)
|
||||
.filter((name) => !protectedAppNames.includes(name))
|
||||
|
||||
let staleCount = 0
|
||||
let spammyCount = 0
|
||||
for (const awpi of appsWithPullIds) {
|
||||
const { isStale, isSpammy } = await assessPullRequest(awpi.repo, awpi.pullNumber)
|
||||
|
||||
if (isSpammy) spammyCount++
|
||||
if (isStale) staleCount++
|
||||
|
||||
if (isSpammy || isStale) {
|
||||
await deleteHerokuApp(awpi.app.name)
|
||||
}
|
||||
}
|
||||
|
||||
const matchingCount = appsWithPullIds.length
|
||||
const counts = {
|
||||
total: matchingCount,
|
||||
alive: matchingCount - staleCount,
|
||||
stale: {
|
||||
total: staleCount,
|
||||
spammy: spammyCount,
|
||||
closed: staleCount - spammyCount,
|
||||
},
|
||||
}
|
||||
console.log(`🧮 COUNTS!\n${JSON.stringify(counts, null, 2)}`)
|
||||
|
||||
const nonMatchingCount = nonMatchingAppNames.length
|
||||
if (nonMatchingCount > 0) {
|
||||
console.log(
|
||||
'⚠️ 👀',
|
||||
chalk.yellow(
|
||||
`Non-matching app names (${nonMatchingCount}):\n - ${nonMatchingAppNames.join('\n - ')}`
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function displayParams(params) {
|
||||
const { owner, repo, pull_number: pullNumber } = params
|
||||
return `${owner}/${repo}#${pullNumber}`
|
||||
}
|
||||
|
||||
async function assessPullRequest(repo, pullNumber) {
|
||||
const params = {
|
||||
owner: 'github',
|
||||
repo: repo,
|
||||
pull_number: pullNumber,
|
||||
}
|
||||
|
||||
let isStale = false
|
||||
let isSpammy = false
|
||||
try {
|
||||
const { data: pullRequest } = await octokit.pulls.get(params)
|
||||
|
||||
if (pullRequest && pullRequest.state === 'closed') {
|
||||
isStale = true
|
||||
console.debug(chalk.green(`STALE: ${displayParams(params)} is closed`))
|
||||
}
|
||||
} catch (error) {
|
||||
// Using a standard GitHub PAT, PRs from spammy users will respond as 404
|
||||
if (error.status === 404) {
|
||||
isStale = true
|
||||
isSpammy = true
|
||||
console.debug(chalk.yellow(`STALE: ${displayParams(params)} is spammy or deleted`))
|
||||
} else {
|
||||
console.debug(chalk.red(`ERROR: ${displayParams(params)} - ${error.message}`))
|
||||
}
|
||||
}
|
||||
|
||||
return { isStale, isSpammy }
|
||||
}
|
||||
|
||||
async function deleteHerokuApp(appName) {
|
||||
try {
|
||||
await heroku.delete(`/apps/${appName}`)
|
||||
console.log('✅', chalk.green(`Removed stale app "${appName}"`))
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'❌',
|
||||
chalk.red(`ERROR: Failed to remove stale app "${appName}" - ${error.message}`)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,266 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// [start-readme]
|
||||
//
|
||||
// This script removes all stale GitHub deployment environments that outlasted
|
||||
// the closure of their corresponding pull requests, or correspond to spammy
|
||||
// pull requests.
|
||||
//
|
||||
// [end-readme]
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
import chalk from 'chalk'
|
||||
import getOctokit from './helpers/github.js'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
// Check for required GitHub PAT
|
||||
if (!process.env.GITHUB_TOKEN) {
|
||||
console.error('Error! You must have a GITHUB_TOKEN environment variable for repo access.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!process.env.ELEVATED_TOKEN) {
|
||||
console.error(
|
||||
'Error! You must have a ELEVATED_TOKEN environment variable for removing deployment environments.'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!process.env.REPO) {
|
||||
console.error('Error! You must have a REPO environment variable.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!process.env.RUN_ID) {
|
||||
console.error('Error! You must have a RUN_ID environment variable.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// This helper uses the `GITHUB_TOKEN` implicitly
|
||||
const octokit = getOctokit()
|
||||
|
||||
const protectedEnvNames = ['production']
|
||||
const maxEnvironmentsToProcess = 50
|
||||
|
||||
// How long must a PR be closed without being merged to be considered stale?
|
||||
const ONE_HOUR = 60 * 60 * 1000
|
||||
const prClosureStaleTime = 2 * ONE_HOUR
|
||||
|
||||
main()
|
||||
|
||||
async function main() {
|
||||
const owner = 'github'
|
||||
const [repoOwner, repo] = (process.env.REPO || '').split('/')
|
||||
|
||||
if (repoOwner !== owner) {
|
||||
console.error(`Error! The repository owner must be "${owner}" but was "${repoOwner}".`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const logUrl = `https://github.com/${owner}/${repo}/actions/runs/${process.env.RUN_ID}`
|
||||
|
||||
const prInfoMatch = /^(?:gha-|ghd-)?(?<repo>docs(?:-internal)?)-(?<pullNumber>\d+)--.*$/
|
||||
|
||||
let exceededLimit = false
|
||||
let matchingCount = 0
|
||||
let staleCount = 0
|
||||
let spammyCount = 0
|
||||
const nonMatchingEnvNames = []
|
||||
|
||||
for await (const response of octokit.paginate.iterator(octokit.repos.getAllEnvironments, {
|
||||
owner,
|
||||
repo,
|
||||
})) {
|
||||
const { data: environments } = response
|
||||
|
||||
const envsPlusPullIds = environments.map((env) => {
|
||||
const match = prInfoMatch.exec(env.name)
|
||||
const { repo: repoName, pullNumber } = (match || {}).groups || {}
|
||||
|
||||
return {
|
||||
env,
|
||||
repo: repoName,
|
||||
pullNumber: parseInt(pullNumber, 10) || null,
|
||||
}
|
||||
})
|
||||
|
||||
const envsWithPullIds = envsPlusPullIds.filter(
|
||||
(eppi) => eppi.repo === repo && eppi.pullNumber > 0
|
||||
)
|
||||
matchingCount += envsWithPullIds.length
|
||||
|
||||
nonMatchingEnvNames.push(
|
||||
...envsPlusPullIds
|
||||
.filter((eppi) => !(eppi.repo && eppi.pullNumber > 0))
|
||||
.map((eppi) => eppi.env.name)
|
||||
.filter((name) => !protectedEnvNames.includes(name))
|
||||
)
|
||||
|
||||
for (const ewpi of envsWithPullIds) {
|
||||
const { isStale, isSpammy } = await assessPullRequest(ewpi.pullNumber)
|
||||
|
||||
if (isSpammy) spammyCount++
|
||||
if (isStale) staleCount++
|
||||
|
||||
if (isSpammy || isStale) {
|
||||
await deleteEnvironment(ewpi.env.name)
|
||||
}
|
||||
|
||||
if (spammyCount + staleCount >= maxEnvironmentsToProcess) {
|
||||
exceededLimit = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (exceededLimit) {
|
||||
console.log(
|
||||
'🛑',
|
||||
chalk.bgRed(`STOP! Exceeded limit, halting after ${maxEnvironmentsToProcess}.`)
|
||||
)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const counts = {
|
||||
total: matchingCount,
|
||||
alive: matchingCount - staleCount,
|
||||
stale: {
|
||||
total: staleCount,
|
||||
spammy: spammyCount,
|
||||
closed: staleCount - spammyCount,
|
||||
},
|
||||
}
|
||||
console.log(`🧮 COUNTS!\n${JSON.stringify(counts, null, 2)}`)
|
||||
|
||||
const nonMatchingCount = nonMatchingEnvNames.length
|
||||
if (nonMatchingCount > 0) {
|
||||
console.log(
|
||||
'⚠️ 👀',
|
||||
chalk.yellow(
|
||||
`Non-matching env names (${nonMatchingCount}):\n - ${nonMatchingEnvNames.join('\n - ')}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
function displayParams(params) {
|
||||
const { owner, repo, pull_number: pullNumber } = params
|
||||
return `${owner}/${repo}#${pullNumber}`
|
||||
}
|
||||
|
||||
async function assessPullRequest(pullNumber) {
|
||||
const params = {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pullNumber,
|
||||
}
|
||||
|
||||
let isStale = false
|
||||
let isSpammy = false
|
||||
try {
|
||||
const { data: pullRequest } = await octokit.pulls.get(params)
|
||||
|
||||
if (pullRequest && pullRequest.state === 'closed') {
|
||||
const isMerged = pullRequest.merged === true
|
||||
const closureAge = Date.now() - Date.parse(pullRequest.closed_at)
|
||||
isStale = isMerged || closureAge >= prClosureStaleTime
|
||||
|
||||
if (isStale) {
|
||||
console.debug(chalk.green(`STALE: ${displayParams(params)} is closed`))
|
||||
} else {
|
||||
console.debug(
|
||||
chalk.blue(`NOT STALE: ${displayParams(params)} is closed but not yet stale`)
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Using a standard GitHub PAT, PRs from spammy users will respond as 404
|
||||
if (error.status === 404) {
|
||||
isStale = true
|
||||
isSpammy = true
|
||||
console.debug(chalk.yellow(`STALE: ${displayParams(params)} is spammy or deleted`))
|
||||
} else {
|
||||
console.debug(chalk.red(`ERROR: ${displayParams(params)} - ${error.message}`))
|
||||
}
|
||||
}
|
||||
|
||||
return { isStale, isSpammy }
|
||||
}
|
||||
|
||||
async function deleteEnvironment(envName) {
|
||||
try {
|
||||
let deploymentCount = 0
|
||||
|
||||
// Get all of the Deployments to signal this environment's complete deactivation
|
||||
for await (const response of octokit.paginate.iterator(octokit.repos.listDeployments, {
|
||||
owner,
|
||||
repo,
|
||||
|
||||
// In the GitHub API, there can only be one active deployment per environment.
|
||||
// For our many staging apps, we must use the unique appName as the environment.
|
||||
environment: envName,
|
||||
})) {
|
||||
const { data: deployments } = response
|
||||
|
||||
// Deactivate ALL of the deployments
|
||||
for (const deployment of deployments) {
|
||||
// Deactivate this Deployment with an 'inactive' DeploymentStatus
|
||||
await octokit.repos.createDeploymentStatus({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deployment.id,
|
||||
state: 'inactive',
|
||||
description: 'The app was undeployed',
|
||||
log_url: logUrl,
|
||||
// The 'ant-man' preview is required for `state` values of 'inactive', as well as
|
||||
// the use of the `log_url`, `environment_url`, and `auto_inactive` parameters.
|
||||
// The 'flash' preview is required for `state` values of 'in_progress' and 'queued'.
|
||||
mediaType: {
|
||||
previews: ['ant-man', 'flash'],
|
||||
},
|
||||
})
|
||||
|
||||
// Delete this Deployment
|
||||
await octokit.repos.deleteDeployment({
|
||||
owner,
|
||||
repo,
|
||||
deployment_id: deployment.id,
|
||||
})
|
||||
|
||||
deploymentCount++
|
||||
}
|
||||
}
|
||||
|
||||
// Delete this Environment
|
||||
try {
|
||||
await octokit.repos.deleteAnEnvironment({
|
||||
// Must use a PAT with more elevated permissions than GITHUB_TOKEN can achieve!
|
||||
headers: {
|
||||
authorization: `token ${process.env.ELEVATED_TOKEN}`,
|
||||
},
|
||||
owner,
|
||||
repo,
|
||||
environment_name: envName,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error.status !== 404) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
'✅',
|
||||
chalk.green(
|
||||
`Removed stale deployment environment "${envName}" (${deploymentCount} deployments)`
|
||||
)
|
||||
)
|
||||
} catch (error) {
|
||||
console.log(
|
||||
'❌',
|
||||
chalk.red(
|
||||
`ERROR: Failed to remove stale deployment environment "${envName}" - ${error.message}`
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,9 +35,9 @@ describe('robots.txt', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('disallows indexing of herokuapp.com domains', async () => {
|
||||
it('disallows indexing of azurecontainer.io domains', async () => {
|
||||
const req = {
|
||||
hostname: 'docs-internal-12345--my-branch.herokuapp.com',
|
||||
hostname: 'docs-internal-preview-12345-asdfz.azurecontainer.io',
|
||||
path: '/robots.txt',
|
||||
}
|
||||
const res = new MockExpressResponse()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user