Merge branch 'main' into codewithdev-changes
6
.github/CODEOWNERS
vendored
@@ -16,10 +16,8 @@ package-lock.json @github/docs-engineering
|
|||||||
package.json @github/docs-engineering
|
package.json @github/docs-engineering
|
||||||
|
|
||||||
# Localization
|
# Localization
|
||||||
/.github/actions-scripts/create-translation-batch-pr.js @github/docs-engineering
|
/.github/actions-scripts/msft-create-translation-batch-pr.js @github/docs-engineering
|
||||||
/.github/workflows/create-translation-batch-pr.yml @github/docs-engineering
|
/.github/workflows/msft-create-translation-batch-pr.yml @github/docs-engineering
|
||||||
/.github/workflows/crowdin.yml @github/docs-engineering
|
|
||||||
/crowdin*.yml @github/docs-engineering
|
|
||||||
/translations/ @Octomerger
|
/translations/ @Octomerger
|
||||||
|
|
||||||
# Site Policy
|
# Site Policy
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
import fs from 'fs'
|
|
||||||
import github from '@actions/github'
|
|
||||||
|
|
||||||
const OPTIONS = Object.fromEntries(
|
|
||||||
['BASE', 'BODY_FILE', 'GITHUB_TOKEN', 'HEAD', 'LANGUAGE', 'TITLE', 'GITHUB_REPOSITORY'].map(
|
|
||||||
(envVarName) => {
|
|
||||||
const envVarValue = process.env[envVarName]
|
|
||||||
if (!envVarValue) {
|
|
||||||
throw new Error(`You must supply a ${envVarName} environment variable`)
|
|
||||||
}
|
|
||||||
return [envVarName, envVarValue]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!process.env.GITHUB_REPOSITORY) {
|
|
||||||
throw new Error('GITHUB_REPOSITORY environment variable not set')
|
|
||||||
}
|
|
||||||
|
|
||||||
const RETRY_STATUSES = [
|
|
||||||
422, // Retry the operation if the PR already exists
|
|
||||||
502, // Retry the operation if the API responds with a `502 Bad Gateway` error.
|
|
||||||
]
|
|
||||||
const RETRY_ATTEMPTS = 3
|
|
||||||
const {
|
|
||||||
// One of the default environment variables provided by Actions.
|
|
||||||
GITHUB_REPOSITORY,
|
|
||||||
|
|
||||||
// These are passed in from the step in the workflow file.
|
|
||||||
TITLE,
|
|
||||||
BASE,
|
|
||||||
HEAD,
|
|
||||||
LANGUAGE,
|
|
||||||
BODY_FILE,
|
|
||||||
GITHUB_TOKEN,
|
|
||||||
} = OPTIONS
|
|
||||||
const [OWNER, REPO] = GITHUB_REPOSITORY.split('/')
|
|
||||||
|
|
||||||
const octokit = github.getOctokit(GITHUB_TOKEN)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {object} config Configuration options for finding the PR.
|
|
||||||
* @returns {Promise<number | undefined>} The PR number.
|
|
||||||
*/
|
|
||||||
async function findPullRequestNumber(config) {
|
|
||||||
// Get a list of PRs and see if one already exists.
|
|
||||||
const { data: listOfPullRequests } = await octokit.rest.pulls.list({
|
|
||||||
owner: config.owner,
|
|
||||||
repo: config.repo,
|
|
||||||
head: `${config.owner}:${config.head}`,
|
|
||||||
})
|
|
||||||
|
|
||||||
return listOfPullRequests[0]?.number
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* When this file was first created, we only introduced support for creating a pull request for some translation batch.
|
|
||||||
* However, some of our first workflow runs failed during the pull request creation due to a timeout error.
|
|
||||||
* There have been cases where, despite the timeout error, the pull request gets created _anyway_.
|
|
||||||
* To accommodate this reality, we created this function to look for an existing pull request before a new one is created.
|
|
||||||
* Although the "find" check is redundant in the first "cycle", it's designed this way to recursively call the function again via its retry mechanism should that be necessary.
|
|
||||||
*
|
|
||||||
* @param {object} config Configuration options for creating the pull request.
|
|
||||||
* @returns {Promise<number>} The PR number.
|
|
||||||
*/
|
|
||||||
async function findOrCreatePullRequest(config) {
|
|
||||||
const found = await findPullRequestNumber(config)
|
|
||||||
|
|
||||||
if (found) {
|
|
||||||
return found
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { data: pullRequest } = await octokit.rest.pulls.create({
|
|
||||||
owner: config.owner,
|
|
||||||
repo: config.repo,
|
|
||||||
base: config.base,
|
|
||||||
head: config.head,
|
|
||||||
title: config.title,
|
|
||||||
body: config.body,
|
|
||||||
draft: false,
|
|
||||||
})
|
|
||||||
|
|
||||||
return pullRequest.number
|
|
||||||
} catch (error) {
|
|
||||||
if (!error.response || !config.retryCount) {
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!config.retryStatuses.includes(error.response.status)) {
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
|
|
||||||
console.error(`Error creating pull request: ${error.message}`)
|
|
||||||
console.warn(`Retrying in 5 seconds...`)
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 5000))
|
|
||||||
|
|
||||||
config.retryCount -= 1
|
|
||||||
|
|
||||||
return findOrCreatePullRequest(config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {object} config Configuration options for labeling the PR
|
|
||||||
* @returns {Promise<undefined>}
|
|
||||||
*/
|
|
||||||
// async function labelPullRequest(config) {
|
|
||||||
// await octokit.rest.issues.update({
|
|
||||||
// owner: config.owner,
|
|
||||||
// repo: config.repo,
|
|
||||||
// issue_number: config.issue_number,
|
|
||||||
// labels: config.labels,
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const options = {
|
|
||||||
title: TITLE,
|
|
||||||
base: BASE,
|
|
||||||
head: HEAD,
|
|
||||||
body: fs.readFileSync(BODY_FILE, 'utf8'),
|
|
||||||
labels: ['translation-batch', `translation-batch-${LANGUAGE}`],
|
|
||||||
owner: OWNER,
|
|
||||||
repo: REPO,
|
|
||||||
retryStatuses: RETRY_STATUSES,
|
|
||||||
retryCount: RETRY_ATTEMPTS,
|
|
||||||
}
|
|
||||||
|
|
||||||
options.issue_number = await findOrCreatePullRequest(options)
|
|
||||||
const pr = `${GITHUB_REPOSITORY}#${options.issue_number}`
|
|
||||||
console.log(`Created PR ${pr}`)
|
|
||||||
|
|
||||||
// metadata parameters aren't currently available in `github.rest.pulls.create`,
|
|
||||||
// but they are in `github.rest.issues.update`.
|
|
||||||
// await labelPullRequest(options)
|
|
||||||
// console.log(`Updated ${pr} with these labels: ${options.labels.join(', ')}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
158
.github/actions-scripts/update-merge-queue-branch.js
vendored
@@ -1,158 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
import { getOctokit } from '@actions/github'
|
|
||||||
const token = process.env.GITHUB_TOKEN
|
|
||||||
const github = getOctokit(token)
|
|
||||||
|
|
||||||
// Mergeable status documentation here:
|
|
||||||
// https://docs.github.com/en/graphql/reference/enums#mergestatestatus
|
|
||||||
// https://docs.github.com/en/graphql/reference/enums#mergeablestate
|
|
||||||
|
|
||||||
/*
|
|
||||||
This script gets a list of automerge-enabled PRs and sorts them
|
|
||||||
by priority. The PRs with the skip-to-front-of-merge-queue label
|
|
||||||
are prioritized first. The rest of the PRs are sorted by the date
|
|
||||||
they were updated. This is basically a FIFO queue, while allowing
|
|
||||||
writers the ability to skip the line when high-priority ships are
|
|
||||||
needed but a freeze isn't necessary.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const DRY_RUN = Boolean(JSON.parse(process.env.DRY_RUN || 'false'))
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const [org, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
|
||||||
if (!org || !repo) {
|
|
||||||
throw new Error('GITHUB_REPOSITORY environment variable not set')
|
|
||||||
}
|
|
||||||
// Get a list of open PRs and order them from oldest to newest
|
|
||||||
const query = `query ($first: Int, $after: String, $firstLabels: Int, $repo: String!, $org: String!) {
|
|
||||||
organization(login: $org) {
|
|
||||||
repository(name: $repo) {
|
|
||||||
pullRequests(first: $first, after: $after, states: OPEN, orderBy: {field: UPDATED_AT, direction: ASC}) {
|
|
||||||
edges{
|
|
||||||
node {
|
|
||||||
number
|
|
||||||
url
|
|
||||||
updatedAt
|
|
||||||
mergeable
|
|
||||||
mergeStateStatus
|
|
||||||
autoMergeRequest {
|
|
||||||
enabledBy {
|
|
||||||
login
|
|
||||||
}
|
|
||||||
enabledAt
|
|
||||||
}
|
|
||||||
labels (first:$firstLabels){
|
|
||||||
nodes {
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
commits(last: 1) {
|
|
||||||
nodes {
|
|
||||||
commit {
|
|
||||||
statusCheckRollup {
|
|
||||||
state
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pageInfo {
|
|
||||||
hasNextPage
|
|
||||||
endCursor
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`
|
|
||||||
|
|
||||||
const queryVariables = {
|
|
||||||
repo,
|
|
||||||
org,
|
|
||||||
first: 100,
|
|
||||||
after: null, // when pagination in null it will get first page
|
|
||||||
firstLabels: 100,
|
|
||||||
headers: {
|
|
||||||
// required for the mergeStateStatus enum
|
|
||||||
accept: 'application/vnd.github.merge-info-preview+json',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
let hasNextPage = true
|
|
||||||
const autoMergeEnabledPRs = []
|
|
||||||
|
|
||||||
// we need to get all the paginated results in the case that
|
|
||||||
// there are more than 100 PRs
|
|
||||||
while (hasNextPage) {
|
|
||||||
const graph = await github.graphql(query, queryVariables)
|
|
||||||
const dataRoot = graph.organization.repository.pullRequests
|
|
||||||
const pullRequests = dataRoot.edges
|
|
||||||
// update pagination variables
|
|
||||||
hasNextPage = dataRoot.pageInfo.hasNextPage
|
|
||||||
// the endCursor is the start cursor for the next page
|
|
||||||
queryVariables.after = dataRoot.pageInfo.endCursor
|
|
||||||
|
|
||||||
const filteredPrs = pullRequests
|
|
||||||
// this simplifies the format received from the graphql query to
|
|
||||||
// remove the unnecessary nested objects
|
|
||||||
.map((pr) => {
|
|
||||||
// make the labels object just an array of the label names
|
|
||||||
const labelArray = pr.node.labels.nodes.map((label) => label.name)
|
|
||||||
pr.node.labels = labelArray
|
|
||||||
// return the pr object and ✂️ the node property
|
|
||||||
return pr.node
|
|
||||||
})
|
|
||||||
.filter((pr) => pr.autoMergeRequest !== null)
|
|
||||||
.filter((pr) => pr.mergeable === 'MERGEABLE')
|
|
||||||
// filter out prs that don't have a calculated mergeable state yet
|
|
||||||
.filter((pr) => pr.mergeStateStatus !== 'UNKNOWN')
|
|
||||||
// filter out prs that still need a review, have merge conflicts,
|
|
||||||
// or have failing ci tests
|
|
||||||
.filter((pr) => pr.mergeStateStatus !== 'BLOCKED')
|
|
||||||
// **NOTE**: In the future we may want to send slack message to initiators
|
|
||||||
// of PRs with the following merge states because these can happen after
|
|
||||||
// a PR is green and the automerge is enabled
|
|
||||||
.filter((pr) => pr.mergeStateStatus !== 'DIRTY')
|
|
||||||
.filter((pr) => pr.mergeStateStatus !== 'UNSTABLE')
|
|
||||||
.filter((pr) => {
|
|
||||||
const nodes = pr.commits.nodes
|
|
||||||
if (!nodes || !nodes.length) {
|
|
||||||
// If it has no commits, why is it even here? Anyway, skip it.
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return nodes[0].commit.statusCheckRollup.state !== 'FAILURE'
|
|
||||||
})
|
|
||||||
|
|
||||||
autoMergeEnabledPRs.push(...filteredPrs)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the list of prs with the skip label so they can
|
|
||||||
// be put at the beginning of the list
|
|
||||||
const prioritizedPrList = autoMergeEnabledPRs.sort(
|
|
||||||
(a, b) =>
|
|
||||||
Number(b.labels.includes('skip-to-front-of-merge-queue')) -
|
|
||||||
Number(a.labels.includes('skip-to-front-of-merge-queue'))
|
|
||||||
)
|
|
||||||
|
|
||||||
if (prioritizedPrList.length) {
|
|
||||||
const nextInQueue = prioritizedPrList.shift()
|
|
||||||
// Update the branch for the next PR in the merge queue
|
|
||||||
if (DRY_RUN) {
|
|
||||||
console.log('DRY RUN! But *would* update on next-in-queue')
|
|
||||||
} else {
|
|
||||||
github.rest.pulls.updateBranch({
|
|
||||||
owner: org,
|
|
||||||
repo,
|
|
||||||
pull_number: nextInQueue.number,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
console.log(`⏱ Total PRs in the merge queue: ${prioritizedPrList.length + 1}`)
|
|
||||||
console.log(`🚂 Updated branch for PR #${JSON.stringify(nextInQueue, null, 2)}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
prioritizedPrList.length
|
|
||||||
? console.log(`🚏 Next up in the queue: \n ${JSON.stringify(prioritizedPrList, null, 2)}`)
|
|
||||||
: console.log(`⚡ The merge queue is empty`)
|
|
||||||
}
|
|
||||||
57
.github/workflows/autoupdate-branch.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Autoupdate branch
|
|
||||||
|
|
||||||
# **What it does**: The next pull request in the merge queue will get its
|
|
||||||
# branch updated with main. Only updating one branch ensures that pull requests
|
|
||||||
# in the queue are merged sequentially.
|
|
||||||
# **Why we have it**: So we don't have to watch pull requests and click
|
|
||||||
# update branch 1000x.
|
|
||||||
# **Who does it impact**: Our health.
|
|
||||||
#
|
|
||||||
# The merge queue consists of any pull requests with automerge enabled and
|
|
||||||
# are mergeable. There is a label that can be used to skip to the front of
|
|
||||||
# the queue (`skip-to-front-of-merge-queue`).
|
|
||||||
#
|
|
||||||
# This workflow is triggered when a `push` event occurs ON the `main` branch
|
|
||||||
# (e.g. a PR was merged or a force-push was done).
|
|
||||||
#
|
|
||||||
# This workflow runs on all PRs created from source branches within the
|
|
||||||
# public and private docs repos but is won't work for PRs created from
|
|
||||||
# forked repos.
|
|
||||||
#
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
# This allows a subsequently queued workflow run to take priority over
|
|
||||||
# previously queued runs but NOT interrupt currently executing runs
|
|
||||||
concurrency:
|
|
||||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
autoupdate:
|
|
||||||
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
|
|
||||||
name: autoupdate
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out repo content
|
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
|
||||||
with:
|
|
||||||
node-version: '16.15.0'
|
|
||||||
cache: npm
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Update next PR in queue
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.DOCUBOT_REPO_PAT }}
|
|
||||||
run: node .github/actions-scripts/update-merge-queue-branch.js
|
|
||||||
28
.github/workflows/azure-preview-env-deploy.yml
vendored
@@ -42,6 +42,10 @@ jobs:
|
|||||||
build-and-deploy-azure-preview:
|
build-and-deploy-azure-preview:
|
||||||
name: Build and deploy Azure preview environment
|
name: Build and deploy Azure preview environment
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
# Ensure this is actually a pull request and not a merge group
|
||||||
|
# If its a merge group, report success without doing anything
|
||||||
|
# See https://bit.ly/3qB9nZW > If a job in a workflow is skipped due to a conditional, it will report its status as "Success".
|
||||||
|
if: (github.event.pull_request.head.sha || github.event.inputs.COMMIT_REF) && (github.event.number || github.event.inputs.PR_NUMBER || github.run_id)
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
environment:
|
environment:
|
||||||
name: preview-env-${{ github.event.number }}
|
name: preview-env-${{ github.event.number }}
|
||||||
@@ -73,7 +77,7 @@ jobs:
|
|||||||
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
uses: docker/setup-buildx-action@f211e3e9ded2d9377c8cadc4489a4e38014bc4c9
|
||||||
|
|
||||||
- if: ${{ env.IS_PUBLIC_BUILD == 'true' }}
|
- if: ${{ env.IS_PUBLIC_BUILD == 'true' }}
|
||||||
name: Check out main branch
|
name: Check out main branch
|
||||||
@@ -196,16 +200,6 @@ jobs:
|
|||||||
# Deploy ARM template is idempotent
|
# Deploy ARM template is idempotent
|
||||||
# Note: once the resources exist the image tag must change for a new deployment to occur (the image tag includes workflow run number, run attempt, as well as sha)
|
# Note: once the resources exist the image tag must change for a new deployment to occur (the image tag includes workflow run number, run attempt, as well as sha)
|
||||||
- name: Run ARM deploy
|
- name: Run ARM deploy
|
||||||
# This 'if' will be truth, if this workflow is...
|
|
||||||
# - run as a workflow_dispatch
|
|
||||||
# - run because of a push to main (or when added to a merge queue)
|
|
||||||
# - run as a regular pull request
|
|
||||||
# But if it's a pull request, *and* for whatever reason, the pull
|
|
||||||
# request has "Auto-merge" enabled, don't bother.
|
|
||||||
# The idea is that if auto-merge has been abled, by humans or by
|
|
||||||
# bots, they have no intention of viewing the deployed preview anyway.
|
|
||||||
# This saves time because the PR can merge sooner.
|
|
||||||
if: ${{ !github.event.pull_request.auto_merge }}
|
|
||||||
uses: azure/arm-deploy@841b12551939c88af8f6df767c24c38a5620fd0d
|
uses: azure/arm-deploy@841b12551939c88af8f6df767c24c38a5620fd0d
|
||||||
with:
|
with:
|
||||||
resourceGroupName: ${{ secrets.PREVIEW_ENV_RESOURCE_GROUP }}
|
resourceGroupName: ${{ secrets.PREVIEW_ENV_RESOURCE_GROUP }}
|
||||||
@@ -217,3 +211,15 @@ jobs:
|
|||||||
dockerRegistryUrl="${{ secrets.NONPROD_REGISTRY_SERVER }}"
|
dockerRegistryUrl="${{ secrets.NONPROD_REGISTRY_SERVER }}"
|
||||||
dockerRegistryUsername="${{ env.NONPROD_REGISTRY_USERNAME }}"
|
dockerRegistryUsername="${{ env.NONPROD_REGISTRY_USERNAME }}"
|
||||||
dockerRegistryPassword="${{ secrets.NONPROD_REGISTRY_PASSWORD }}"
|
dockerRegistryPassword="${{ secrets.NONPROD_REGISTRY_PASSWORD }}"
|
||||||
|
|
||||||
|
- name: Check that it can reached
|
||||||
|
# This introduces a necessary delay. Because the preview evironment
|
||||||
|
# URL is announced to the pull request as soon as all the steps
|
||||||
|
# finish, what sometimes happens is that a viewer of the PR clicks
|
||||||
|
# that link too fast and are confronted with a broken page.
|
||||||
|
# It's because there's a delay between the `azure/arm-deploy`
|
||||||
|
# and when the server is actually started and can receive and
|
||||||
|
# process requests.
|
||||||
|
# By introducing a slight "delay" here we avoid announcing a
|
||||||
|
# preview environment URL that isn't actually working just yet.
|
||||||
|
run: curl --retry-connrefused --retry 5 -I ${{ env.APP_URL }}
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ jobs:
|
|||||||
password: ${{ secrets.PROD_REGISTRY_PASSWORD }}
|
password: ${{ secrets.PROD_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
uses: docker/setup-buildx-action@f211e3e9ded2d9377c8cadc4489a4e38014bc4c9
|
||||||
|
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
uses: docker/setup-buildx-action@f211e3e9ded2d9377c8cadc4489a4e38014bc4c9
|
||||||
|
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
body-includes: '<!-- MODIFIED_CONTENT_LINKING_COMMENT -->'
|
body-includes: '<!-- MODIFIED_CONTENT_LINKING_COMMENT -->'
|
||||||
|
|
||||||
- name: Update comment
|
- name: Update comment
|
||||||
uses: peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd
|
uses: peter-evans/create-or-update-comment@c9fcb64660bc90ec1cc535646af190c992007c32
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.findComment.outputs.comment-id }}
|
comment-id: ${{ steps.findComment.outputs.comment-id }}
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
|||||||
222
.github/workflows/create-translation-batch-pr.yml
vendored
@@ -1,222 +0,0 @@
|
|||||||
name: Create translation Batch Pull Request
|
|
||||||
|
|
||||||
# **What it does**:
|
|
||||||
# - Creates one pull request per language after running a series of automated checks,
|
|
||||||
# removing translations that are broken in any known way
|
|
||||||
# **Why we have it**:
|
|
||||||
# - To deploy translations
|
|
||||||
# **Who does it impact**: It automates what would otherwise be manual work,
|
|
||||||
# helping docs engineering focus on higher value work
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# schedule:
|
|
||||||
# - cron: '02 17 * * *' # Once a day at 17:02 UTC / 9:02 PST
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-translation-batch:
|
|
||||||
name: Create translation batch
|
|
||||||
if: github.repository == 'github/docs-internal'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# A sync's average run time is ~3.2 hours.
|
|
||||||
# This sets a maximum execution time of 300 minutes (5 hours) to prevent the workflow from running longer than necessary.
|
|
||||||
timeout-minutes: 300
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
max-parallel: 1
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- language: pt
|
|
||||||
crowdin_language: pt-BR
|
|
||||||
language_dir: translations/pt-BR
|
|
||||||
|
|
||||||
- language: es
|
|
||||||
crowdin_language: es-ES
|
|
||||||
language_dir: translations/es-ES
|
|
||||||
|
|
||||||
- language: cn
|
|
||||||
crowdin_language: zh-CN
|
|
||||||
language_dir: translations/zh-CN
|
|
||||||
|
|
||||||
- language_dir: translations/ja-JP
|
|
||||||
crowdin_language: ja
|
|
||||||
language: ja
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set branch name
|
|
||||||
id: set-branch
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=BRANCH_NAME::translation-batch-${{ matrix.language }}-$(date +%Y-%m-%d__%H-%M)"
|
|
||||||
|
|
||||||
- run: git config --global user.name "docubot"
|
|
||||||
- run: git config --global user.email "67483024+docubot@users.noreply.github.com"
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
lfs: true
|
|
||||||
|
|
||||||
- run: git checkout -b ${{ steps.set-branch.outputs.BRANCH_NAME }}
|
|
||||||
|
|
||||||
- name: Remove unwanted git hooks
|
|
||||||
run: rm .git/hooks/post-checkout
|
|
||||||
|
|
||||||
# https://support.crowdin.com/cli-tool/#debian
|
|
||||||
- name: Download and install the public signing key
|
|
||||||
run: wget -qO - https://artifacts.crowdin.com/repo/GPG-KEY-crowdin | sudo apt-key add -
|
|
||||||
- name: Create the crowdin.list file in the /etc/apt/sources.list.d directory
|
|
||||||
run: |
|
|
||||||
sudo touch /etc/apt/sources.list.d/crowdin.list
|
|
||||||
echo "deb https://artifacts.crowdin.com/repo/deb/ /" | sudo tee -a /etc/apt/sources.list.d/crowdin.list
|
|
||||||
- name: Install the Crowdin CLI Debian Package
|
|
||||||
run: sudo apt-get update && sudo apt-get install crowdin3
|
|
||||||
|
|
||||||
# Delete empty source files that would be rejected by Crowdin breaking the workflow
|
|
||||||
- name: Remove empty source files
|
|
||||||
run: |
|
|
||||||
find content -type f -empty -delete
|
|
||||||
find data -type f -empty -delete
|
|
||||||
|
|
||||||
- name: Upload files to crowdin
|
|
||||||
run: crowdin upload sources --delete-obsolete --no-progress --no-colors --verbose --debug '--branch=main' '--config=crowdin.yml'
|
|
||||||
env:
|
|
||||||
# This is a numeric id, not to be confused with Crowdin API v1 "project identifier" string
|
|
||||||
# See "API v2" on https://crowdin.com/project/<your-project>/settings#api
|
|
||||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
|
||||||
|
|
||||||
# A personal access token, not to be confused with Crowdin API v1 "API key"
|
|
||||||
# See https://crowdin.com/settings#api-key to generate a token
|
|
||||||
# This token was created by logging into Crowdin with the octoglot user
|
|
||||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
|
||||||
|
|
||||||
- name: Remove all language translations
|
|
||||||
run: |
|
|
||||||
git rm -rf --quiet ${{ matrix.language_dir }}/content
|
|
||||||
git rm -rf --quiet ${{ matrix.language_dir }}/data
|
|
||||||
|
|
||||||
- name: Download crowdin translations
|
|
||||||
run: crowdin download --no-progress --no-colors --verbose --debug '--branch=main' '--config=crowdin.yml' --language=${{ matrix.crowdin_language }}
|
|
||||||
env:
|
|
||||||
# This is a numeric id, not to be confused with Crowdin API v1 "project identifier" string
|
|
||||||
# See "API v2" on https://crowdin.com/project/<your-project>/settings#api
|
|
||||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
|
||||||
|
|
||||||
# A personal access token, not to be confused with Crowdin API v1 "API key"
|
|
||||||
# See https://crowdin.com/settings#api-key to generate a token
|
|
||||||
# This token was created by logging into Crowdin with the octoglot user
|
|
||||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
|
||||||
|
|
||||||
- name: Commit crowdin sync
|
|
||||||
run: |
|
|
||||||
git add ${{ matrix.language_dir }}
|
|
||||||
git commit -m "Add crowdin translations" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
- name: 'Setup node'
|
|
||||||
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
|
||||||
with:
|
|
||||||
node-version: '16.15.0'
|
|
||||||
|
|
||||||
- run: npm ci
|
|
||||||
|
|
||||||
# step 6 in docs-engineering/crowdin.md
|
|
||||||
- name: Homogenize frontmatter
|
|
||||||
run: |
|
|
||||||
node script/i18n/homogenize-frontmatter.js
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "Run script/i18n/homogenize-frontmatter.js" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
# step 7 in docs-engineering/crowdin.md
|
|
||||||
- name: Fix translation errors
|
|
||||||
run: |
|
|
||||||
node script/i18n/fix-translation-errors.js
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "Run script/i18n/fix-translation-errors.js" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
# step 8a in docs-engineering/crowdin.md
|
|
||||||
- name: Check parsing
|
|
||||||
run: |
|
|
||||||
node script/i18n/lint-translation-files.js --check parsing | tee -a /tmp/batch.log | cat
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "Run script/i18n/lint-translation-files.js --check parsing" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
# step 8b in docs-engineering/crowdin.md
|
|
||||||
- name: Check rendering
|
|
||||||
run: |
|
|
||||||
node script/i18n/lint-translation-files.js --check rendering | tee -a /tmp/batch.log | cat
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "Run script/i18n/lint-translation-files.js --check rendering" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
- name: Reset files with broken liquid tags
|
|
||||||
run: |
|
|
||||||
node script/i18n/reset-files-with-broken-liquid-tags.js --language=${{ matrix.language }} | tee -a /tmp/batch.log | cat
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "run script/i18n/reset-files-with-broken-liquid-tags.js --language=${{ matrix.language }}" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
# step 5 in docs-engineering/crowdin.md using script from docs-internal#22709
|
|
||||||
- name: Reset known broken files
|
|
||||||
run: |
|
|
||||||
node script/i18n/reset-known-broken-translation-files.js | tee -a /tmp/batch.log | cat
|
|
||||||
git add ${{ matrix.language_dir }} && git commit -m "run script/i18n/reset-known-broken-translation-files.js" || echo "Nothing to commit"
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.DOCUBOT_REPO_PAT }}
|
|
||||||
|
|
||||||
- name: Check in CSV report
|
|
||||||
run: |
|
|
||||||
mkdir -p translations/log
|
|
||||||
csvFile=translations/log/${{ matrix.language }}-resets.csv
|
|
||||||
script/i18n/report-reset-files.js --report-type=csv --language=${{ matrix.language }} --log-file=/tmp/batch.log > $csvFile
|
|
||||||
git add -f $csvFile && git commit -m "Check in ${{ matrix.language }} CSV report" || echo "Nothing to commit"
|
|
||||||
|
|
||||||
- name: Write the reported files that were reset to /tmp/pr-body.txt
|
|
||||||
run: script/i18n/report-reset-files.js --report-type=pull-request-body --language=${{ matrix.language }} --log-file=/tmp/batch.log > /tmp/pr-body.txt
|
|
||||||
|
|
||||||
- name: Push filtered translations
|
|
||||||
run: git push origin ${{ steps.set-branch.outputs.BRANCH_NAME }}
|
|
||||||
|
|
||||||
# - name: Close existing stale batches
|
|
||||||
# uses: lee-dohm/close-matching-issues@e9e43aad2fa6f06a058cedfd8fb975fd93b56d8f
|
|
||||||
# with:
|
|
||||||
# token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
|
|
||||||
# query: 'type:pr label:translation-batch-${{ matrix.language }}'
|
|
||||||
|
|
||||||
- name: Create translation batch pull request
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.DOCUBOT_REPO_PAT }}
|
|
||||||
TITLE: 'New translation batch for ${{ matrix.language }}'
|
|
||||||
BASE: 'main'
|
|
||||||
HEAD: ${{ steps.set-branch.outputs.BRANCH_NAME }}
|
|
||||||
LANGUAGE: ${{ matrix.language }}
|
|
||||||
BODY_FILE: '/tmp/pr-body.txt'
|
|
||||||
run: .github/actions-scripts/create-translation-batch-pr.js
|
|
||||||
|
|
||||||
# - name: Approve PR
|
|
||||||
# if: github.ref_name == 'main'
|
|
||||||
# env:
|
|
||||||
# GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
|
|
||||||
# run: gh pr review --approve || echo "Nothing to approve"
|
|
||||||
|
|
||||||
# - name: Set auto-merge
|
|
||||||
# if: github.ref_name == 'main'
|
|
||||||
# env:
|
|
||||||
# GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
|
|
||||||
# run: gh pr merge ${{ steps.set-branch.outputs.BRANCH_NAME }} --auto --squash || echo "Nothing to merge"
|
|
||||||
|
|
||||||
# # When the maximum execution time is reached for this job, Actions cancels the workflow run.
|
|
||||||
# # This emits a notification for the first responder to triage.
|
|
||||||
# - name: Send Slack notification if workflow is cancelled
|
|
||||||
# uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
|
||||||
# if: cancelled()
|
|
||||||
# with:
|
|
||||||
# channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
|
|
||||||
# bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
|
||||||
# color: failure
|
|
||||||
# text: 'The new translation batch for ${{ matrix.language }} was cancelled.'
|
|
||||||
|
|
||||||
# # Emit a notification for the first responder to triage if the workflow failed.
|
|
||||||
# - name: Send Slack notification if workflow failed
|
|
||||||
# uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
|
||||||
# if: failure()
|
|
||||||
# with:
|
|
||||||
# channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
|
|
||||||
# bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
|
||||||
# color: failure
|
|
||||||
# text: 'The new translation batch for ${{ matrix.language }} failed.'
|
|
||||||
54
.github/workflows/crowdin-cleanup.yml
vendored
@@ -1,54 +0,0 @@
|
|||||||
name: Crowdin Cleanup
|
|
||||||
|
|
||||||
# **What it does**: Homogenizes localized non-translatable frontmatter after every push by the octoglot bot to the translations branch.
|
|
||||||
# **Why we have it**: So Crowdin doesn't break frontmatter in production.
|
|
||||||
# **Who does it impact**: Docs engineering and international expansion.
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- translations
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
|
||||||
concurrency:
|
|
||||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
homogenize_frontmatter:
|
|
||||||
name: Homogenize frontmatter
|
|
||||||
# Only run this after octoglot commits or when a Hubber is running this using the workflow dispatch button.
|
|
||||||
if: github.repository == 'github/docs-internal' && (github.event.pusher.name == 'octoglot' || github.event_name == 'workflow_dispatch')
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
|
||||||
with:
|
|
||||||
node-version: '16.15.0'
|
|
||||||
cache: npm
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Homogenize frontmatter
|
|
||||||
run: script/i18n/homogenize-frontmatter.js
|
|
||||||
|
|
||||||
- name: Check in homogenized files
|
|
||||||
uses: EndBug/add-and-commit@050a66787244b10a4874a2a5f682130263edc192
|
|
||||||
with:
|
|
||||||
# The arguments for the `git add` command
|
|
||||||
add: 'translations'
|
|
||||||
|
|
||||||
# The message for the commit
|
|
||||||
message: 'Run script/i18n/homogenize-frontmatter.js'
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Disable pre-commit hooks; they don't play nicely with add-and-commit
|
|
||||||
HUSKY: '0'
|
|
||||||
@@ -8,7 +8,7 @@ on:
|
|||||||
merge_group:
|
merge_group:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- script/search/index-elasticsearch.js
|
- 'script/search/**'
|
||||||
- 'package*.json'
|
- 'package*.json'
|
||||||
- .github/workflows/dry-run-elasticsearch-indexing.yml
|
- .github/workflows/dry-run-elasticsearch-indexing.yml
|
||||||
|
|
||||||
@@ -46,9 +46,57 @@ jobs:
|
|||||||
node-version: 16.15.x
|
node-version: 16.15.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|
||||||
- name: Install
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Cache nextjs build
|
||||||
|
uses: actions/cache@48af2dc4a9e8278b89d7fa154b955c30c6aaab09
|
||||||
|
with:
|
||||||
|
path: .next/cache
|
||||||
|
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
|
||||||
|
|
||||||
|
- name: Run build scripts
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Start the server in the background
|
||||||
|
env:
|
||||||
|
ENABLE_DEV_LOGGING: false
|
||||||
|
run: |
|
||||||
|
npm run sync-search-server > /tmp/stdout.log 2> /tmp/stderr.log &
|
||||||
|
|
||||||
|
# first sleep to give it a chance to start
|
||||||
|
sleep 6
|
||||||
|
curl --retry-connrefused --retry 4 -I http://localhost:4002/
|
||||||
|
|
||||||
|
- if: ${{ failure() }}
|
||||||
|
name: Debug server outputs on errors
|
||||||
|
run: |
|
||||||
|
echo "____STDOUT____"
|
||||||
|
cat /tmp/stdout.log
|
||||||
|
echo "____STDERR____"
|
||||||
|
cat /tmp/stderr.log
|
||||||
|
|
||||||
|
- name: Scrape records into a temp directory
|
||||||
|
env:
|
||||||
|
# If a reusable, or anything in the `data/*` directory is deleted
|
||||||
|
# you might get a
|
||||||
|
#
|
||||||
|
# RenderError: Can't find the key 'site.data.reusables...' in the scope
|
||||||
|
#
|
||||||
|
# But that'll get fixed in the next translation pipeline. For now,
|
||||||
|
# let's just accept an empty string instead.
|
||||||
|
THROW_ON_EMPTY: false
|
||||||
|
|
||||||
|
run: |
|
||||||
|
mkdir /tmp/records
|
||||||
|
npm run sync-search-indices -- \
|
||||||
|
--language en \
|
||||||
|
--version dotcom \
|
||||||
|
--out-directory /tmp/records \
|
||||||
|
--no-compression --no-lunr-index
|
||||||
|
|
||||||
|
ls -lh /tmp/records
|
||||||
|
|
||||||
# Serves two purposes;
|
# Serves two purposes;
|
||||||
# 1. Be confident that the Elasticsearch server start-up worked at all
|
# 1. Be confident that the Elasticsearch server start-up worked at all
|
||||||
# 2. Sometimes Elasticsearch will bind to the port but still not
|
# 2. Sometimes Elasticsearch will bind to the port but still not
|
||||||
@@ -62,8 +110,8 @@ jobs:
|
|||||||
ELASTICSEARCH_URL: 'http://localhost:9200'
|
ELASTICSEARCH_URL: 'http://localhost:9200'
|
||||||
run: |
|
run: |
|
||||||
./script/search/index-elasticsearch.js --verbose \
|
./script/search/index-elasticsearch.js --verbose \
|
||||||
-l en -l ja \
|
-l en \
|
||||||
-V dotcom -V ghes-3.5
|
-V dotcom -- /tmp/records
|
||||||
|
|
||||||
- name: Show created indexes and aliases
|
- name: Show created indexes and aliases
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
40
.github/workflows/link-check-all.yml
vendored
@@ -38,22 +38,46 @@ jobs:
|
|||||||
- name: Install
|
- name: Install
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
# Creates file "${{ env.HOME }}/files.json", among others
|
|
||||||
- name: Gather files changed
|
- name: Gather files changed
|
||||||
uses: trilom/file-changes-action@a6ca26c14274c33b15e6499323aac178af06ad4b
|
env:
|
||||||
with:
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
fileOutput: 'json'
|
PR: ${{ github.event.pull_request.number }}
|
||||||
|
HEAD: ${{ github.event.pull_request.head.ref || github.event.merge_group.head_ref }}
|
||||||
|
run: |
|
||||||
|
# Find the file diff in the pull request or merge group
|
||||||
|
# If its a pull request, use the faster call to the GitHub API
|
||||||
|
# For push, workflow_dispatch, and merge_group, use git diff
|
||||||
|
if [ -n "$PR" ]
|
||||||
|
then
|
||||||
|
echo __ running gh pr diff __
|
||||||
|
DIFF=`gh pr diff $PR --name-only`
|
||||||
|
elif [ -n "$HEAD" ]
|
||||||
|
then
|
||||||
|
echo __ running git fetch main __
|
||||||
|
git fetch origin main --depth 1
|
||||||
|
echo __ running git diff __
|
||||||
|
DIFF=`git diff --name-only origin/main`
|
||||||
|
else
|
||||||
|
echo __ no head, empty diff __
|
||||||
|
DIFF=''
|
||||||
|
fi
|
||||||
|
# So we can inspect the output
|
||||||
|
echo __ DIFF found __
|
||||||
|
echo $DIFF
|
||||||
|
|
||||||
# For verification
|
# Formats into single line JSON array, removing any empty strings
|
||||||
- name: Show files changed
|
echo __ format, write to files.json __
|
||||||
run: cat $HOME/files.json
|
echo $DIFF | \
|
||||||
|
tr ' ' '\n' | \
|
||||||
|
jq --raw-input | \
|
||||||
|
jq --slurp --compact-output 'map(select(length > 0))' \
|
||||||
|
> $HOME/files.json
|
||||||
|
|
||||||
- name: Link check (warnings, changed files)
|
- name: Link check (warnings, changed files)
|
||||||
env:
|
env:
|
||||||
# Don't care about CDN caching image URLs
|
# Don't care about CDN caching image URLs
|
||||||
DISABLE_REWRITE_ASSET_URLS: true
|
DISABLE_REWRITE_ASSET_URLS: true
|
||||||
run: |
|
run: |
|
||||||
|
|
||||||
# Note as of Aug 2022, we *don't* check external links
|
# Note as of Aug 2022, we *don't* check external links
|
||||||
# on the pages you touched in the PR. We could enable that
|
# on the pages you touched in the PR. We could enable that
|
||||||
# but it has the added risk of false positives blocking CI.
|
# but it has the added risk of false positives blocking CI.
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ jobs:
|
|||||||
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
password: ${{ secrets.NONPROD_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
uses: docker/setup-buildx-action@f211e3e9ded2d9377c8cadc4489a4e38014bc4c9
|
||||||
|
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
|||||||
4
.github/workflows/needs-sme-workflow.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
if: ${{ github.repository == 'github/docs' && (github.event.label.name == 'needs SME' && github.event_name == 'issues') }}
|
if: ${{ github.repository == 'github/docs' && (github.event.label.name == 'needs SME' && github.event_name == 'issues') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: peter-evans/create-or-update-comment@a35cf36e5301d70b76f316e867e7788a55a31dae
|
- uses: peter-evans/create-or-update-comment@c9fcb64660bc90ec1cc535646af190c992007c32
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.issue.number }}
|
issue-number: ${{ github.event.issue.number }}
|
||||||
body: |
|
body: |
|
||||||
@@ -29,7 +29,7 @@ jobs:
|
|||||||
if: ${{ github.repository == 'github/docs' && (github.event.label.name == 'needs SME' && github.event_name == 'pull_request_target') }}
|
if: ${{ github.repository == 'github/docs' && (github.event.label.name == 'needs SME' && github.event_name == 'pull_request_target') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: peter-evans/create-or-update-comment@a35cf36e5301d70b76f316e867e7788a55a31dae
|
- uses: peter-evans/create-or-update-comment@c9fcb64660bc90ec1cc535646af190c992007c32
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
|||||||
7
.github/workflows/repo-sync.yml
vendored
@@ -244,12 +244,13 @@ jobs:
|
|||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
number: ${{ steps.find-pull-request.outputs.number }}
|
number: ${{ steps.find-pull-request.outputs.number }}
|
||||||
|
|
||||||
- name: Enable GitHub auto-merge
|
- name: Admin merge the pull request
|
||||||
if: ${{ steps.find-pull-request.outputs.number && steps.pr-files.outputs.count != '0' }}
|
if: ${{ steps.find-pull-request.outputs.number && steps.pr-files.outputs.count != '0' }}
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
|
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
|
||||||
AUTOMERGE_PR_NUMBER: ${{ steps.find-pull-request.outputs.number }}
|
PR_NUMBER: ${{ steps.find-pull-request.outputs.number }}
|
||||||
run: node .github/actions-scripts/enable-automerge.js
|
run: |
|
||||||
|
gh pr merge $PR_NUMBER --admin --merge
|
||||||
|
|
||||||
- name: Send Slack notification if workflow fails
|
- name: Send Slack notification if workflow fails
|
||||||
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||||
|
|||||||
10
.github/workflows/site-policy-reminder.yml
vendored
@@ -1,8 +1,8 @@
|
|||||||
name: Site Policy Reminder
|
name: Site Policy Reminder
|
||||||
|
|
||||||
# **What it does**: Automated comment reminder on a PR to change the title for public consumption before merging
|
# **What it does**: Automated comment reminder on a PR to change the title for public consumption before merging and to run the Site Policy repo sync action
|
||||||
# **Why we have it**: Titles of merged PRs to Site Policies are sent to the public site-policy repo
|
# **Why we have it**: Titles of merged PRs to Site Policies are sent to the public site-policy repo when the repos are synced
|
||||||
# **Who does it impact**: Docs team merging changes to Site Policies
|
# **Who does it impact**: Everyone merging changes to Site Policies
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -19,10 +19,12 @@ jobs:
|
|||||||
github.repository == 'github/docs-internal'
|
github.repository == 'github/docs-internal'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd
|
- uses: peter-evans/create-or-update-comment@c9fcb64660bc90ec1cc535646af190c992007c32
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.API_TOKEN_SITEPOLICY }}
|
GITHUB_TOKEN: ${{ secrets.API_TOKEN_SITEPOLICY }}
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
Before merging, please remember to change the title of this PR to a description of its changes that is suitable for public viewing on github/site-policy.
|
Before merging, please remember to change the title of this PR to a description of its changes that is suitable for public viewing on github/site-policy.
|
||||||
|
|
||||||
|
<@github/site-policy-admins>, when these changes are ready to be synced to the site policy repo for the 24-hour or 30-day [review window](https://github.com/github/site-policy#whats-the-process), run the [site policy sync action](https://github.com/github/docs-internal/actions/workflows/site-policy-sync.yml) from this PR's branch. When these changes are ready to be merged in `docs-internal`, let the Docs team know on Slack in #docs-content and a writer will merge this PR.
|
||||||
|
|||||||
8
.github/workflows/site-policy-sync.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
|||||||
echo "DESCRIPTION=$DESCRIPTION" >> $GITHUB_ENV
|
echo "DESCRIPTION=$DESCRIPTION" >> $GITHUB_ENV
|
||||||
git commit -m "$(echo $DESCRIPTION)"
|
git commit -m "$(echo $DESCRIPTION)"
|
||||||
|
|
||||||
- name: If there are changes to push, create a pull request in the public repo using the gh command line tool, then immediately merge the PR and delete the branch
|
- name: If there are changes to push, create a pull request in the public repo using the gh command line tool, then immediately approve the PR
|
||||||
id: createAndMergePullRequest
|
id: createAndMergePullRequest
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.API_TOKEN_SITEPOLICY }}
|
GITHUB_TOKEN: ${{ secrets.API_TOKEN_SITEPOLICY }}
|
||||||
@@ -70,11 +70,7 @@ jobs:
|
|||||||
git push --set-upstream origin automated-sync-$GITHUB_RUN_ID
|
git push --set-upstream origin automated-sync-$GITHUB_RUN_ID
|
||||||
PR_URL=$(gh pr create --title "${TITLE}" --body-file msg --head automated-sync-$GITHUB_RUN_ID --base main --repo github/site-policy)
|
PR_URL=$(gh pr create --title "${TITLE}" --body-file msg --head automated-sync-$GITHUB_RUN_ID --base main --repo github/site-policy)
|
||||||
gh pr diff ${PR_URL}
|
gh pr diff ${PR_URL}
|
||||||
gh pr merge ${PR_URL} --merge --delete-branch
|
gh pr review --approve || echo "Nothing to approve"
|
||||||
else
|
else
|
||||||
echo "No updates to push to the public repo"
|
echo "No updates to push to the public repo"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Delete remote updates branch if previous step failed
|
|
||||||
if: failure() && steps.createAndMergePullRequest.outcome == 'failure'
|
|
||||||
run: git push github/site-policy --delete automated-sync-$GITHUB_RUN_ID
|
|
||||||
|
|||||||
32
.github/workflows/sync-search-elasticsearch.yml
vendored
@@ -13,6 +13,11 @@ on:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
# This allows a subsequently queued workflow run to cancel previous runs
|
||||||
|
concurrency:
|
||||||
|
group: '${{ github.workflow }} @ ${{ github.head_ref }}'
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
FREEZE: ${{ secrets.FREEZE }}
|
FREEZE: ${{ secrets.FREEZE }}
|
||||||
ELASTICSEARCH_URL: ${{ secrets.ELASTICSEARCH_URL }}
|
ELASTICSEARCH_URL: ${{ secrets.ELASTICSEARCH_URL }}
|
||||||
@@ -36,17 +41,6 @@ jobs:
|
|||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
|
||||||
# TEMPORARY UNTIL WE HAVE A PRODUCTION ELASTICSEARCH
|
|
||||||
- uses: getong/elasticsearch-action@95b501ab0c83dee0aac7c39b7cea3723bef14954
|
|
||||||
with:
|
|
||||||
elasticsearch version: '7.17.5'
|
|
||||||
host port: 9200
|
|
||||||
container port: 9200
|
|
||||||
host node port: 9300
|
|
||||||
node port: 9300
|
|
||||||
discovery type: 'single-node'
|
|
||||||
# END TEMPORARY
|
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
||||||
with:
|
with:
|
||||||
@@ -97,8 +91,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
mkdir /tmp/records
|
mkdir /tmp/records
|
||||||
npm run sync-search-indices -- \
|
npm run sync-search-indices -- \
|
||||||
-l ${{ matrix.language }} \
|
--language ${{ matrix.language }} \
|
||||||
-o /tmp/records \
|
--out-directory /tmp/records \
|
||||||
--no-compression --no-lunr-index
|
--no-compression --no-lunr-index
|
||||||
|
|
||||||
ls -lh /tmp/records
|
ls -lh /tmp/records
|
||||||
@@ -110,10 +104,18 @@ jobs:
|
|||||||
- name: Index into Elasticsearch
|
- name: Index into Elasticsearch
|
||||||
run: |
|
run: |
|
||||||
./script/search/index-elasticsearch.js \
|
./script/search/index-elasticsearch.js \
|
||||||
--language ${{ matrix.language }} \
|
--language ${{ matrix.language }} -- /tmp/records
|
||||||
--source-directory /tmp/records
|
|
||||||
|
|
||||||
- name: Check created indexes and aliases
|
- name: Check created indexes and aliases
|
||||||
run: |
|
run: |
|
||||||
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
||||||
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
||||||
|
|
||||||
|
- name: Send Slack notification if workflow fails
|
||||||
|
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340
|
||||||
|
if: failure()
|
||||||
|
with:
|
||||||
|
channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
|
||||||
|
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
|
||||||
|
color: failure
|
||||||
|
text: The last 'Sync search Elasticsearch' run failed. See https://github.com/${{github.repository}}/actions?query=workflow%3A%22Repo+Sync%22
|
||||||
|
|||||||
79
.github/workflows/sync-search-pr.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: Sync search - PR
|
name: Sync search - PR
|
||||||
|
|
||||||
# **What it does**: Tries running the sync-search when relevant files change.
|
# **What it does**: This does what `sync-sarch-elasticsearch.yml` does but
|
||||||
|
# with a localhost Elasticsearch and only for English.
|
||||||
# **Why we have it**: To test that the script works and the popular pages json is valid.
|
# **Why we have it**: To test that the script works and the popular pages json is valid.
|
||||||
# **Who does it impact**: Docs engineering.
|
# **Who does it impact**: Docs engineering.
|
||||||
|
|
||||||
@@ -21,10 +22,26 @@ concurrency:
|
|||||||
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Yes, it's hardcoded but it makes all the steps look exactly the same
|
||||||
|
# as they do in `sync-search-elasticsearch.yml` where it uses
|
||||||
|
# that `${{ env.ELASTICSEARCH_URL }}`
|
||||||
|
ELASTICSEARCH_URL: http://localhost:9200
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ fromJSON('["ubuntu-latest", "ubuntu-20.04-xl"]')[github.repository == 'github/docs-internal'] }}
|
||||||
steps:
|
steps:
|
||||||
|
- uses: getong/elasticsearch-action@95b501ab0c83dee0aac7c39b7cea3723bef14954
|
||||||
|
with:
|
||||||
|
# # Make sure this matches production and `test.yml`
|
||||||
|
elasticsearch version: '7.11.1'
|
||||||
|
host port: 9200
|
||||||
|
container port: 9200
|
||||||
|
host node port: 9300
|
||||||
|
node port: 9300
|
||||||
|
discovery type: 'single-node'
|
||||||
|
|
||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
|
||||||
@@ -46,9 +63,57 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: npm run build
|
run: npm run build
|
||||||
|
|
||||||
- name: Run sync-search
|
- name: Start the server in the background
|
||||||
env:
|
env:
|
||||||
# Set filtered to only these so it doesn't run for too long.
|
ENABLE_DEV_LOGGING: false
|
||||||
LANGUAGE: en
|
run: |
|
||||||
VERSION: free-pro-team@latest
|
npm run sync-search-server > /tmp/stdout.log 2> /tmp/stderr.log &
|
||||||
run: npm run sync-search
|
|
||||||
|
# first sleep to give it a chance to start
|
||||||
|
sleep 6
|
||||||
|
curl --retry-connrefused --retry 4 -I http://localhost:4002/
|
||||||
|
|
||||||
|
- if: ${{ failure() }}
|
||||||
|
name: Debug server outputs on errors
|
||||||
|
run: |
|
||||||
|
echo "____STDOUT____"
|
||||||
|
cat /tmp/stdout.log
|
||||||
|
echo "____STDERR____"
|
||||||
|
cat /tmp/stderr.log
|
||||||
|
|
||||||
|
- name: Scrape records into a temp directory
|
||||||
|
env:
|
||||||
|
# If a reusable, or anything in the `data/*` directory is deleted
|
||||||
|
# you might get a
|
||||||
|
#
|
||||||
|
# RenderError: Can't find the key 'site.data.reusables...' in the scope
|
||||||
|
#
|
||||||
|
# But that'll get fixed in the next translation pipeline. For now,
|
||||||
|
# let's just accept an empty string instead.
|
||||||
|
THROW_ON_EMPTY: false
|
||||||
|
|
||||||
|
run: |
|
||||||
|
mkdir /tmp/records
|
||||||
|
npm run sync-search-indices -- \
|
||||||
|
--language en \
|
||||||
|
--version dotcom \
|
||||||
|
--out-directory /tmp/records \
|
||||||
|
--no-compression --no-lunr-index
|
||||||
|
|
||||||
|
ls -lh /tmp/records
|
||||||
|
|
||||||
|
- name: Check that Elasticsearch is accessible
|
||||||
|
run: |
|
||||||
|
curl --fail --retry-connrefused --retry 5 -I ${{ env.ELASTICSEARCH_URL }}
|
||||||
|
|
||||||
|
- name: Index into Elasticsearch
|
||||||
|
run: |
|
||||||
|
./script/search/index-elasticsearch.js \
|
||||||
|
--language en \
|
||||||
|
--version dotcom \
|
||||||
|
--source-directory /tmp/records
|
||||||
|
|
||||||
|
- name: Check created indexes and aliases
|
||||||
|
run: |
|
||||||
|
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
||||||
|
curl --fail --retry-connrefused --retry 5 ${{ env.ELASTICSEARCH_URL }}/_cat/indices?v
|
||||||
|
|||||||
42
.github/workflows/test.yml
vendored
@@ -36,7 +36,6 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
# The same array lives in test-windows.yml, so make any updates there too.
|
|
||||||
test-group:
|
test-group:
|
||||||
[
|
[
|
||||||
content,
|
content,
|
||||||
@@ -55,6 +54,7 @@ jobs:
|
|||||||
if: ${{ matrix.test-group == 'content' }}
|
if: ${{ matrix.test-group == 'content' }}
|
||||||
uses: getong/elasticsearch-action@95b501ab0c83dee0aac7c39b7cea3723bef14954
|
uses: getong/elasticsearch-action@95b501ab0c83dee0aac7c39b7cea3723bef14954
|
||||||
with:
|
with:
|
||||||
|
# Make sure this matches production and `sync-search-pr.yml`
|
||||||
elasticsearch version: '7.11.1'
|
elasticsearch version: '7.11.1'
|
||||||
host port: 9200
|
host port: 9200
|
||||||
container port: 9200
|
container port: 9200
|
||||||
@@ -72,7 +72,9 @@ jobs:
|
|||||||
# only for the test groups that we know need the files.
|
# only for the test groups that we know need the files.
|
||||||
lfs: ${{ matrix.test-group == 'content' }}
|
lfs: ${{ matrix.test-group == 'content' }}
|
||||||
# Enables cloning the Early Access repo later with the relevant PAT
|
# Enables cloning the Early Access repo later with the relevant PAT
|
||||||
persist-credentials: 'false'
|
# persist-credentials: 'false'
|
||||||
|
# ->> Do we really need this? actions/checkout doesn't use it for the nested example:
|
||||||
|
# https://github.com/actions/checkout#checkout-multiple-repos-nested
|
||||||
|
|
||||||
- name: Figure out which docs-early-access branch to checkout, if internal repo
|
- name: Figure out which docs-early-access branch to checkout, if internal repo
|
||||||
if: ${{ github.repository == 'github/docs-internal' }}
|
if: ${{ github.repository == 'github/docs-internal' }}
|
||||||
@@ -127,19 +129,37 @@ jobs:
|
|||||||
run: git lfs checkout
|
run: git lfs checkout
|
||||||
|
|
||||||
- name: Gather files changed
|
- name: Gather files changed
|
||||||
uses: trilom/file-changes-action@a6ca26c14274c33b15e6499323aac178af06ad4b
|
env:
|
||||||
id: get_diff_files
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
PR: ${{ github.event.pull_request.number }}
|
||||||
# So that `steps.get_diff_files.outputs.files` becomes
|
HEAD: ${{ github.event.pull_request.head.ref || github.event.merge_group.head_ref }}
|
||||||
# a string like `foo.js path/bar.md`
|
|
||||||
output: ' '
|
|
||||||
|
|
||||||
- name: Insight into changed files
|
|
||||||
run: |
|
run: |
|
||||||
|
# Find the file diff in the pull request or merge group
|
||||||
|
# If its a pull request, use the faster call to the GitHub API
|
||||||
|
# For push, workflow_dispatch, and merge_group, use git diff
|
||||||
|
if [ -n "$PR" ]
|
||||||
|
then
|
||||||
|
echo __ running gh pr diff __
|
||||||
|
DIFF=`gh pr diff $PR --name-only`
|
||||||
|
elif [ -n "$HEAD" ]
|
||||||
|
then
|
||||||
|
echo __ running git fetch main __
|
||||||
|
git fetch origin main --depth 1
|
||||||
|
echo __ running git diff __
|
||||||
|
DIFF=`git diff --name-only origin/main`
|
||||||
|
else
|
||||||
|
echo __ no head, empty diff __
|
||||||
|
DIFF=''
|
||||||
|
fi
|
||||||
|
# So we can inspect the output
|
||||||
|
echo __ DIFF found __
|
||||||
|
echo $DIFF
|
||||||
|
|
||||||
|
# So that becomes a string like `foo.js path/bar.md`
|
||||||
# Must to do this because the list of files can be HUGE. Especially
|
# Must to do this because the list of files can be HUGE. Especially
|
||||||
# in a repo-sync when there are lots of translation files involved.
|
# in a repo-sync when there are lots of translation files involved.
|
||||||
echo "${{ steps.get_diff_files.outputs.files }}" > get_diff_files.txt
|
echo __ format, write to get_diff_files.txt __
|
||||||
|
echo $DIFF | tr '\n' ' ' > get_diff_files.txt
|
||||||
|
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
uses: actions/setup-node@17f8bd926464a1afa4c6a11669539e9c1ba77048
|
||||||
|
|||||||
@@ -1,72 +0,0 @@
|
|||||||
name: Copy to REST API issue to docs-content
|
|
||||||
|
|
||||||
# **What it does**: Copies an issue in the open source repo to the docs-content repo, comments on and closes the original issue
|
|
||||||
# **Why we have it**: REST API updates cannot be made in the open source repo. Instead, we copy the issue to an internal issue (we do not transfer so that the issue does not disappear for the contributor) and close the original issue.
|
|
||||||
# **Who does it impact**: Open source and docs-content maintainers
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types:
|
|
||||||
- labeled
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
transfer-issue:
|
|
||||||
name: Transfer issue
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: (github.event.label.name == 'localization ' && github.repository == 'github/docs')
|
|
||||||
steps:
|
|
||||||
- name: Check if this run was triggered by a member of the docs team
|
|
||||||
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
|
|
||||||
id: triggered-by-member
|
|
||||||
with:
|
|
||||||
github-token: ${{secrets.DOCUBOT_READORG_REPO_WORKFLOW_SCOPES}}
|
|
||||||
result-encoding: string
|
|
||||||
script: |
|
|
||||||
const triggerer_login = context.payload.sender.login
|
|
||||||
const teamMembers = await github.request(
|
|
||||||
`/orgs/github/teams/docs/members?per_page=100`
|
|
||||||
)
|
|
||||||
const logins = teamMembers.data.map(member => member.login)
|
|
||||||
if (logins.includes(triggerer_login)) {
|
|
||||||
console.log(`This workflow was triggered by ${triggerer_login} (on the docs team).`)
|
|
||||||
return 'true'
|
|
||||||
}
|
|
||||||
console.log(`This workflow was triggered by ${triggerer_login} (not on the docs team), so no action will be taken.`)
|
|
||||||
return 'false'
|
|
||||||
|
|
||||||
- name: Exit if not triggered by a docs team member
|
|
||||||
if: steps.triggered-by-member.outputs.result == 'false'
|
|
||||||
run: |
|
|
||||||
echo Aborting. This workflow must be triggered by a member of the docs team.
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
- name: Create an issue in the localization repo
|
|
||||||
run: |
|
|
||||||
new_issue_url="$(gh issue create --title "$ISSUE_TITLE" --body "$ISSUE_BODY" --repo github/localization)"
|
|
||||||
echo 'NEW_ISSUE='$new_issue_url >> $GITHUB_ENV
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{secrets.DOCUBOT_READORG_REPO_WORKFLOW_SCOPES}}
|
|
||||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
|
||||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
|
||||||
|
|
||||||
- name: Comment on the new issue
|
|
||||||
run: gh issue comment $NEW_ISSUE --body "This issue was originally opened in the open source repo as $OLD_ISSUE"
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{secrets.DOCUBOT_READORG_REPO_WORKFLOW_SCOPES}}
|
|
||||||
NEW_ISSUE: ${{ env.NEW_ISSUE }}
|
|
||||||
OLD_ISSUE: ${{ github.event.issue.html_url }}
|
|
||||||
|
|
||||||
- name: Comment on the old issue
|
|
||||||
run: gh issue comment $OLD_ISSUE --body "Thanks for opening this issue! Unfortunately, we are not able to accept issues for translated content. Our translation process involves an integration with an external service at crowdin.com, where all translation activity happens. We hope to eventually open up the translation process to the open-source community, but we're not there yet.See https://github.com/github/docs/blob/main/contributing/types-of-contributions.md#earth_asia-translations for more information."
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{secrets.DOCUBOT_READORG_REPO_WORKFLOW_SCOPES}}
|
|
||||||
OLD_ISSUE: ${{ github.event.issue.html_url }}
|
|
||||||
|
|
||||||
- name: Close the old issue
|
|
||||||
run: gh issue close $OLD_ISSUE
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{secrets.DOCUBOT_READORG_REPO_WORKFLOW_SCOPES}}
|
|
||||||
OLD_ISSUE: ${{ github.event.issue.html_url }}
|
|
||||||
@@ -25,17 +25,24 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-internal-changes:
|
check-internal-changes:
|
||||||
if: github.repository == 'github/docs-internal' && github.event.pull_request.user.login != 'Octomerger'
|
if: github.repository == 'github/docs-internal' && github.event.pull_request && github.event.pull_request.user.login != 'Octomerger'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
notAllowedSearchSyncLabel: ${{ steps.filter.outputs.notAllowedSearchSyncLabel }}
|
notAllowedSearchSyncLabel: ${{ steps.filter.outputs.notAllowedSearchSyncLabel }}
|
||||||
steps:
|
steps:
|
||||||
|
# This checkout is needed for merge_group
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@dcd71f646680f2efd8db4afa5ad64fdcba30e748
|
||||||
|
with:
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
token: ${{ secrets.DOCUBOT_REPO_PAT }}
|
||||||
|
|
||||||
- name: Get files changed
|
- name: Get files changed
|
||||||
uses: dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58
|
uses: dorny/paths-filter@eb75a1edc117d3756a18ef89958ee59f9500ba58
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
# Base branch used to get changed files
|
# Base branch used to get changed files
|
||||||
base: ${{ github.event.pull_request.base.ref || github.base_ref || github.ref }}
|
base: ${{ github.event.pull_request.base.ref || github.base_ref || github.ref || 'main' }}
|
||||||
|
|
||||||
# Enables setting an output in the format in `${FILTER_NAME}_files
|
# Enables setting an output in the format in `${FILTER_NAME}_files
|
||||||
# with the names of the matching files formatted as JSON array
|
# with the names of the matching files formatted as JSON array
|
||||||
@@ -45,6 +52,7 @@ jobs:
|
|||||||
filters: |
|
filters: |
|
||||||
notAllowedSearchSyncLabel:
|
notAllowedSearchSyncLabel:
|
||||||
- 'lib/search/indexes/**'
|
- 'lib/search/indexes/**'
|
||||||
|
|
||||||
notAllowedSearchSyncLabel:
|
notAllowedSearchSyncLabel:
|
||||||
needs: check-internal-changes
|
needs: check-internal-changes
|
||||||
if: ${{ needs.check-internal-changes.outputs.notAllowedSearchSyncLabel == 'true' }}
|
if: ${{ needs.check-internal-changes.outputs.notAllowedSearchSyncLabel == 'true' }}
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
translations/
|
/translations/
|
||||||
includes/
|
includes/
|
||||||
data/release-notes/
|
data/release-notes/
|
||||||
script/bookmarklets/
|
script/bookmarklets/
|
||||||
|
/.next/
|
||||||
|
/.coverage
|
||||||
|
|||||||
2
.vscode/settings.json
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"files.exclude": {
|
"files.exclude": {
|
||||||
"**/translations": true
|
"translations/**": true
|
||||||
},
|
},
|
||||||
"workbench.editor.enablePreview": false,
|
"workbench.editor.enablePreview": false,
|
||||||
"workbench.editor.enablePreviewFromQuickOpen": false
|
"workbench.editor.enablePreviewFromQuickOpen": false
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ Once you submit your PR, a Docs team member will review your proposal. We may as
|
|||||||
|
|
||||||
Congratulations :tada::tada: The GitHub team thanks you :sparkles:.
|
Congratulations :tada::tada: The GitHub team thanks you :sparkles:.
|
||||||
|
|
||||||
Once your PR is merged, your contributions will be publicly visible on the [GitHubs docs](https://docs.github.com/en).
|
Once your PR is merged, your contributions will be publicly visible on the [GitHub docs](https://docs.github.com/en).
|
||||||
|
|
||||||
Now that you are part of the GitHub docs community, see how else you can [contribute to the docs](/contributing/types-of-contributions.md).
|
Now that you are part of the GitHub docs community, see how else you can [contribute to the docs](/contributing/types-of-contributions.md).
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# --------------------------------------------------------------------------------
|
# --------------------------------------------------------------------------------
|
||||||
# BASE IMAGE
|
# BASE IMAGE
|
||||||
# --------------------------------------------------------------------------------
|
# --------------------------------------------------------------------------------
|
||||||
FROM node:16.15.0-alpine@sha256:1a9a71ea86aad332aa7740316d4111ee1bd4e890df47d3b5eff3e5bded3b3d10 as base
|
FROM node:16.17.0-alpine@sha256:2c405ed42fc0fd6aacbe5730042640450e5ec030bada7617beac88f742b6997b as base
|
||||||
|
|
||||||
# This directory is owned by the node user
|
# This directory is owned by the node user
|
||||||
ARG APP_HOME=/home/node/app
|
ARG APP_HOME=/home/node/app
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 70 KiB |
|
Before Width: | Height: | Size: 81 KiB |
|
Before Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
After Width: | Height: | Size: 22 KiB |
BIN
assets/images/help/business-accounts/csv-report-button.png
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
|
After Width: | Height: | Size: 58 KiB |
BIN
assets/images/help/codespaces/open-codespace-from-vscode.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
|
After Width: | Height: | Size: 80 KiB |
BIN
assets/images/help/codespaces/open-codespace-remote-explorer.png
Normal file
|
After Width: | Height: | Size: 61 KiB |
BIN
assets/images/help/codespaces/restart-codespace-webui.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
assets/images/help/codespaces/stop-codespace-webui.png
Normal file
|
After Width: | Height: | Size: 142 KiB |
|
Before Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 307 KiB |
|
Before Width: | Height: | Size: 103 KiB |
|
Before Width: | Height: | Size: 106 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 172 KiB |
|
Before Width: | Height: | Size: 7.9 KiB |
|
Before Width: | Height: | Size: 6.0 KiB |
|
Before Width: | Height: | Size: 3.6 KiB |
|
Before Width: | Height: | Size: 6.5 KiB |
|
Before Width: | Height: | Size: 34 KiB |
|
Before Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 669 KiB |
|
Before Width: | Height: | Size: 173 KiB |
|
Before Width: | Height: | Size: 342 KiB |
|
Before Width: | Height: | Size: 98 KiB |
|
Before Width: | Height: | Size: 201 KiB |
|
Before Width: | Height: | Size: 240 KiB |
|
Before Width: | Height: | Size: 5.0 KiB |
|
Before Width: | Height: | Size: 2.5 KiB |
|
Before Width: | Height: | Size: 160 KiB |
|
Before Width: | Height: | Size: 142 KiB |
BIN
assets/images/help/discussions/sort-comments-buttons.png
Normal file
|
After Width: | Height: | Size: 117 KiB |
BIN
assets/images/help/discussions/sort-discussions-buttons.png
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
assets/images/help/discussions/upvote-comment-button.png
Normal file
|
After Width: | Height: | Size: 124 KiB |
BIN
assets/images/help/discussions/upvote-discussion-button.png
Normal file
|
After Width: | Height: | Size: 106 KiB |
BIN
assets/images/help/organizations/integration-access-requests.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
|
After Width: | Height: | Size: 68 KiB |
|
After Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 36 KiB |
BIN
assets/images/help/organizations/team-project-add-project.png
Normal file
|
After Width: | Height: | Size: 8.9 KiB |
BIN
assets/images/help/organizations/team-project-search.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 40 KiB |
|
Before Width: | Height: | Size: 29 KiB |
|
Before Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 58 KiB |
|
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 89 KiB |
|
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 89 KiB |
BIN
assets/images/help/repository/autolink-format-setting.png
Normal file
|
After Width: | Height: | Size: 88 KiB |
|
Before Width: | Height: | Size: 50 KiB |
|
Before Width: | Height: | Size: 47 KiB |
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 12 KiB |
BIN
assets/images/help/repository/secret-scanning-dismiss-alert.png
Normal file
|
After Width: | Height: | Size: 116 KiB |
|
After Width: | Height: | Size: 58 KiB |
|
Before Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 49 KiB |
BIN
assets/images/help/repository/unarchive-repository-warnings.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
assets/images/help/repository/unarchive-repository.png
Normal file
|
After Width: | Height: | Size: 105 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
After Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 17 KiB |
|
After Width: | Height: | Size: 23 KiB |