1
0
mirror of synced 2026-01-10 00:03:04 -05:00

Merge branch 'main' into patch-1

This commit is contained in:
ylemkimon
2020-12-02 11:49:27 +09:00
committed by GitHub
3244 changed files with 1755551 additions and 149618 deletions

1
.eslintignore Normal file
View File

@@ -0,0 +1 @@
dist/

28
.eslintrc.js Normal file
View File

@@ -0,0 +1,28 @@
module.exports = {
env: {
browser: true,
commonjs: true,
es2020: true,
node: true
},
parser: 'babel-eslint',
extends: [
'eslint:recommended',
'standard'
],
parserOptions: {
ecmaVersion: 11
},
rules: {
},
overrides: [
{
files: [
'**/tests/**/*.js'
],
env: {
jest: true
}
}
]
}

View File

@@ -22,6 +22,6 @@ Thanks again!
### Check off the following:
- [ ] All of the tests are passing.
- [ ] I have reviewed my changes in staging.
- [ ] I have reviewed my changes in staging. (look for the **deploy-to-heroku** link in your pull request, then click **View deployment**)
- [ ] For content changes, I have reviewed the [localization checklist](https://github.com/github/docs/blob/main/contributing/localization-checklist.md)
- [ ] For content changes, I have reviewed the [Content style guide for GitHub Docs](https://github.com/github/docs/blob/main/contributing/content-style-guide.md).

View File

@@ -0,0 +1,36 @@
#!/usr/bin/env node
const fs = require('fs')
const core = require('@actions/core')
const eventPayload = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8'))
// This workflow-run script does the following:
// 1. Gets an array of labels on a PR.
// 2. Finds one with the relevant Algolia text; if none found, exits early.
// 3. Gets the version substring from the label string.
const labelText = 'sync-english-index-for-'
const labelsArray = eventPayload.pull_request.labels
// Exit early if no labels are on this PR
if (!(labelsArray && labelsArray.length)) {
process.exit(0)
}
// Find the relevant label
const algoliaLabel = labelsArray
.map(label => label.name)
.find(label => label.startsWith(labelText))
// Exit early if no relevant label is found
if (!algoliaLabel) {
process.exit(0)
}
// Given: sync-english-index-for-enterprise-server@3.0
// Returns: enterprise-server@3.0
const versionToSync = algoliaLabel.split(labelText)[1]
// Store the version so we can access it later in the workflow
core.setOutput('versionToSync', versionToSync)
process.exit(0)

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env node
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
const semver = require('semver')
/*
* This script performs two checks to prevent shipping development mode OpenAPI schemas:
* - Ensures the `info.version` property is a semantic version.
* In development mode, the `info.version` property is a string
* containing the `github/github` branch name.
* - Ensures the decorated schema matches the dereferenced schema.
* The workflow that calls this script runs `script/rest/update-files.js`
* with the `--decorate-only` switch then checks to see if files changed.
*
*/
// Check that the `info.version` property is a semantic version
const dereferencedDir = path.join(process.cwd(), 'lib/rest/static/dereferenced')
const schemas = fs.readdirSync(dereferencedDir)
schemas.forEach(filename => {
const schema = require(path.join(dereferencedDir, filename))
if (!semver.valid(schema.info.version)) {
console.log(`🚧⚠️ Your branch contains a development mode OpenAPI schema: ${schema.info.version}. This check is a reminder to not 🚢 OpenAPI files in development mode. 🛑`)
process.exit(1)
}
})
// Check that the decorated schema matches the dereferenced schema
const changedFiles = execSync('git diff --name-only HEAD').toString()
if(changedFiles !== '') {
console.log(`These files were changed:\n${changedFiles}`)
console.log(`🚧⚠️ Your decorated and dereferenced schema files don't match. Ensure you're using decorated and dereferenced schemas from the automatically created pull requests by the 'github-openapi-bot' user. For more information, see 'script/rest/README.md'. 🛑`)
process.exit(1)
}
// All checks pass, ready to ship
console.log('All good 👍')
process.exit(0)

View File

@@ -21,13 +21,16 @@ module.exports = [
'juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8',
'juliangruber/find-pull-request-action@64d55773c959748ad30a4184f4dc102af1669f7b',
'juliangruber/read-file-action@e0a316da496006ffd19142f0fd594a1783f3b512',
'lee-dohm/close-matching-issues@22002609b2555fe18f52b8e2e7c07cbf5529e8a8',
'pascalgn/automerge-action@c9bd182',
'peter-evans/create-issue-from-file@35e304e2a12caac08c568247a2cb46ecd0c3ecc5',
'peter-evans/create-issue-from-file@a04ce672e3acedb1f8e416b46716ddfd09905326',
'peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd',
'peter-evans/create-pull-request@938e6aea6f8dbdaced2064e948cb806c77fe87b8',
'rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9',
'rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e',
'repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88',
'repo-sync/pull-request@33777245b1aace1a58c87a29c90321aa7a74bd7d',
'rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815',
'tjenkinson/gh-action-auto-merge-dependency-updates@cee2ac0'
'tjenkinson/gh-action-auto-merge-dependency-updates@cee2ac0',
'EndBug/add-and-commit@9358097a71ad9fb9e2f9624c6098c89193d83575'
]

View File

@@ -1,20 +1,21 @@
name: 60 Days Stale Check
on:
schedule:
- cron: "40 16 * * *" # Run each day at 16:40 UTC / 8:40 PST
- cron: '40 16 * * *' # Run each day at 16:40 UTC / 8:40 PST
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'This issue is stale because it has been open 60 days with no activity.'
stale-pr-message: 'This PR is stale because it has been open 60 days with no activity.'
days-before-stale: 60
days-before-close: -1
only-labels: 'engineering'
stale-issue-label: 'stale'
stale-pr-label: 'stale'
- uses: actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'This issue is stale because it has been open 60 days with no activity.'
stale-pr-message: 'This PR is stale because it has been open 60 days with no activity.'
days-before-stale: 60
days-before-close: -1
only-labels: 'engineering'
stale-issue-label: 'stale'
stale-pr-label: 'stale'
exempt-pr-labels: 'never-stale'
exempt-issue-labels: 'never-stale'

View File

@@ -1,12 +1,12 @@
name: Auto label Pull Requests
on:
- pull_request
pull_request:
jobs:
triage:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@5f867a63be70efff62b767459b009290364495eb
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
- uses: actions/labeler@5f867a63be70efff62b767459b009290364495eb
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'

View File

@@ -3,10 +3,10 @@ name: Auto Merge Dependency Updates
on:
pull_request:
paths:
- "package*.json"
- "Gemfile*"
- "Dockerfile"
- ".github/workflows/**"
- 'package*.json'
- 'Gemfile*'
- 'Dockerfile'
- '.github/workflows/**'
pull_request_review:
types:
- edited

View File

@@ -23,14 +23,14 @@ jobs:
if: contains(github.event.pull_request.labels.*.name, 'automerge') || contains(github.event.pull_request.labels.*.name, 'autosquash')
steps:
- name: automerge
uses: "pascalgn/automerge-action@c9bd182"
uses: 'pascalgn/automerge-action@c9bd182'
env:
GITHUB_TOKEN: "${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}"
MERGE_METHOD_LABELS: "automerge=merge,autosquash=squash"
MERGE_COMMIT_MESSAGE: "pull-request-title"
MERGE_METHOD: "merge"
MERGE_FORKS: "true"
MERGE_RETRIES: "50"
MERGE_RETRY_SLEEP: "10000" # ten seconds
UPDATE_LABELS: "automerge,autosquash"
UPDATE_METHOD: "merge"
GITHUB_TOKEN: '${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}'
MERGE_METHOD_LABELS: 'automerge=merge,autosquash=squash'
MERGE_COMMIT_MESSAGE: 'pull-request-title'
MERGE_METHOD: 'merge'
MERGE_FORKS: 'true'
MERGE_RETRIES: '50'
MERGE_RETRY_SLEEP: '10000' # ten seconds
UPDATE_LABELS: 'automerge,autosquash'
UPDATE_METHOD: 'merge'

View File

@@ -20,18 +20,22 @@ jobs:
paths: '[".github/workflows/browser-test.yml","assets/**", "content/**", "data/**", "includes/**", "javascripts/**", "jest-puppeteer.config.js", "jest.config.js", "layouts/**", "lib/**", "middleware/**", "package-lock.json", "package.json", "server.js", "translations/**", "webpack.config.js"]'
build:
needs: see_if_should_skip
if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Install
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Install
uses: ianwalter/puppeteer@12728ddef82390d1ecd4732fb543f62177392fbb
with:
args: npm ci
- name: Test
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Test
uses: ianwalter/puppeteer@12728ddef82390d1ecd4732fb543f62177392fbb
with:
args: npm run browser-test

View File

@@ -3,7 +3,7 @@ name: Check all English links
on:
workflow_dispatch:
schedule:
- cron: "40 19 * * *" # once a day at 19:40 UTC / 11:40 PST
- cron: '40 19 * * *' # once a day at 19:40 UTC / 11:40 PST
jobs:
check_all_english_links:
@@ -11,22 +11,38 @@ jobs:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: npm run build
run: npm run build
- name: Run script
run: script/check-english-links.js > broken_links.md
- if: ${{ failure() }}
name: Get title for issue
id: check
run: echo "::set-output name=title::$(head -1 broken_links.md)"
- if: ${{ failure() }}
name: Create issue from file
uses: peter-evans/create-issue-from-file@35e304e2a12caac08c568247a2cb46ecd0c3ecc5
with:
token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
title: ${{ steps.check.outputs.title }}
content-filepath: ./broken_links.md
labels: broken link report
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: npm run build
run: npm run build
- name: Run script
run: |
script/check-english-links.js > broken_links.md
- if: ${{ failure() }}
name: Get title for issue
id: check
run: echo "::set-output name=title::$(head -1 broken_links.md)"
- if: ${{ failure() }}
name: Close previous report
uses: lee-dohm/close-matching-issues@22002609b2555fe18f52b8e2e7c07cbf5529e8a8
with:
query: 'label:"broken link report"'
token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
- if: ${{ failure() }}
name: Create issue from file
id: broken-link-report
uses: peter-evans/create-issue-from-file@a04ce672e3acedb1f8e416b46716ddfd09905326
with:
token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
title: ${{ steps.check.outputs.title }}
content-filepath: ./broken_links.md
labels: broken link report
- if: ${{ failure() }}
name: Add comment to issue
uses: peter-evans/create-or-update-comment@5221bf4aa615e5c6e95bb142f9673a9c791be2cd
with:
body: |
cc @github/docs-content
issue-number: ${{ steps.broken-link-report.outputs.issue-number }}
token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}

View File

@@ -1,19 +1,19 @@
name: "CodeQL analysis"
name: CodeQL analysis
on:
push:
paths:
- '**/*.js'
- '.github/workflows/codeql.yml'
- '**/*.js'
- '.github/workflows/codeql.yml'
jobs:
build:
runs-on: ubuntu-latest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: github/codeql-action/init@v1
with:
languages: javascript # comma separated list of values from {go, python, javascript, java, cpp, csharp} (not YET ruby, sorry!)
- uses: github/codeql-action/analyze@v1
continue-on-error: true
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: github/codeql-action/init@v1
with:
languages: javascript # comma separated list of values from {go, python, javascript, java, cpp, csharp} (not YET ruby, sorry!)
- uses: github/codeql-action/analyze@v1
continue-on-error: true

View File

@@ -3,7 +3,7 @@ name: Crowdin Sync
on:
workflow_dispatch:
schedule:
- cron: "33 2 * * *" # every day at 2:33 UTC at least until automerge is working
- cron: '33 2 * * *' # every day at 2:33 UTC at least until automerge is working
jobs:
sync_with_crowdin:
@@ -20,7 +20,7 @@ jobs:
upload_translations: false
download_translations: true
create_pull_request: true
# Using a custom config temporarily to avoid clobbering the existing crowdin.yml
# that is used by the github-help-docs OAuth integration.
config: 'crowdin.yml'
@@ -35,17 +35,15 @@ jobs:
crowdin_branch_name: main
env:
# Using an @octoglot token instead of the default Actions-provided GITHUB_TOKEN here
# Using an @octoglot token instead of the default Actions-provided GITHUB_TOKEN here
# so that subsequent workflows will be able to run on the pull request created by this workflow.
GITHUB_TOKEN: ${{ secrets.OCTOGLOT_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
# This is a numeric id, not to be confused with Crowdin API v1 "project identifier" string
# See "API v2" on https://crowdin.com/project/<your-project>/settings#api
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
# A personal access token, not to be confused with Crowdin API v1 "API key"
# See https://crowdin.com/settings#api-key to generate a token
# This token was created by logging into Crowdin with the octoglot user
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

View File

@@ -9,23 +9,23 @@ jobs:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: npm ci
run: npm ci
- name: (Dry run) sync indices
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search-dry-run
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: npm ci
run: npm ci
- name: (Dry run) sync indices
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search-dry-run

View File

@@ -1,7 +1,12 @@
name: First responder docs-content
on:
pull_request:
types: [reopened, opened, ready_for_review, closed, unlabeled]
types:
- reopened
- opened
- ready_for_review
- closed
- unlabeled
jobs:
first-responder-triage-pr:
@@ -10,46 +15,46 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check if the event originated from a team member
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: set-result
with:
github-token: ${{secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES}}
result-encoding: string
script: |
const repoName = context.payload.repository.name
const ownerName = context.payload.repository.owner.login
const issueNumber = (context.eventName === "issues") ? context.payload.issue.number : context.payload.number
const updatedIssueInformation = await github.issues.get({
owner: ownerName,
repo: repoName,
issue_number: issueNumber
})
const teamMembers = await github.request(
`/orgs/github/teams/docs/members`
)
const logins = teamMembers.data.map(member => member.login)
// ignore PRs opened by docs bot accounts
logins.push('Octomerger', 'octoglot')
if (logins.some(login => login === updatedIssueInformation.data.user.login)) {
console.log(`This issue or pull request was authored by a member of the github/docs team.`)
return 'true'
}
console.log(`This issue or pull request was authored by an external contributor.`)
return 'false'
- name: Label external contributor pull requests with docs-content-fr
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
if: steps.set-result.outputs.result == 'false'
with:
repo-token: "${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}"
add-labels: "docs-content-fr"
- name: Triage to FR PR project column
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
if: steps.set-result.outputs.result == 'false'
with:
action-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
project-url: "https://github.com/orgs/github/projects/1367"
column-name: "Docs-internal external contributor PRs"
- name: Check if the event originated from a team member
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: set-result
with:
github-token: ${{secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES}}
result-encoding: string
script: |
const repoName = context.payload.repository.name
const ownerName = context.payload.repository.owner.login
const issueNumber = (context.eventName === "issues") ? context.payload.issue.number : context.payload.number
const updatedIssueInformation = await github.issues.get({
owner: ownerName,
repo: repoName,
issue_number: issueNumber
})
const teamMembers = await github.request(
`/orgs/github/teams/docs/members`
)
const logins = teamMembers.data.map(member => member.login)
// ignore PRs opened by docs bot accounts
logins.push('Octomerger', 'octoglot')
if (logins.some(login => login === updatedIssueInformation.data.user.login)) {
console.log(`This issue or pull request was authored by a member of the github/docs team.`)
return 'true'
}
console.log(`This issue or pull request was authored by an external contributor.`)
return 'false'
- name: Label external contributor pull requests with docs-content-fr
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
if: steps.set-result.outputs.result == 'false'
with:
repo-token: '${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}'
add-labels: 'docs-content-fr'
- name: Triage to FR PR project column
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
if: steps.set-result.outputs.result == 'false'
with:
action-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
project-url: 'https://github.com/orgs/github/projects/1367'
column-name: 'Docs-internal external contributor PRs'
first-responder-remove-pr:
name: Remove PR from FR project board
@@ -57,29 +62,29 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Remove card from project
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES}}
result-encoding: string
script: |
const issueToRemove = context.payload.number
const cards = await github.projects.listCards({
column_id: 11130889
})
cards.data.forEach(card => {
if (card.content_url) {
const cardIssueNumber = parseInt(card.content_url.split('/').pop(), 10)
if (cardIssueNumber === issueToRemove) {
const cards = github.projects.deleteCard({
card_id: card.id
})
- name: Remove card from project
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES}}
result-encoding: string
script: |
const issueToRemove = context.payload.number
const cards = await github.projects.listCards({
column_id: 11130889
})
cards.data.forEach(card => {
if (card.content_url) {
const cardIssueNumber = parseInt(card.content_url.split('/').pop(), 10)
if (cardIssueNumber === issueToRemove) {
const cards = github.projects.deleteCard({
card_id: card.id
})
}
}
}
})
- name: Remove docs-content-fr label if not already removed
if: github.event.action == 'closed'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: "${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}"
remove-labels: "docs-content-fr"
})
- name: Remove docs-content-fr label if not already removed
if: github.event.action == 'closed'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: '${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}'
remove-labels: 'docs-content-fr'

View File

@@ -21,7 +21,7 @@ jobs:
with:
cancel_others: 'false'
github_token: ${{ github.token }}
paths: '["**/*.js", "package*.json", ".github/workflows/js-lint.yml"]'
paths: '["**/*.js", "package*.json", ".github/workflows/js-lint.yml", ".eslint*"]'
lint:
runs-on: ubuntu-latest
@@ -53,7 +53,7 @@ jobs:
run: npm ci
- name: Run linter
run: npx standard
run: npx eslint .
- name: Check dependencies
run: npm run check-deps

View File

@@ -1,7 +1,8 @@
name: Merged notification
on:
pull_request_target:
types: ['closed']
types:
- 'closed'
jobs:
comment:

32
.github/workflows/openapi-decorate.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: OpenAPI generate decorated schema files
on:
workflow_dispatch:
pull_request:
types: [opened]
jobs:
generate-decorated-files:
if: github.event.pull_request.user.login == 'github-openapi-bot'
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Install dependencies
run: npm ci
- name: Decorate the dereferenced OpenAPI schemas
run: script/rest/update-files.js --decorate-only
- name: Check in the decorated files
uses: EndBug/add-and-commit@9358097a71ad9fb9e2f9624c6098c89193d83575
with:
# The arguments for the `git add` command
add: 'lib/rest/static/decorated'
# The message for the commit
message: 'Add decorated OpenAPI schema files'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Leave this line unchanged

View File

@@ -0,0 +1,22 @@
name: OpenAPI dev mode check
on:
workflow_dispatch:
push:
jobs:
check-schema-versions:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Install dependencies
run: npm ci
# Differences between decorated and dereferenced files indicates a problem
- name: Generate decorated files to check that there are no differences
run: script/rest/update-files.js --decorate-only
- name: Check if deref/decorated schemas are dev mode and that they match
run: .github/actions-scripts/openapi-schema-branch.js

View File

@@ -1,8 +1,8 @@
name: "Pa11y"
name: Pa11y
on:
workflow_dispatch:
schedule:
- cron: "25 17 * * *" # once a day at 17:25 UTC / 11:50 PST
- cron: '25 17 * * *' # once a day at 17:25 UTC / 11:50 PST
jobs:
test:
runs-on: ubuntu-latest

View File

@@ -2,7 +2,7 @@ name: Ping staging apps
on:
schedule:
- cron: "*/20 * * * *" # every twenty minutes
- cron: '*/20 * * * *' # every twenty minutes
jobs:
ping_staging_apps:
@@ -12,10 +12,10 @@ jobs:
env:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: npm run build
run: npm run build
- name: Run script
run: script/ping-staging-apps.js
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: npm run build
run: npm run build
- name: Run script
run: script/ping-staging-apps.js

View File

@@ -5,7 +5,7 @@ env:
on:
schedule:
- cron: "20 15 * * 0" # run every Sunday at 20:15 UTC / 12:15 PST
- cron: '20 15 * * 0' # run every Sunday at 20:15 UTC / 12:15 PST
jobs:
remove_unused_assets:
@@ -13,42 +13,43 @@ jobs:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: Run scripts
run: |
script/remove-unused-assets.js > results.md
script/remove-extraneous-translation-files.js
- name: Get script results to use in PR body
id: results
uses: juliangruber/read-file-action@e0a316da496006ffd19142f0fd594a1783f3b512
with:
path: ./results.md
- name: Remove script results file
run: rm -rf ./results.md
- name: Create pull request
uses: peter-evans/create-pull-request@938e6aea6f8dbdaced2064e948cb806c77fe87b8
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
commit-message: Action ran script/remove-unused-assets.js
title: Remove unused assets
body: "Hello! This PR removes some files that exist in the repo but are not used in content or data files:\n\n
${{ steps.results.outputs.content }}
\n\nIf you have any questions, please contact @github/docs-engineering."
labels: unused assets
project: Core docs work for the current week
project-column: Should do
branch: remove-unused-assets
- if: ${{ failure() }}
name: Delete remote branch (if previous steps failed)
uses: dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branches: remove-unused-assets
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
run: npm ci
- name: Run scripts
run: |
script/remove-unused-assets.js > results.md
script/remove-extraneous-translation-files.js
- name: Get script results to use in PR body
id: results
uses: juliangruber/read-file-action@e0a316da496006ffd19142f0fd594a1783f3b512
with:
path: ./results.md
- name: Remove script results file
run: rm -rf ./results.md
- name: Create pull request
uses: peter-evans/create-pull-request@938e6aea6f8dbdaced2064e948cb806c77fe87b8
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
commit-message: Action ran script/remove-unused-assets.js
title: Remove unused assets
body:
"Hello! This PR removes some files that exist in the repo but are not used in content or data files:\n\n
${{ steps.results.outputs.content }}
\n\nIf you have any questions, please contact @github/docs-engineering."
labels: unused assets
project: Core docs work for the current week
project-column: Should do
branch: remove-unused-assets
- if: ${{ failure() }}
name: Delete remote branch (if previous steps failed)
uses: dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branches: remove-unused-assets

View File

@@ -19,9 +19,8 @@ jobs:
name: Prevent merging during deployment freezes
runs-on: ubuntu-latest
steps:
- name: Fail if repo merges are paused
if: ${{ env.FREEZE == 'true' }}
run: |
echo 'Merges into the "main" branch on this repo are currently paused!'
exit 1
- name: Fail if repo merges are paused
if: ${{ env.FREEZE == 'true' }}
run: |
echo 'Merges into the "main" branch on this repo are currently paused!'
exit 1

View File

@@ -2,7 +2,7 @@ name: Repo Freeze Reminders
on:
schedule:
- cron: "00 11 * * *" # once per day around 11:00am UTC
- cron: '00 11 * * *' # once per day around 11:00am UTC
env:
FREEZE: ${{ secrets.FREEZE }}
@@ -13,13 +13,12 @@ jobs:
runs-on: ubuntu-latest
if: github.repository == 'github/docs-internal'
steps:
- name: Send Slack notification if repo is frozen
if: ${{ env.FREEZE == 'true' }}
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_USERNAME: docs-repo-sync
SLACK_ICON_EMOJI: ':freezing_face:'
SLACK_COLOR: '#51A0D5' # Carolina Blue
SLACK_MESSAGE: All repo-sync runs will fail for ${{ github.repository }} because the repo is currently frozen!
- name: Send Slack notification if repo is frozen
if: ${{ env.FREEZE == 'true' }}
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_USERNAME: docs-repo-sync
SLACK_ICON_EMOJI: ':freezing_face:'
SLACK_COLOR: '#51A0D5' # Carolina Blue
SLACK_MESSAGE: All repo-sync runs will fail for ${{ github.repository }} because the repo is currently frozen!

View File

@@ -1,14 +1,14 @@
# The docs.github.com project has two repositories: github/docs (public) and github/docs-internal (private)
#
#
# This GitHub Actions workflow keeps the main branch of those two repos in sync.
#
#
# For more details, see https://github.com/repo-sync/repo-sync#how-it-works
name: Repo Sync
on:
schedule:
- cron: "*/15 * * * *" # every 15 minutes
- cron: '*/15 * * * *' # every 15 minutes
env:
FREEZE: ${{ secrets.FREEZE }}
@@ -18,65 +18,63 @@ jobs:
name: Check for deployment freezes
runs-on: ubuntu-latest
steps:
- name: Exit if repo is frozen
if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Exit if repo is frozen
if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
repo-sync:
name: Repo Sync
needs: check-freezer
runs-on: ubuntu-latest
steps:
- name: Check out repo
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Check out repo
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Sync repo to branch
uses: repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88
env:
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
with:
source_repo: ${{ secrets.SOURCE_REPO }} # https://${access_token}@github.com/github/the-other-repo.git
source_branch: main
destination_branch: repo-sync
github_token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
- name: Sync repo to branch
uses: repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88
env:
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
with:
source_repo: ${{ secrets.SOURCE_REPO }} # https://${access_token}@github.com/github/the-other-repo.git
source_branch: main
destination_branch: repo-sync
github_token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
- name: Create pull request
uses: repo-sync/pull-request@33777245b1aace1a58c87a29c90321aa7a74bd7d
env:
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
with:
source_branch: repo-sync
destination_branch: main
pr_title: 'repo sync'
pr_body: "This is an automated pull request to sync changes between the public and private repos.\n\n:robot: This pull request should be merged (not squashed) to preserve continuity across repos, so please let a bot do the merging!"
pr_label: automerge,autoupdate
github_token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
- name: Create pull request
uses: repo-sync/pull-request@33777245b1aace1a58c87a29c90321aa7a74bd7d
env:
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
with:
source_branch: repo-sync
destination_branch: main
pr_title: "repo sync"
pr_body: "This is an automated pull request to sync changes between the public and private repos.\n\n:robot: This pull request should be merged (not squashed) to preserve continuity across repos, so please let a bot do the merging!"
pr_label: automerge,autoupdate
github_token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
- name: Find pull request
uses: juliangruber/find-pull-request-action@64d55773c959748ad30a4184f4dc102af1669f7b
id: find-pull-request
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
branch: repo-sync
base: main
- name: Find pull request
uses: juliangruber/find-pull-request-action@64d55773c959748ad30a4184f4dc102af1669f7b
id: find-pull-request
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
branch: repo-sync
base: main
- name: Approve pull request
if: ${{ steps.find-pull-request.outputs.number }}
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.find-pull-request.outputs.number }}
- name: Approve pull request
if: ${{ steps.find-pull-request.outputs.number }}
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.find-pull-request.outputs.number }}
- name: Send Slack notification if workflow fails
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
if: ${{ failure() }}
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_USERNAME: docs-repo-sync
SLACK_ICON_EMOJI: ':ohno:'
SLACK_COLOR: '#B90E0A' # Crimson
SLACK_MESSAGE: The last repo-sync run for ${{github.repository}} failed. See https://github.com/${{github.repository}}/actions?query=workflow%3A%22Repo+Sync%22
- name: Send Slack notification if workflow fails
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
if: ${{ failure() }}
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_USERNAME: docs-repo-sync
SLACK_ICON_EMOJI: ':ohno:'
SLACK_COLOR: '#B90E0A' # Crimson
SLACK_MESSAGE: The last repo-sync run for ${{github.repository}} failed. See https://github.com/${{github.repository}}/actions?query=workflow%3A%22Repo+Sync%22

View File

@@ -1,20 +1,22 @@
name: Send Issue to EPD backlog
on:
issues:
types: [labeled, reopened]
on:
issues:
types:
- labeled
- reopened
jobs:
triage:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
continue-on-error: true
steps:
- name: Add issues with engineering label to project board
if: contains(github.event.issue.labels.*.name, 'engineering') || contains(github.event.issue.labels.*.name, 'design') || contains(github.event.issue.labels.*.name, 'Design')
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
- name: Add issues with engineering label to project board
if: contains(github.event.issue.labels.*.name, 'engineering') || contains(github.event.issue.labels.*.name, 'design') || contains(github.event.issue.labels.*.name, 'Design')
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
script: |
var column_id = 9659080;
try {

View File

@@ -1,9 +1,11 @@
name: Start new engineering PR workflow
on:
on:
pull_request_target:
types: [opened, reopened]
types:
- opened
- reopened
jobs:
triage:
runs-on: ubuntu-latest
@@ -12,52 +14,52 @@ jobs:
DRAFT_COLUMN_ID: 10095775
REGULAR_COLUMN_ID: 10095779
steps:
- uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
continue-on-error: true
with:
- uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
continue-on-error: true
with:
github-token: ${{ secrets.DOCUBOT_FR_PROJECT_BOARD_WORKFLOWS_REPO_ORG_READ_SCOPES }}
script: |
// Only assign the engineering folks
try {
await github.teams.getMembershipForUserInOrg({
org: 'github',
team_slug: 'docs-engineering',
username: context.payload.sender.login,
});
} catch(err) {
return
}
// Only assign the engineering folks
try {
await github.teams.getMembershipForUserInOrg({
org: 'github',
team_slug: 'docs-engineering',
username: context.payload.sender.login,
});
} catch(err) {
return
}
// Set column ID
const column_id = context.payload.pull_request.draft
? process.env.DRAFT_COLUMN_ID
: process.env.REGULAR_COLUMN_ID
// Set column ID
const column_id = context.payload.pull_request.draft
? process.env.DRAFT_COLUMN_ID
: process.env.REGULAR_COLUMN_ID
// Try to create the card on the GitHub Project
try {
await github.projects.createCard({
column_id: column_id,
content_type: 'PullRequest',
content_id: context.payload.pull_request.id
});
} catch(error) {
console.log(error);
}
// Try to create the card on the GitHub Project
try {
await github.projects.createCard({
column_id: column_id,
content_type: 'PullRequest',
content_id: context.payload.pull_request.id
});
} catch(error) {
console.log(error);
}
// Try to set the author as the assignee
const owner = context.payload.repository.owner.login
const repo = context.payload.repository.name
// Try to set the author as the assignee
const owner = context.payload.repository.owner.login
const repo = context.payload.repository.name
try {
await github.issues.addAssignees({
owner: owner,
repo: repo,
issue_number: context.payload.pull_request.number,
assignees: [
context.payload.sender.login
]
});
} catch(error) {
console.log(error);
}
try {
await github.issues.addAssignees({
owner: owner,
repo: repo,
issue_number: context.payload.pull_request.number,
assignees: [
context.payload.sender.login
]
});
} catch(error) {
console.log(error);
}

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch:
push:
branches:
- main
- main
jobs:
updateIndices:
@@ -12,29 +12,29 @@ jobs:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: npm ci
run: npm ci
- name: sync indices
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search
- name: Send slack notification if workflow run fails
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
if: failure()
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_MESSAGE: The last Algolia workflow run for ${{github.repository}} failed. See https://github.com/github/docs-internal/actions?query=workflow%3AAlgolia
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: npm ci
run: npm ci
- name: sync indices
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search
- name: Send slack notification if workflow run fails
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
if: failure()
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_MESSAGE: The last Algolia workflow run for ${{github.repository}} failed. See https://github.com/github/docs-internal/actions?query=workflow%3AAlgolia

View File

@@ -0,0 +1,46 @@
name: Algolia Sync Single English Index
on:
pull_request:
types:
- labeled
- unlabeled
- opened
- reopened
- synchronize
- ready_for_review
- unlocked
# This workflow requires a label in the format `sync-english-index-for-<PLAN@RELEASE>`
jobs:
updateIndices:
name: Update English index for single version based on a label's version
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: npm ci
run: npm ci
- name: Get version from Algolia label if present; only continue if the label is found.
id: getVersion
run: $GITHUB_WORKSPACE/.github/actions-scripts/enterprise-algolia-label.js
- if: ${{ steps.getVersion.outputs.versionToSync }}
name: Sync English index for single version
env:
VERSION: ${{ steps.getVersion.outputs.versionToSync }}
LANGUAGE: 'en'
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search

View File

@@ -4,7 +4,7 @@ name: Node.js Tests - Translations
on:
schedule:
- cron: "10 20 * * *" # once a day at 20:10 UTC / 12:10 PST
- cron: '10 20 * * *' # once a day at 20:10 UTC / 12:10 PST
jobs:
lint:
@@ -38,7 +38,7 @@ jobs:
run: npm ci
- name: Run linter
run: npx standard
run: npx eslint .
- name: Check dependencies
run: npm run check-deps
@@ -78,4 +78,4 @@ jobs:
- name: Run tests
run: npx jest tests/${{ matrix.test-group }}/
env:
NODE_OPTIONS: "--max_old_space_size=4096"
NODE_OPTIONS: '--max_old_space_size=4096'

View File

@@ -5,10 +5,7 @@ name: Node.js Tests - Windows
on:
workflow_dispatch:
schedule:
- cron: "50 19 * * *" # once a day at 19:50 UTC / 11:50 PST
env:
CI: true
- cron: '50 19 * * *' # once a day at 19:50 UTC / 11:50 PST
jobs:
test:
@@ -48,4 +45,4 @@ jobs:
- name: Run tests
run: npx jest tests/${{ matrix.test-group }}/
env:
NODE_OPTIONS: "--max_old_space_size=4096"
NODE_OPTIONS: '--max_old_space_size=4096'

View File

@@ -31,7 +31,6 @@ jobs:
test:
needs: see_if_should_skip
if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
timeout-minutes: 60
strategy:
@@ -39,20 +38,26 @@ jobs:
matrix:
test-group: [content, meta, rendering, routing, unit, links-and-images]
steps:
- name: Check out repo
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Check out repo
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup node
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Setup node
uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: Get npm cache directory
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- name: Cache node modules
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ${{ steps.npm-cache.outputs.dir }}
@@ -60,20 +65,23 @@ jobs:
restore-keys: |
${{ runner.os }}-node-
- name: Install dependencies
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Install dependencies
run: npm ci
- name: Run build script
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Run build script
run: npm run build
- name: Run tests
- if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
name: Run tests
run: npx jest tests/${{ matrix.test-group }}/
env:
NODE_OPTIONS: "--max_old_space_size=4096"
NODE_OPTIONS: '--max_old_space_size=4096'
- name: Send Slack notification if workflow fails
uses: rtCamp/action-slack-notify@e17352feaf9aee300bf0ebc1dfbf467d80438815
if: failure() && github.ref == 'early-access'
env:
SLACK_WEBHOOK: ${{ secrets.DOCS_ALERTS_SLACK_WEBHOOK }}
SLACK_MESSAGE: "Tests are failing on the `early-access` branch. https://github.com/github/docs-internal/tree/early-access"
SLACK_MESSAGE: 'Tests are failing on the `early-access` branch. https://github.com/github/docs-internal/tree/early-access'

View File

@@ -2,7 +2,7 @@ name: Translations
on:
schedule:
- cron: "20 19 * * *" # once a day at 19:20 UTC / 11:20 PST
- cron: '20 19 * * *' # once a day at 19:20 UTC / 11:20 PST
env:
FREEZE: ${{ secrets.FREEZE }}
@@ -12,45 +12,45 @@ jobs:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Find original Pull Request
uses: juliangruber/find-pull-request-action@64d55773c959748ad30a4184f4dc102af1669f7b
id: pr
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
branch: translations
- if: ${{ steps.pr.outputs.number }}
name: Check if already labeled
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: has-label
with:
script: |
const { data: labels } = await github.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: ${{ steps.pr.outputs.number }}
})
if (labels.find(label => label.name === 'automerge')) {
return 'ok'
}
- if: ${{ !steps.has-label.outputs.result }}
name: Approve Pull Request
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.pr.outputs.number }}
- if: ${{ !steps.has-label.outputs.result }}
name: Add automerge label
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: ${{ steps.pr.outputs.number }},
labels: ['automerge']
})
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Find original Pull Request
uses: juliangruber/find-pull-request-action@64d55773c959748ad30a4184f4dc102af1669f7b
id: pr
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
branch: translations
- if: ${{ steps.pr.outputs.number }}
name: Check if already labeled
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: has-label
with:
script: |
const { data: labels } = await github.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: ${{ steps.pr.outputs.number }}
})
if (labels.find(label => label.name === 'automerge')) {
return 'ok'
}
- if: ${{ !steps.has-label.outputs.result }}
name: Approve Pull Request
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.pr.outputs.number }}
- if: ${{ !steps.has-label.outputs.result }}
name: Add automerge label
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: ${{ steps.pr.outputs.number }},
labels: ['automerge']
})

View File

@@ -1,7 +1,8 @@
name: Triage new issue comments
on:
issue_comment:
types: [created]
types:
- created
jobs:
triage-issue-comments:
@@ -9,38 +10,38 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check if the event originated from a team member
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: is-internal-contributor
with:
github-token: ${{secrets.GITHUB_TOKEN}}
result-encoding: string
script: |
const repo = context.payload.repository.name
const org = context.payload.repository.owner.login
const actor = context.actor
let collaboratorStatus = ''
try {
collaboratorStatus = await github.request('GET /repos/{owner}/{repo}/collaborators/{username}', {
owner: org,
repo: repo,
username: actor
})
console.log(`This issue was commented on by a Hubber.`)
return 'true'
} catch (error) {
console.log(`This issue was commented on by an external contributor.`)
return 'false'
}
- name: Label issues with new comments with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
if: (steps.is-internal-contributor.outputs.result == 'false')
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
add-labels: "triage"
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: "https://github.com/github/docs/projects/1"
column-name: "Triage"
- name: Check if the event originated from a team member
uses: actions/github-script@626af12fe9a53dc2972b48385e7fe7dec79145c9
id: is-internal-contributor
with:
github-token: ${{secrets.GITHUB_TOKEN}}
result-encoding: string
script: |
const repo = context.payload.repository.name
const org = context.payload.repository.owner.login
const actor = context.actor
let collaboratorStatus = ''
try {
collaboratorStatus = await github.request('GET /repos/{owner}/{repo}/collaborators/{username}', {
owner: org,
repo: repo,
username: actor
})
console.log(`This issue was commented on by a Hubber.`)
return 'true'
} catch (error) {
console.log(`This issue was commented on by an external contributor.`)
return 'false'
}
- name: Label issues with new comments with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
if: (steps.is-internal-contributor.outputs.result == 'false')
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
add-labels: 'triage'
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: 'https://github.com/github/docs/projects/1'
column-name: 'Triage'

View File

@@ -1,7 +1,9 @@
name: Triage new issues
on:
issues:
types: [reopened, opened]
types:
- reopened
- opened
jobs:
triage_issues:
@@ -9,14 +11,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Label new issues with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
add-labels: "triage"
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: "https://github.com/github/docs/projects/1"
column-name: "Triage"
- name: Label new issues with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
add-labels: 'triage'
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: 'https://github.com/github/docs/projects/1'
column-name: 'Triage'

View File

@@ -1,7 +1,9 @@
name: Triage new pull requests
on:
pull_request:
types: [reopened, opened]
types:
- reopened
- opened
jobs:
triage_pulls:
@@ -9,14 +11,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Label new pull requests with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
add-labels: "triage"
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: "https://github.com/github/docs/projects/1"
column-name: "Triage"
- name: Label new pull requests with 'triage'
uses: rachmari/labeler@832d42ec5523f3c6d46e8168de71cd54363e3e2e
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
add-labels: 'triage'
- name: Triage to project board
uses: rachmari/actions-add-new-issue-to-column@1a459ef92308ba7c9c9dc2fcdd72f232495574a9
with:
action-token: ${{ secrets.GITHUB_TOKEN }}
project-url: 'https://github.com/github/docs/projects/1'
column-name: 'Triage'

View File

@@ -1,7 +1,7 @@
name: Public Repo Stale Check
on:
schedule:
- cron: "45 16 * * *" # Run each day at 16:45 UTC / 8:45 PST
- cron: '45 16 * * *' # Run each day at 16:45 UTC / 8:45 PST
jobs:
stale:
@@ -9,10 +9,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-pr-message: 'This PR is stale because it has been open 7 days with no activity and will be automatically closed in 3 days. To keep this PR open, update the PR by adding a comment or pushing a commit.'
days-before-stale: 7
days-before-close: 10
stale-pr-label: 'stale'
- uses: actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-pr-message: 'This PR is stale because it has been open 7 days with no activity and will be automatically closed in 3 days. To keep this PR open, update the PR by adding a comment or pushing a commit.'
days-before-stale: 7
days-before-close: 10
stale-pr-label: 'stale'
exempt-pr-labels: 'never-stale'
exempt-issue-labels: 'never-stale'

View File

@@ -10,63 +10,64 @@ env:
on:
schedule:
- cron: "20 16 * * *" # run every day at 16:20 UTC / 8:20 PST
- cron: '20 16 * * *' # run every day at 16:20 UTC / 8:20 PST
jobs:
update_graphql_files:
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Set up Ruby
uses: actions/setup-ruby@5f29a1cd8dfebf420691c4c9a0e832e2fae5a526
with:
ruby-version: '2.4'
- name: Install Ruby dependencies
run: |
gem install bundler
bundle install
- name: Install Node.js dependencies
run: npm ci
- name: Run updater scripts
env:
# need to use a token from a user with access to github/github for this step
GITHUB_TOKEN: ${{ secrets.ZEKE_PAT_WITH_REPO_AND_WORKFLOW_SCOPE_FOR_REPO_SYNC }}
# technically the changelog should only be updated once per day, but we can safely
# run build-changelog-from-markdown.js in its current form once per hour; when we
# rewrite the changelog script, we may need to run it in a separate workflow on a
# once-per-day schedule; see details in https://github.com/github/docs-internal/issues/12722.
run: |
script/graphql/update-files.js
script/graphql/build-changelog-from-markdown.js
- name: Create pull request
id: create-pull-request
uses: peter-evans/create-pull-request@938e6aea6f8dbdaced2064e948cb806c77fe87b8
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
commit-message: 'Action ran graphql scripts "update-files" and "build-changelog-from-markdown"'
title: GraphQL schema update
body: "Hello! Some GraphQL data in github/github was updated recently. This PR
syncs up the GraphQL data in this repo.\n\n
If CI passes, this PR will be auto-merged. :green_heart:\n\n
If CI does not pass or other problems arise, contact #docs-engineering on slack."
labels: automerge
branch: graphql-schema-update
- if: ${{ failure() }}
name: Delete remote branch (if previous steps failed)
uses: dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branches: graphql-schema-update
- if: ${{ steps.create-pull-request.outputs.pr_number }}
name: Approve
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.create-pull-request.outputs.pr_number }}
- if: ${{ env.FREEZE == 'true' }}
run: |
echo 'The repo is currently frozen! Exiting this workflow.'
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Set up Ruby
uses: actions/setup-ruby@5f29a1cd8dfebf420691c4c9a0e832e2fae5a526
with:
ruby-version: '2.4'
- name: Install Ruby dependencies
run: |
gem install bundler
bundle install
- name: Install Node.js dependencies
run: npm ci
- name: Run updater scripts
env:
# need to use a token from a user with access to github/github for this step
GITHUB_TOKEN: ${{ secrets.ZEKE_PAT_WITH_REPO_AND_WORKFLOW_SCOPE_FOR_REPO_SYNC }}
# technically the changelog should only be updated once per day, but we can safely
# run build-changelog-from-markdown.js in its current form once per hour; when we
# rewrite the changelog script, we may need to run it in a separate workflow on a
# once-per-day schedule; see details in https://github.com/github/docs-internal/issues/12722.
run: |
script/graphql/update-files.js
script/graphql/build-changelog-from-markdown.js
- name: Create pull request
id: create-pull-request
uses: peter-evans/create-pull-request@938e6aea6f8dbdaced2064e948cb806c77fe87b8
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
commit-message: 'Action ran graphql scripts "update-files" and "build-changelog-from-markdown"'
title: GraphQL schema update
body:
"Hello! Some GraphQL data in github/github was updated recently. This PR
syncs up the GraphQL data in this repo.\n\n
If CI passes, this PR will be auto-merged. :green_heart:\n\n
If CI does not pass or other problems arise, contact #docs-engineering on slack."
labels: automerge
branch: graphql-schema-update
- if: ${{ failure() }}
name: Delete remote branch (if previous steps failed)
uses: dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branches: graphql-schema-update
- if: ${{ steps.create-pull-request.outputs.pr_number }}
name: Approve
uses: juliangruber/approve-pull-request-action@c530832d4d346c597332e20e03605aa94fa150a8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.create-pull-request.outputs.pr_number }}

56
.github/workflows/yml-lint.yml vendored Normal file
View File

@@ -0,0 +1,56 @@
name: Lint Yaml
on:
workflow_dispatch:
push:
branches:
- main
pull_request:
branches-ignore:
- translations
jobs:
see_if_should_skip:
runs-on: ubuntu-latest
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- id: skip_check
uses: fkirc/skip-duplicate-actions@36feb0d8d062137530c2e00bd278d138fe191289
with:
cancel_others: 'false'
github_token: ${{ github.token }}
paths: '["**/*.yml", "**/*.yaml", "package*.json", ".github/workflows/yml-lint.yml"]'
lint:
runs-on: ubuntu-latest
needs: see_if_should_skip
if: ${{ needs.see_if_should_skip.outputs.should_skip != 'true' }}
steps:
- name: Check out repo
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup node
uses: actions/setup-node@56899e050abffc08c2b3b61f3ec6a79a9dc3223d
with:
node-version: 14.x
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- name: Cache node modules
uses: actions/cache@0781355a23dac32fd3bac414512f4b903437991a
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install dependencies
run: npm ci
- name: Run linter
run: npx prettier -c "**/*.{yml,yaml}"

1
.prettierignore Normal file
View File

@@ -0,0 +1 @@
translations/

12
.prettierrc.json Normal file
View File

@@ -0,0 +1,12 @@
{
"overrides": [
{
"files":[
"**/*.{yml,yaml}"
],
"options": {
"singleQuote": true
}
}
]
}

View File

@@ -22,6 +22,7 @@ Examples of unacceptable behavior include:
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address, without their explicit permission
* Contacting individual members, contributors, or leaders privately, outside designated community mechanisms, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Enforcement Responsibilities

View File

@@ -114,7 +114,8 @@ You can browse existing issues to find something that needs help!
### Labels
Labels can help you find an issue you'd like to help with.
- The [`good-first-issue` label](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3Agood-first-issue) is for problems or updates we think are ideal for beginners.
- The [`help wanted` label](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) is for problems or updates that anyone in the community can start working on.
- The [`good first issue` label](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) is for problems or updates we think are ideal for beginners.
- The [`content` label](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3Acontent) is for problems or updates in the content on docs.github.com. These will usually require some knowledge of Markdown.
- The [`engineering` label](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3Aengineering) is for problems or updates in the docs.github.com website. These will usually require some knowledge of JavaScript/Node.js or YAML to fix.

View File

@@ -5,6 +5,7 @@ This repository contains the documentation website code and Markdown source file
GitHub's Docs team works on pre-production content in a private repo that regularly syncs with this public repo.
In this article:
- [Contributing](#contributing)
- [READMEs](#readmes)
- [License](#license)
@@ -34,6 +35,7 @@ If you have a solution to one of the open issues, you will need to fork the repo
We use GitHub Discussions to talk about all sorts of topics related to documentation and this site. For example: if you'd like help troubleshooting a PR, have a great new idea, or want to share something amazing you've learned in our docs, join us in [discussions](https://github.com/github/docs/discussions).
#### And that's it!
That's how you can get started easily as a member of the GitHub Documentation community. :sparkles:
If you want to know more, or you're making a more complex contribution, check out [Getting Started with Contributing](/CONTRIBUTING.md).

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 90 KiB

After

Width:  |  Height:  |  Size: 123 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 211 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 295 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 383 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 228 KiB

After

Width:  |  Height:  |  Size: 165 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 42 KiB

After

Width:  |  Height:  |  Size: 545 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 406 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 630 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 82 KiB

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.7 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

View File

@@ -91,7 +91,7 @@ steps:
### Caching dependencies
You can cache your dependencies to speed up your workflow runs. After a successful run, your local Gradle package cache will be stored on GitHub Actions infrastructure. In future workflow runs, the cache will be restored so that dependencies don't need to be downloaded from remote package repositories. For more information, see "[Caching dependencies to speed up workflows](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)" and the [`cache` action](https://github.com/marketplace/actions/cache).
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can cache your dependencies to speed up your workflow runs. After a successful run, your local Gradle package cache will be stored on GitHub Actions infrastructure. In future workflow runs, the cache will be restored so that dependencies don't need to be downloaded from remote package repositories. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>" and the [`cache` action](https://github.com/marketplace/actions/cache).
{% raw %}
```yaml

View File

@@ -91,7 +91,7 @@ steps:
### Caching dependencies
You can cache your dependencies to speed up your workflow runs. After a successful run, your local Maven repository will be stored on GitHub Actions infrastructure. In future workflow runs, the cache will be restored so that dependencies don't need to be downloaded from remote Maven repositories. For more information, see "[Caching dependencies to speed up workflows](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)" and the [`cache` action](https://github.com/marketplace/actions/cache).
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can cache your dependencies to speed up your workflow runs. After a successful run, your local Maven repository will be stored on GitHub Actions infrastructure. In future workflow runs, the cache will be restored so that dependencies don't need to be downloaded from remote Maven repositories. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>" and the [`cache` action](https://github.com/marketplace/actions/cache).
{% raw %}
```yaml

View File

@@ -129,7 +129,7 @@ If you don't specify a Node.js version, {% data variables.product.prodname_dotco
{% data variables.product.prodname_dotcom %}-hosted runners have npm and Yarn dependency managers installed. You can use npm and Yarn to install dependencies in your workflow before building and testing your code. The Windows and Linux {% data variables.product.prodname_dotcom %}-hosted runners also have Grunt, Gulp, and Bower installed.
You can also cache dependencies to speed up your workflow. For more information, see "[Caching dependencies to speed up your workflow](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)."
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can also cache dependencies to speed up your workflow. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
#### Example using npm
@@ -227,7 +227,7 @@ always-auth=true
#### Example caching dependencies
You can cache dependencies using a unique key, and restore the dependencies when you run future workflows using the `cache` action. For more information, see "[Caching dependencies to speed up workflows](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)" and the [`cache` action](https://github.com/marketplace/actions/cache).
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can cache dependencies using a unique key, and restore the dependencies when you run future workflows using the `cache` action. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>" and the [`cache` action](https://github.com/marketplace/actions/cache).
{% raw %}
```yaml
@@ -241,7 +241,7 @@ steps:
uses: actions/cache@v2
with:
# npm cache files are stored in `~/.npm` on Linux/macOS
path: ~/.npm
path: ~/.npm
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.OS }}-node-

View File

@@ -30,7 +30,7 @@ We recommend that you have a basic understanding of PowerShell and Pester. For m
### Adding a workflow for Pester
To automate your testing with PowerShell and Pester, you can add a workflow that runs every time a change is pushed to your repository. In the following example, `Test-Path` is used to check that a file called `resultsfile.log` is present.
To automate your testing with PowerShell and Pester, you can add a workflow that runs every time a change is pushed to your repository. In the following example, `Test-Path` is used to check that a file called `resultsfile.log` is present.
This example workflow file must be added to your repository's `.github/workflows/` directory:
@@ -57,7 +57,7 @@ jobs:
{% endraw %}
* `shell: pwsh` - Configures the job to use PowerShell when running the `run` commands.
* `run: Test-Path resultsfile.log` - Check whether a file called `resultsfile.log` is present in the repository's root directory.
* `run: Test-Path resultsfile.log` - Check whether a file called `resultsfile.log` is present in the repository's root directory.
* `Should -Be $true` - Uses Pester to define an expected result. If the result is unexpected, then {% data variables.product.prodname_actions %} flags this as a failed test. For example:
![Failed Pester test](/assets/images/help/repository/actions-failed-pester-test.png)
@@ -83,7 +83,7 @@ The table below describes the locations for various PowerShell modules in each {
### Installing dependencies
{% data variables.product.prodname_dotcom %}-hosted runners have PowerShell 7 and Pester installed. You can use `Install-Module` to install additional dependencies from the PowerShell Gallery before building and testing your code.
{% data variables.product.prodname_dotcom %}-hosted runners have PowerShell 7 and Pester installed. You can use `Install-Module` to install additional dependencies from the PowerShell Gallery before building and testing your code.
{% note %}
@@ -91,7 +91,7 @@ The table below describes the locations for various PowerShell modules in each {
{% endnote %}
You can also cache dependencies to speed up your workflow. For more information, see "[Caching dependencies to speed up your workflow](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)."
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can also cache dependencies to speed up your workflow. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
For example, the following job installs the `SqlServer` and `PSScriptAnalyzer` modules:
@@ -119,7 +119,7 @@ jobs:
#### Caching dependencies
You can cache PowerShell dependencies using a unique key, which allows you to restore the dependencies for future workflows with the [`cache`](https://github.com/marketplace/actions/cache) action. For more information, see "[Caching dependencies to speed up workflows](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)."
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can cache PowerShell dependencies using a unique key, which allows you to restore the dependencies for future workflows with the [`cache`](https://github.com/marketplace/actions/cache) action. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
PowerShell caches its dependencies in different locations, depending on the runner's operating system. For example, the `path` location used in the following Ubuntu example will be different for a Windows operating system.

View File

@@ -141,9 +141,9 @@ jobs:
uses: actions/setup-python@v2
with:
# Semantic version range syntax or exact version of a Python version
python-version: '3.x'
python-version: '3.x'
# Optional - x64 or x86 architecture, defaults to x64
architecture: 'x64'
architecture: 'x64'
# You can test your matrix by printing the current Python version
- name: Display Python version
run: python -c "import sys; print(sys.version)"
@@ -192,7 +192,7 @@ We recommend using `setup-python` to configure the version of Python used in you
{% data variables.product.prodname_dotcom %}-hosted runners have the pip package manager installed. You can use pip to install dependencies from the PyPI package registry before building and testing your code. For example, the YAML below installs or upgrades the `pip` package installer and the `setuptools` and `wheel` packages.
You can also cache dependencies to speed up your workflow. For more information, see "[Caching dependencies to speed up your workflow](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)."
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can also cache dependencies to speed up your workflow. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
{% raw %}
```yaml
@@ -228,7 +228,7 @@ steps:
#### Caching Dependencies
You can cache pip dependencies using a unique key, and restore the dependencies when you run future workflows using the [`cache`](https://github.com/marketplace/actions/cache) action. For more information, see "[Caching dependencies to speed up workflows](/actions/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows)."
When using {% data variables.product.prodname_dotcom %}-hosted runners, you can cache pip dependencies using a unique key, and restore the dependencies when you run future workflows using the [`cache`](https://github.com/marketplace/actions/cache) action. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
Pip caches dependencies in different locations, depending on the operating system of the runner. The path you'll need to cache may differ from the Ubuntu example below depending on the operating system you use. For more information, see [Python caching examples](https://github.com/actions/cache/blob/main/examples.md#python---pip).

View File

@@ -0,0 +1,318 @@
---
title: Building and testing Ruby
intro: You can create a continuous integration (CI) workflow to build and test your Ruby project.
product: '{% data reusables.gated-features.actions %}'
versions:
free-pro-team: '*'
enterprise-server: '>=2.22'
---
{% data reusables.actions.enterprise-beta %}
{% data reusables.actions.enterprise-github-hosted-runners %}
### Introduction
This guide shows you how to create a continuous integration (CI) workflow that builds and tests a Ruby application. If your CI tests pass, you may want to deploy your code or publish a gem.
### Prerequisites
We recommend that you have a basic understanding of Ruby, YAML, workflow configuration options, and how to create a workflow file. For more information, see:
- [Learn {% data variables.product.prodname_actions %}](/actions/learn-github-actions)
- [Ruby in 20 minutes](https://www.ruby-lang.org/en/documentation/quickstart/)
### Starting with the Ruby workflow template
{% data variables.product.prodname_dotcom %} provides a Ruby workflow template that will work for most Ruby projects. For more information, see the [Ruby workflow template](https://github.com/actions/starter-workflows/blob/master/ci/ruby.yml).
To get started quickly, add the template to the `.github/workflows` directory of your repository.
{% raw %}
```yaml
name: Ruby
on:
push:
branches: [ $default-branch ]
pull_request:
branches: [ $default-branch ]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Ruby
# To automatically get bug fixes and new Ruby versions for ruby/setup-ruby,
# change this to (see https://github.com/ruby/setup-ruby#versioning):
# uses: ruby/setup-ruby@v1
uses: ruby/setup-ruby@ec106b438a1ff6ff109590de34ddc62c540232e0
with:
ruby-version: 2.6
- name: Install dependencies
run: bundle install
- name: Run tests
run: bundle exec rake
```
{% endraw %}
### Specifying the Ruby version
The easiest way to specify a Ruby version is by using the `ruby/setup-ruby` action provided by the Ruby organization on GitHub. The action adds any supported Ruby version to `PATH` for each job run in a workflow. For more information see, the [`ruby/setup-ruby`](https://github.com/ruby/setup-ruby).
Using either Ruby's `ruby/setup-ruby` action or GitHub's `actions/setup-ruby` action is the recommended way of using Ruby with GitHub Actions because it ensures consistent behavior across different runners and different versions of Ruby.
The `setup-ruby` action takes a Ruby version as an input and configures that version on the runner.
{% raw %}
```yaml
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6 # Not needed with a .ruby-version file
- run: bundle install
- run: bundle exec rake
```
{% endraw %}
Alternatively, you can check a `.ruby-version` file into the root of your repository and `setup-ruby` will use the version defined in that file.
### Testing with multiple versions of Ruby
You can add a matrix strategy to run your workflow with more than one version of Ruby. For example, you can test your code against the latest patch releases of versions 2.7, 2.6, and 2.5. The 'x' is a wildcard character that matches the latest patch release available for a version.
{% raw %}
```yaml
strategy:
matrix:
ruby-version: [2.7.x, 2.6.x, 2.5.x]
```
{% endraw %}
Each version of Ruby specified in the `ruby-version` array creates a job that runs the same steps. The {% raw %}`${{ matrix.ruby-version }}`{% endraw %} context is used to access the current job's version. For more information about matrix strategies and contexts, see "Workflow syntax for GitHub Actions" and "Context and expression syntax for GitHub Actions."
The full updated workflow with a matrix strategy could look like this:
{% raw %}
```yaml
name: Ruby CI
on:
push:
branches: [ $default-branch ]
pull_request:
branches: [ $default-branch ]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
ruby-version: [2.7.x, 2.6.x, 2.5.x]
steps:
- uses: actions/checkout@v2
- name: Set up Ruby ${{ matrix.ruby-version }}
# To automatically get bug fixes and new Ruby versions for ruby/setup-ruby,
# change this to (see https://github.com/ruby/setup-ruby#versioning):
# uses: ruby/setup-ruby@v1
uses: ruby/setup-ruby@ec106b438a1ff6ff109590de34ddc62c540232e0
with:
ruby-version: ${{ matrix.ruby-version }}
- name: Install dependencies
run: bundle install
- name: Run tests
run: bundle exec rake
```
{% endraw %}
### Installing dependencies with Bundler
The `setup-ruby` action will automatically install bundler for you. The version is determined by your `gemfile.lock` file. If no version is present in your lockfile, then the latest compatible version will be installed.
{% raw %}
```yaml
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- run: bundle install
```
{% endraw %}
#### Caching dependencies
If you are using {% data variables.product.prodname_dotcom %}-hosted runners, the `setup-ruby` actions provides a method to automatically handle the caching of your gems between runs.
To enable caching, set the following.
{% raw %}
```yaml
steps:
- uses: ruby/setup-ruby@v1
with:
bundler-cache: true
```
{% endraw %}
This will configure bundler to install your gems to `vendor/cache`. For each successful run of your workflow, this folder will be cached by Actions and re-downloaded for subsequent workflow runs. A hash of your gemfile.lock and the Ruby version are used as the cache key. If you install any new gems, or change a version, the cache will be invalidated and bundler will do a fresh install.
**Caching without setup-ruby**
For greater control over caching, if you are using {% data variables.product.prodname_dotcom %}-hosted runners, you can use the `actions/cache` Action directly. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
{% raw %}
```yaml
steps:
- uses: actions/cache@v2
with:
path: vendor/bundle
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gems-
- name: Bundle install
run: |
bundle config path vendor/bundle
bundle install --jobs 4 --retry 3
```
{% endraw %}
If you're using a matrix build, you will want to include the matrix variables in your cache key. For example, if you have a matrix strategy for different ruby versions (`matrix.ruby-version`) and different operating systems (`matrix.os`), your workflow steps might look like this:
{% raw %}
```yaml
steps:
- uses: actions/cache@v2
with:
path: vendor/bundle
key: bundle-use-ruby-${{ matrix.os }}-${{ matrix.ruby-version }}-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
bundle-use-ruby-${{ matrix.os }}-${{ matrix.ruby-version }}-
- name: Bundle install
run: |
bundle config path vendor/bundle
bundle install --jobs 4 --retry 3
```
{% endraw %}
### Matrix testing your code
The following example matrix tests all stable releases and head versions of MRI, JRuby and TruffleRuby on Ubuntu and macOS.
{% raw %}
```yaml
name: Matrix Testing
on:
push:
branches: [ $default-branch ]
pull_request:
branches: [ $default-branch ]
jobs:
test:
runs-on: ${{ matrix.os }}-latest
strategy:
fail-fast: false
matrix:
os: [ubuntu, macos]
ruby: [2.5, 2.6, 2.7, head, debug, jruby, jruby-head, truffleruby, truffleruby-head]
continue-on-error: ${{ endsWith(matrix.ruby, 'head') || matrix.ruby == 'debug' }}
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby }}
- run: bundle install
- run: bundle exec rake
```
{% endraw %}
### Linting your code
The following example installs `rubocop` and uses it to lint all files. For more information, see [Rubocop](https://github.com/rubocop-hq/rubocop). You can [configure Rubocop](https://docs.rubocop.org/rubocop/configuration.html) to decide on the specific linting rules.
{% raw %}
```yaml
name: Linting
on: [push]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- run: bundle install
- name: Rubocop
run: rubocop
```
{% endraw %}
### Publishing Gems
You can configure your workflow to publish your Ruby package to any package registry you'd like when your CI tests pass.
You can store any access tokens or credentials needed to publish your package using repository secrets. The following example creates and publishes a package to `GitHub Package Registry` and `RubyGems`.
{% raw %}
```yaml
name: Ruby Gem
on:
# Manually publish
workflow_dispatch:
# Alternatively, publish whenever changes are merged to the default branch.
push:
branches: [ $default-branch ]
pull_request:
branches: [ $default-branch ]
jobs:
build:
name: Build + Publish
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Ruby 2.6
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- run: bundle install
- name: Publish to GPR
run: |
mkdir -p $HOME/.gem
touch $HOME/.gem/credentials
chmod 0600 $HOME/.gem/credentials
printf -- "---\n:github: ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
gem build *.gemspec
gem push --KEY github --host https://rubygems.pkg.github.com/${OWNER} *.gem
env:
GEM_HOST_API_KEY: "Bearer ${{secrets.GITHUB_TOKEN}}"
OWNER: ${{ github.repository_owner }}
- name: Publish to RubyGems
run: |
mkdir -p $HOME/.gem
touch $HOME/.gem/credentials
chmod 0600 $HOME/.gem/credentials
printf -- "---\n:rubygems_api_key: ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
gem build *.gemspec
gem push *.gem
env:
GEM_HOST_API_KEY: "${{secrets.RUBYGEMS_AUTH_TOKEN}}"
```
{% endraw %}

View File

@@ -31,6 +31,7 @@ You can use {% data variables.product.prodname_actions %} to create custom conti
{% link_in_list /building-and-testing-nodejs %}
{% link_in_list /building-and-testing-powershell %}
{% link_in_list /building-and-testing-python %}
{% link_in_list /building-and-testing-ruby %}
{% link_in_list /building-and-testing-java-with-maven %}
{% link_in_list /building-and-testing-java-with-gradle %}
{% link_in_list /building-and-testing-java-with-ant %}

View File

@@ -131,7 +131,7 @@ The `retention-days` value cannot exceed the retention limit set by the reposito
During a workflow run, you can use the [`download-artifact`](https://github.com/actions/download-artifact)action to download artifacts that were previously uploaded in the same workflow run.
After a workflow run has been completed, you can download or delete artifacts on {% data variables.product.prodname_dotcom %} or using the REST API. For more information, see "[Downloading workflow artifacts](/actions/managing-workflow-runs/downloading-workflow-artifacts)," "[Removing workflow artifacts](/actions/managing-workflow-runs/removing-workflow-artifacts)," and the "[Artifacts REST API](/v3/actions/artifacts/)."
After a workflow run has been completed, you can download or delete artifacts on {% data variables.product.prodname_dotcom %} or using the REST API. For more information, see "[Downloading workflow artifacts](/actions/managing-workflow-runs/downloading-workflow-artifacts)," "[Removing workflow artifacts](/actions/managing-workflow-runs/removing-workflow-artifacts)," and the "[Artifacts REST API](/rest/reference/actions#artifacts)."
#### Downloading artifacts during a workflow run

View File

@@ -33,7 +33,7 @@ All organizations have a single default self-hosted runner group. Organizations
Self-hosted runners are automatically assigned to the default group when created, and can only be members of one group at a time. You can move a runner from the default group to any group you create.
When creating a group, you must choose a policy that defines which repositories have access to the runner group. You can configure a runner group to be accessible to a specific list of repositories, all private repositories, or all repositories in the organization.
When creating a group, you must choose a policy that defines which repositories have access to the runner group.
{% data reusables.organizations.navigate-to-org %}
{% data reusables.organizations.org_settings %}
@@ -41,9 +41,21 @@ When creating a group, you must choose a policy that defines which repositories
1. In the **Self-hosted runners** section, click **Add new**, and then **New group**.
![Add runner group](/assets/images/help/settings/actions-org-add-runner-group.png)
1. Enter a name for your runner group, and select an access policy from the **Repository access** dropdown list.
1. Enter a name for your runner group, and assign a policy for repository access.
![Add runner group options](/assets/images/help/settings/actions-org-add-runner-group-options.png)
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@2.22" %} You can configure a runner group to be accessible to a specific list of repositories, or to all repositories in the organization. By default, public repositories can't access runners in a runner group, but you can use the **Allow public repositories** option to override this.{% else if currentVersion == "enterprise-server@2.22"%}You can configure a runner group to be accessible to a specific list of repositories, all private repositories, or all repositories in the organization.{% endif %}
{% warning %}
**Warning**
{% indented_data_reference site.data.reusables.github-actions.self-hosted-runner-security spaces=3 %}
For more information, see "[About self-hosted runners](/actions/hosting-your-own-runners/about-self-hosted-runners#self-hosted-runner-security-with-public-repositories)."
{% endwarning %}
![Add runner group options](/assets/images/help/settings/actions-org-add-runner-group-options.png)
1. Click **Save group** to create the group and apply the policy.
### Creating a self-hosted runner group for an enterprise
@@ -52,7 +64,7 @@ Enterprises can add their self-hosted runners to groups for access management. E
Self-hosted runners are automatically assigned to the default group when created, and can only be members of one group at a time. You can assign the runner to a specific group during the registration process, or you can later move the runner from the default group to a custom group.
When creating a group, you must choose a policy that grants access to all organizations in the enterprise or choose specific organizations.
When creating a group, you must choose a policy that defines which organizations have access to the runner group.
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.policies-tab %}
@@ -61,7 +73,19 @@ When creating a group, you must choose a policy that grants access to all organi
1. Click **Add new**, and then **New group**.
![Add runner group](/assets/images/help/settings/actions-enterprise-account-add-runner-group.png)
1. Enter a name for your runner group, and select an access policy from the **Organization access** dropdown list.
1. Enter a name for your runner group, and assign a policy for organization access.
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@2.22" %} You can configure a runner group to be accessible to a specific list of organizations, or all organizations in the enterprise. By default, public repositories can't access runners in a runner group, but you can use the **Allow public repositories** option to override this.{% else if currentVersion == "enterprise-server@2.22"%}You can configure a runner group to be accessible to all organizations in the enterprise or choose specific organizations.{% endif %}
{% warning %}
**Warning**
{% indented_data_reference site.data.reusables.github-actions.self-hosted-runner-security spaces=3 %}
For more information, see "[About self-hosted runners](/actions/hosting-your-own-runners/about-self-hosted-runners#self-hosted-runner-security-with-public-repositories)."
{% endwarning %}
![Add runner group options](/assets/images/help/settings/actions-enterprise-account-add-runner-group-options.png)
1. Click **Save group** to create the group and apply the policy.

View File

@@ -66,7 +66,7 @@ versions:
<h2 class="mb-2 font-mktg h1">Code examples</h2>
<div class="pr-lg-3 mb-5 mt-3">
<input class="js-code-example-filter input-lg py-2 px-3 col-12 col-lg-8 form-control" placeholder="Search code examples" type="text" autocomplete="off" />
<input class="js-code-example-filter input-lg py-2 px-3 col-12 col-lg-8 form-control" placeholder="Search code examples" type="search" autocomplete="off" aria-label="Search code examples"/>
</div>
<div class="d-flex flex-wrap gutter">
@@ -79,7 +79,7 @@ versions:
<div class="mb-3">{% octicon "search" width="24" %}</div>
<h3 class="text-normal">Sorry, there is no result for <strong class="js-code-example-filter-value"></strong></h3>
<p class="my-3 f4">It looks like we don't have an example that fits your filter.<br>Try another filter or add your code example</p>
<a href="https://github.com/github/docs/blob/HEAD/data/variables/action_code_examples.yml">Learn how to add a code example {% octicon "arrow-right" %}</a>
<a href="https://github.com/github/docs/blob/main/data/variables/action_code_examples.yml">Learn how to add a code example {% octicon "arrow-right" %}</a>
</div>
</div>
{% endif %}

View File

@@ -42,7 +42,7 @@ A job is a set of steps that execute on the same runner. By default, a workflow
#### Steps
A step is an individual task that can run commands (known as _actions_). Each step in a job executes on the same runner, allowing the actions in that job to share data with each other.
A step is an individual task that can run commands in a job. A step can be either an _action_ or a shell command. Each step in a job executes on the same runner, allowing the actions in that job to share data with each other.
#### Actions
@@ -50,7 +50,7 @@ _Actions_ are standalone commands that are combined into _steps_ to create a _jo
#### Runners
A runner is a server that has the {% data variables.product.prodname_actions %} runner application installed. You can use a runner hosted by {% data variables.product.prodname_dotcom %}, or you can host your own. A runner listens for available jobs, runs one job at a time, and reports the progress, logs, and results back to {% data variables.product.prodname_dotcom %}. For {% data variables.product.prodname_dotcom %}-hosted runners, each job in a workflow runs in a fresh virtual environment.
A runner is a server that has the [{% data variables.product.prodname_actions %} runner application](https://github.com/actions/runner) installed. You can use a runner hosted by {% data variables.product.prodname_dotcom %}, or you can host your own. A runner listens for available jobs, runs one job at a time, and reports the progress, logs, and results back to {% data variables.product.prodname_dotcom %}. For {% data variables.product.prodname_dotcom %}-hosted runners, each job in a workflow runs in a fresh virtual environment.
{% data variables.product.prodname_dotcom %}-hosted runners are based on Ubuntu Linux, Microsoft Windows, and macOS. For information on {% data variables.product.prodname_dotcom %}-hosted runners, see "[Virtual environments for {% data variables.product.prodname_dotcom %}-hosted runners](/actions/reference/virtual-environments-for-github-hosted-runners)." If you need a different operating system or require a specific hardware configuration, you can host your own runners. For information on self-hosted runners, see "[Hosting your own runners](/actions/hosting-your-own-runners)."
@@ -197,7 +197,7 @@ To help you understand how YAML syntax is used to create a workflow file, this s
#### Visualizing the workflow file
In this diagram, you can see the workflow file you just created and how the {% data variables.product.prodname_actions %} components are organized in a hierarchy. Each step executes a single action. Steps 1 and 2 use prebuilt community actions. To find more prebuilt actions for your workflows, see "[Finding and customizing actions](/actions/learn-github-actions/finding-and-customizing-actions)."
In this diagram, you can see the workflow file you just created and how the {% data variables.product.prodname_actions %} components are organized in a hierarchy. Each step executes a single action or shell command. Steps 1 and 2 use prebuilt community actions. Steps 3 and 4 run shell commands directly on the runner. To find more prebuilt actions for your workflows, see "[Finding and customizing actions](/actions/learn-github-actions/finding-and-customizing-actions)."
![Workflow overview](/assets/images/help/images/overview-actions-event.png)

View File

@@ -12,11 +12,11 @@ versions:
### Overview
This article describes some of the advanced features of {% data variables.product.prodname_actions %} that help you work create more complex workflows.
This article describes some of the advanced features of {% data variables.product.prodname_actions %} that help you work create more complex workflows.
### Storing secrets
If your workflows use sensitive data, such as passwords or certificates, you can save these in {% data variables.product.prodname_dotcom %} as _secrets_ and then use them in your workflows as environment variables. This means that you will be able to create and share workflows without having to embed sensitive values directly in the YAML workflow.
If your workflows use sensitive data, such as passwords or certificates, you can save these in {% data variables.product.prodname_dotcom %} as _secrets_ and then use them in your workflows as environment variables. This means that you will be able to create and share workflows without having to embed sensitive values directly in the YAML workflow.
This example action demonstrates how to reference an existing secret as an environment variable, and send it as a parameter to an example command.
@@ -57,7 +57,7 @@ jobs:
needs: build
runs-on: ubuntu-latest
steps:
- run: ./test_server.sh
- run: ./test_server.sh
```
For more information, see [`jobs.<job_id>.needs`](/actions/reference/workflow-syntax-for-github-actions#jobsjob_idneeds).
@@ -85,7 +85,7 @@ For more information, see [`jobs.<job_id>.strategy.matrix`](/actions/reference/w
### Caching dependencies
{% data variables.product.prodname_dotcom %}-hosted runners are started as fresh environments for each job, so if your jobs regularly reuse dependencies, you can consider caching these files to help improve performance. Once the cache is created, it is available to all workflows in the same repository.
{% data variables.product.prodname_dotcom %}-hosted runners are started as fresh environments for each job, so if your jobs regularly reuse dependencies, you can consider caching these files to help improve performance. Once the cache is created, it is available to all workflows in the same repository.
This example demonstrates how to cache the ` ~/.npm` directory:
@@ -106,7 +106,7 @@ jobs:
```
{% endraw %}
For more information, see "[Caching dependencies to speed up workflows](/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows)."
For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
### Using databases and service containers
@@ -136,7 +136,7 @@ For more information, see "[Using databases and service containers](/actions/con
### Using labels to route workflows
This feature helps you assign jobs to a specific self-hosted runner. If you want to be sure that a particular type of runner will process your job, you can use labels to control where jobs are executed. You can assign labels to a self-hosted runner, and then refer to these labels in your YAML workflow, ensuring that the job is routed in a predictable way.
This feature helps you assign jobs to a specific self-hosted runner. If you want to be sure that a particular type of runner will process your job, you can use labels to control where jobs are executed. You can assign labels to a self-hosted runner, and then refer to these labels in your YAML workflow, ensuring that the job is routed in a predictable way.
This example shows how a workflow can use labels to specify the required runner:

View File

@@ -101,7 +101,7 @@ GitHub Actions
</tr>
</table>
For more information, see "[Caching dependencies to speed up workflows](/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows)."
{% data variables.product.prodname_actions %} caching is only applicable to {% data variables.product.prodname_dotcom %}-hosted runners. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
{% data variables.product.prodname_actions %} does not have an equivalent of CircleCIs Docker Layer Caching (or DLC).

View File

@@ -262,7 +262,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- run: echo "This job will be run first, in parallel with build_a"
test_ab:
runs-on: ubuntu-latest
needs: [build_a,build_b]
@@ -346,7 +346,7 @@ jobs:
</tr>
</table>
For more information, see "[Caching dependencies to speed up workflows](/actions/guides/caching-dependencies-to-speed-up-workflows)."
{% data variables.product.prodname_actions %} caching is only applicable to {% data variables.product.prodname_dotcom %}-hosted runners. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
### Artifacts
@@ -367,7 +367,7 @@ GitLab CI/CD
<td class="d-table-cell v-align-top">
{% raw %}
```yaml
script:
script:
artifacts:
paths:
- math-homework.txt
@@ -414,7 +414,7 @@ GitLab CI/CD
container-job:
variables:
POSTGRES_PASSWORD: postgres
# The hostname used to communicate with the
# The hostname used to communicate with the
# PostgreSQL service container
POSTGRES_HOST: postgres
# The default PostgreSQL port
@@ -423,10 +423,10 @@ container-job:
services:
- postgres
script:
# Performs a clean installation of all dependencies
# Performs a clean installation of all dependencies
# in the `package.json` file
- npm ci
# Runs a script that creates a PostgreSQL client,
# Runs a script that creates a PostgreSQL client,
# populates the client with data, and retrieves data
- node client.js
tags:
@@ -452,7 +452,7 @@ jobs:
- name: Check out repository code
uses: actions/checkout@v2
# Performs a clean installation of all dependencies
# Performs a clean installation of all dependencies
# in the `package.json` file
- name: Install dependencies
run: npm ci
@@ -462,7 +462,7 @@ jobs:
# populates the client with data, and retrieves data
run: node client.js
env:
# The hostname used to communicate with the
# The hostname used to communicate with the
# PostgreSQL service container
POSTGRES_HOST: postgres
# The default PostgreSQL port

View File

@@ -164,6 +164,12 @@ git:
</tr>
</table>
#### Using environment variables in a matrix
Travis CI and {% data variables.product.prodname_actions %} can both add custom environment variables to a test matrix, which allows you to refer to the variable in a later step.
In {% data variables.product.prodname_actions %}, you can use the `include` key to add custom environment variables to a matrix. {% data reusables.github-actions.matrix-variable-example %}
### Key features in {% data variables.product.prodname_actions %}
When migrating from Travis CI, consider the following key features in {% data variables.product.prodname_actions %}:
@@ -178,7 +184,7 @@ When migrating from Travis CI, consider the following key features in {% data va
#### Hosting your own runners
If your jobs require specific hardware or software, {% data variables.product.prodname_actions %} allows you to host your own runners and send your jobs to them for processing. {% data variables.product.prodname_actions %} also lets you use policies to control how these runners are accessed, granting access at the organization or repository level. For more information, see ["Hosting your own runners](/actions/hosting-your-own-runners)."
If your jobs require specific hardware or software, {% data variables.product.prodname_actions %} allows you to host your own runners and send your jobs to them for processing. {% data variables.product.prodname_actions %} also lets you use policies to control how these runners are accessed, granting access at the organization or repository level. For more information, see ["Hosting your own runners](/actions/hosting-your-own-runners)."
#### Concurrent jobs and execution time
@@ -207,7 +213,7 @@ For example:
shell: bash
```
### Error handling in {% data variables.product.prodname_actions %}
### Error handling in {% data variables.product.prodname_actions %}
When migrating to {% data variables.product.prodname_actions %}, there are different approaches to error handling that you might need to be aware of.
@@ -282,7 +288,7 @@ jobs:
### Caching dependencies
Travis CI and {% data variables.product.prodname_actions %} let you manually cache dependencies for later reuse. This example demonstrates the cache syntax for each system.
Travis CI and {% data variables.product.prodname_actions %} let you manually cache dependencies for later reuse. This example demonstrates the cache syntax for each system.
<table>
<tr>
@@ -317,7 +323,7 @@ cache: npm
</tr>
</table>
For more information, see "[Caching dependencies to speed up workflows](/actions/guides/caching-dependencies-to-speed-up-workflows)."
{% data variables.product.prodname_actions %} caching is only applicable to {% data variables.product.prodname_dotcom %}-hosted runners. For more information, see "<a href="/actions/guides/caching-dependencies-to-speed-up-workflows" class="dotcom-only">Caching dependencies to speed up workflows</a>."
### Examples of common tasks

View File

@@ -14,7 +14,7 @@ You can see whether a workflow run is in progress or complete from the workflow
If the run is complete, you can see whether the result was a success, failure, canceled, or neutral. If the run failed, you can view and search the build logs to diagnose the failure and re-run the workflow. You can also view billable job execution minutes, or download logs and build artifacts.
{% data variables.product.prodname_actions %} use the Checks API to output statuses, results, and logs for a workflow. {% data variables.product.prodname_dotcom %} creates a new check suite for each workflow run. The check suite contains a check run for each job in the workflow, and each job includes steps. {% data variables.product.prodname_actions %} are run as a step in a workflow. For more information about the Checks API, see "[Checks](/v3/checks/)."
{% data variables.product.prodname_actions %} use the Checks API to output statuses, results, and logs for a workflow. {% data variables.product.prodname_dotcom %} creates a new check suite for each workflow run. The check suite contains a check run for each job in the workflow, and each job includes steps. {% data variables.product.prodname_actions %} are run as a step in a workflow. For more information about the Checks API, see "[Checks](/rest/reference/checks)."
{% data reusables.github-actions.invalid-workflow-files %}

View File

@@ -79,7 +79,7 @@ You can use the `GITHUB_TOKEN` to make authenticated API calls. This example wor
### Permissions for the `GITHUB_TOKEN`
For information about the API endpoints {% data variables.product.prodname_github_apps %} can access with each permission, see "[{% data variables.product.prodname_github_app %} Permissions](/v3/apps/permissions/)."
For information about the API endpoints {% data variables.product.prodname_github_apps %} can access with each permission, see "[{% data variables.product.prodname_github_app %} Permissions](/rest/reference/permissions-required-for-github-apps)."
| Permission | Access type | Access by forked repos |
|------------|-------------|--------------------------|

View File

@@ -43,11 +43,11 @@ You can use and read encrypted secrets in a workflow file if you have access to
{% endwarning %}
You can also manage secrets using the REST API. For more information, see "[Secrets](/v3/actions/secrets/)."
You can also manage secrets using the REST API. For more information, see "[Secrets](/rest/reference/actions#secrets)."
#### Limiting credential permissions
When generating credentials, we recommend that you grant the minimum permissions possible. For example, instead of using personal credentials, use [deploy keys](/v3/guides/managing-deploy-keys/#deploy-keys) or a service account. Consider granting read-only permissions if that's all that is needed, and limit access as much as possible. When generating a personal access token (PAT), select the fewest scopes necessary.
When generating credentials, we recommend that you grant the minimum permissions possible. For example, instead of using personal credentials, use [deploy keys](/developers/overview/managing-deploy-keys#deploy-keys) or a service account. Consider granting read-only permissions if that's all that is needed, and limit access as much as possible. When generating a personal access token (PAT), select the fewest scopes necessary.
### Creating encrypted secrets for a repository

View File

@@ -143,7 +143,7 @@ jobs:
{% data reusables.github-actions.branch-requirement %}
You can use the {% data variables.product.product_name %} API to trigger a webhook event called [`repository_dispatch`](/webhooks/event-payloads/#repository_dispatch) when you want to trigger a workflow for activity that happens outside of {% data variables.product.prodname_dotcom %}. For more information, see "[Create a repository dispatch event](/v3/repos/#create-a-repository-dispatch-event)."
You can use the {% data variables.product.product_name %} API to trigger a webhook event called [`repository_dispatch`](/webhooks/event-payloads/#repository_dispatch) when you want to trigger a workflow for activity that happens outside of {% data variables.product.prodname_dotcom %}. For more information, see "[Create a repository dispatch event](/rest/reference/repos#create-a-repository-dispatch-event)."
To trigger the custom `repository_dispatch` webhook event, you must send a `POST` request to a {% data variables.product.product_name %} API endpoint and provide an `event_type` name to describe the activity type. To trigger a workflow run, you must also configure your workflow to use the `repository_dispatch` event.
@@ -163,7 +163,7 @@ You can configure your workflow to run when webhook events are created on {% dat
#### `check_run`
Runs your workflow anytime the `check_run` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Check runs](/v3/checks/runs/)."
Runs your workflow anytime the `check_run` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Check runs](/rest/reference/checks#runs)."
{% data reusables.github-actions.branch-requirement %}
@@ -183,7 +183,7 @@ on:
#### `check_suite`
Runs your workflow anytime the `check_suite` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Check suites](/v3/checks/suites/)."
Runs your workflow anytime the `check_suite` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Check suites](/rest/reference/checks#suites)."
{% data reusables.github-actions.branch-requirement %}
@@ -209,7 +209,7 @@ on:
#### `create`
Runs your workflow anytime someone creates a branch or tag, which triggers the `create` event. For information about the REST API, see "[Create a reference](/v3/git/refs/#create-a-reference)."
Runs your workflow anytime someone creates a branch or tag, which triggers the `create` event. For information about the REST API, see "[Create a reference](/rest/reference/git#create-a-reference)."
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------|
@@ -224,7 +224,7 @@ on:
#### `delete`
Runs your workflow anytime someone deletes a branch or tag, which triggers the `delete` event. For information about the REST API, see "[Delete a reference](/v3/git/refs/#delete-a-reference)."
Runs your workflow anytime someone deletes a branch or tag, which triggers the `delete` event. For information about the REST API, see "[Delete a reference](/rest/reference/git#delete-a-reference)."
{% data reusables.github-actions.branch-requirement %}
@@ -271,7 +271,7 @@ on:
#### `fork`
Runs your workflow anytime when someone forks a repository, which triggers the `fork` event. For information about the REST API, see "[Create a fork](/v3/repos/forks/#create-a-fork)."
Runs your workflow anytime when someone forks a repository, which triggers the `fork` event. For information about the REST API, see "[Create a fork](/rest/reference/repos#create-a-fork)."
{% data reusables.github-actions.branch-requirement %}
@@ -327,6 +327,7 @@ The `issue_comment` event occurs for comments on both issues and pull requests.
For example, you can choose to run the `pr_commented` job when comment events occur in a pull request, and the `issue_commented` job when comment events occur in an issue.
{% raw %}
```yaml
on: issue_comment
@@ -349,10 +350,11 @@ jobs:
- run: |
echo "Comment on issue #${{ github.event.issue.number }}"
```
{% endraw %}
#### `issues`
Runs your workflow anytime the `issues` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Issues](/v3/issues)."
Runs your workflow anytime the `issues` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Issues](/rest/reference/issues)."
{% data reusables.github-actions.branch-requirement %}
@@ -372,7 +374,7 @@ on:
#### `label`
Runs your workflow anytime the `label` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Labels](/v3/issues/labels/)."
Runs your workflow anytime the `label` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Labels](/rest/reference/issues#labels)."
{% data reusables.github-actions.branch-requirement %}
@@ -392,7 +394,7 @@ on:
#### `milestone`
Runs your workflow anytime the `milestone` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Milestones](/v3/issues/milestones/)."
Runs your workflow anytime the `milestone` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Milestones](/rest/reference/issues#milestones)."
{% data reusables.github-actions.branch-requirement %}
@@ -429,7 +431,7 @@ on:
#### `project`
Runs your workflow anytime the `project` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Projects](/v3/projects/)."
Runs your workflow anytime the `project` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Projects](/rest/reference/projects)."
{% data reusables.github-actions.branch-requirement %}
@@ -449,7 +451,7 @@ on:
#### `project_card`
Runs your workflow anytime the `project_card` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Project cards](/v3/projects/cards)."
Runs your workflow anytime the `project_card` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Project cards](/rest/reference/projects#cards)."
{% data reusables.github-actions.branch-requirement %}
@@ -469,7 +471,7 @@ on:
#### `project_column`
Runs your workflow anytime the `project_column` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Project columns](/v3/projects/columns)."
Runs your workflow anytime the `project_column` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Project columns](/rest/reference/projects#columns)."
{% data reusables.github-actions.branch-requirement %}
@@ -489,7 +491,7 @@ on:
#### `public`
Runs your workflow anytime someone makes a private repository public, which triggers the `public` event. For information about the REST API, see "[Edit repositories](/v3/repos/#edit)."
Runs your workflow anytime someone makes a private repository public, which triggers the `public` event. For information about the REST API, see "[Edit repositories](/rest/reference/repos#edit)."
{% data reusables.github-actions.branch-requirement %}
@@ -506,7 +508,7 @@ on:
#### `pull_request`
Runs your workflow anytime the `pull_request` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Pull requests](/v3/pulls)."
Runs your workflow anytime the `pull_request` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Pull requests](/rest/reference/pulls)."
{% note %}
@@ -532,7 +534,7 @@ on:
#### `pull_request_review`
Runs your workflow anytime the `pull_request_review` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Pull request reviews](/v3/pulls/reviews)."
Runs your workflow anytime the `pull_request_review` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Pull request reviews](/rest/reference/pulls#reviews)."
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------|
@@ -552,7 +554,7 @@ on:
#### `pull_request_review_comment`
Runs your workflow anytime a comment on a pull request's unified diff is modified, which triggers the `pull_request_review_comment` event. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see [Review comments](/v3/pulls/comments).
Runs your workflow anytime a comment on a pull request's unified diff is modified, which triggers the `pull_request_review_comment` event. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see [Review comments](/rest/reference/pulls#comments).
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------|
@@ -570,6 +572,8 @@ on:
{% data reusables.developer-site.pull_request_forked_repos_link %}
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@2.22" %}
#### `pull_request_target`
This event is similar to `pull_request`, except that it runs in the context of the base repository of the pull request, rather than in the merge commit. This means that you can more safely make your secrets available to the workflows triggered by the pull request, because only workflows defined in the commit on the base repository are run. For example, this event allows you to create workflows that label and comment on pull requests, based on the contents of the event payload.
@@ -593,11 +597,13 @@ on: pull_request_target
types: [assigned, opened, synchronize, reopened]
```
{% endif %}
#### `push`
{% note %}
**Note:** The webhook payload available to GitHub Actions does not include the `added`, `removed`, and `modified` attributes in the `commit` object. You can retrieve the full commit object using the REST API. For more information, see "[Get a single commit](/v3/repos/commits/#get-a-single-commit)"".
**Note:** The webhook payload available to GitHub Actions does not include the `added`, `removed`, and `modified` attributes in the `commit` object. You can retrieve the full commit object using the REST API. For more information, see "[Get a single commit](/rest/reference/repos#get-a-single-commit)"".
{% endnote %}
@@ -640,7 +646,7 @@ on:
{% endnote %}
Runs your workflow anytime the `release` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Releases](/v3/repos/releases/)."
Runs your workflow anytime the `release` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Releases](/rest/reference/repos#releases)."
| Webhook event payload | Activity types | `GITHUB_SHA` | `GITHUB_REF` |
| --------------------- | -------------- | ------------ | -------------|
@@ -658,7 +664,7 @@ on:
#### `status`
Runs your workflow anytime the status of a Git commit changes, which triggers the `status` event. For information about the REST API, see [Statuses](/v3/repos/statuses/).
Runs your workflow anytime the status of a Git commit changes, which triggers the `status` event. For information about the REST API, see [Statuses](/rest/reference/repos#statuses).
{% data reusables.github-actions.branch-requirement %}
@@ -675,7 +681,7 @@ on:
#### `watch`
Runs your workflow anytime the `watch` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Starring](/v3/activity/starring/)."
Runs your workflow anytime the `watch` event occurs. {% data reusables.developer-site.multiple_activity_types %} For information about the REST API, see "[Starring](/rest/reference/activity#starring)."
{% data reusables.github-actions.branch-requirement %}
@@ -693,6 +699,8 @@ on:
types: [started]
```
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@2.22" %}
#### `workflow_run`
{% data reusables.webhooks.workflow_run_desc %}
@@ -715,6 +723,8 @@ on:
- requested
```
{% endif %}
### Triggering new workflows using a personal access token
{% data reusables.github-actions.actions-do-not-trigger-workflows %} For more information, see "[Authenticating with the GITHUB_TOKEN](/actions/configuring-and-managing-workflows/authenticating-with-the-github_token)."

View File

@@ -76,6 +76,7 @@ For more information, see:
- "[Disabling or limiting {% data variables.product.prodname_actions %} for your organization](/github/setting-up-and-managing-organizations-and-teams/disabling-or-limiting-github-actions-for-your-organization)"{% if currentVersion == "free-pro-team@latest" %}
- "[Enforcing {% data variables.product.prodname_actions %} policies in your enterprise account](/github/setting-up-and-managing-your-enterprise/enforcing-github-actions-policies-in-your-enterprise-account)" for {% data variables.product.prodname_ghe_cloud %}{% endif %}
{% if currentVersion == "free-pro-team@latest" or currentVersion ver_gt "enterprise-server@2.22" %}
### Disabling and enabling workflows
You can enable and disable individual workflows in your repository on {% data variables.product.prodname_dotcom %}.
@@ -83,3 +84,4 @@ You can enable and disable individual workflows in your repository on {% data va
{% data reusables.actions.scheduled-workflows-disabled %}
For more information, see "[Disabling and enabling a workflow](/actions/managing-workflow-runs/disabling-and-enabling-a-workflow)."
{% endif %}

View File

@@ -227,7 +227,7 @@ Each job runs in an environment specified by `runs-on`.
You can run an unlimited number of jobs as long as you are within the workflow usage limits. For more information, see "[Usage limits and billing](/actions/reference/usage-limits-billing-and-administration)" for {% data variables.product.prodname_dotcom %}-hosted runners and "[About self-hosted runners](/actions/hosting-your-own-runners/about-self-hosted-runners/#usage-limits)" for self-hosted runner usage limits.
If you need to find the unique identifier of a job running in a workflow run, you can use the {% data variables.product.prodname_dotcom %} API. For more information, see "[Workflow Jobs](/v3/actions/workflow-jobs)."
If you need to find the unique identifier of a job running in a workflow run, you can use the {% data variables.product.prodname_dotcom %} API. For more information, see "[Workflow Jobs](/rest/reference/actions#workflow-jobs)."
### **`jobs.<job_id>`**
@@ -446,7 +446,7 @@ steps:
uses: monacorp/action-name@main
- name: My backup step
if: {% raw %}${{ failure() }}{% endraw %}
uses: actions/heroku@master
uses: actions/heroku@1.0.0
```
#### **`jobs.<job_id>.steps.name`**
@@ -492,7 +492,7 @@ jobs:
steps:
- name: My first step
# Uses the default branch of a public repository
uses: actions/heroku@master
uses: actions/heroku@1.0.0
- name: My second step
# Uses a specific version tag of a public repository
uses: actions/aws@v2.0.1
@@ -659,7 +659,7 @@ For built-in shell keywords, we provide the following defaults that are executed
- `cmd`
- There doesn't seem to be a way to fully opt into fail-fast behavior other than writing your script to check each error code and respond accordingly. Because we can't actually provide that behavior by default, you need to write this behavior into your script.
- `cmd.exe` will exit with the error level of the last program it executed, and it will and return the error code to the runner. This behavior is internally consistent with the previous `sh` and `pwsh` default behavior and is the `cmd.exe` default, so this behavior remains intact.
- `cmd.exe` will exit with the error level of the last program it executed, and it will return the error code to the runner. This behavior is internally consistent with the previous `sh` and `pwsh` default behavior and is the `cmd.exe` default, so this behavior remains intact.
#### **`jobs.<job_id>.steps.with`**
@@ -718,7 +718,7 @@ steps:
entrypoint: /a/different/executable
```
The `entrypoint` keyword is meant to use with Docker container actions, but you can also use it with JavaScript actions that don't define any inputs.
The `entrypoint` keyword is meant to be used with Docker container actions, but you can also use it with JavaScript actions that don't define any inputs.
#### **`jobs.<job_id>.steps.env`**
@@ -876,6 +876,12 @@ strategy:
{% endnote %}
##### Using environment variables in a matrix
You can add custom environment variables for each test combination by using the `include` key. You can then refer to the custom environment variables in a later step.
{% data reusables.github-actions.matrix-variable-example %}
### **`jobs.<job_id>.strategy.fail-fast`**
When set to `true`, {% data variables.product.prodname_dotcom %} cancels all in-progress jobs if any `matrix` job fails. Default: `true`

View File

@@ -9,7 +9,7 @@ versions:
---
User accounts on {% data variables.product.product_location %} are preserved when you change the authentication method and users will continue to log into the same account as long as their username doesn't change.
If the new method of authentication changes usernames, new accounts will be created. As an administrator, you can rename users through the site admin settings or by using [the User Administration API](/enterprise/{{currentVersion}}/v3/enterprise-admin/users/#rename-an-existing-user).
If the new method of authentication changes usernames, new accounts will be created. As an administrator, you can rename users through the site admin settings or by using [the User Administration API](/rest/reference/enterprise-admin#update-the-username-for-a-user).
Other issues you should take into consideration include:

View File

@@ -82,7 +82,7 @@ Allows you to find the uuid of your node in `cluster.conf`.
Allows you to exempt a list of users from API rate limits. For more information, see "[Resources in the REST API](/rest/overview/resources-in-the-rest-api#rate-limiting)."
``` shell
$ ghe-config app.github.rate_limiting_exempt_users "<em>hubot</em> <em>github-actions</em>"
$ ghe-config app.github.rate-limiting-exempt-users "<em>hubot</em> <em>github-actions</em>"
# Exempts the users hubot and github-actions from rate limits
```
{% endif %}
@@ -157,7 +157,7 @@ $ ghe-es-index-status -do | column -ts,
#### ghe-legacy-github-services-report
This utility lists repositories on your appliance that use {% data variables.product.prodname_dotcom %} Services, an integration method that will be discontinued on October 1, 2018. Users on your appliance may have set up {% data variables.product.prodname_dotcom %} Services to create notifications for pushes to certain repositories. For more information, see "[Announcing the deprecation of {% data variables.product.prodname_dotcom %} Services](https://developer.github.com/changes/2018-04-25-github-services-deprecation/)" on {% data variables.product.prodname_blog %} or "[Replacing {% data variables.product.prodname_dotcom %} Services](/v3/guides/replacing-github-services/)." For more information about this command or for additional options, use the `-h` flag.
This utility lists repositories on your appliance that use {% data variables.product.prodname_dotcom %} Services, an integration method that will be discontinued on October 1, 2018. Users on your appliance may have set up {% data variables.product.prodname_dotcom %} Services to create notifications for pushes to certain repositories. For more information, see "[Announcing the deprecation of {% data variables.product.prodname_dotcom %} Services](https://developer.github.com/changes/2018-04-25-github-services-deprecation/)" on {% data variables.product.prodname_blog %} or "[Replacing {% data variables.product.prodname_dotcom %} Services](/developers/overview/replacing-github-services)." For more information about this command or for additional options, use the `-h` flag.
```shell
ghe-legacy-github-services-report

View File

@@ -37,7 +37,7 @@ Enabling {% data variables.product.prodname_github_connect %} also creates a {%
Enabling {% data variables.product.prodname_github_connect %} will not allow {% data variables.product.prodname_dotcom_the_website %} users to make changes to {% data variables.product.prodname_ghe_server %}.
For more information about managing enterprise accounts using the GraphQL API, see "[Enterprise accounts](/v4/guides/managing-enterprise-accounts)."
For more information about managing enterprise accounts using the GraphQL API, see "[Enterprise accounts](/graphql/guides/managing-enterprise-accounts)."
### Enabling {% data variables.product.prodname_github_connect %}
1. Sign in to {% data variables.product.product_location_enterprise %} and {% data variables.product.prodname_dotcom_the_website %}.

View File

@@ -31,9 +31,7 @@ versions:
{% link_in_list /enabling-private-mode %}
{% link_in_list /configuring-email-for-notifications %}
{% link_in_list /configuring-rate-limits %}
<!-- if currentVersion ver_gt "enterprise-server@2.15" -->
{% link_in_list /configuring-applications %}
<!-- endif -->
{% link_in_list /troubleshooting-ssl-errors %}
{% link_in_list /configuring-time-synchronization %}
{% link_in_list /command-line-utilities %}
@@ -54,11 +52,7 @@ versions:
{% link_in_list /connecting-github-enterprise-server-to-github-enterprise-cloud %}
{% link_in_list /enabling-unified-search-between-github-enterprise-server-and-githubcom %}
{% link_in_list /enabling-unified-contributions-between-github-enterprise-server-and-githubcom %}
<!-- if currentVersion ver_gt "enterprise-server@2.16" -->
{% link_in_list /enabling-alerts-for-vulnerable-dependencies-on-github-enterprise-server %}
{% link_in_list /enabling-automatic-user-license-sync-between-github-enterprise-server-and-github-enterprise-cloud %}
<!-- endif -->
<!-- if currentVersion ver_gt "enterprise-server@2.21" -->
{% topic_link_in_list /configuring-advanced-security-features %}
{% link_in_list /configuring-code-scanning-for-your-appliance %}
<!-- endif -->

View File

@@ -10,7 +10,7 @@ versions:
---
### Set up an external `collectd` server
If you haven't already set up an external `collectd` server, you will need to do so before enabling `collectd` forwarding on {% data variables.product.product_location %}. Your `collectd` server must by running `collectd` version 5.x or higher.
If you haven't already set up an external `collectd` server, you will need to do so before enabling `collectd` forwarding on {% data variables.product.product_location %}. Your `collectd` server must be running `collectd` version 5.x or higher.
1. Log into your `collectd` server.
2. Create or edit the `collectd` configuration file to load the network plugin and populate the server and port directives with the proper values. On most distributions, this is located at `/etc/collectd/collectd.conf`

View File

@@ -57,32 +57,36 @@ Before you define a secondary datacenter for your passive nodes, ensure that you
mysql-master = <em>HOSTNAME</em>
redis-master = <em>HOSTNAME</em>
<strong>primary-datacenter = default</strong>
```
```
- Optionally, change the name of the primary datacenter to something more descriptive or accurate by editing the value of `primary-datacenter`.
4. {% data reusables.enterprise_clustering.configuration-file-heading %} Under each node's heading, add a new key-value pair to assign the node to a datacenter. Use the same value as `primary-datacenter` from step 3 above. For example, if you want to use the default name (`default`), add the following key-value pair to the section for each node.
datacenter = default
```
datacenter = default
```
When you're done, the section for each node in the cluster configuration file should look like the following example. {% data reusables.enterprise_clustering.key-value-pair-order-irrelevant %}
```shell
[cluster "<em>HOSTNAME</em>"]
<strong>datacenter = default</strong>
hostname = <em>HOSTNAME</em>
ipv4 = <em>IP ADDRESS</em>
```shell
[cluster "<em>HOSTNAME</em>"]
<strong>datacenter = default</strong>
hostname = <em>HOSTNAME</em>
ipv4 = <em>IP ADDRESS</em>
...
...
...
```
```
{% note %}
{% note %}
**Note**: If you changed the name of the primary datacenter in step 3, find the `consul-datacenter` key-value pair in the section for each node and change the value to the renamed primary datacenter. For example, if you named the primary datacenter `primary`, use the following key-value pair for each node.
**Note**: If you changed the name of the primary datacenter in step 3, find the `consul-datacenter` key-value pair in the section for each node and change the value to the renamed primary datacenter. For example, if you named the primary datacenter `primary`, use the following key-value pair for each node.
consul-datacenter = primary
```
consul-datacenter = primary
```
{% endnote %}
{% endnote %}
{% data reusables.enterprise_clustering.apply-configuration %}
@@ -103,31 +107,37 @@ For an example configuration, see "[Example configuration](#example-configuratio
1. For each node in your cluster, provision a matching virtual machine with identical specifications, running the same version of {% data variables.product.prodname_ghe_server %}. Note the IPv4 address and hostname for each new cluster node. For more information, see "[Prerequisites](#prerequisites)."
{% note %}
{% note %}
**Note**: If you're reconfiguring high availability after a failover, you can use the old nodes from the primary datacenter instead.
**Note**: If you're reconfiguring high availability after a failover, you can use the old nodes from the primary datacenter instead.
{% endnote %}
{% endnote %}
{% data reusables.enterprise_clustering.ssh-to-a-node %}
3. Back up your existing cluster configuration.
cp /data/user/common/cluster.conf ~/$(date +%Y-%m-%d)-cluster.conf.backup
```
cp /data/user/common/cluster.conf ~/$(date +%Y-%m-%d)-cluster.conf.backup
```
4. Create a copy of your existing cluster configuration file in a temporary location, like _/home/admin/cluster-passive.conf_. Delete unique key-value pairs for IP addresses (`ipv*`), UUIDs (`uuid`), and public keys for WireGuard (`wireguard-pubkey`).
grep -Ev "(?:|ipv|uuid|vpn|wireguard\-pubkey)" /data/user/common/cluster.conf > ~/cluster-passive.conf
```
grep -Ev "(?:|ipv|uuid|vpn|wireguard\-pubkey)" /data/user/common/cluster.conf > ~/cluster-passive.conf
```
5. Remove the `[cluster]` section from the temporary cluster configuration file that you copied in the previous step.
git config -f ~/cluster-passive.conf --remove-section cluster
```
git config -f ~/cluster-passive.conf --remove-section cluster
```
6. Decide on a name for the secondary datacenter where you provisioned your passive nodes, then update the temporary cluster configuration file with the new datacenter name. Replace `SECONDARY` with the name you choose.
```shell
sed -i 's/datacenter = default/datacenter = <em>SECONDARY</em>/g' ~/cluster-passive.conf
```
sed -i 's/datacenter = default/datacenter = <em>SECONDARY</em>/g' ~/cluster-passive.conf
```
7. Decide on a pattern for the passive nodes' hostnames.
@@ -140,7 +150,7 @@ For an example configuration, see "[Example configuration](#example-configuratio
8. Open the temporary cluster configuration file from step 3 in a text editor. For example, you can use Vim.
```shell
sudo vim ~/cluster-passive.conf
sudo vim ~/cluster-passive.conf
```
9. In each section within the temporary cluster configuration file, update the node's configuration. {% data reusables.enterprise_clustering.configuration-file-heading %}
@@ -150,37 +160,37 @@ For an example configuration, see "[Example configuration](#example-configuratio
- Add a new key-value pair, `replica = enabled`.
```shell
[cluster "<em>NEW PASSIVE NODE HOSTNAME</em>"]
[cluster "<em>NEW PASSIVE NODE HOSTNAME</em>"]
...
hostname = <em>NEW PASSIVE NODE HOSTNAME</em>
ipv4 = <em>NEW PASSIVE NODE IPV4 ADDRESS</em>
<strong>replica = enabled</strong>
...
...
hostname = <em>NEW PASSIVE NODE HOSTNAME</em>
ipv4 = <em>NEW PASSIVE NODE IPV4 ADDRESS</em>
<strong>replica = enabled</strong>
...
...
```
10. Append the contents of the temporary cluster configuration file that you created in step 4 to the active configuration file.
```shell
cat ~/cluster-passive.conf >> /data/user/common/cluster.conf
```
cat ~/cluster-passive.conf >> /data/user/common/cluster.conf
```
11. Designate the primary MySQL and Redis nodes in the secondary datacenter. Replace `REPLICA MYSQL PRIMARY HOSTNAME` and `REPLICA REDIS PRIMARY HOSTNAME` with the hostnames of the passives node that you provisioned to match your existing MySQL and Redis primaries.
```shell
git config -f /data/user/common/cluster.conf cluster.mysql-master-replica <em>REPLICA MYSQL PRIMARY HOSTNAME</em>
git config -f /data/user/common/cluster.conf cluster.redis-master-replica <em>REPLICA REDIS PRIMARY HOSTNAME</em>
```
git config -f /data/user/common/cluster.conf cluster.mysql-master-replica <em>REPLICA MYSQL PRIMARY HOSTNAME</em>
git config -f /data/user/common/cluster.conf cluster.redis-master-replica <em>REPLICA REDIS PRIMARY HOSTNAME</em>
```
12. Enable MySQL to fail over automatically when you fail over to the passive replica nodes.
```shell
git config -f /data/user/common/cluster.conf cluster.mysql-auto-failover true
git config -f /data/user/common/cluster.conf cluster.mysql-auto-failover true
```
{% warning %}
{% warning %}
**Warning**: Review your cluster configuration file before proceeding.
**Warning**: Review your cluster configuration file before proceeding.
- In the top-level `[cluster]` section, ensure that the values for `mysql-master-replica` and `redis-master-replica` are the correct hostnames for the passive nodes in the secondary datacenter that will serve as the MySQL and Redis primaries after a failover.
- In each section for an active node named <code>[cluster "<em>ACTIVE NODE HOSTNAME</em>"]</code>, double-check the following key-value pairs.
@@ -194,9 +204,9 @@ For an example configuration, see "[Example configuration](#example-configuratio
- `replica` should be configured as `enabled`.
- Take the opportunity to remove sections for offline nodes that are no longer in use.
To review an example configuration, see "[Example configuration](#example-configuration)."
To review an example configuration, see "[Example configuration](#example-configuration)."
{% endwarning %}
{% endwarning %}
13. Initialize the new cluster configuration. {% data reusables.enterprise.use-a-multiplexer %}
@@ -207,7 +217,7 @@ For an example configuration, see "[Example configuration](#example-configuratio
14. After the initialization finishes, {% data variables.product.prodname_ghe_server %} displays the following message.
```shell
Finished cluster initialization
Finished cluster initialization
```
{% data reusables.enterprise_clustering.apply-configuration %}
@@ -294,19 +304,27 @@ You can monitor the progress on any node in the cluster, using command-line tool
- Monitor replication of databases:
/usr/local/share/enterprise/ghe-cluster-status-mysql
```
/usr/local/share/enterprise/ghe-cluster-status-mysql
```
- Monitor replication of repository and Gist data:
ghe-spokes status
```
ghe-spokes status
```
- Monitor replication of attachment and LFS data:
ghe-storage replication-status
```
ghe-storage replication-status
```
- Monitor replication of Pages data:
ghe-dpages replication-status
```
ghe-dpages replication-status
```
You can use `ghe-cluster-status` to review the overall health of your cluster. For more information, see "[Command-line utilities](/enterprise/admin/configuration/command-line-utilities#ghe-cluster-status)."

View File

@@ -20,6 +20,8 @@ As more users join {% data variables.product.product_location %}, you may need t
{% endnote %}
#### Minimum requirements
{% data reusables.enterprise_installation.hardware-rec-table %}
### Increasing the data partition size

View File

@@ -27,7 +27,7 @@ Before launching {% data variables.product.product_location %} on Google Cloud P
{% data variables.product.prodname_ghe_server %} is supported on the following Google Compute Engine (GCE) machine types. For more information, see [the Google Cloud Platform machine types article](https://cloud.google.com/compute/docs/machine-types).
| High-memory |
------------- |
| ------------- |
| n1-highmem-4 |
| n1-highmem-8 |
| n1-highmem-16 |
@@ -54,7 +54,7 @@ Based on your user license count, we recommend these machine types.
1. Using the [gcloud compute](https://cloud.google.com/compute/docs/gcloud-compute/) command-line tool, list the public {% data variables.product.prodname_ghe_server %} images:
```shell
$ gcloud compute images list --project github-enterprise-public --no-standard-images
```
```
2. Take note of the image name for the latest GCE image of {% data variables.product.prodname_ghe_server %}.
@@ -63,18 +63,18 @@ Based on your user license count, we recommend these machine types.
GCE virtual machines are created as a member of a network, which has a firewall. For the network associated with the {% data variables.product.prodname_ghe_server %} VM, you'll need to configure the firewall to allow the required ports listed in the table below. For more information about firewall rules on Google Cloud Platform, see the Google guide "[Firewall Rules Overview](https://cloud.google.com/vpc/docs/firewalls)."
1. Using the gcloud compute command-line tool, create the network. For more information, see "[gcloud compute networks create](https://cloud.google.com/sdk/gcloud/reference/compute/networks/create)" in the Google documentation.
```shell
$ gcloud compute networks create <em>NETWORK-NAME</em> --subnet-mode auto
```
```shell
$ gcloud compute networks create <em>NETWORK-NAME</em> --subnet-mode auto
```
2. Create a firewall rule for each of the ports in the table below. For more information, see "[gcloud compute firewall-rules](https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/)" in the Google documentation.
```shell
$ gcloud compute firewall-rules create <em>RULE-NAME</em> \
--network <em>NETWORK-NAME</em> \
--allow tcp:22,tcp:25,tcp:80,tcp:122,udp:161,tcp:443,udp:1194,tcp:8080,tcp:8443,tcp:9418,icmp
```
This table identifies the required ports and what each port is used for.
```shell
$ gcloud compute firewall-rules create <em>RULE-NAME</em> \
--network <em>NETWORK-NAME</em> \
--allow tcp:22,tcp:25,tcp:80,tcp:122,udp:161,tcp:443,udp:1194,tcp:8080,tcp:8443,tcp:9418,icmp
```
This table identifies the required ports and what each port is used for.
{% data reusables.enterprise_installation.necessary_ports %}
{% data reusables.enterprise_installation.necessary_ports %}
### Allocating a static IP and assigning it to the VM
@@ -87,21 +87,21 @@ In production High Availability configurations, both primary and replica applian
To create the {% data variables.product.prodname_ghe_server %} instance, you'll need to create a GCE instance with your {% data variables.product.prodname_ghe_server %} image and attach an additional storage volume for your instance data. For more information, see "[Hardware considerations](#hardware-considerations)."
1. Using the gcloud compute command-line tool, create a data disk to use as an attached storage volume for your instance data, and configure the size based on your user license count. For more information, see "[gcloud compute disks create](https://cloud.google.com/sdk/gcloud/reference/compute/disks/create)" in the Google documentation.
```shell
$ gcloud compute disks create <em>DATA-DISK-NAME</em> --size <em>DATA-DISK-SIZE</em> --type <em>DATA-DISK-TYPE</em> --zone <em>ZONE</em>
```
```shell
$ gcloud compute disks create <em>DATA-DISK-NAME</em> --size <em>DATA-DISK-SIZE</em> --type <em>DATA-DISK-TYPE</em> --zone <em>ZONE</em>
```
2. Then create an instance using the name of the {% data variables.product.prodname_ghe_server %} image you selected, and attach the data disk. For more information, see "[gcloud compute instances create](https://cloud.google.com/sdk/gcloud/reference/compute/instances/create)" in the Google documentation.
```shell
$ gcloud compute instances create <em>INSTANCE-NAME</em> \
--machine-type n1-standard-8 \
--image <em>GITHUB-ENTERPRISE-IMAGE-NAME</em> \
--disk name=<em>DATA-DISK-NAME</em> \
--metadata serial-port-enable=1 \
--zone <em>ZONE</em> \
--network <em>NETWORK-NAME</em> \
--image-project github-enterprise-public
```
```shell
$ gcloud compute instances create <em>INSTANCE-NAME</em> \
--machine-type n1-standard-8 \
--image <em>GITHUB-ENTERPRISE-IMAGE-NAME</em> \
--disk name=<em>DATA-DISK-NAME</em> \
--metadata serial-port-enable=1 \
--zone <em>ZONE</em> \
--network <em>NETWORK-NAME</em> \
--image-project github-enterprise-public
```
### Configuring the instance

Some files were not shown because too many files have changed in this diff Show More