1
0
mirror of synced 2026-01-07 09:01:31 -05:00

Merge branch 'main' into patch-2

This commit is contained in:
mc
2021-12-01 05:57:48 +00:00
committed by GitHub
24 changed files with 575 additions and 167 deletions

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env node
import createStagingAppName from '../../script/deployment/create-staging-app-name.js'
import * as github from '@actions/github'
import { setOutput } from '@actions/core'
const context = github.context
const githubToken = process.env.GITHUB_TOKEN
if (!githubToken) {
throw new Error(`GITHUB_TOKEN environment variable not set`)
}
const stagingPrefix = createStagingAppName({
repo: context.payload.repository.name,
pullNumber: context.payload.number,
branch: context.payload.pull_request.head.ref,
})
const octokit = github.getOctokit(githubToken)
const response = await octokit.rest.repos.compareCommits({
owner: context.repo.owner,
repo: context.payload.repository.name,
base: context.payload.pull_request.base.sha,
head: context.payload.pull_request.head.sha,
})
const { files } = response.data
let markdownTable =
'| **Source** | **Staging** | **Production** | **What Changed** |\n|:----------- |:----------- |:----------- |:----------- |\n'
const pathPrefix = 'content/'
const articleFiles = files.filter(
({ filename }) => filename.startsWith(pathPrefix) && !filename.endsWith('/index.md')
)
for (const file of articleFiles) {
const sourceUrl = file.blob_url
const fileName = file.filename.slice(pathPrefix.length)
const fileUrl = fileName.slice(0, fileName.lastIndexOf('.'))
const stagingLink = `https://${stagingPrefix}.herokuapp.com/${fileUrl}`
const productionLink = `https://docs.github.com/${fileUrl}`
let markdownLine = ''
if (file.status === 'modified') {
markdownLine = `| [content/${fileName}](${sourceUrl}) | [Modified](${stagingLink}) | [Original](${productionLink}) | |\n`
} else if (file.status === 'added') {
markdownLine = `| New file: [content/${fileName}](${sourceUrl}) | [Modified](${stagingLink}) | | |\n`
}
markdownTable += markdownLine
}
setOutput('changesTable', markdownTable)

51
.github/actions-scripts/prod-deploy.js vendored Executable file
View File

@@ -0,0 +1,51 @@
#!/usr/bin/env node
import getOctokit from '../../script/helpers/github.js'
import deployToProduction from '../../script/deployment/deploy-to-production.js'
const {
GITHUB_TOKEN,
HEROKU_API_TOKEN,
HEROKU_PRODUCTION_APP_NAME,
SOURCE_BLOB_URL,
DELAY_FOR_PREBOOT,
RUN_ID,
} = process.env
// Exit if GitHub Actions PAT is not found
if (!GITHUB_TOKEN) {
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
}
// Exit if Heroku API token is not found
if (!HEROKU_API_TOKEN) {
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
}
// Exit if Heroku App name is not found
if (!HEROKU_PRODUCTION_APP_NAME) {
throw new Error('You must supply a HEROKU_PRODUCTION_APP_NAME environment variable!')
}
if (!RUN_ID) {
throw new Error('$RUN_ID not set')
}
// This helper uses the `GITHUB_TOKEN` implicitly!
// We're using our usual version of Octokit vs. the provided `github`
// instance to avoid versioning discrepancies.
const octokit = getOctokit()
try {
await deployToProduction({
octokit,
includeDelayForPreboot: DELAY_FOR_PREBOOT !== 'false',
// These parameters will ONLY be set by Actions
sourceBlobUrl: SOURCE_BLOB_URL,
runId: RUN_ID,
})
} catch (error) {
console.error(`Failed to deploy to production: ${error.message}`)
console.error(error)
throw error
}

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env node
import purgeEdgeCache from '../../script/deployment/purge-edge-cache.js'
try {
await purgeEdgeCache()
} catch (error) {
console.error(`Failed to purge the edge cache: ${error.message}`)
console.error(error)
throw error
}

View File

@@ -49,57 +49,12 @@ jobs:
- name: Install temporary dependencies
run: |
npm install --no-save github-slugger
npm install --no-save --include=optional esm
- name: Get changes table
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
id: changes
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
script: |
// Workaround to allow us to load ESM files with `require(...)`
const esm = require('esm')
require = esm({})
const { default: createStagingAppName } = require('./script/deployment/create-staging-app-name')
const stagingPrefix = createStagingAppName({
repo: context.payload.repository.name,
pullNumber: context.payload.number,
branch: context.payload.pull_request.head.ref,
})
const response = await github.repos.compareCommits({
owner: context.repo.owner,
repo: context.repo.repo,
base: context.payload.pull_request.base.sha,
head: context.payload.pull_request.head.sha
})
const files = response.data.files
let markdownTable = '| **Source** | **Staging** | **Production** | **What Changed** |\n|:----------- |:----------- |:----------- |:----------- |\n'
const pathPrefix = 'content/'
const articleFiles = files.filter(({ filename }) => filename.startsWith(pathPrefix) && !filename.endsWith('/index.md'))
for (const file of articleFiles) {
const sourceUrl = file.blob_url
const fileName = file.filename.slice(pathPrefix.length)
const fileUrl = fileName.slice(0, fileName.lastIndexOf('.'))
const stagingLink = `https://${stagingPrefix}.herokuapp.com/${fileUrl}`
const productionLink = `https://docs.github.com/${fileUrl}`
let markdownLine = ''
if (file.status === 'modified') {
markdownLine = `| [content/${fileName}](${sourceUrl}) | [Modified](${stagingLink}) | [Original](${productionLink}) | |\n`
} else if (file.status === 'added') {
markdownLine = `| New file: [content/${fileName}](${sourceUrl}) | [Modified](${stagingLink}) | | |\n`
}
markdownTable += markdownLine
}
core.setOutput('changesTable', markdownTable)
run: .github/actions-scripts/content-changes-table-comment.js
- name: Find content directory changes comment
uses: peter-evans/find-comment@d2dae40ed151c634e4189471272b57e76ec19ba8

View File

@@ -138,8 +138,8 @@ jobs:
- name: Check in CSV report
run: |
mkdir -p log
csvFile=log/${{ matrix.language_code }}-resets.csv
mkdir -p translations/log
csvFile=translations/log/${{ matrix.language_code }}-resets.csv
script/i18n/report-reset-files.js --report-type=csv --language=${{ matrix.language_code }} --log-file=/tmp/batch.log > $csvFile
git add -f $csvFile && git commit -m "Check in ${{ matrix.language }} CSV report" || echo "Nothing to commit"

View File

@@ -21,7 +21,7 @@ concurrency:
jobs:
build-and-deploy:
if: ${{ github.repository == 'github/docs-internal'}}
runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Check out repo
@@ -129,12 +129,8 @@ jobs:
-H 'Content-Type:' \
--data-binary @app.tar.gz
- name: Install one-off development-only dependencies
run: npm install --no-save --include=optional esm
- name: Deploy
id: deploy
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
@@ -144,56 +140,8 @@ jobs:
SOURCE_BLOB_URL: ${{ steps.build-source.outputs.download_url }}
DELAY_FOR_PREBOOT: 'true'
ALLOWED_POLLING_FAILURES_PER_PHASE: '15'
with:
script: |
const {
GITHUB_TOKEN,
HEROKU_API_TOKEN,
HEROKU_PRODUCTION_APP_NAME,
SOURCE_BLOB_URL,
DELAY_FOR_PREBOOT
} = process.env
// Exit if GitHub Actions PAT is not found
if (!GITHUB_TOKEN) {
throw new Error('You must supply a GITHUB_TOKEN environment variable!')
}
// Exit if Heroku API token is not found
if (!HEROKU_API_TOKEN) {
throw new Error('You must supply a HEROKU_API_TOKEN environment variable!')
}
// Exit if Heroku App name is not found
if (!HEROKU_PRODUCTION_APP_NAME) {
throw new Error('You must supply a HEROKU_PRODUCTION_APP_NAME environment variable!')
}
// Workaround to allow us to load ESM files with `require(...)`
const esm = require('esm')
require = esm({})
const { default: getOctokit } = require('./script/helpers/github')
const { default: deployToProduction } = require('./script/deployment/deploy-to-production')
// This helper uses the `GITHUB_TOKEN` implicitly!
// We're using our usual version of Octokit vs. the provided `github`
// instance to avoid versioning discrepancies.
const octokit = getOctokit()
try {
await deployToProduction({
octokit,
includeDelayForPreboot: DELAY_FOR_PREBOOT !== 'false',
// These parameters will ONLY be set by Actions
sourceBlobUrl: SOURCE_BLOB_URL,
runId: context.runId
})
} catch (error) {
console.error(`Failed to deploy to production: ${error.message}`)
console.error(error)
throw error
}
RUN_ID: ${{ github.run_id }}
run: .github/actions-scripts/prod-deploy.js
- name: Mark the deployment as inactive if timed out
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
@@ -227,26 +175,11 @@ jobs:
console.log('⏲️ Deployment status: error - The deployment timed out...')
- name: Purge Fastly edge cache
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d
env:
FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
FASTLY_SURROGATE_KEY: 'all-the-things'
with:
script: |
// Workaround to allow us to load ESM files with `require(...)`
const esm = require('esm')
require = esm({})
const { default: purgeEdgeCache } = require('./script/deployment/purge-edge-cache')
try {
await purgeEdgeCache()
} catch (error) {
console.error(`Failed to purge the edge cache: ${error.message}`)
console.error(error)
throw error
}
run: .github/actions-scripts/purge-fastly-edge-cache.js
- name: Send Slack notification if workflow failed
uses: someimportantcompany/github-actions-slack-message@f8d28715e7b8a4717047d23f48c39827cacad340

View File

@@ -80,11 +80,15 @@ export const ToolPicker = ({ variant = 'subnav' }: Props) => {
}
}, [])
// Whenever the currentTool is changed, update the article content
useEffect(() => {
preserveAnchorNodePosition(document, () => {
showToolSpecificContent(currentTool)
})
}, [currentTool])
function onClickTool(tool: string) {
setCurrentTool(tool)
preserveAnchorNodePosition(document, () => {
showToolSpecificContent(tool)
})
sendEvent({
type: EventType.preference,
preference_name: 'application',

View File

@@ -70,6 +70,7 @@ Called workflows can access self-hosted runners from caller's context. This mean
* Reusable workflows stored within a private repository can only be used by workflows within the same repository.
* Any environment variables set in an `env` context defined at the workflow level in the caller workflow are not propagated to the called workflow. For more information about the `env` context, see "[Context and expression syntax for GitHub Actions](/actions/reference/context-and-expression-syntax-for-github-actions#env-context)."
* You can't set the concurrency of a called workflow from the caller workflow. For more information about `jobs.<job_id>.concurrency`, see "[Workflow syntax for GitHub Actions](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idconcurrency)."
* The `strategy` property is not supported in any job that calls a reusable workflow.
## Creating a reusable workflow

View File

@@ -28,24 +28,91 @@ shortTitle: Use Dependabot with actions
## Responding to events
{% data variables.product.prodname_dependabot %} is able to trigger {% data variables.product.prodname_actions %} workflows on its pull requests and comments; however, due to ["GitHub Actions: Workflows triggered by Dependabot PRs will run with read-only permissions"](https://github.blog/changelog/2021-02-19-github-actions-workflows-triggered-by-dependabot-prs-will-run-with-read-only-permissions/), certain events are treated differently.
{% data variables.product.prodname_dependabot %} is able to trigger {% data variables.product.prodname_actions %} workflows on its pull requests and comments; however, certain events are treated differently.
For workflows initiated by {% data variables.product.prodname_dependabot %} (`github.actor == "dependabot[bot]"`) using the `pull_request`, `pull_request_review`, `pull_request_review_comment`, and `push` events, the following restrictions apply:
- `GITHUB_TOKEN` has read-only permissions.
- Secrets are inaccessible.
- {% ifversion ghes = 3.3 %}`GITHUB_TOKEN` has read-only permissions, unless your administrator has removed restrictions.{% else %}`GITHUB_TOKEN` has read-only permissions by default.{% endif %}
- {% ifversion ghes = 3.3 %}Secrets are inaccessible, unless your administrator has removed restrictions.{% else %}Secrets are populated from {% data variables.product.prodname_dependabot %} secrets. {% data variables.product.prodname_actions %} secrets are not available.{% endif %}
For more information, see ["Keeping your GitHub Actions and workflows secure: Preventing pwn requests"](https://securitylab.github.com/research/github-actions-preventing-pwn-requests/).
{% ifversion ghes > 3.2 %}
{% ifversion fpt or ghec or ghes > 3.3 %}
### Changing `GITHUB_TOKEN` permissions
By default, {% data variables.product.prodname_actions %} workflows triggered by {% data variables.product.prodname_dependabot %} get a `GITHUB_TOKEN` with read-only permissions. You can use the `permissions` key in your workflow to increase the access for the token:
{% raw %}
```yaml
name: CI
on: pull_request
# Set the access for individual scopes, or use permissions: write-all
permissions:
pull-requests: write
issues: write
repository-projects: write
...
jobs:
...
```
{% endraw %}
For more information, see "[Modifying the permissions for the GITHUB_TOKEN](/actions/security-guides/automatic-token-authentication#modifying-the-permissions-for-the-github_token)."
### Accessing secrets
When a {% data variables.product.prodname_dependabot %} event triggers a workflow, the only secrets available to the workflow are {% data variables.product.prodname_dependabot %} secrets. {% data variables.product.prodname_actions %} secrets are not available. Consequently, you must store any secrets that are used by a workflow triggered by {% data variables.product.prodname_dependabot %} events as {% data variables.product.prodname_dependabot %} secrets. For more information, see "[Managing encrypted secrets for Dependabot](/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/managing-encrypted-secrets-for-dependabot)".
{% data variables.product.prodname_dependabot %} secrets are added to the `secrets` context and referenced using exactly the same syntax as secrets for {% data variables.product.prodname_actions %}. For more information, see "[Encrypted secrets](/actions/security-guides/encrypted-secrets#using-encrypted-secrets-in-a-workflow)."
If you have a workflow that will be triggered by {% data variables.product.prodname_dependabot %} and also by other actors, the simplest solution is to store the token with the permissions required in an action and in a {% data variables.product.prodname_dependabot %} secret with identical names. Then the workflow can include a single call to these secrets. If the secret for {% data variables.product.prodname_dependabot %} has a different name, use conditions to specify the correct secrets for different actors to use. For examples that use conditions, see "[Common automations](#common-dependabot-automations)" below.
To access a private container registry on AWS with a user name and password, a workflow must include a secret for `username` and `password`. In the example below, when {% data variables.product.prodname_dependabot %} triggers the workflow, the {% data variables.product.prodname_dependabot %} secrets with the names `READONLY_AWS_ACCESS_KEY_ID` and `READONLY_AWS_ACCESS_KEY` are used. If another actor triggers the workflow, the actions secrets with those names are used.
{% raw %}
```yaml
name: CI
on:
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Login to private container registry for dependencies
uses: docker/login-action@v1
with:
registry: https://1234567890.dkr.ecr.us-east-1.amazonaws.com
username: ${{ secrets.READONLY_AWS_ACCESS_KEY_ID }}
password: ${{ secrets.READONLY_AWS_ACCESS_KEY }}
- name: Build the Docker image
run: docker build . --file Dockerfile --tag my-image-name:$(date +%s)
```
{% endraw %}
{% endif %}
{% ifversion ghes = 3.3 %}
{% note %}
**Note:** Your site administrator can override these restrictions for {% data variables.product.product_location %}. For more information, see "[Troubleshooting {% data variables.product.prodname_actions %} for your enterprise](/admin/github-actions/advanced-configuration-and-troubleshooting/troubleshooting-github-actions-for-your-enterprise#troubleshooting-failures-when-dependabot-triggers-existing-workflows)."
If the restrictions are removed, when a workflow is triggered by {% data variables.product.prodname_dependabot %} it will have access to any secrets that are normally available. In addition, workflows triggered by {% data variables.product.prodname_dependabot %} can use the `permissions` term to increase the default scope of the `GITHUB_TOKEN` from read-only access.
If the restrictions are removed, when a workflow is triggered by {% data variables.product.prodname_dependabot %} it will have access to {% data variables.product.prodname_actions %} secrets and can use the `permissions` term to increase the default scope of the `GITHUB_TOKEN` from read-only access. You can ignore the specific steps in the "Handling `pull_request` events" and "Handling `push` events" sections, as it no longer applies.
{% endnote %}
{% endif %}
### Handling `pull_request` events
@@ -54,6 +121,7 @@ If your workflow needs access to secrets or a `GITHUB_TOKEN` with write permissi
Below is a simple example of a `pull_request` workflow that might now be failing:
{% raw %}
```yaml
### This workflow now has no secrets and a read-only token
name: Dependabot Workflow
@@ -68,6 +136,7 @@ jobs:
steps:
- uses: actions/checkout@v2
```
{% endraw %}
You can replace `pull_request` with `pull_request_target`, which is used for pull requests from forks, and explicitly check out the pull request `HEAD`.
@@ -79,6 +148,7 @@ You can replace `pull_request` with `pull_request_target`, which is used for pul
{% endwarning %}
{% raw %}
```yaml
### This workflow has access to secrets and a read-write token
name: Dependabot Workflow
@@ -99,6 +169,7 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}
github-token: ${{ secrets.GITHUB_TOKEN }}
```
{% endraw %}
It is also strongly recommended that you downscope the permissions granted to the `GITHUB_TOKEN` in order to avoid leaking a token with more privilege than necessary. For more information, see "[Permissions for the `GITHUB_TOKEN`](/actions/reference/authentication-in-a-workflow#permissions-for-the-github_token)."
@@ -110,6 +181,7 @@ As there is no `pull_request_target` equivalent for `push` events, you will have
The first workflow performs any untrusted work:
{% raw %}
```yaml
### This workflow doesn't have access to secrets and has a read-only token
name: Dependabot Untrusted Workflow
@@ -123,11 +195,13 @@ jobs:
steps:
- uses: ...
```
{% endraw %}
The second workflow performs trusted work after the first workflow completes successfully:
{% raw %}
```yaml
### This workflow has access to secrets and a read-write token
name: Dependabot Trusted Workflow
@@ -147,8 +221,11 @@ jobs:
steps:
- uses: ...
```
{% endraw %}
{% endif %}
### Manually re-running a workflow
You can also manually re-run a failed Dependabot workflow, and it will run with a read-write token and access to secrets. Before manually re-running a failed workflow, you should always check the dependency being updated to ensure that the change doesn't introduce any malicious or unintended behavior.
@@ -157,15 +234,28 @@ You can also manually re-run a failed Dependabot workflow, and it will run with
Here are several common scenarios that can be automated using {% data variables.product.prodname_actions %}.
{% ifversion ghes = 3.3 %}
{% note %}
**Note:** If your site administrator has overridden restrictions for {% data variables.product.prodname_dependabot %} on {% data variables.product.product_location %}, you can use `pull_request` instead of `pull_request_target` in the following workflows.
{% endnote %}
{% endif %}
### Fetch metadata about a pull request
A large amount of automation requires knowing information about the contents of the pull request: what the dependency name was, if it's a production dependency, and if it's a major, minor, or patch update.
The `dependabot/fetch-metadata` action provides all that information for you:
{% ifversion ghes = 3.3 %}
{% raw %}
```yaml
name: Dependabot auto-label
name: Dependabot fetch metadata
on: pull_request_target
permissions:
@@ -188,8 +278,42 @@ jobs:
# - steps.dependabot-metadata.outputs.dependency-type
# - steps.dependabot-metadata.outputs.update-type
```
{% endraw %}
{% else %}
{% raw %}
```yaml
name: Dependabot fetch metadata
on: pull_request
permissions:
pull-requests: write
issues: write
repository-projects: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.1.1
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
# The following properties are now available:
# - steps.metadata.outputs.dependency-names
# - steps.metadata.outputs.dependency-type
# - steps.metadata.outputs.update-type
```
{% endraw %}
{% endif %}
For more information, see the [`dependabot/fetch-metadata`](https://github.com/dependabot/fetch-metadata) repository.
### Label a pull request
@@ -198,7 +322,10 @@ If you have other automation or triage workflows based on {% data variables.prod
For example, if you want to flag all production dependency updates with a label:
{% ifversion ghes = 3.3 %}
{% raw %}
```yaml
name: Dependabot auto-label
on: pull_request_target
@@ -224,13 +351,51 @@ jobs:
env:
PR_URL: ${{github.event.pull_request.html_url}}
```
{% endraw %}
{% else %}
{% raw %}
```yaml
name: Dependabot auto-label
on: pull_request
permissions:
pull-requests: write
issues: write
repository-projects: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.1.1
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Add a label for all production dependencies
if: ${{ steps.metadata.outputs.dependency-type == 'direct:production' }}
run: gh pr edit "$PR_URL" --add-label "production"
env:
PR_URL: ${{github.event.pull_request.html_url}}
```
{% endraw %}
{% endif %}
### Approve a pull request
If you want to automatically approve Dependabot pull requests, you can use the {% data variables.product.prodname_cli %} in a workflow:
{% ifversion ghes = 3.3 %}
{% raw %}
```yaml
name: Dependabot auto-approve
on: pull_request_target
@@ -254,15 +419,51 @@ jobs:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
```
{% endraw %}
{% else %}
{% raw %}
```yaml
name: Dependabot auto-approve
on: pull_request
permissions:
pull-requests: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.1.1
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Approve a PR
run: gh pr review --approve "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
```
{% endraw %}
{% endif %}
### Enable auto-merge on a pull request
If you want to auto-merge your pull requests, you can use {% data variables.product.prodname_dotcom %}'s auto-merge functionality. This enables the pull request to be merged when all required tests and approvals are successfully met. For more information on auto-merge, see "[Automatically merging a pull request"](/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/automatically-merging-a-pull-request)."
Here is an example of enabling auto-merge for all patch updates to `my-dependency`:
{% ifversion ghes = 3.3 %}
{% raw %}
```yaml
name: Dependabot auto-merge
on: pull_request_target
@@ -288,15 +489,61 @@ jobs:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
```
{% endraw %}
{% else %}
{% raw %}
```yaml
name: Dependabot auto-merge
on: pull_request
permissions:
pull-requests: write
contents: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.1.1
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
if: ${{contains(steps.metadata.outputs.dependency-names, 'my-dependency') && steps.metadata.outputs.update-type == 'version-update:semver-patch'}}
run: gh pr merge --auto --merge "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
```
{% endraw %}
{% endif %}
## Troubleshooting failed workflow runs
If your workflow run fails, check the following:
{% ifversion ghes = 3.3 %}
- You are running the workflow only when the correct actor triggers it.
- You are checking out the correct `ref` for your `pull_request`.
- You aren't trying to access secrets from within a Dependabot-triggered `pull_request`, `pull_request_review`, `pull_request_review_comment`, or `push` event.
- You aren't trying to perform any `write` actions from within a Dependabot-triggered `pull_request`, `pull_request_review`, `pull_request_review_comment`, or `push` event.
{% else %}
- You are running the workflow only when the correct actor triggers it.
- You are checking out the correct `ref` for your `pull_request`.
- Your secrets are available in {% data variables.product.prodname_dependabot %} secrets rather than as {% data variables.product.prodname_actions %} secrets.
- You have a `GITHUB_TOKEN` with the correct permissions.
{% endif %}
For information on writing and debugging {% data variables.product.prodname_actions %}, see "[Learning GitHub Actions](/actions/learn-github-actions)."

View File

@@ -22,6 +22,7 @@ children:
- /downloading-your-organizations-saml-single-sign-on-recovery-codes
- /managing-team-synchronization-for-your-organization
- /accessing-your-organization-if-your-identity-provider-is-unavailable
- /troubleshooting-identity-and-access-management
shortTitle: Manage SAML single sign-on
---

View File

@@ -20,8 +20,6 @@ shortTitle: Manage team synchronization
{% data reusables.enterprise-accounts.emu-scim-note %}
{% data reusables.gated-features.okta-team-sync %}
## About team synchronization
You can enable team synchronization between your IdP and {% data variables.product.product_name %} to allow organization owners and team maintainers to connect teams in your organization with IdP groups.
@@ -65,12 +63,22 @@ You must have a linked SAML identity. To create a linked identity, you must auth
### Enabling team synchronization for Okta
Okta team synchronization requires that SAML and SCIM with Okta have already been set up for your organization.
To avoid potential team synchronization errors with Okta, we recommend that you confirm that SCIM linked identities are correctly set up for all organization members who are members of your chosen Okta groups, before enabling team synchronization on {% data variables.product.prodname_dotcom %}.
If an organization member does not have a linked SCIM identity, then team synchronization will not work as expected and the user may not be added or removed from teams as expected. If any of these users are missing a SCIM linked identity, you will need to reprovision them.
For help on provisioning users that have missing a missing SCIM linked identity, see "[Troubleshooting identity and access management](/organizations/managing-saml-single-sign-on-for-your-organization/troubleshooting-identity-and-access-management)."
{% data reusables.identity-and-permissions.team-sync-okta-requirements %}
{% data reusables.profile.access_org %}
{% data reusables.profile.org_settings %}
{% data reusables.organizations.security %}
{% data reusables.identity-and-permissions.team-sync-confirm-saml %}
{% data reusables.identity-and-permissions.team-sync-confirm-scim %}
1. Consider enforcing SAML in your organization to ensure that organization members link their SAML and SCIM identities. For more information, see "[Enforcing SAML single sign-on for your organization](/organizations/managing-saml-single-sign-on-for-your-organization/enforcing-saml-single-sign-on-for-your-organization)."
{% data reusables.identity-and-permissions.enable-team-sync-okta %}
7. Under your organization's name, type a valid SSWS token and the URL to your Okta instance.
![Enable team synchronization Okta organization form](/assets/images/help/teams/confirm-team-synchronization-okta-organization.png)

View File

@@ -0,0 +1,87 @@
---
title: Troubleshooting identity and access management
intro: 'Review and resolve common troubleshooting errors for managing your organization''s SAML SSO, team synchronization, or identity provider (IdP) connection.'
product: '{% data reusables.gated-features.saml-sso %}'
versions:
fpt: '*'
ghec: '*'
topics:
- Organizations
- Teams
shortTitle: Troubleshooting access
---
## Some users are not provisioned or deprovisioned by SCIM
When you encounter provisioning issues with users, we recommend that you check if the users are missing SCIM metadata. If an organization member has missing SCIM metadata, then you can re-provision SCIM for the user manually through your IdP.
### Auditing users for missing SCIM metadata
If you suspect or notice that any users are not provisioned or deprovisioned as expected, we recommend that you audit all users in your organization.
To check whether users have a SCIM identity (SCIM metadata) in their external identity, you can review SCIM metadata for one organization member at a time on {% data variables.product.prodname_dotcom %} or you can programatically check all organization members using the {% data variables.product.prodname_dotcom %} API.
#### Auditing organization members on {% data variables.product.prodname_dotcom %}
As an organization owner, to confirm that SCIM metadata exists for a single organization member, visit this URL, replacing `<organization>` and `<username>`:
> `https://github.com/orgs/<organization>/people/<username>/sso`
If the user's external identity includes SCIM metadata, the organization owner should see a SCIM identity section on that page. If their external identity does not include any SCIM metadata, the SCIM Identity section will not exist.
#### Auditing organization members through the {% data variables.product.prodname_dotcom %} API
As an organization owner, you can also query the SCIM REST API or GraphQL to list all SCIM provisioned identities in an organization.
#### Using the REST API
The SCIM REST API will only return data for users that have SCIM metadata populated under their external identities. We recommend you compare a list of SCIM provisioned identities with a list of all your organization members.
For more information, see:
- "[List SCIM provisioned identities](/rest/reference/scim#list-scim-provisioned-identities)"
- "[List organization members](/rest/reference/orgs#list-organization-members)"
#### Using GraphQL
This GraphQL query shows you the SAML `NameId`, the SCIM `UserName` and the {% data variables.product.prodname_dotcom %} username (`login`) for each user in the organization. To use this query, replace `ORG` with your organization name.
```graphql
{
organization(login: "ORG") {
samlIdentityProvider {
ssoUrl
externalIdentities(first: 100) {
edges {
node {
samlIdentity {
nameId
}
scimIdentity {
username
}
user {
login
}
}
}
}
}
}
}
```
```shell
curl -X POST -H "Authorization: Bearer <personal access token>" -H "Content-Type: application/json" -d '{ "query": "{ organization(login: \"ORG\") { samlIdentityProvider { externalIdentities(first: 100) { pageInfo { endCursor startCursor hasNextPage } edges { cursor node { samlIdentity { nameId } scimIdentity {username} user { login } } } } } } }" }' https://api.github.com/graphql
```
For more information on using the GraphQL API, see:
- "[GraphQL guides](/graphql/guides)"
- "[GraphQL explorer](/graphql/overview/explorer)"
### Re-provisioning SCIM for users through your identity provider
You can re-provision SCIM for users manually through your IdP. For example, to resolve provisioning errors, in the Okta admin portal, you can unassign and reassign users to the {% data variables.product.prodname_dotcom %} app. This should trigger Okta to make an API call to populate the SCIM metadata for these users on {% data variables.product.prodname_dotcom %}. For more information, see "[Unassign users from applications](https://help.okta.com/en/prod/Content/Topics/users-groups-profiles/usgp-unassign-apps.htm)" or "[Assign users to applications](https://help.okta.com/en/prod/Content/Topics/users-groups-profiles/usgp-assign-apps.htm)" in the Okta documentation.
To confirm that a user's SCIM identity is created, we recommend testing this process with a single organization member whom you have confirmed doesn't have a SCIM external identity. After manually updating the users in your IdP, you can check if the user's SCIM identity was created using the SCIM API or on {% data variables.product.prodname_dotcom %}. For more information, see "[Auditing users for missing SCIM metadata](#auditing-users-for-missing-scim-metadata)" or the REST API endpoint "[Get SCIM provisioning information for a user](/rest/reference/scim#get-scim-provisioning-information-for-a-user)."
If re-provisioning SCIM for users doesn't help, please contact {% data variables.product.prodname_dotcom %} Support.

View File

@@ -15,8 +15,6 @@ topics:
shortTitle: Synchronize with an IdP
---
{% data reusables.gated-features.okta-team-sync %}
{% data reusables.enterprise-accounts.emu-scim-note %}
## About team synchronization

View File

@@ -1,9 +0,0 @@
{% ifversion not ghae %}
{% note %}
**Note:** Team synchronization with Okta is currently in beta and subject to change. Please contact your GitHub Sales account representative to register for the beta.
{% endnote %}
{% endif %}

View File

@@ -1 +1 @@
3. Confirm that SAML SSO is enabled. For more information, see "[Managing SAML single sign-on for your organization](/organizations/managing-saml-single-sign-on-for-your-organization/)."
3. Confirm that SAML SSO is enabled for your organization. For more information, see "[Managing SAML single sign-on for your organization](/organizations/managing-saml-single-sign-on-for-your-organization/)."

View File

@@ -0,0 +1 @@
1. We recommend you confirm that your users have SAML enabled and have a linked SCIM identity to avoid potential provisioning errors. For help auditing your users, see "[Auditing users for missing SCIM metadata](/organizations/managing-saml-single-sign-on-for-your-organization/troubleshooting-identity-and-access-management#auditing-users-for-missing-scim-metadata)." For help resolving unlinked SCIM identities, see "[Troubleshooting identity and access management](/organizations/managing-saml-single-sign-on-for-your-organization/troubleshooting-identity-and-access-management)."

View File

@@ -1,5 +1,5 @@
To enable team synchronization for Okta, you or your IdP administrator must:
Before you enable team synchronization for Okta, you or your IdP administrator must:
- Enable SAML SSO and SCIM for your organization using Okta. For more information, see "[Configuring SAML single sign-on and SCIM using Okta](/organizations/managing-saml-single-sign-on-for-your-organization/configuring-saml-single-sign-on-and-scim-using-okta)."
- Configure the SAML, SSO, and SCIM integration for your organization using Okta. For more information, see "[Configuring SAML single sign-on and SCIM using Okta](/organizations/managing-saml-single-sign-on-for-your-organization/configuring-saml-single-sign-on-and-scim-using-okta)."
- Provide the tenant URL for your Okta instance.
- Generate a valid SSWS token with read-only admin permissions for your Okta installation as a service user. For more information, see [Create the token](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) and [Service users](https://help.okta.com/en/prod/Content/Topics/Adv_Server_Access/docs/service-users.htm) in Okta's documentation.

View File

@@ -1,5 +1,5 @@
{%- ifversion ghae %}
1. In the `plugins` element of the *pom.xml* file, add the [checksum-maven-plugin](http://checksum-maven-plugin.nicoulaj.net/index.html) plugin, and configure the plugin to send at least SHA-256 checksums.
1. In the `plugins` element of the *pom.xml* file, add the [checksum-maven-plugin](https://search.maven.org/artifact/net.nicoulaj.maven.plugins/checksum-maven-plugin) plugin, and configure the plugin to send at least SHA-256 checksums.
```xml
<plugins>
<plugin>

View File

@@ -1,3 +1,4 @@
9. To avoid syncing errors and confirm that your users have SAML enabled and SCIM linked identities, we recommend you audit your organization's users. For more information, see "[Auditing users for missing SCIM metadata](/organizations/managing-saml-single-sign-on-for-your-organization/troubleshooting-identity-and-access-management#auditing-users-for-missing-scim-metadata)."
10. To the right of "Provisioning to App", click **Edit**.
!["Edit" button for Okta application's provisioning options](/assets/images/help/saml/okta-provisioning-to-app-edit-button.png)
11. To the right of "Create Users", select **Enable**.

View File

@@ -1,18 +1,28 @@
// Linkinator treats the following as regex.
/**
* This file exports a mix of strings and of regexes. Linkinator relies
* on this in `script/check-english-links.js` when we encounter external
* links that we *specifically ignore*. That means, that URLs or patterns
* mentioned in this file might appear within our content but we don't
* bother checking that they actually work.
*/
/* eslint-disable prefer-regex-literals */
export default [
// Skip GitHub search links.
'https://github.com/search\\?',
'https://github.com/github/gitignore/search\\?',
// E.g. https://github.com/search?foo=bar
new RegExp('https://github\\.com/search\\?'),
new RegExp('https://github\\.com/github/gitignore/search\\?'),
// These links require auth.
'https://github.com/settings/profile',
'https://github.com/github/docs/edit',
'https://github.com/github/insights-releases/releases/latest',
'https://classroom.github.com/videos',
new RegExp('https://github\\.com/settings/profile'),
new RegExp('https://github\\.com/github/docs/edit'),
new RegExp('https://github\\.com/github/insights-releases/releases/latest'),
new RegExp('https://classroom\\.github.com/videos'),
// Oneoff links that link checkers think are broken but are not.
'https://haveibeenpwned.com/',
'https://www.ilo.org/dyn/normlex/en/f\\?p=NORMLEXPUB:12100:0::NO::P12100_ILO_CODE:P029',
'https://www.ilo.org/dyn/normlex/en/f?p=NORMLEXPUB:12100:0::NO::P12100_ILO_CODE:P029',
'https://www.linkedin.com/company/github',
'https://www.facebook.com/',
'https://ko-fi.com/',

View File

@@ -1,3 +1,4 @@
import fs from 'fs'
import path from 'path'
import slash from 'slash'
import {
@@ -10,10 +11,24 @@ import isArchivedVersion from '../lib/is-archived-version.js'
import got from 'got'
import readJsonFile from '../lib/read-json-file.js'
import { cacheControlFactory } from './cache-control.js'
const archivedRedirects = readJsonFile(
function readJsonFileLazily(xpath) {
const cache = new Map()
// This will throw if the file isn't accessible at all, e.g. ENOENT
fs.accessSync(xpath)
return () => {
if (!cache.has(xpath)) cache.set(xpath, readJsonFile(xpath))
return cache.get(xpath)
}
}
// These files are huge so lazy-load them. But note that the
// `readJsonFileLazily()` function will, at import-time, check that
// the path does exist.
const archivedRedirects = readJsonFileLazily(
'./lib/redirects/static/archived-redirects-from-213-to-217.json'
)
const archivedFrontmatterFallbacks = readJsonFile(
const archivedFrontmatterFallbacks = readJsonFileLazily(
'./lib/redirects/static/archived-frontmatter-fallbacks.json'
)
@@ -55,7 +70,9 @@ export default async function archivedEnterpriseVersions(req, res, next) {
versionSatisfiesRange(requestedVersion, `>=${firstVersionDeprecatedOnNewSite}`) &&
versionSatisfiesRange(requestedVersion, `<=${lastVersionWithoutArchivedRedirectsFile}`)
) {
const redirect = archivedRedirects[req.path]
// `archivedRedirects` is a callable because it's a lazy function
// and memoized so calling it is cheap.
const redirect = archivedRedirects()[req.path]
if (redirect && redirect !== req.path) {
cacheControl(res)
return res.redirect(301, redirect)
@@ -122,7 +139,9 @@ function getFallbackRedirects(req, requestedVersion) {
if (versionSatisfiesRange(requestedVersion, `<${firstVersionDeprecatedOnNewSite}`)) return
if (versionSatisfiesRange(requestedVersion, `>${lastVersionWithoutArchivedRedirectsFile}`)) return
return archivedFrontmatterFallbacks.find((arrayOfFallbacks) =>
// `archivedFrontmatterFallbacks` is a callable because it's a lazy function
// and memoized so calling it is cheap.
return archivedFrontmatterFallbacks().find((arrayOfFallbacks) =>
arrayOfFallbacks.includes(req.path)
)
}

View File

@@ -52,7 +52,7 @@ program
// Skip non-English content.
const languagesToSkip = Object.keys(libLanguages)
.filter((code) => code !== 'en')
.map((code) => `${root}/${code}`)
.map((code) => new RegExp(`${root}/${code}`))
// Skip deprecated Enterprise content.
// Capture the old format https://docs.github.com/enterprise/2.1/
@@ -66,7 +66,19 @@ const config = {
recurse: !program.opts().dryRun,
silent: true,
// The values in this array are treated as regexes.
linksToSkip: [enterpriseReleasesToSkip, ...languagesToSkip, ...excludedLinks],
linksToSkip: linksToSkipFactory([enterpriseReleasesToSkip, ...languagesToSkip, ...excludedLinks]),
}
// Return a function that can as quickly as possible check if a certain
// href input should be skipped.
// Do this so we can use a `Set` and a `iterable.some()` for a speedier
// check. The default implementation in Linkinator, if you set
// the `linksToSkip` config to be an array, it will, for every URL it
// checks turn that into a new regex every single time.
function linksToSkipFactory(regexAndURLs) {
const set = new Set(regexAndURLs.filter((regexOrURL) => typeof regexOrURL === 'string'))
const regexes = regexAndURLs.filter((regexOrURL) => regexOrURL instanceof RegExp)
return (href) => set.has(href) || regexes.some((regex) => regex.test(href))
}
main()

View File

@@ -71,7 +71,7 @@ function pullRequestBodyReport() {
body.push(checkBoxes)
})
return body.join('\n')
return body.flat().join('\n')
}
function csvReport() {

View File

@@ -1,5 +1,6 @@
import fs from 'fs'
import walkSync from 'walk-sync'
import readFileAsync from '../../lib/readfile-async.js'
import minimatch from 'minimatch'
/*
@@ -75,33 +76,56 @@ const REPO_REGEXP = /\/\/github\.com\/github\/(?!docs[/'"\n])([\w-.]+)/gi
const IGNORE_PATHS = [
'.git',
'.next',
'.vscode', // Not part of the repo but could be for a developer locally
'node_modules',
'translations',
'.linkinator',
'**/*.png', // Do not check images or font files.
'**/*.jpg', // We could just put all of assets/* here, but that would prevent any
'**/*.gif', // READMEs or other text-based files from being checked.
'**/*.pdf',
'**/*.ico',
'**/*.woff',
'**/*.csv',
'**/*.br', // E.g. the search index .json.br files
'**/*.graphql', // E.g. data/graphql/ghec/schema.docs.graphql
'package-lock.json', // At the time of writing it's 1.5MB!
'.linkinator/full.log', // Only present if you've run linkinator
'lib/search/popular-pages.json', // used to build search indexes
'tests/**/*.json',
'content/early-access', // Not committed to public repository.
'data/early-access', // Not committed to public repository.
'data/release-notes', // These include links to many internal issues in Liquid comments.
'lib/redirects/.redirects-cache*',
]
describe('check if a GitHub-owned private repository is referenced', () => {
const filenames = walkSync(process.cwd(), {
directories: false,
ignore: IGNORE_PATHS,
})
}).filter(
(filename) =>
// Skip the large static json files because they're not code.
!(
filename.includes('static') &&
(filename.endsWith('.json') || filename.endsWith('.json.br'))
)
)
test.each(filenames)('in file %s', async (filename) => {
const file = await readFileAsync(filename, 'utf8')
const allowDocs = ALLOW_DOCS_PATHS.some((path) => minimatch(filename, path))
test.each(filenames)('in file %s', (filename) => {
// When you're reading many small files, it's faster to do it
// *synchronously* because the event-loop overhead is less since
// the disk I/O is sufficiently small.
const file = fs.readFileSync(filename, 'utf8')
const matches = Array.from(file.matchAll(REPO_REGEXP))
.map(([, repoName]) => repoName)
.filter((repoName) => !PUBLIC_REPOS.has(repoName))
.filter((repoName) => !(allowDocs && repoName.startsWith('docs')))
.filter((repoName) => {
return !(
repoName.startsWith('docs') && ALLOW_DOCS_PATHS.some((path) => minimatch(filename, path))
)
})
expect(
matches,
`Please edit ${filename} to remove references to ${matches.join(', ')}`