1
0
mirror of synced 2025-12-22 11:26:57 -05:00

Merge branch 'jules-11521' of github.com:github/docs-internal into jules-11521

This commit is contained in:
Jules Porter
2023-08-14 10:54:14 +02:00
407 changed files with 909346 additions and 7920 deletions

View File

@@ -1,4 +0,0 @@
{
"presets": ["next/babel"],
"plugins": ["styled-components"]
}

View File

@@ -5,44 +5,53 @@ module.exports = {
es2020: true,
node: true,
},
parser: '@babel/eslint-parser',
extends: ['eslint:recommended', 'standard', 'plugin:import/errors', 'prettier'],
extends: [
"eslint:recommended",
"standard",
"plugin:import/errors",
"prettier",
],
parserOptions: {
ecmaVersion: 11,
requireConfigFile: 'false',
babelOptions: { configFile: './.babelrc' },
sourceType: 'module',
ecmaVersion: 2022,
requireConfigFile: "false",
sourceType: "module",
},
ignorePatterns: ['tmp/*', '!/.*', '/.next/', 'script/bookmarklets/*', 'rest-api-description/'],
ignorePatterns: [
"tmp/*",
"!/.*",
"/.next/",
"script/bookmarklets/*",
"rest-api-description/",
],
rules: {
'import/no-extraneous-dependencies': ['error', { packageDir: '.' }],
"import/no-extraneous-dependencies": ["error", { packageDir: "." }],
},
overrides: [
{
files: ['**/tests/**/*.js'],
files: ["**/tests/**/*.js"],
env: {
jest: true,
},
},
{
files: ['**/*.tsx', '**/*.ts'],
plugins: ['@typescript-eslint', 'jsx-a11y'],
extends: ['plugin:jsx-a11y/recommended'],
parser: '@typescript-eslint/parser',
files: ["**/*.tsx", "**/*.ts"],
plugins: ["@typescript-eslint", "jsx-a11y"],
extends: ["plugin:jsx-a11y/recommended"],
parser: "@typescript-eslint/parser",
rules: {
camelcase: 'off',
'no-unused-vars': 'off',
'no-undef': 'off',
'no-use-before-define': 'off',
'@typescript-eslint/no-unused-vars': ['error'],
'jsx-a11y/no-onchange': 'off',
camelcase: "off",
"no-unused-vars": "off",
"no-undef": "off",
"no-use-before-define": "off",
"@typescript-eslint/no-unused-vars": ["error"],
"jsx-a11y/no-onchange": "off",
},
},
],
settings: {
'import/resolver': {
"import/resolver": {
typescript: true,
node: true
}
}
}
node: true,
},
},
};

View File

@@ -0,0 +1,176 @@
#!/usr/bin/env node
/**
*
* The only mandatory environment variables for this scrips are:
*
* - GITHUB_TOKEN
* - GITHUB_REPOSITORY (e.g. "github/docs")
*
* To delete old workflows, it first downloads all the workflows.
* The list of workflows is sorted by: A) does the `workflow.path`
* exist in the repo any more, B) each workflow's `updated_at` date.
*
* Then, one workflow at a time, it searches that workflow for runs.
* The search for runs uses a `created` filter that depends on the
* `MIN_AGE_DAYS` environment variable. The default is 90 days.
*
* For every run found, it deletes its logs and its run.
*
* The total number of deletions is limited by the `MAX_DELETIONS`
* environment variable. The default is 100.
* */
import fs from 'fs'
import assert from 'node:assert/strict'
import { getOctokit } from '@actions/github'
main()
async function main() {
const DRY_RUN = Boolean(JSON.parse(process.env.DRY_RUN || 'false'))
const MAX_DELETIONS = parseInt(JSON.parse(process.env.MAX_DELETIONS || '100'))
const MIN_AGE_DAYS = parseInt(process.env.MIN_AGE_DAYS || '90', 10)
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
if (!owner || !repo) {
throw new Error('GITHUB_REPOSITORY environment variable not set')
}
const token = process.env.GITHUB_TOKEN
if (!token) {
throw new Error(`GITHUB_TOKEN environment variable not set`)
}
const github = getOctokit(token)
// The sort order is not explicitly listed for this API endpoint.
// In practice it appears to list those that are oldest first.
// But to guarantee that it reaches the oldest, we paginate over
// all of them.
const allWorkflows = await github.paginate('GET /repos/{owner}/{repo}/actions/workflows', {
owner,
repo,
})
const sortByDate = (a, b) => a.updated_at.localeCompare(b.updated_at)
const workflows = [
...allWorkflows.filter((w) => !fs.existsSync(w.path)).sort(sortByDate),
...allWorkflows.filter((w) => fs.existsSync(w.path)).sort(sortByDate),
]
let deletions = 0
for (const workflow of workflows) {
console.log('WORKFLOW', workflow)
console.log(
fs.existsSync(workflow.path)
? `${workflow.path} still exists on disk`
: `${workflow.path} no longer exists on disk`,
)
deletions += await deleteWorkflowRuns(github, owner, repo, workflow, {
dryRun: DRY_RUN,
minAgeDays: MIN_AGE_DAYS,
maxDeletions: MAX_DELETIONS - deletions,
})
if (deletions >= MAX_DELETIONS) {
console.log(`Reached max number of deletions: ${MAX_DELETIONS}`)
break
}
}
console.log(`Deleted ${deletions} runs in total`)
}
async function deleteWorkflowRuns(
github,
owner,
repo,
workflow,
{ dryRun = false, minAgeDays = 100, maxDeletions = 1000 },
) {
// https://docs.github.com/en/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates
const minCreated = new Date(Date.now() - minAgeDays * 24 * 60 * 60 * 1000)
const minCreatedSearch = `<=${minCreated.toISOString().split('T')[0]}`
// Delete is 10, but max is 100. But if we're only going to delete,
// 30, use 30. And if we're only going to delete 5, use the default
// per_page value of 10.
const perPage = Math.max(100, Math.max(10, maxDeletions))
// We could use github.paginate(...) but given that we can use a
// filter on `created` and we can set a decent `per_page`, there's no
// reason to request data that we're not going to use.
const { data } = await github.request(
'GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs',
{
owner,
repo,
workflow_id: workflow.id,
per_page: perPage,
created: minCreatedSearch,
},
)
const runs = data.workflow_runs
console.log(
`Total runs in workflow "${
workflow.name
}" (${minCreatedSearch}): ${data.total_count.toLocaleString()}`,
)
let deletions = 0
let notDeletions = 0
for (const run of runs) {
const created = new Date(run.created_at)
if (created < minCreated) {
const ageDays = Math.round((Date.now() - created.getTime()) / (24 * 60 * 60 * 1000))
console.log(
'DELETE',
{
id: run.id,
created_at: run.created_at,
name: run.name,
display_title: run.display_title,
},
`${ageDays} days old`,
)
if (!dryRun) {
const { status } = await github.request(
'DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs',
{
owner,
repo,
run_id: run.id,
},
)
assert(status === 204, `Unexpected status deleting logs for run ${run.id}: ${status}`)
}
if (!dryRun) {
const { status } = await github.request(
'DELETE /repos/{owner}/{repo}/actions/runs/{run_id}',
{
owner,
repo,
run_id: run.id,
},
)
assert(status === 204, `Unexpected status deleting logs for run ${run.id}: ${status}`)
}
deletions++
if (maxDeletions && deletions >= maxDeletions) {
console.log(
`Reached max number of deletions (${maxDeletions}) for one workflow: ${workflow.name}`,
)
break
} else {
console.log(`Deleted ${deletions} of ${maxDeletions} runs for workflow: ${workflow.name}`)
}
} else {
notDeletions++
}
}
console.log(`Deleted ${deletions} runs in total for workflow: ${workflow.name}`)
if (notDeletions) {
console.log(`Skipped ${notDeletions} runs for workflow: ${workflow.name}`)
}
return deletions
}

18
.github/actions/cache-nextjs/action.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
# Based on https://nextjs.org/docs/pages/building-your-application/deploying/ci-build-caching#github-actions
name: Cache Nextjs build cache
description: Cache the .next/cache according to best practices
runs:
using: 'composite'
steps:
- name: Cache .next/cache
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # pin @3.3.1
with:
path: ${{ github.workspace }}/.next/cache
# Generate a new cache whenever packages or source files change.
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**/*.ts', '**/*.tsx') }}
# If source files changed but packages didn't, rebuild from a prior cache.
restore-keys: |
${{ runner.os }}-nextjs-v13-${{ hashFiles('**/package-lock.json') }}-

View File

@@ -7,8 +7,8 @@ name: Codeowners - Content Strategy
on:
pull_request:
paths:
- '/contributing/content-*.md'
- '/content/contributing/**.md'
- 'contributing/content-*.md'
- 'content/contributing/**.md'
jobs:
codeowners-content-strategy:

View File

@@ -35,11 +35,7 @@ jobs:
- uses: ./.github/actions/node-npm-setup
- name: Cache nextjs build
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- uses: ./.github/actions/cache-nextjs
- name: Run build script
run: npm run build

View File

@@ -25,11 +25,7 @@ jobs:
- uses: ./.github/actions/node-npm-setup
- name: Cache nextjs build
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- uses: ./.github/actions/cache-nextjs
- name: Build
run: npm run build

View File

@@ -21,7 +21,9 @@ jobs:
- name: Check out repo
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- uses: ./.github/actions/node-npm-setup
# - uses: ./.github/actions/node-npm-setup
- name: Install dependencies
run: npm install
# Note that we don't check out docs-early-access, Elasticsearch,
# or any remote translations. Nothing fancy here!

View File

@@ -0,0 +1,40 @@
name: Purge old workflow runs
# **What it does**: Deletes really old workflow runs.
# **Why we have it**: To keep things neat and tidy.
# **Who does it impact**: Docs engineering.
on:
workflow_dispatch:
schedule:
- cron: '20 */3 * * *' # Run every day at 16:20 UTC / 8:20 PST
permissions:
contents: write
jobs:
purge-old-workflow-runs:
if: ${{ github.repository == 'github/docs-internal' || github.repository == 'github/docs' }}
runs-on: ubuntu-latest
steps:
- name: Checkout out repo
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- uses: ./.github/actions/node-npm-setup
- name: Run purge script
if: ${{ env.FREEZE != 'true' }}
env:
GITHUB_REPOSITORY: ${{ github.repository }}
# Necessary to be able to delete deployment environments
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WORKFLOW_READORG }}
run: .github/actions-scripts/purge-old-workflow-runs.js
- name: Send Slack notification if workflow fails
uses: someimportantcompany/github-actions-slack-message@1d367080235edfa53df415bd8e0bbab480f29bad
if: ${{ failure() && env.FREEZE != 'true' }}
with:
channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: The last "Purge old workflow runs" run for ${{github.repository}} failed. See https://github.com/${{github.repository}}/actions/workflows/purge-old-workflow-runs.yml

View File

@@ -126,11 +126,7 @@ jobs:
- uses: ./.github/actions/node-npm-setup
- name: Cache nextjs build
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- uses: ./.github/actions/cache-nextjs
- name: Run build scripts
run: npm run build

View File

@@ -51,11 +51,7 @@ jobs:
- uses: ./.github/actions/node-npm-setup
- name: Cache nextjs build
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- uses: ./.github/actions/cache-nextjs
- name: Build
run: npm run build

View File

@@ -152,11 +152,7 @@ jobs:
echo __ format, write to get_diff_files.txt __
echo $DIFF | tr '\n' ' ' > get_diff_files.txt
- name: Cache nextjs build
uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}
- uses: ./.github/actions/cache-nextjs
- name: Run build script
run: npm run build

3
.npmrc
View File

@@ -1,6 +1,3 @@
# skip installing optional dependencies to avoid issues with troublesome `fsevents` module
omit=optional
# For 15-25% faster npm install
# https://www.peterbe.com/plog/benchmarking-npm-install-with-or-without-audit
# Also we have Dependabot alerts configured in the GitHub repo.

View File

@@ -5,7 +5,7 @@
# --------------------------------------------------------------------------------
# To update the sha, run `docker pull node:$VERSION-alpine`
# look for something like: `Digest: sha256:0123456789abcdef`
FROM node:18.16-alpine@sha256:1ccc70acda680aa4ba47f53e7c40b2d4d6892de74817128e0662d32647dd7f4d as base
FROM node:18-alpine@sha256:58878e9e1ed3911bdd675d576331ed8838fc851607aed3bb91e25dfaffab3267 as base
# This directory is owned by the node user
ARG APP_HOME=/home/node/app
@@ -45,7 +45,6 @@ RUN npm prune --production
FROM all_deps as builder
COPY stylesheets ./stylesheets
COPY pages ./pages
COPY components ./components
COPY lib ./lib
COPY src ./src

View File

@@ -0,0 +1,159 @@
Category,Old Attribute,New Attribute
API,reason,gh.api.error_reason
API,resource,gh.api.resource_type
API,graphql,gh.api.graphql
API,graphql_time,graphql.time
API,graphql_query_byte_size,graphql.query.byte_size
API,graphql_variables_byte_size,graphql.variables.byte_size
API,graphql_operation_name,graphql.operation.name
API,graphql_origin,graphql.origin
API,graphql_success,graphql.success
API,graphql_query_depth,graphql.query.depth
API,graphql_query_complexity,graphql.query.complexity
API,graphql_schema,graphql.schema
API,graphql_query_hash,graphql.query.hash
API,graphql_variables_hash,graphql.variables.hash
API,graphql_query_name,graphql.query.name
API,graphql_operation_id,graphql.operation.id
Repos,rename_id,gh.branch_protection_rule.repository_branch_renamer.id
Repos,end_date,gh.repo.purge.end_date
Repos,deleted_count,gh.repo.mirror.deleted_count
CodeScanning,pull_request_id,gh.pull_request.id
CodeScanning,pull_request_number,gh.pull_request.number
CodeScanning,pull_request_head,gh.pull_request.head_sha
CodeScanning,alerts_count,gh.code_scanning.alert.count
CodeScanning,file_path,gh.code_scanning.alert.file_path
CodeScanning,alert_number,gh.code_scanning.alert.number
CodeScanning,category,gh.code_scanning.analysis.category
CodeScanning,check_run_id,gh.check_run.id
CodeScanning,alerts_in_the_diff,gh.code_scanning.diff.alerts.in
CodeScanning,alerts_out_the_diff,gh.code_scanning.diff.alerts.out
CodeScanning,onboarding_comment_posted,gh.code_scanning.onboarding_comment.posted
CodeScanning,onboarding_comments_count,gh.code_scanning.onboarding_comments.count
CodeScanning,tool_name,gh.code_scanning.tool
CodeScanning,code_scanning_review_comment,gh.code_scanning.review_comment.id
CodeScanning,head_commit_oid,gh.pull_request.head_sha
CodeScanning,merge_commit_oid,gh.pull_request.merge_sha
CodeScanning,checkrun_previously_completed,gh.check_run.previously_completed
CodeScanning,job_reason,gh.code_scanning.job.reason
CodeScanning,time_in_secs,gh.code_scanning.job.time
CodeScanning,code_scanning_check_suite,gh.check_suite.id
CodeScanning,base_ref_name,gh.pull_request.base_ref.name
CodeScanning,pull_request_old,gh.pull_request.id.old
CodeScanning,pull_request_new,gh.pull_request.id.new
CodeScanning,repo_nwo,gh.repo.nwo
CodeScanning,skip_check_runs,gh.code_scanning.skip_check_runs
CodeScanning,refs,git.refs
CodeScanning,ref,git.ref
CodeScanning,commit_oid,git.commit.oid
CodeScanning,sarif_size,gh.code_scanning.sarif.size
CodeScanning,sarif_id,gh.code_scanning.sarif.id
CodeScanning,sarif_uri,gh.code_scanning.sarif.uri
CodeScanning,old_base_ref,gh.pull_request.base_ref.old
CodeScanning,new_base_ref,gh.pull_request.base_ref.new
CodeScanning,replication_lag,gh.freno.replication_delay
CodeScanning,key,gh.kv.key
External Identities,external_id,gh.external_identities.external_id
External Identities,oid,gh.external_identities.oid
External Identities,refresh_token,gh.external_identities.refresh_token
External Identities,email,gh.external_identities.email
External Identities,type,gh.external_identities.type
External Identities,key,gh.external_identities.cache_key
External Identities,body,gh.external_identities.cache_body
External Identities,expires,gh.external_identities.cache_expires
External Identities,cap_message,gh.external_identities.cap_message
External Identities,token_url,gh.external_identities.token_url
External Identities,credential_auth_org_id,gh.external_identities.credential_auth_org_id
External Identities,credential_auth_exists_for_target_org,gh.external_identities.credential_auth_exists_for_target_org
External Identities,resource_type,gh.external_identities.resource_type
External Identities,can_self_identify_internal_or_public,gh.external_identities.can_self_identify_internal_or_public
Memex,column_id,gh.memex.column.id
Memex,class_name,code.namespace
Memex,actor_id,gh.actor.id
Memex,project_item_id,gh.memex.item.id
Memex,value,gh.memex.column.value
Memex,result,gh.memex.column.update_result
Memex,memex_id,gh.memex.id
Memex,ns,code.namespace
Memex,fn,code.function
Memex,result,gh.job.result
Memex,on_tasklist_waitlist,gh.memex.tasklist_waitlist
Memex,id,gh.membership.id
Memex,member_id,gh.membership.member.id
Notifyd,subject_type,gh.notifyd.subject.type
Webhooks,file,code.filepath
Webhooks,catalog_service,gh.catalog_service
Webhooks,request_id,gh.request_id
Webhooks,fn,code.namespace & code.function
Webhooks,event,gh.webhook.event_type
Webhooks,action,gh.webhook.action
Webhooks,method,code.function
Webhooks,event_type,gh.webhook.event_type
Webhooks,model_name,code.namespace
Webhooks,id,gh.webhook.id
Webhooks,push_sha,gh.webhook.push_sha
Webhooks,parent,gh.webhook.parent
Webhooks,guid,gh.webhook.delivery_guid
Webhooks,hook_ids,gh.webhooks
Webhooks,repo_id,gh.repo.id
Webhooks,org_id,gh.org.id
Webhooks,user_id,gh.user.id
Webhooks,webhook_delivery_id,gh.webhook.delivery_guid
Webhooks,repo_database_id,gh.repo.id
Webhooks,repo_global_id,gh.repo.global_id
Webhooks,event_at,gh.webhook.reminder_event.event_at
Webhooks,event_type_db,gh.webhook.reminder_event.event_type_db
Webhooks,personal,gh.webhook.reminder_event.reminder_event.personal
Webhooks,pull_request_ids,gh.pull_request.id
Webhooks,pull_request_ids_for_author,pull_request_ids_for_author
Webhooks,actor_id,gh.actor.id
Webhooks,actor_login,gh.actor.login
Webhooks,user_login,gh.user.login
Webhooks,path,code.filepath
Webhooks,enterprise,gh.webhook.is_enterprise
Webhooks,job,gh.job.name
Webhooks,class,exception.type
Webhooks,payload_size,gh.webhook.payload_size
Webhooks,target_repository_nwo,gh.repo.name_with_owner
Webhooks,target_repository_id,gh.repo.id
Webhooks,target_organization_id,gh.org.id
Webhooks,target_organization_name,gh.org.name
Scheduled Reminders,transaction_id,gh.scheduled_reminders.transaction_id
Camo,request_id,gh.request.id
Camo,hmac,gh.camo.request_hmac
Camo,url,gh.camo.encoded_url
Camo,referer,gh.request.referer
Camo,error,gh.camo.error
Camo,dns-time,gh.camo.dns.time
Camo,resp,gh.camo.upstream.response
Camo,len,gh.camo.upstream.response.content_length
Camo,request,gh.camo.upstream.request_buf
Camo,response,gh.camo.upstream.response_buf
Camo,code,gh.camo.upstream.response.code
Camo,resp,gh.camo.response
Camo,ctype,gh.camo.upstream.response.content_type
Camo,pem,gh.camo.certfile.name
Notifications,fn,code.function
Notifications,id,gh.notifications.rollup_summary.id
Notifications,fn,code.function
repo migration,fn,code.namespace
repo migration,migration_guid,gh.repo_migration.migration_guid
repo migration,source_url,gh.repo_migration.model_source_url
repo migration,resolution,gh.repo_migration.resolution
repo migration,model_name,gh.repo_migration.model_name
repo migration,migratable_resource_id,gh.repo_migration.migratable_resource_id
repo migration,model_id,gh.repo_migration.model_id
repo migration,source_owner,gh.repo_migration.source_owner
repo migration,source_repository,gh.repo_migration.source_repository
repo migration,target_url,gh.repo_migration.model_target_url
repo migration,translator_url,gh.repo_migration.model_translator_url
repo migration,state,gh.repo_migration.model_state
repo migration,asset_storage,gh.repo_migration.asset_storage.type
repo migration,asset_type,gh.repo_migration.asset_storage.asset_type
repo migration,asset_id,gh.repo_migration.asset_storage.asset_id
repo migration,http_response_code,gh.repo_migration.asset_storage.http_response_code
repo migration,field,gh.repo_migration.field
repo migration,state,gh.repo_migration.state
repo migration,url,gh.repo_migration.repository.repository_url
repo migration,validation_error,validation_error
repo migration,code,code
1 Category Old Attribute New Attribute
2 API reason gh.api.error_reason
3 API resource gh.api.resource_type
4 API graphql gh.api.graphql
5 API graphql_time graphql.time
6 API graphql_query_byte_size graphql.query.byte_size
7 API graphql_variables_byte_size graphql.variables.byte_size
8 API graphql_operation_name graphql.operation.name
9 API graphql_origin graphql.origin
10 API graphql_success graphql.success
11 API graphql_query_depth graphql.query.depth
12 API graphql_query_complexity graphql.query.complexity
13 API graphql_schema graphql.schema
14 API graphql_query_hash graphql.query.hash
15 API graphql_variables_hash graphql.variables.hash
16 API graphql_query_name graphql.query.name
17 API graphql_operation_id graphql.operation.id
18 Repos rename_id gh.branch_protection_rule.repository_branch_renamer.id
19 Repos end_date gh.repo.purge.end_date
20 Repos deleted_count gh.repo.mirror.deleted_count
21 CodeScanning pull_request_id gh.pull_request.id
22 CodeScanning pull_request_number gh.pull_request.number
23 CodeScanning pull_request_head gh.pull_request.head_sha
24 CodeScanning alerts_count gh.code_scanning.alert.count
25 CodeScanning file_path gh.code_scanning.alert.file_path
26 CodeScanning alert_number gh.code_scanning.alert.number
27 CodeScanning category gh.code_scanning.analysis.category
28 CodeScanning check_run_id gh.check_run.id
29 CodeScanning alerts_in_the_diff gh.code_scanning.diff.alerts.in
30 CodeScanning alerts_out_the_diff gh.code_scanning.diff.alerts.out
31 CodeScanning onboarding_comment_posted gh.code_scanning.onboarding_comment.posted
32 CodeScanning onboarding_comments_count gh.code_scanning.onboarding_comments.count
33 CodeScanning tool_name gh.code_scanning.tool
34 CodeScanning code_scanning_review_comment gh.code_scanning.review_comment.id
35 CodeScanning head_commit_oid gh.pull_request.head_sha
36 CodeScanning merge_commit_oid gh.pull_request.merge_sha
37 CodeScanning checkrun_previously_completed gh.check_run.previously_completed
38 CodeScanning job_reason gh.code_scanning.job.reason
39 CodeScanning time_in_secs gh.code_scanning.job.time
40 CodeScanning code_scanning_check_suite gh.check_suite.id
41 CodeScanning base_ref_name gh.pull_request.base_ref.name
42 CodeScanning pull_request_old gh.pull_request.id.old
43 CodeScanning pull_request_new gh.pull_request.id.new
44 CodeScanning repo_nwo gh.repo.nwo
45 CodeScanning skip_check_runs gh.code_scanning.skip_check_runs
46 CodeScanning refs git.refs
47 CodeScanning ref git.ref
48 CodeScanning commit_oid git.commit.oid
49 CodeScanning sarif_size gh.code_scanning.sarif.size
50 CodeScanning sarif_id gh.code_scanning.sarif.id
51 CodeScanning sarif_uri gh.code_scanning.sarif.uri
52 CodeScanning old_base_ref gh.pull_request.base_ref.old
53 CodeScanning new_base_ref gh.pull_request.base_ref.new
54 CodeScanning replication_lag gh.freno.replication_delay
55 CodeScanning key gh.kv.key
56 External Identities external_id gh.external_identities.external_id
57 External Identities oid gh.external_identities.oid
58 External Identities refresh_token gh.external_identities.refresh_token
59 External Identities email gh.external_identities.email
60 External Identities type gh.external_identities.type
61 External Identities key gh.external_identities.cache_key
62 External Identities body gh.external_identities.cache_body
63 External Identities expires gh.external_identities.cache_expires
64 External Identities cap_message gh.external_identities.cap_message
65 External Identities token_url gh.external_identities.token_url
66 External Identities credential_auth_org_id gh.external_identities.credential_auth_org_id
67 External Identities credential_auth_exists_for_target_org gh.external_identities.credential_auth_exists_for_target_org
68 External Identities resource_type gh.external_identities.resource_type
69 External Identities can_self_identify_internal_or_public gh.external_identities.can_self_identify_internal_or_public
70 Memex column_id gh.memex.column.id
71 Memex class_name code.namespace
72 Memex actor_id gh.actor.id
73 Memex project_item_id gh.memex.item.id
74 Memex value gh.memex.column.value
75 Memex result gh.memex.column.update_result
76 Memex memex_id gh.memex.id
77 Memex ns code.namespace
78 Memex fn code.function
79 Memex result gh.job.result
80 Memex on_tasklist_waitlist gh.memex.tasklist_waitlist
81 Memex id gh.membership.id
82 Memex member_id gh.membership.member.id
83 Notifyd subject_type gh.notifyd.subject.type
84 Webhooks file code.filepath
85 Webhooks catalog_service gh.catalog_service
86 Webhooks request_id gh.request_id
87 Webhooks fn code.namespace & code.function
88 Webhooks event gh.webhook.event_type
89 Webhooks action gh.webhook.action
90 Webhooks method code.function
91 Webhooks event_type gh.webhook.event_type
92 Webhooks model_name code.namespace
93 Webhooks id gh.webhook.id
94 Webhooks push_sha gh.webhook.push_sha
95 Webhooks parent gh.webhook.parent
96 Webhooks guid gh.webhook.delivery_guid
97 Webhooks hook_ids gh.webhooks
98 Webhooks repo_id gh.repo.id
99 Webhooks org_id gh.org.id
100 Webhooks user_id gh.user.id
101 Webhooks webhook_delivery_id gh.webhook.delivery_guid
102 Webhooks repo_database_id gh.repo.id
103 Webhooks repo_global_id gh.repo.global_id
104 Webhooks event_at gh.webhook.reminder_event.event_at
105 Webhooks event_type_db gh.webhook.reminder_event.event_type_db
106 Webhooks personal gh.webhook.reminder_event.reminder_event.personal
107 Webhooks pull_request_ids gh.pull_request.id
108 Webhooks pull_request_ids_for_author pull_request_ids_for_author
109 Webhooks actor_id gh.actor.id
110 Webhooks actor_login gh.actor.login
111 Webhooks user_login gh.user.login
112 Webhooks path code.filepath
113 Webhooks enterprise gh.webhook.is_enterprise
114 Webhooks job gh.job.name
115 Webhooks class exception.type
116 Webhooks payload_size gh.webhook.payload_size
117 Webhooks target_repository_nwo gh.repo.name_with_owner
118 Webhooks target_repository_id gh.repo.id
119 Webhooks target_organization_id gh.org.id
120 Webhooks target_organization_name gh.org.name
121 Scheduled Reminders transaction_id gh.scheduled_reminders.transaction_id
122 Camo request_id gh.request.id
123 Camo hmac gh.camo.request_hmac
124 Camo url gh.camo.encoded_url
125 Camo referer gh.request.referer
126 Camo error gh.camo.error
127 Camo dns-time gh.camo.dns.time
128 Camo resp gh.camo.upstream.response
129 Camo len gh.camo.upstream.response.content_length
130 Camo request gh.camo.upstream.request_buf
131 Camo response gh.camo.upstream.response_buf
132 Camo code gh.camo.upstream.response.code
133 Camo resp gh.camo.response
134 Camo ctype gh.camo.upstream.response.content_type
135 Camo pem gh.camo.certfile.name
136 Notifications fn code.function
137 Notifications id gh.notifications.rollup_summary.id
138 Notifications fn code.function
139 repo migration fn code.namespace
140 repo migration migration_guid gh.repo_migration.migration_guid
141 repo migration source_url gh.repo_migration.model_source_url
142 repo migration resolution gh.repo_migration.resolution
143 repo migration model_name gh.repo_migration.model_name
144 repo migration migratable_resource_id gh.repo_migration.migratable_resource_id
145 repo migration model_id gh.repo_migration.model_id
146 repo migration source_owner gh.repo_migration.source_owner
147 repo migration source_repository gh.repo_migration.source_repository
148 repo migration target_url gh.repo_migration.model_target_url
149 repo migration translator_url gh.repo_migration.model_translator_url
150 repo migration state gh.repo_migration.model_state
151 repo migration asset_storage gh.repo_migration.asset_storage.type
152 repo migration asset_type gh.repo_migration.asset_storage.asset_type
153 repo migration asset_id gh.repo_migration.asset_storage.asset_id
154 repo migration http_response_code gh.repo_migration.asset_storage.http_response_code
155 repo migration field gh.repo_migration.field
156 repo migration state gh.repo_migration.state
157 repo migration url gh.repo_migration.repository.repository_url
158 repo migration validation_error validation_error
159 repo migration code code

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 77 KiB

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

After

Width:  |  Height:  |  Size: 71 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

After

Width:  |  Height:  |  Size: 102 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

After

Width:  |  Height:  |  Size: 119 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 77 KiB

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 83 KiB

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 63 KiB

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 110 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 193 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 165 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

View File

@@ -123,7 +123,7 @@ export const DefaultLayout = (props: Props) => {
{props.children}
</main>
<footer>
<footer data-container="footer">
<SupportSection />
<LegalFooter />
<ScrollButton

View File

@@ -45,14 +45,14 @@ export const SimpleHeader = () => {
aria-label="Main"
>
<div className="d-flex flex-items-center" style={{ zIndex: 3 }} id="github-logo-mobile">
<Link href={`/${router.locale}`}>
<Link href={`/${router.locale}`} passHref legacyBehavior>
{/* eslint-disable-next-line jsx-a11y/anchor-is-valid */}
<a aria-hidden="true" tabIndex={-1}>
<MarkGithubIcon size={32} className="color-fg-default" />
</a>
</Link>
<Link href={`/${router.locale}`}>
<Link href={`/${router.locale}`} passHref legacyBehavior>
{/* eslint-disable-next-line jsx-a11y/anchor-is-valid */}
<a className="h4 color-fg-default no-underline no-wrap pl-2">GitHub Docs</a>
</Link>

View File

@@ -25,7 +25,12 @@ export function Link(props: Props) {
}
return (
<NextLink href={locale ? `/${locale}${href}` : href || ''} locale={locale || false}>
<NextLink
href={locale ? `/${locale}${href}` : href || ''}
locale={locale || false}
passHref
legacyBehavior
>
{/* eslint-disable-next-line jsx-a11y/anchor-has-content */}
<a rel={isExternal ? 'noopener' : ''} {...restProps} />
</NextLink>

View File

@@ -26,6 +26,7 @@ export const ArticleGridLayout = ({
{topper && <Box gridArea="topper">{topper}</Box>}
{toc && (
<Box
data-container="toc"
gridArea="sidebar"
alignSelf="flex-start"
className={cx(styles.sidebarBox, 'border-bottom border-lg-0 pb-4 mb-5 pb-xl-0 mb-xl-0')}
@@ -40,7 +41,7 @@ export const ArticleGridLayout = ({
</Box>
)}
<Box gridArea="content" data-search="article-body">
<Box data-container="article" gridArea="content" data-search="article-body">
{supportPortalVaIframeProps &&
supportPortalVaIframeProps.supportPortalUrl &&
supportPortalVaIframeProps.vaFlowUrlParameter && (

View File

@@ -41,6 +41,7 @@ export const ArticleInlineLayout = ({
{toc && (
<Box
data-container="toc"
gridArea="sidebar"
alignSelf="flex-start"
className={cx(styles.sidebarBox, 'border-bottom border-lg-0 pb-4 mb-5 pb-xl-0 mb-xl-0')}
@@ -50,6 +51,7 @@ export const ArticleInlineLayout = ({
)}
<Box
data-container="article"
gridArea="content"
data-search="article-body"
className={cx(styles.articleContainer, className)}

View File

@@ -9,7 +9,6 @@ export type ProductT = {
href: string
id: string
name: string
versions?: Array<string>
}
type VersionItem = {
@@ -70,7 +69,6 @@ export type MainContextT = {
currentProduct?: ProductT
currentLayoutName: string
isHomepageVersion: boolean
isFPT: boolean
data: DataT
error: string
currentCategory?: string
@@ -132,7 +130,6 @@ export const getMainContext = async (req: any, res: any): Promise<MainContextT>
currentProduct: req.context.productMap[req.context.currentProduct] || null,
currentLayoutName: req.context.currentLayoutName,
isHomepageVersion: req.context.page?.documentType === 'homepage',
isFPT: req.context.currentVersion === 'free-pro-team@latest',
error: req.context.error ? req.context.error.toString() : '',
data: {
ui: req.context.site.data.ui,

View File

@@ -142,6 +142,7 @@ export const Header = () => {
return (
<>
<div
data-container="header"
className={cx(
'border-bottom d-unset color-border-muted no-print z-3 color-bg-default',
styles.header,

View File

@@ -3,7 +3,6 @@ import { TreeView } from '@primer/react'
import { Link } from 'components/Link'
import { ProductTreeNode } from 'components/context/MainContext'
import { EventType, sendEvent } from 'src/events/components/events'
type SectionProps = {
routePath: string
@@ -25,11 +24,6 @@ export const ProductCollapsibleSection = (props: SectionProps) => {
current={isCurrent}
defaultExpanded={isCurrent}
onSelect={(e) => {
sendEvent({
type: EventType.navigate,
navigate_label: `product page navigate to: ${page.href}`,
})
if (e.nativeEvent instanceof KeyboardEvent && e.nativeEvent.code === 'Enter') {
document.getElementById(page.href)?.click()
e?.stopPropagation()

View File

@@ -20,6 +20,7 @@ export const SidebarNav = ({ variant = 'full' }: Props) => {
return (
<div
data-container="nav"
className={cx(variant === 'full' ? 'position-sticky d-none border-right d-xxl-block' : '')}
style={{ width: 326, height: 'calc(100vh - 65px)', top: '65px' }}
>

View File

@@ -36,4 +36,4 @@ While forks of private repositories are deleted when a collaborator is removed,
## Further reading
- "[AUTOTITLE](/organizations/organizing-members-into-teams/removing-organization-members-from-a-team)"
- "[AUTOTITLE](/organizations/managing-user-access-to-your-organizations-repositories/removing-an-outside-collaborator-from-an-organization-repository)"
- "[AUTOTITLE](/organizations/managing-user-access-to-your-organizations-repositories/managing-outside-collaborators/removing-an-outside-collaborator-from-an-organization-repository)"

View File

@@ -85,7 +85,7 @@ You can use the `git config` command to change the email address you associate w
```shell
$ git config --global user.email
<span class="output">email@example.com</span>
email@example.com
```
1. {% data reusables.user-settings.link_email_with_your_account %}
@@ -109,7 +109,7 @@ You can change the email address associated with commits you make in a single re
```shell
$ git config user.email
<span class="output">email@example.com</span>
email@example.com
```
1. {% data reusables.user-settings.link_email_with_your_account %}

View File

@@ -89,4 +89,4 @@ Collaborators can also perform the following actions.
## Further reading
- "[AUTOTITLE](/organizations/managing-user-access-to-your-organizations-repositories/repository-roles-for-an-organization)"
- "[AUTOTITLE](/organizations/managing-user-access-to-your-organizations-repositories/managing-repository-roles/repository-roles-for-an-organization)"

View File

@@ -53,7 +53,7 @@ You can also view whether an enterprise owner has a specific role in the organiz
| Enterprise owner | Organization owner | Able to configure organization settings and manage access to the organization's resources through teams, etc. |
| Enterprise owner | Organization member | Able to access organization resources and content, such as repositories, without access to the organization's settings. |
To review all roles in an organization, see "[AUTOTITLE](/organizations/managing-peoples-access-to-your-organization-with-roles/roles-in-an-organization)." {% ifversion custom-repository-roles %} An organization member can also have a custom role for a specific repository. For more information, see "[AUTOTITLE](/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)."{% endif %}
To review all roles in an organization, see "[AUTOTITLE](/organizations/managing-peoples-access-to-your-organization-with-roles/roles-in-an-organization)." {% ifversion custom-repository-roles %} An organization member can also have a custom role for a specific repository. For more information, see "[AUTOTITLE](/organizations/managing-user-access-to-your-organizations-repositories/managing-repository-roles/managing-custom-repository-roles-for-an-organization)."{% endif %}
For more information about the enterprise owner role, see "[AUTOTITLE](/admin/user-management/managing-users-in-your-enterprise/roles-in-an-enterprise#enterprise-owner)."

View File

@@ -115,5 +115,5 @@ To use a different SSH key for different repositories that you clone to your wor
For example, the following command sets the `GIT_SSH_COMMAND` environment variable to specify an SSH command that uses the private key file at **_PATH/TO/KEY/FILE_** for authentication to clone the repository named OWNER/REPOSITORY on {% data variables.location.product_location %}.
<pre>
GIT_SSH_COMMAND='ssh -i <em>PATH/TO/KEY/FILE</em> -o IdentitiesOnly=yes' git clone git@github.com:<em>OWNER</em>/<em>REPOSITORY</em>
GIT_SSH_COMMAND='ssh -i PATH/TO/KEY/FILE -o IdentitiesOnly=yes' git clone git@github.com:OWNER/REPOSITORY
</pre>

View File

@@ -88,7 +88,7 @@ For more information, see "[AUTOTITLE](/actions/hosting-your-own-runners/managin
## Adding a self-hosted runner to an enterprise
{% ifversion fpt %}If you use {% data variables.product.prodname_ghe_cloud %}, you{% elsif ghec or ghes or ghae %}You{% endif %} can add self-hosted runners to an enterprise, where they can be assigned to multiple organizations. The organization admins are then able to control which repositories can use it. {% ifversion fpt %}For more information, see the [{% data variables.product.prodname_ghe_cloud %} documentation](/enterprise-cloud@latest/actions/hosting-your-own-runners/managing-self-hosted-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-an-enterprise).{% endif %}
{% ifversion fpt %}If you use {% data variables.product.prodname_ghe_cloud %}, you{% elsif ghec or ghes or ghae %}You{% endif %} can add self-hosted runners to an enterprise, where they can be assigned to multiple organizations. The organization owner can control which repositories can use it. {% ifversion fpt %}For more information, see the [{% data variables.product.prodname_ghe_cloud %} documentation](/enterprise-cloud@latest/actions/hosting-your-own-runners/managing-self-hosted-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-an-enterprise).{% endif %}
{% ifversion ghec or ghes or ghae %}
New runners are assigned to the default group. You can modify the runner's group after you've registered the runner. For more information, see "[AUTOTITLE](/actions/hosting-your-own-runners/managing-self-hosted-runners/managing-access-to-self-hosted-runners-using-groups#moving-a-self-hosted-runner-to-a-group)."

View File

@@ -33,7 +33,9 @@ If you need a self-hosted runner to communicate via a proxy server, the self-hos
The proxy environment variables are read when the self-hosted runner application starts, so you must set the environment variables before configuring or starting the self-hosted runner application. If your proxy configuration changes, you must restart the self-hosted runner application.
On Windows machines, the proxy environment variable names are not case-sensitive. On Linux and macOS machines, we recommend that you use all lowercase environment variables. If you have an environment variable in both lowercase and uppercase on Linux or macOS, for example `https_proxy` and `HTTPS_PROXY`, the self-hosted runner application uses the lowercase environment variable.
{% data reusables.actions.environment-variables-as-case-sensitive %}
On Windows machines, the proxy environment variable names are case insensitive. On Linux and macOS machines, we recommend that you use all lowercase environment variables. If you have an environment variable in both lowercase and uppercase on Linux or macOS, for example `https_proxy` and `HTTPS_PROXY`, the self-hosted runner application uses the lowercase environment variable.
{% data reusables.actions.self-hosted-runner-ports-protocols %}

View File

@@ -15,6 +15,7 @@ versions:
type: overview
topics:
- Fundamentals
layout: inline
---
{% data reusables.actions.enterprise-github-hosted-runners %}

View File

@@ -43,7 +43,7 @@ Certain Azure DevOps constructs must be migrated manually from Azure DevOps into
- Environments
- Pre-deployment approvals
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-azure-pipelines-to-github-actions)."
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-azure-pipelines-to-github-actions)."
#### Unsupported tasks
@@ -62,7 +62,7 @@ For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-t
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with Azure DevOps and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.

View File

@@ -40,7 +40,7 @@ There are some limitations when migrating from Bamboo to {% data variables.produ
- {% data variables.product.prodname_actions_importer %} relies on the YAML specification generated by the Bamboo Server to perform migrations. When Bamboo does not support exporting something to YAML, the missing information is not migrated.
- Trigger conditions are unsupported. When {% data variables.product.prodname_actions_importer %} encounters a trigger with a condition, the condition is surfaced as a comment and the trigger is transformed without it.
- Bamboo Plans with customized settings for storing artifacts are not transformed. Instead, artifacts are stored and retrieved using the [`upload-artifact`](https://github.com/actions/upload-artifact) and [`download-artifact`](https://github.com/actions/download-artifact) actions.
- Disabled plans must be disabled manually in the GitHub UI. For more information, see "[AUTOTITLE](/actions/managing-workflow-runs/disabling-and-enabling-a-workflow)."
- Disabled plans must be disabled manually in the GitHub UI. For more information, see "[AUTOTITLE](/actions/using-workflows/disabling-and-enabling-a-workflow)."
- Disabled jobs are transformed with a `if: false` condition which prevents it from running. You must remove this to re-enable the job.
- Disabled tasks are not transformed because they are not included in the exported plan when using the Bamboo API.
- Bamboo provides options to clean up build workspaces after a build is complete. These are not transformed because it is assumed GitHub-hosted runners or ephemeral self-hosted runners will automatically handle this.
@@ -65,7 +65,7 @@ Certain Bamboo constructs must be migrated manually. These include:
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with Bamboo and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.

View File

@@ -50,7 +50,7 @@ Certain CircleCI constructs must be migrated manually. These include:
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with CircleCI and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.

View File

@@ -40,7 +40,7 @@ Certain GitLab constructs must be migrated manually. These include:
- Masked project or group variable values
- Artifact reports
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions)."
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-gitlab-cicd-to-github-actions)."
## Installing the {% data variables.product.prodname_actions_importer %} CLI extension
@@ -50,7 +50,7 @@ For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-t
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with GitLab and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.
@@ -312,7 +312,7 @@ gh actions-importer migrate gitlab --project my-project-name --output-dir output
### Supported syntax for GitLab pipelines
The following table shows the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how GitLab pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions)".
The following table shows the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how GitLab pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-gitlab-cicd-to-github-actions)".
| GitLab Pipelines | GitHub Actions | Status |
| :-------------------------------------- | :------------------------------ | :-------------------------- |

View File

@@ -36,7 +36,7 @@ There are some limitations when migrating from Jenkins to {% data variables.prod
- Self-hosted runners
- Unknown plugins
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-jenkins-to-github-actions)."
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-jenkins-to-github-actions)."
## Installing the {% data variables.product.prodname_actions_importer %} CLI extension
@@ -46,7 +46,7 @@ For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-t
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with Jenkins and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.
@@ -275,7 +275,7 @@ source_files:
### Supported syntax for Jenkins pipelines
The following tables show the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how Jenkins pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-jenkins-to-github-actions)".
The following tables show the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how Jenkins pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-jenkins-to-github-actions)".
For information about supported Jenkins plugins, see the [`github/gh-actions-importer` repository](https://github.com/github/gh-actions-importer/blob/main/docs/jenkins/index.md).

View File

@@ -37,7 +37,7 @@ Certain Travis CI constructs must be migrated manually. These include:
- Secrets
- Unknown job properties
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions)."
For more information on manual migrations, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-travis-ci-to-github-actions)."
#### Travis CI project languages
@@ -53,7 +53,7 @@ For a list of the project languages supported by {% data variables.product.prodn
The `configure` CLI command is used to set required credentials and options for {% data variables.product.prodname_actions_importer %} when working with Travis CI and {% data variables.product.prodname_dotcom %}.
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-personal-access-token-classic)."
1. Create a {% data variables.product.prodname_dotcom %} {% data variables.product.pat_v1 %}. For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)."
Your token must have the `workflow` scope.
@@ -310,7 +310,7 @@ gh actions-importer dry-run travis-ci --travis-ci-repository travis-org-name/tra
### Supported syntax for Travis CI pipelines
The following table shows the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how Travis CI pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions)".
The following table shows the type of properties {% data variables.product.prodname_actions_importer %} is currently able to convert. For more details about how Travis CI pipeline syntax aligns with {% data variables.product.prodname_actions %}, see "[AUTOTITLE](/actions/migrating-to-github-actions/manually-migrating-to-github-actions/migrating-from-travis-ci-to-github-actions)".
| Travis CI | GitHub Actions | Status |
| :------------------ | :--------------------------------- | ------------------: |

View File

@@ -10,7 +10,7 @@ versions:
{% ifversion ghec %}
## Adding a {% data variables.actions.hosted_runner %} to an enterprise
Enterprise owners can add {% data variables.actions.hosted_runner %}s to an enterprise and assign them to organizations. By default, when a {% data variables.actions.hosted_runner %} is created for an enterprise, it is added to a default runner group that all organizations in the enterprise have access to. While all organizations are granted access to the runner, the repositories in each organization **are not** granted access. For each organization, an organization administrator must configure the runner group to specify which repositories have access to the enterprise runner. For more information, see "[Allowing repositories to access a runner group](#allowing-repositories-to-access-a-runner-group)."
Enterprise owners can add {% data variables.actions.hosted_runner %}s to an enterprise and assign them to organizations. By default, when a {% data variables.actions.hosted_runner %} is created for an enterprise, it is added to a default runner group that all organizations in the enterprise have access to. While all organizations are granted access to the runner, the repositories in each organization **are not** granted access. For each organization, an organization owner must configure the runner group to specify which repositories have access to the enterprise runner. For more information, see "[Allowing repositories to access a runner group](#allowing-repositories-to-access-a-runner-group)."
{% data reusables.actions.add-hosted-runner-overview %}
@@ -25,7 +25,7 @@ Enterprise owners can add {% data variables.actions.hosted_runner %}s to an ente
## Adding a {% data variables.actions.hosted_runner %} to an organization
You can add a {% data variables.actions.hosted_runner %} to an organization, where organization administrators can control which repositories can use it. When you create a new runner for an organization, by default, all repositories in the organization have access to the runner. To limit which repositories can use the runner, assign it to a runner group with access to specific repositories. For more information, see "[Allowing repositories to access a runner group](#allowing-repositories-to-access-a-runner-group)."
You can add a {% data variables.actions.hosted_runner %} to an organization, where organization owners can control which repositories can use it. When you create a new runner for an organization, by default, all repositories in the organization have access to the runner. To limit which repositories can use the runner, assign it to a runner group with access to specific repositories. For more information, see "[Allowing repositories to access a runner group](#allowing-repositories-to-access-a-runner-group)."
{% data reusables.actions.add-hosted-runner-overview %}
@@ -37,7 +37,7 @@ You can add a {% data variables.actions.hosted_runner %} to an organization, whe
## Allowing repositories to access {% data variables.actions.hosted_runner %}s
Repositories are granted access to {% data variables.actions.hosted_runner %}s through runner groups. Enterprise administrators can choose which organizations are granted access to enterprise-level runner groups, and organization administrators control repository-level access to all {% data variables.actions.hosted_runner %}s. Organization administrators can use and configure enterprise-level runner groups for the repositories in their organization, or they can create organization-level runner groups to control access.
Repositories are granted access to {% data variables.actions.hosted_runner %}s through runner groups. Enterprise administrators can choose which organizations are granted access to enterprise-level runner groups, and organization owners control repository-level access to all {% data variables.actions.hosted_runner %}s. Organization owners can use and configure enterprise-level runner groups for the repositories in their organization, or they can create organization-level runner groups to control access.
- **For enterprise-level runner groups**: {% data reusables.actions.about-enterprise-level-runner-groups %}
- **For organization-level runner groups**: {% data reusables.actions.about-organization-level-runner-groups %}

View File

@@ -17,7 +17,7 @@ Once your runner type has been defined, you can update your workflow YAML files
{% endnote %}
Only owner or administrator accounts can see the runner settings. Non-administrative users can contact the organization administrator to find out which runners are enabled. Your organization administrator can create new runners and runner groups, as well as configure permissions to specify which repositories can access a runner group. For more information, see "[AUTOTITLE](/actions/using-github-hosted-runners/managing-larger-runners#allowing-repositories-to-access-a-runner-group)."
Only owner or administrator accounts can see the runner settings. Non-administrative users can contact the organization owner to find out which runners are enabled. Your organization owner can create new runners and runner groups, as well as configure permissions to specify which repositories can access a runner group. For more information, see "[AUTOTITLE](/actions/using-github-hosted-runners/managing-larger-runners#allowing-repositories-to-access-a-runner-group)."
## Using groups to control where jobs are run
@@ -54,7 +54,7 @@ jobs:
If you notice the jobs that target your {% data variables.actions.hosted_runner %}s are delayed or not running, there are several factors that may be causing this.
- **Concurrency settings**: You may have reached your maximum concurrency limit. If you would like to enable more jobs to run in parallel, you can update your autoscaling settings to a larger number. For more information, see "[AUTOTITLE](/actions/using-github-hosted-runners/managing-larger-runners#configuring-autoscaling-for-larger-runners)."
- **Repository permissions**: Ensure you have the appropriate repository permissions enabled for your {% data variables.actions.hosted_runner %}s. By default, enterprise runners are not available at the repository level and must be manually enabled by an organization administrator. For more information, see "[AUTOTITLE](/actions/using-github-hosted-runners/managing-larger-runners#allowing-repositories-to-access-larger-runners)."
- **Repository permissions**: Ensure you have the appropriate repository permissions enabled for your {% data variables.actions.hosted_runner %}s. By default, enterprise runners are not available at the repository level and must be manually enabled by an organization owner. For more information, see "[AUTOTITLE](/actions/using-github-hosted-runners/managing-larger-runners#allowing-repositories-to-access-larger-runners)."
- **Billing information**: You must have a valid credit card on file in order to use {% data variables.actions.hosted_runner %}s. After adding a credit card to your account, it can take up to 10 minutes to enable the use of your {% data variables.actions.hosted_runner %}s. For more information, see "[AUTOTITLE](/billing/managing-your-github-billing-settings/adding-or-editing-a-payment-method)."
- **Spending limit**: Your {% data variables.product.prodname_actions %} spending limit must be set to a value greater than zero. For more information, see "[AUTOTITLE](/billing/managing-billing-for-github-actions/managing-your-spending-limit-for-github-actions)."
- **Fair use policy**: {% data variables.product.company_short %} has a fair use policy that begins to throttle jobs based on several factors, such as how many jobs you are running or how many jobs are running across the entirety of {% data variables.product.prodname_actions %}.

View File

@@ -13,11 +13,7 @@ versions:
## Overview
{% note %}
**Note:** A job that is skipped will report its status as "Success". It will not prevent a pull request from merging, even if it is a required check.
{% endnote %}
{% data reusables.actions.workflows.skipped-job-status-checks-passing %}
{% data reusables.actions.jobs.section-using-conditions-to-control-job-execution %}

View File

@@ -13,6 +13,7 @@ redirect_from:
- /actions/using-workflows/advanced-workflow-features
topics:
- Workflows
layout: inline
---
## About workflows

View File

@@ -942,7 +942,7 @@ jobs:
{% endnote %}
Runs your workflow when you push a commit or tag.
Runs your workflow when you push a commit or tag, or when you clone a repository.
For example, you can run a workflow when the `push` event occurs.

Some files were not shown because too many files have changed in this diff Show More