1
0
mirror of synced 2026-01-08 03:01:54 -05:00

Merge branch 'main' into patch-5

This commit is contained in:
Lucas Costi
2022-08-31 17:04:07 +10:00
committed by GitHub
1554 changed files with 64576 additions and 59111 deletions

View File

@@ -17,6 +17,6 @@ jobs:
runs-on: ubuntu-latest
steps:
# See labeling configuration in the `.github/labeler.yml` file
- uses: actions/labeler@5f867a63be70efff62b767459b009290364495eb
- uses: actions/labeler@e54e5b338fbd6e6cdb5d60f51c22335fc57c401e
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'

View File

@@ -53,6 +53,14 @@ jobs:
# Don't care about CDN caching image URLs
DISABLE_REWRITE_ASSET_URLS: true
run: |
# Note as of Aug 2022, we *don't* check external links
# on the pages you touched in the PR. We could enable that
# but it has the added risk of false positives blocking CI.
# We are using this script for the daily/nightly checker that
# checks external links too. Once we're confident it really works
# well, we can consider enabling it here on every content PR too.
./script/rendered-content-link-checker.js \
--language en \
--max 100 \

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 176 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 6.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -5,7 +5,7 @@ import { sendEvent, EventType } from 'components/lib/events'
import { useRouter } from 'next/router'
import { useArticleContext } from 'components/context/ArticleContext'
import parseUserAgent from 'components/lib/user-agent'
import { parseUserAgent } from 'components/lib/user-agent'
const platforms = [
{ id: 'mac', label: 'Mac' },

View File

@@ -81,7 +81,8 @@ export function getCssTheme(cookieValue = ''): CssColorTheme {
darkTheme: filterTheme(dark_theme) || defaultCSSTheme.darkTheme,
}
} catch (err) {
console.warn("Unable to parse 'color_mode' cookie", err)
if (process.env.NODE_ENV === 'development')
console.warn("Unable to parse 'color_mode' cookie", err)
return defaultCSSTheme
}
}

View File

@@ -1,7 +1,7 @@
/* eslint-disable camelcase */
import { v4 as uuidv4 } from 'uuid'
import Cookies from 'js-cookie'
import parseUserAgent from './user-agent'
import { parseUserAgent } from './user-agent'
const COOKIE_NAME = '_docs-events'

View File

@@ -45,7 +45,7 @@ export function getShellExample(operation: Operation, codeSample: CodeSample) {
const args = [
operation.verb !== 'get' && `-X ${operation.verb.toUpperCase()}`,
`-H "Accept: ${defaultAcceptHeader}" \\ \n -H "Authorization: token <TOKEN>"`,
`-H "Accept: ${defaultAcceptHeader}" \\ \n -H "Authorization: Bearer <YOUR-TOKEN>"`,
`${operation.serverUrl}${requestPath}`,
requestBodyParams,
].filter(Boolean)
@@ -86,12 +86,12 @@ export function getGHExample(operation: Operation, codeSample: CodeSample) {
requestBodyParams = Object.keys(codeSample.request.bodyParameters)
.map((key) => {
if (typeof codeSample.request.bodyParameters[key] === 'string') {
return `-f ${key}='${codeSample.request.bodyParameters[key]}'\n`
return `-f ${key}='${codeSample.request.bodyParameters[key]}' `
} else {
return `-F ${key}=${codeSample.request.bodyParameters[key]}\n`
return `-F ${key}=${codeSample.request.bodyParameters[key]} `
}
})
.join(' ')
.join('\\\n ')
}
const args = [
operation.verb !== 'get' && `--method ${operation.verb.toUpperCase()}`,
@@ -141,11 +141,7 @@ export function getJSExample(operation: Operation, codeSample: CodeSample) {
}
}
const comment = `// Octokit.js\n// https://github.com/octokit/core.js#readme\n`
const require = `const octokit = new Octokit(${stringify(
{ auth: 'personal-access-token123' },
null,
2
)})\n\n`
const require = `const octokit = new Octokit(${stringify({ auth: 'YOUR-TOKEN' }, null, 2)})\n\n`
return `${comment}${require}await octokit.request('${operation.verb.toUpperCase()} ${
operation.requestPath

View File

@@ -19,7 +19,7 @@ const BROWSER_REGEXPS = [
/ms(ie)\/([^\s)]+)/i,
]
export default function parseUserAgent(ua = navigator.userAgent) {
export function parseUserAgent(ua = navigator.userAgent) {
ua = ua.toLowerCase()
const osRe = OS_REGEXPS.find((re) => re.test(ua))
let [, os = 'other', os_version = '0'] = (osRe && ua.match(osRe)) || []

View File

@@ -5,6 +5,7 @@ import ReactMarkdown from 'react-markdown'
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'
import { vs, vscDarkPlus } from 'react-syntax-highlighter/dist/cjs/styles/prism'
import gfm from 'remark-gfm'
import rehypeRaw from 'rehype-raw'
import { MarkdownContent } from 'components/ui/MarkdownContent'
@@ -22,6 +23,9 @@ export const ArticleMarkdown = ({ className, children }: Props) => {
<ReactMarkdown
className={cx(styles.articleMarkdown, className)}
remarkPlugins={[gfm as any]}
// This makes it so that HTML inside that `children`, as a string,
// is preserved and left alone.
rehypePlugins={[rehypeRaw]}
components={{
// eslint-disable-next-line @typescript-eslint/no-unused-vars
code: ({ node, inline, className, children, ...props }) => {

View File

@@ -41,6 +41,7 @@ export const PlaygroundContentBlock = ({ sectionIndex, contentBlock }: Props) =>
const isActive = sectionIndex === activeSectionIndex
const anchorLink = getAnchorLink(contentBlock.title || '')
const showDivider = !isActive && activeSectionIndex - 1 !== sectionIndex
return (
<div
className={cx(

View File

@@ -11,8 +11,8 @@ const article: PlaygroundArticleT = {
'/codespaces/setting-up-your-project-for-codespaces/setting-up-your-dotnet-project-for-codespaces',
codeLanguageId: 'dotnet',
intro: dedent`
This guide shows you how to add a dev container configuration to your repository to define the GitHub Codespaces development environment for your **C# (.NET)** codebase. For more information, see "[Introduction to dev containers](/codespaces/setting-up-your-project-for-codespaces/introduction-to-dev-containers)."
This guide shows you how to add a dev container configuration to your repository to define the GitHub Codespaces development environment for your **C# (.NET)** codebase. For more information, see "[Introduction to dev containers](/codespaces/setting-up-your-project-for-codespaces/introduction-to-dev-containers)."
If you want to add a dev container configuration for another programming language, click the language button to the right.
`,
prerequisites: dedent`
@@ -36,9 +36,9 @@ const article: PlaygroundArticleT = {
When you create a codespace, your project is created on a remote VM that is dedicated to you. By default, the container for your codespace has many languages and runtimes including .NET. It also includes a common set of tools like git, wget, rsync, openssh, and nano.
You can customize your codespace by adjusting the amount of vCPUs and RAM, [adding dotfiles to personalize your environment](/codespaces/setting-up-your-codespace/personalizing-codespaces-for-your-account), or by modifying the tools and scripts installed.
GitHub Codespaces uses a file called \`devcontainer.json\` to configure the development container that you use when you work in a codespace. Each repository can contain one or more \`devcontainer.json\` files, to give you exactly the development environment you need to work on your code in a codespace.
On launch, GitHub Codespaces uses a \`devcontainer.json\` file, and any dependent files that make up the dev container configuration, to install tools and runtimes, and perform other setup tasks that the project requires. For more information, see "[Introduction to dev containers](/codespaces/setting-up-your-codespace/configuring-codespaces-for-your-project)."
`,
},
@@ -53,17 +53,17 @@ const article: PlaygroundArticleT = {
To set up your repository to use a custom dev container, you will need to create one or more \`devcontainer.json\` files. You can add these either from a template, in Visual Studio Code, or you can write your own. For more information on dev container configurations, see "[Introduction to dev containers](/codespaces/setting-up-your-codespace/configuring-codespaces-for-your-project)".
1. Access the Command Palette (\`Shift + Command + P\` / \`Ctrl + Shift + P\`), then start typing "dev container". Select **Codespaces: Add Development Container Configuration Files...**.
1. Access the Command Palette (<kbd>Shift</kbd> + <kbd>Command</kbd> + <kbd>P</kbd> / <kbd>Ctrl</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>), then start typing "dev container". Select **Codespaces: Add Development Container Configuration Files...**.
![Codespaces: Add Development Container Configuration Files... in the command palette](/assets/images/help/codespaces/add-prebuilt-container-command.png)
2. For this example, click **C# (.NET)**. If you need additional features you can select any container thats specific to C# (.NET) or a combination of tools such as C# (.NET) and MS SQL.
![Select C# (.NET) option from the list](/assets/images/help/codespaces/add-dotnet-prebuilt-container.png)
3. Click the recommended version of .NET.
![.NET version selection](/assets/images/help/codespaces/add-dotnet-version.png)
4. Accept the default option to add Node.js to your customization.
![Add Node.js selection](/assets/images/help/codespaces/dotnet-options.png)
5. Select any additional features to install and click **OK**.
6. Access the command palette (\`Shift + Command + P\`/ \`Ctrl + Shift + P\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
6. Access the command palette (<kbd>Shift</kbd> + <kbd>Command</kbd> + <kbd>P</kbd> / <kbd>Ctrl</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
![Rebuild container option](/assets/images/help/codespaces/codespaces-rebuild.png)
`,
},
@@ -189,15 +189,15 @@ const article: PlaygroundArticleT = {
"streetsidesoftware.code-spell-checker"
],
\`\`\`
3. Uncomment the \`postCreateCommand\` to restore dependencies as part of the codespace setup process.
\`\`\`json{:copy}
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "dotnet restore",
\`\`\`
4. Access the command palette (\`Shift + Command + P\`/ \`Ctrl + Shift + P\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
4. Access the command palette (\`Shift + Command + P\`/ \`Ctrl + Shift + P\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
![Rebuild container option](/assets/images/help/codespaces/codespaces-rebuild.png)
Rebuilding inside your codespace ensures your changes work as expected before you commit the changes to the repository. If something does result in a failure, youll be placed in a codespace with a recovery container that you can rebuild from to keep adjusting your container.
@@ -253,26 +253,26 @@ const article: PlaygroundArticleT = {
"INSTALL_AZURE_CLI": "false"
}
},
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": "/bin/bash"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-dotnettools.csharp"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [5000, 5001],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "dotnet restore",
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode"
}
}
`,
},
'1': {
@@ -282,22 +282,22 @@ const article: PlaygroundArticleT = {
# [Choice] .NET version: 5.0, 3.1, 2.1
ARG VARIANT="5.0"
FROM mcr.microsoft.com/vscode/devcontainers/dotnetcore:0-\${VARIANT}
# [Option] Install Node.js
ARG INSTALL_NODE="true"
ARG NODE_VERSION="lts/*"
RUN if [ "\${INSTALL_NODE}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install \${NODE_VERSION} 2>&1"; fi
# [Option] Install Azure CLI
ARG INSTALL_AZURE_CLI="false"
COPY library-scripts/azcli-debian.sh /tmp/library-scripts/
RUN if [ "$INSTALL_AZURE_CLI" = "true" ]; then bash /tmp/library-scripts/azcli-debian.sh; fi \
&& apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# [Optional] Uncomment this line to install global node packages.
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
`,

View File

@@ -53,7 +53,7 @@ const article: PlaygroundArticleT = {
To set up your repository to use a custom dev container, you will need to create one or more \`devcontainer.json\` files. You can add these either from a template, in Visual Studio Code, or you can write your own. For more information on dev container configurations, see "[Introduction to dev containers](/codespaces/setting-up-your-codespace/configuring-codespaces-for-your-project)".
1. Access the Command Palette (\`<kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>\` / \`<kbd>Ctrl</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>\`), then start typing "dev container". Select **Codespaces: Add Development Container Configuration Files...**.
1. Access the command palette (\`Shift + Command + P\` / \`Ctrl + Shift + P\`), then start typing "dev container". Select **Codespaces: Add Development Container Configuration Files...**.
!["Codespaces: Add Development Container Configuration Files..." in the command palette](/assets/images/help/codespaces/add-prebuilt-container-command.png)
2. For this example, click **Python 3**. If you need additional features you can select any container thats specific to Python or a combination of tools such as Python 3 and PostgreSQL.
![Select Python option from the list](/assets/images/help/codespaces/add-python-prebuilt-container.png)
@@ -62,7 +62,7 @@ const article: PlaygroundArticleT = {
4. Accept the default option to add Node.js to your customization.
![Add Node.js selection](/assets/images/help/codespaces/add-nodejs-selection.png)
5. Select any additional features to install and click **OK**.
6. Access the command palette (\`<kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>\`/ \`<kbd>Ctrl</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
6. Access the command palette (\`Shift + Command + P\` / \`Ctrl + Shift + P\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
![Rebuild container option](/assets/images/help/codespaces/codespaces-rebuild.png)
`,
},
@@ -197,7 +197,7 @@ const article: PlaygroundArticleT = {
"postCreateCommand": "pip3 install --user -r requirements.txt",
\`\`\`
4. Access the command palette (\`<kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>\`/ \`<kbd>Ctrl</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
4. Access the command palette (\`Shift + Command + P\` / \`Ctrl + Shift + P\`), then start typing "rebuild". Select **Codespaces: Rebuild Container**.
![Rebuild container option](/assets/images/help/codespaces/codespaces-rebuild.png)

View File

@@ -102,7 +102,7 @@ export const Editor: React.FC<Props> = ({ article }) => {
className = 'color-bg-accent'
}
}
return { style: { display: 'block' }, class: className }
return { style: { display: 'block' }, className }
}}
lineNumberStyle={{ minWidth: '3.25em' }}
>

View File

@@ -1,58 +1,57 @@
import { useTranslation } from 'components/hooks/useTranslation'
import { ParameterRow } from './ParameterRow'
import type { ChildParamsGroup } from './types'
import type { ChildParameter } from './types'
type Props = {
slug: string
childParamsGroups?: ChildParamsGroup[]
childParamsGroups: ChildParameter[]
parentName: string
parentType: string
}
export function ChildBodyParametersRows({ slug, childParamsGroups }: Props) {
export function ChildBodyParametersRows({
slug,
parentName,
parentType,
childParamsGroups,
}: Props) {
const { t } = useTranslation('products')
return (
<tr className="border-none">
<tr className="border-top-0">
<td colSpan={4} className="has-nested-table">
{childParamsGroups?.map((childParamGroup) => (
<details key={childParamGroup.id}>
<summary role="button" aria-expanded="false" className="keyboard-focus color-fg-muted">
<span className="d-inline-block mb-3" id={`${slug}-${childParamGroup.id}`}>
Properties of the
<code>{childParamGroup.parentName}</code>
{childParamGroup.parentType}
</span>
</summary>
<table
id={`${childParamGroup.parentName}-object`}
className="ml-4 mb-4 mt-2 color-bg-subtle"
>
<thead className="visually-hidden">
<tr>
<th>
{`${t('rest.reference.name')}, ${t('rest.reference.type')}, ${t(
'rest.reference.description'
)}`}
</th>
</tr>
</thead>
<tbody>
{childParamGroup.params.map((childParam, index) => (
<details className="ml-1">
<summary role="button" aria-expanded="false" className="keyboard-focus color-fg-muted">
<span className="d-inline-block mb-3" id={`${slug}-${parentName}-${parentType}`}>
Properties of the
<code>{parentName}</code>
{parentType}
</span>
</summary>
<table id={`${parentName}-object`} className="mb-4 mt-2 color-bg-subtle">
<thead className="visually-hidden">
<tr>
<th>
{`${t('rest.reference.name')}, ${t('rest.reference.type')}, ${t(
'rest.reference.description'
)}`}
</th>
</tr>
</thead>
<tbody>
{childParamsGroups.map((childParam) => {
return (
<ParameterRow
name={childParam.name}
description={childParam.description}
type={childParam.type}
isRequired={childParam.isRequired}
defaultValue={childParam.default}
enumValues={childParam.enum}
rowParams={childParam}
slug={slug}
isChild={true}
key={`${index}-${childParam}`}
key={childParam.name}
/>
))}
</tbody>
</table>
</details>
))}
)
})}
</tbody>
</table>
</details>
</td>
</tr>
)

View File

@@ -1,44 +1,24 @@
import { useTranslation } from 'components/hooks/useTranslation'
import { ChildBodyParametersRows } from './ChildBodyParametersRows'
import type { ChildParamsGroup } from './types'
import type { ChildParameter } from './types'
type Props = {
name: string
type: string | string[]
description: string
isRequired?: boolean
defaultValue?: string
enumValues?: string[]
rowParams: ChildParameter
slug: string
childParamsGroups?: ChildParamsGroup[] | null
numPreviews?: number
isChild?: boolean
}
export function ParameterRow({
name,
type,
description,
isRequired,
defaultValue,
enumValues,
slug,
childParamsGroups = null,
numPreviews = 0,
isChild = false,
}: Props) {
export function ParameterRow({ rowParams, slug, numPreviews = 0, isChild = false }: Props) {
const { t } = useTranslation('products')
return (
<>
<tr className={`${isChild ? 'color-bg-subtle' : ''}`}>
<td className={`${isChild ? 'pl-2' : ''}`}>
<td className={`${isChild ? 'px-3' : ''}`}>
<div>
<code className={`text-bold ${isChild ? 'f6' : 'f5'}`}>{name}</code>
<span className="color-fg-muted pl-2 f5">
{Array.isArray(type) ? type.join(' or ') : type}
</span>
{isRequired ? (
<code className={`text-bold ${isChild ? 'f6' : 'f5'}`}>{rowParams.name}</code>
<span className="color-fg-muted pl-2 f5">{rowParams.type}</span>
{rowParams.isRequired ? (
<span className={`color-fg-attention f5 ${isChild ? 'pl-3' : 'float-right'}`}>
{t('rest.reference.required')}
</span>
@@ -46,7 +26,7 @@ export function ParameterRow({
</div>
<div className="pl-1 pt-2 color-fg-muted f5">
<div dangerouslySetInnerHTML={{ __html: description }} />
<div dangerouslySetInnerHTML={{ __html: rowParams.description }} />
{numPreviews > 0 && (
<a href={`#${slug}-preview-notices`} className="d-inline">
{numPreviews > 1
@@ -55,18 +35,18 @@ export function ParameterRow({
</a>
)}
<div className="pt-2">
{defaultValue !== undefined && (
{rowParams.default && (
<p>
<span>{t('rest.reference.default')}: </span>
<code>{defaultValue.toString()}</code>
<code>{rowParams.default}</code>
</p>
)}
{enumValues && (
{rowParams.enum && rowParams.enum.length && (
<p>
<span>{t('rest.reference.enum_description_title')}: </span>
{enumValues.map((item, index) => {
return index !== enumValues.length - 1 ? (
{rowParams.enum.map((item, index, array) => {
return index !== array.length - 1 ? (
<span key={item + index}>
<code>{item}</code>,{' '}
</span>
@@ -82,8 +62,13 @@ export function ParameterRow({
</div>
</td>
</tr>
{childParamsGroups && childParamsGroups.length > 0 && (
<ChildBodyParametersRows slug={slug} childParamsGroups={childParamsGroups} />
{rowParams.childParamsGroups && rowParams.childParamsGroups.length > 0 && (
<ChildBodyParametersRows
slug={slug}
parentName={rowParams.name}
parentType={rowParams.type}
childParamsGroups={rowParams.childParamsGroups}
/>
)}
</>
)

View File

@@ -13,6 +13,6 @@
table-layout: fixed !important;
}
.codeBlock code {
.codeBlock code:not(td *) {
word-break: break-all;
}

View File

@@ -42,10 +42,12 @@ export function RestParameterTable({ slug, numPreviews, parameters, bodyParamete
<tbody>
<ParameterRow
name={'accept'}
type={'string'}
description={`<p>Setting to <code>application/vnd.github+json</code> is recommended.</p>`}
isRequired={false}
rowParams={{
name: 'accept',
type: 'string',
description: `<p>Setting to <code>application/vnd.github+json</code> is recommended.</p>`,
isRequired: false,
}}
slug={slug}
numPreviews={numPreviews}
/>
@@ -65,12 +67,14 @@ export function RestParameterTable({ slug, numPreviews, parameters, bodyParamete
</tr>
{pathParams.map((param, index) => (
<ParameterRow
name={param.name}
type={param.schema.type}
description={param.description}
isRequired={param.required}
defaultValue={param.schema.default}
enumValues={param.schema.enum}
rowParams={{
name: param.name,
type: param.schema.type,
description: param.description,
isRequired: param.required,
default: param.schema.default,
enum: param.schema.enum,
}}
slug={slug}
key={`${index}-${param}`}
/>
@@ -95,12 +99,14 @@ export function RestParameterTable({ slug, numPreviews, parameters, bodyParamete
{queryParams.map((param, index) => (
<ParameterRow
name={param.name}
type={param.schema.type}
description={param.description}
isRequired={param.required}
defaultValue={param.schema.default}
enumValues={param.schema.enum}
rowParams={{
name: param.name,
type: param.schema.type,
description: param.description,
isRequired: param.required,
default: param.schema.default,
enum: param.schema.enum,
}}
slug={slug}
key={`${index}-${param}`}
/>
@@ -124,17 +130,7 @@ export function RestParameterTable({ slug, numPreviews, parameters, bodyParamete
</tr>
{bodyParameters.map((param, index) => (
<ParameterRow
name={param.name}
type={param.type}
description={param.description}
isRequired={param.isRequired}
defaultValue={param.default}
enumValues={param.enum}
slug={slug}
childParamsGroups={param.childParamsGroups}
key={`${index}-${param}`}
/>
<ParameterRow rowParams={param} slug={slug} key={`${index}-${param}`} />
))}
</>
)}

View File

@@ -54,26 +54,20 @@ export interface BodyParameter {
name: string
description: string
type: string
isRequired: boolean
isRequired?: boolean
default?: string
enum?: Array<string>
childParamsGroups?: Array<ChildParamsGroup>
}
export interface ChildParamsGroup {
id: string
params: Array<ChildParameter>
parentName: string
parentType: string
childParamsGroups?: Array<ChildParameter>
}
export interface ChildParameter {
name: string
description: string
type: string
isRequired: boolean
isRequired?: boolean
enum?: Array<string>
default?: string
childParamsGroups?: ChildParameter[]
}
export type ExampleT = {

View File

@@ -13,7 +13,6 @@ export function SearchError({ error }: Props) {
return (
<div>
{' '}
<Flash variant="danger" sx={{ margin: '3rem' }}>
{t('search_error')}
<br />

View File

@@ -45,7 +45,8 @@ export const RestCollapsibleSection = (props: SectionProps) => {
router.query.productId === 'rest' ||
// These pages need the Article Page mini tocs instead of the Rest Pages
router.asPath.includes('/rest/guides') ||
router.asPath.includes('/rest/overview')
router.asPath.includes('/rest/overview') ||
router.asPath.includes('/rest/quickstart')
? []
: useAutomatedPageContext().miniTocItems
@@ -66,7 +67,11 @@ export const RestCollapsibleSection = (props: SectionProps) => {
}, [])
useEffect(() => {
if (!router.asPath.includes('guides') && !router.asPath.includes('overview')) {
if (
!router.asPath.includes('guides') &&
!router.asPath.includes('overview') &&
!router.asPath.includes('quickstart')
) {
const observer = new IntersectionObserver(
(entries) => {
entries.forEach((entry) => {

View File

@@ -80,10 +80,16 @@ export const SidebarProduct = () => {
const restSection = () => {
const conceptualPages = currentProductTree.childPages.filter(
(page) => page.href.includes('guides') || page.href.includes('overview')
(page) =>
page.href.includes('guides') ||
page.href.includes('overview') ||
page.href.includes('quickstart')
)
const restPages = currentProductTree.childPages.filter(
(page) => !page.href.includes('guides') && !page.href.includes('overview')
(page) =>
!page.href.includes('guides') &&
!page.href.includes('overview') &&
!page.href.includes('quickstart')
)
return (
<>
@@ -95,6 +101,7 @@ export const SidebarProduct = () => {
const isActive =
routePath.includes(childPage.href + '/') || routePath === childPage.href
const defaultOpen = hasExactCategory ? isActive : false
return (
<li
key={childPage.href + i}
@@ -102,12 +109,23 @@ export const SidebarProduct = () => {
data-is-current-page={isActive && isStandaloneCategory}
className={cx('py-1', isActive && 'color-bg-inset')}
>
<ProductCollapsibleSection
defaultOpen={defaultOpen}
routePath={routePath}
title={childTitle}
page={childPage}
/>
{childPage.href.includes('quickstart') ? (
<Link
href={childPage.href}
className={cx(
'd-block pl-4 pr-5 py-1 color-fg-default text-bold no-underline width-full'
)}
>
{childTitle}
</Link>
) : (
<ProductCollapsibleSection
defaultOpen={defaultOpen}
routePath={routePath}
title={childTitle}
page={childPage}
/>
)}
</li>
)
})}

View File

@@ -1,43 +1,42 @@
import { Heading, NavList } from '@primer/react'
import cx from 'classnames'
import { ActionList, Heading } from '@primer/react'
import type { MiniTocItem } from 'components/context/ArticleContext'
import { MiniTocItem } from 'components/context/ArticleContext'
import { Link } from 'components/Link'
import { useTranslation } from 'components/hooks/useTranslation'
import styles from './Minitocs.module.scss'
export type MiniTocsPropsT = {
pageTitle: string
miniTocItems: MiniTocItem[]
}
const renderTocItem = (item: MiniTocItem) => {
function RenderTocItem(item: MiniTocItem) {
return (
<ActionList.Item
as="li"
key={item.contents.href}
className={item.platform}
sx={{
listStyle: 'none',
padding: '2px',
':hover': {
bg: 'var(--color-canvas-inset) !important',
},
'ul > li': {
':hover': {
bg: 'var(--color-neutral-subtle) !important',
},
},
}}
>
<div className={cx('lh-condensed d-block width-full')}>
<a className="d-block width-auto" href={item.contents.href}>
{item.contents.title}
</a>
{item.items && item.items.length > 0 ? (
<ul className="ml-3">{item.items.map(renderTocItem)}</ul>
) : null}
</div>
</ActionList.Item>
<div className={cx(styles.nested, item.platform)}>
<NavList.Item
href={item.contents.href}
sx={{
padding: '4px 0 4px 0',
marginLeft: '7px',
}}
>
{item.contents.title}
</NavList.Item>
{item.items && item.items.length > 0 && (
<ul className={cx(styles.indentNested)}>
{item.items.map((toc) => (
<RenderTocItem
key={toc.contents.href}
contents={toc.contents}
items={toc.items}
platform={toc.platform}
/>
))}
</ul>
)}
</div>
)
}
@@ -46,17 +45,22 @@ export function MiniTocs({ pageTitle, miniTocItems }: MiniTocsPropsT) {
return (
<>
<Heading as="h2" id="in-this-article" className="mb-1" sx={{ fontSize: 1 }}>
<Heading as="h2" id="in-this-article" className="mb-1 ml-3" sx={{ fontSize: 1 }}>
<Link href="#in-this-article">{t('miniToc')}</Link>
</Heading>
<ActionList variant="full" className="my-2" key={pageTitle} as="div">
<div>
{miniTocItems.map((items, i) => {
return <ul key={pageTitle + i}>{renderTocItem(items)}</ul>
})}
</div>
</ActionList>
<NavList className="my-2" key={pageTitle}>
{miniTocItems.map((items, i) => {
return (
<RenderTocItem
key={items.contents.href + i}
contents={items.contents}
items={items.items}
platform={items.platform}
/>
)
})}
</NavList>
</>
)
}

View File

@@ -0,0 +1,12 @@
.indentNested {
padding-inline-start: 0;
}
.nested {
div ul div li {
padding-left: 4em;
}
div li {
padding-left: 2em;
}
}

View File

@@ -227,7 +227,7 @@ defaultPlatform: linux
### `defaultTool`
- Purpose: Override the initial tool selection for a page, where tool refers to the application the reader is using to work with GitHub (such as GitHub.com's web UI, the GitHub CLI, or GitHub Desktop) or the GitHub APIs (such as cURL or the GitHub CLI). For more information about the tool selector, see [Markup reference for GitHub Docs](../contributing/content-markup-reference.md#tool-tags). If this frontmatter is omitted, then the tool-specific content matching the GitHub web UI is shown by default. If a user has indicated a tool preference (by clicking on a tool tab), then the user's preference will be applied instead of the default value.
- Purpose: Override the initial tool selection for a page, where the tool refers to the application the reader is using to work with GitHub (such as GitHub.com's web UI, the GitHub CLI, or GitHub Desktop) or the GitHub APIs (such as cURL or the GitHub CLI). For more information about the tool selector, see [Markup reference for GitHub Docs](../contributing/content-markup-reference.md#tool-tags). If this frontmatter is omitted, then the tool-specific content matching the GitHub web UI is shown by default. If a user has indicated a tool preference (by clicking on a tool tab), then the user's preference will be applied instead of the default value.
- Type: `String`, one of: `webui`, `cli`, `desktop`, `curl`, `codespaces`, `vscode`, `importer_cli`, `graphql`, `powershell`, `bash`, `javascript`.
- Optional.
@@ -263,9 +263,9 @@ includeGuides:
- Optional.
### `topics`
- Purpose: Indicate the topics covered by the article. The topics are used to filter guides on some landing pages. For example, the guides at the bottom of [this page](https://docs.github.com/en/actions/guides) can be filtered by topics and the topics are listed under the guide intro. Topics are also added to all search records that get created for each page. The search records contain a `topics` property that is used to filter search results by topics. For more information, see the [Search](/contributing/search.md) contributing guide. Refer to the content models for more details around adding topics. A full list of existing topics is located in the [allowed topics file](/data/allowed-topics.js). If topics in article frontmatter and the allow-topics list become out of sync, the [topics CI test](/tests/unit/search/topics.js) will fail.
- Purpose: Indicate the topics covered by the article. The topics are used to filter guides on some landing pages. For example, the guides at the bottom of [this page](https://docs.github.com/en/actions/guides) can be filtered by topics, and the topics are listed under the guide intro. Topics are also added to all search records that get created for each page. The search records contain a `topics` property that is used to filter search results by topics. For more information, see the [Search](/contributing/search.md) contributing guide. Refer to the content models for more details about adding topics. A full list of existing topics is located in the [allowed topics file](/data/allowed-topics.js). If topics in article frontmatter and the allow-topics list become out of sync, the [topics CI test](/tests/unit/search/topics.js) will fail.
- Type: Array of `String`s
- Optional: Topics are preferred for each article, but, there may be cases where existing articles don't yet have topics or adding a topic to a new article may not add value.
- Optional: Topics are preferred for each article, but, there may be cases where existing articles don't yet have topics, or adding a topic to a new article may not add value.
### `contributor`
- Purpose: Indicate an article is contributed and maintained by a third-party organization, typically a GitHub Technology Partner.
@@ -294,7 +294,7 @@ contributor:
If you see two single quotes in a row (`''`) in YML frontmatter where you might expect to see one (`'`), this is the YML-preferred way to escape a single quote. From [the YAML spec](https://yaml.org/spec/history/2001-12-10.html):
> In single quoted leaves, a single quote character needs to be escaped. This is done by repeating the character.
> In single-quoted leaves, a single quote character needs to be escaped. This is done by repeating the character.
As an alternative, you can change the single quotes surrounding the frontmatter field to double quotes and leave interior single quotes unescaped.
@@ -314,7 +314,7 @@ Make sure not to add hardcoded "In this article" sections in the Markdown source
A content file can have **two** types of versioning:
* [`versions`](#versions) frontmatter (**required**)
* Determines in which the versions the page is available. See [contributing/permalinks](../contributing/permalinks.md) for more info.
* Determines in which versions the page is available. See [contributing/permalinks](../contributing/permalinks.md) for more info.
* Liquid statements in content (**optional**)
* Conditionally render content depending on the current version being viewed. See [contributing/liquid-helpers](../contributing/liquid-helpers.md) for more info. Note Liquid conditionals can also appear in `data` and `include` files.
@@ -358,7 +358,7 @@ and when viewed on GitHub Enterprise Server docs, the version is included as wel
### Preventing transformations
Sometimes you want to link to a Dotcom-only article in Enterprise content and you don't want the link to be Enterprise-ified. To prevent the transformation, include the preferred version in the path.
Sometimes you want to link to a Dotcom-only article in Enterprise content and you don't want the link to be Enterprise-ified. To prevent the transformation, you should include the preferred version in the path.
```markdown
"[GitHub's Terms of Service](/free-pro-team@latest/github/site-policy/github-terms-of-service)"
@@ -388,8 +388,8 @@ The homepage is the main Table of Contents file for the docs site. The homepage
To create a product guides page (e.g. [Actions' Guide page](https://docs.github.com/en/actions/guides)), create or modify an existing markdown file with these specific frontmatter values:
1. Use the product guides page template by referencing it `layout: product-guides`
2. (optional) Include the learning tracks in [`learningTracks`](#learningTracks)
1. Use the product guides page template by referencing `layout: product-guides`.
2. (optional) Include the learning tracks in [`learningTracks`](#learningTracks).
3. (optional) Define which articles to include with [`includeGuides`](#includeGuides).
If using learning tracks, they need to be defined in [`data/learning-tracks/*.yml`](../data/learning-tracks/README.md).

View File

@@ -1,6 +1,6 @@
---
title: Setting up and managing your personal account on GitHub
intro: 'You can manage settings for your personal account on {% data variables.product.prodname_dotcom %}, including email preferences, collaborator access for personal repositories, and organization memberships.'
intro: You can manage settings for your personal account on {% ifversion fpt or ghec or ghes %}{% data variables.product.product_location %}{% elsif ghae %}{% data variables.product.product_name %}{% endif %}, including email preferences, access to personal repositories, and organization memberships. You can also manage the account itself.
shortTitle: Personal accounts
redirect_from:
- /categories/setting-up-and-managing-your-github-user-account
@@ -15,6 +15,7 @@ topics:
- Accounts
children:
- /managing-personal-account-settings
- /managing-your-personal-account
- /managing-email-preferences
- /managing-access-to-your-personal-repositories
- /managing-your-membership-in-organizations

View File

@@ -1,6 +1,6 @@
---
title: Managing user account settings
intro: 'You can change several settings for your personal account, including changing your username and deleting your account.'
intro: 'You can manage settings for your personal account, including your theme, username, default branch, accessibility, and security settings.'
redirect_from:
- /categories/29/articles
- /categories/user-accounts
@@ -19,9 +19,6 @@ children:
- /managing-your-theme-settings
- /managing-your-tab-size-rendering-preference
- /changing-your-github-username
- /merging-multiple-personal-accounts
- /converting-a-user-into-an-organization
- /deleting-your-personal-account
- /permission-levels-for-a-personal-account-repository
- /permission-levels-for-a-project-board-owned-by-a-personal-account
- /managing-accessibility-settings
@@ -29,7 +26,6 @@ children:
- /managing-security-and-analysis-settings-for-your-personal-account
- /managing-access-to-your-personal-accounts-project-boards
- /integrating-jira-with-your-personal-projects
- /best-practices-for-leaving-your-company
- /what-does-the-available-for-hire-checkbox-do
shortTitle: Personal account settings
---

View File

@@ -6,6 +6,7 @@ redirect_from:
- /github/setting-up-and-managing-your-github-user-account/best-practices-for-leaving-your-company
- /github/setting-up-and-managing-your-github-user-account/managing-user-account-settings/best-practices-for-leaving-your-company
- /account-and-profile/setting-up-and-managing-your-github-user-account/managing-user-account-settings/best-practices-for-leaving-your-company
- /account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-personal-account-settings/best-practices-for-leaving-your-company
versions:
fpt: '*'
ghec: '*'

View File

@@ -7,6 +7,7 @@ redirect_from:
- /github/setting-up-and-managing-your-github-user-account/converting-a-user-into-an-organization
- /github/setting-up-and-managing-your-github-user-account/managing-user-account-settings/converting-a-user-into-an-organization
- /account-and-profile/setting-up-and-managing-your-github-user-account/managing-user-account-settings/converting-a-user-into-an-organization
- /account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-personal-account-settings/converting-a-user-into-an-organization
intro: You can convert your personal account into an organization. This allows more granular permissions for repositories that belong to the organization.
versions:
fpt: '*'

View File

@@ -1,38 +1,53 @@
---
title: Deleting your personal account
intro: 'You can delete your personal account on {% data variables.product.product_name %} at any time.'
intro: 'You can delete your personal account on {% data variables.product.product_location %} at any time.'
redirect_from:
- /articles/deleting-a-user-account
- /articles/deleting-your-user-account
- /github/setting-up-and-managing-your-github-user-account/deleting-your-user-account
- /github/setting-up-and-managing-your-github-user-account/managing-user-account-settings/deleting-your-user-account
- /account-and-profile/setting-up-and-managing-your-github-user-account/managing-user-account-settings/deleting-your-user-account
- /account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-personal-account-settings/deleting-your-personal-account
versions:
fpt: '*'
ghes: '*'
ghec: '*'
topics:
- Accounts
shortTitle: Delete your personal account
shortTitle: Delete your account
---
Deleting your personal account removes all repositories, forks of private repositories, wikis, issues, pull requests, and pages owned by your account. {% ifversion fpt or ghec %} Issues and pull requests you've created and comments you've made in repositories owned by other users will not be deleted - instead, they'll be associated with our [Ghost user](https://github.com/ghost).{% else %}Issues and pull requests you've created and comments you've made in repositories owned by other users will not be deleted.{% endif %}
{% ifversion fpt or ghec %} When you delete your account we stop billing you. The email address associated with the account becomes available for use with a different account on {% data variables.product.product_location %}. After 90 days, the account name also becomes available to anyone else to use on a new account. {% endif %}
## About deletion of your personal account
If youre the only owner of an organization, you must transfer ownership to another person or delete the organization before you can delete your personal account. If there are other owners in the organization, you must remove yourself from the organization before you can delete your personal account.
Deleting your personal account removes all repositories, forks of private repositories, wikis, issues, pull requests, and pages owned by your account. {% ifversion fpt or ghec %}Issues and pull requests you've created and comments you've made in repositories owned by other users will not be deleted. Your resources and comments will become associated with the [ghost user](https://github.com/ghost).{% else %}Issues and pull requests you've created and comments you've made in repositories owned by other users will not be deleted.{% endif %}
{% ifversion ghec %}
{% note %}
**Note**: If your enterprise manages your account and you sign into {% data variables.product.product_location %} through your company's identity provider (IdP), you cannot delete your account. For more information, see "[About {% data variables.product.prodname_emus %}](/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users)."
{% endnote %}
{% endif %}
{% ifversion fpt or ghec %}When you delete your account we stop billing you. The email address associated with the account becomes available for use with a different account on {% data variables.product.product_location %}. After 90 days, the account name also becomes available to anyone else to use on a new account. {% endif %}
If you're the only owner of an organization, you must transfer ownership to another person or delete the organization before you can delete your personal account. If there are other owners in the organization, you must remove yourself from the organization before you can delete your personal account.
For more information, see the following articles.
For more information, see:
- "[Transferring organization ownership](/articles/transferring-organization-ownership)"
- "[Deleting an organization account](/articles/deleting-an-organization-account)"
- "[Removing yourself from an organization](/articles/removing-yourself-from-an-organization/)"
## Back up your account data
Before you delete your personal account, make a copy of all repositories, private forks, wikis, issues, and pull requests owned by your account.
Before you delete your personal account, make a copy of all repositories, private forks, wikis, issues, and pull requests owned by your account. For more information, see "[Backing up a repository](/repositories/archiving-a-github-repository/backing-up-a-repository)."
{% warning %}
**Warning:** Once your personal account has been deleted, GitHub cannot restore your content.
**Warning:** Once your personal account has been deleted, {% ifversion fpt or ghec %}{% data variables.product.company_short %}{% elsif ghes or ghae %}an enterprise owner{% endif %} cannot restore your content.
{% endwarning %}

View File

@@ -0,0 +1,19 @@
---
title: Managing your personal account
intro: 'You can manage your personal account on {% ifversion fpt or ghec or ghes %}{% data variables.product.product_location %}{% elsif ghae %}{% data variables.product.product_name %}{% endif %}. For example, you can {% ifversion fpt or ghec %}manage multiple accounts, {% endif %}convert an account to an organization{% ifversion fpt or ghec or ghes %}, or delete an account{% endif %}.'
shortTitle: Manage personal account
versions:
fpt: '*'
ghes: '*'
ghae: '*'
ghec: '*'
topics:
- Accounts
children:
- /managing-multiple-accounts
- /merging-multiple-personal-accounts
- /converting-a-user-into-an-organization
- /best-practices-for-leaving-your-company
- /deleting-your-personal-account
---

View File

@@ -0,0 +1,105 @@
---
title: Managing multiple accounts
intro: 'If you use one workstation to contribute to projects for more than one account on {% data variables.product.product_location %}, you can modify your Git configuration to simplify the contribution process.'
versions:
feature: multiple-accounts-one-workstation
topics:
- Accounts
- Git
- GitHub
shortTitle: Manage multiple accounts
---
## About management of multiple accounts
In some cases, you may need to use multiple accounts on {% data variables.product.product_location %}. For example, you may have a personal account for open source contributions, and your employer may also create and manage a user account for you within an enterprise.
You cannot use your {% data variables.product.prodname_managed_user %} to contribute to public projects on {% data variables.product.product_location %}, so you must contribute to those resources using your personal account. For more information, see "[About {% data variables.product.prodname_emus %}]({% ifversion fpt %}/enterprise-cloud@latest{% endif %}/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users#abilities-and-restrictions-of-managed-user-accounts){% ifversion fpt %}" in the {% data variables.product.prodname_ghe_cloud %} documentation.{% elsif ghec %}."{% endif %}
If you want to use one workstation to contribute from both accounts, you can simplify contribution with Git by using a mixture of protocols to access repository data, or by using credentials on a per-repository basis.
{% warning %}
**Warning**: Be mindful when you use one workstation to contribute to two separate accounts. Management of two or more accounts can increase the chance of mistakenly leaking internal code to the public.
{% endwarning %}
If you aren't required to use a {% data variables.product.prodname_managed_user %}, {% data variables.product.company_short %} recommends that you use one personal account for all your work on {% data variables.product.product_location %}. With a single personal account, you can contribute to a combination of personal, open source, or professional projects using one identity. Other people can invite the account to contribute to both individual repositories and repositories owned by an organization, and the account can be a member of multiple organizations or enterprises.
## Contributing to two accounts using HTTPS and SSH
If you contribute with two accounts from one workstation, you can access repositories by using a different protocol and credentials for each account.
Git can use either the HTTPS or SSH protocol to access and update data in repositories on {% data variables.product.product_location %}. The protocol you use to clone a repository determines which credentials your workstation will use to authenticate when you access the repository. With this approach to account management, you store the credentials for one account to use for HTTPS connections and upload an SSH key to the other account to use for SSH connections.
You can find both the HTTPS or an SSH URLs for cloning a repository on {% data variables.product.product_name %}. For more information, see "[Cloning a repository](/repositories/creating-and-managing-repositories/cloning-a-repository)."
For more information about the use of SSH to access repositories on {% data variables.product.product_name %}, see "[Connecting to {% data variables.product.prodname_dotcom %} with SSH](/authentication/connecting-to-github-with-ssh)."
## Contributing to multiple accounts using HTTPS and PATs
Alternatively, if you want to use the HTTPS protocol for both accounts, you can use different personal access tokens (PAT) for each account by configuring Git to store different credentials for each repository.
{% mac %}
{% data reusables.git.open-terminal %}
{% data reusables.git.confirm-credential-manager %}
{% data reusables.git.clear-the-stored-credentials %}
{% data reusables.git.no-credential-manager %}
- If the output is `osxkeychain`, you're using the macOS keychain. To clear the credentials, enter the following command.
```shell{:copy}
git credential-osxkeychain erase https://github.com
```
{% data reusables.git.clear-stored-gcm-credentials %}
{% data reusables.git.cache-on-repository-path %}
{% data reusables.accounts.create-personal-access-tokens %}
{% data reusables.git.provide-credentials %}
{% endmac %}
{% windows %}
1. Open Git Bash.
{% data reusables.git.confirm-credential-manager %}
{% data reusables.git.clear-the-stored-credentials %}
{% data reusables.git.no-credential-manager %}
{% data reusables.git.clear-stored-gcm-credentials %}
- If the output is `wincred`, you're using the Windows Credential Manager. To clear the credentials, enter the following command.
```shell{:copy}
cmdkey /delete:LegacyGeneric:target=git:https://github.com
```
{% data reusables.git.cache-on-repository-path %}
{% data reusables.accounts.create-personal-access-tokens %}
{% data reusables.git.provide-credentials %}
{% endwindows %}
{% linux %}
{% data reusables.git.open-terminal %}
{% data reusables.git.confirm-credential-manager %}
{% data reusables.git.clear-the-stored-credentials %}
{% data reusables.git.no-credential-manager %}
{% data reusables.git.clear-stored-gcm-credentials %}
{% data reusables.git.cache-on-repository-path %}
{% data reusables.accounts.create-personal-access-tokens %}
{% data reusables.git.provide-credentials %}
{% endlinux %}
## Contributing to multiple accounts using SSH and `GIT_SSH_COMMAND`
If you want to use the SSH protocol for both accounts, you can use different SSH keys for each account. For more information about using SSH, see "[Connecting to {% data variables.product.prodname_dotcom %} with SSH](/authentication/connecting-to-github-with-ssh)."
To use a different SSH key for different repositories that you clone to your workstation, you must write a shell wrapper function for Git operations. The function should perform the following steps.
1. Determine the repository's full name with owner, using a command such as `git config --get remote.origin.url`.
2. Choose the correct SSH key for authentication.
3. Modify `GIT_SSH_COMMAND` accordingly. For more information about `GIT_SSH_COMMAND`, see [Environment Variables](https://git-scm.com/docs/git#Documentation/git.txt-codeGITSSHCOMMANDcode) in the Git documentation.
For example, the following command sets the `GIT_SSH_COMMAND` environment variable to specify an SSH command that uses the private key file at **_PATH/TO/KEY/FILE_** for authentication to clone the repository named **_OWNER_**/**_REPOSITORY_** on {% data variables.product.product_location %}.
<pre>
GIT_SSH_COMMAND='ssh -i <em>PATH/TO/KEY/FILE</em> -o IdentitiesOnly=yes' git clone git@github.com:<em>OWNER</em>/<em>REPOSITORY</em>
</pre>

View File

@@ -8,12 +8,13 @@ redirect_from:
- /github/setting-up-and-managing-your-github-user-account/merging-multiple-user-accounts
- /github/setting-up-and-managing-your-github-user-account/managing-user-account-settings/merging-multiple-user-accounts
- /account-and-profile/setting-up-and-managing-your-github-user-account/managing-user-account-settings/merging-multiple-user-accounts
- /account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-personal-account-settings/merging-multiple-personal-accounts
versions:
fpt: '*'
ghec: '*'
topics:
- Accounts
shortTitle: Merge multiple personal accounts
shortTitle: Merge multiple accounts
---
{% tip %}

View File

@@ -354,7 +354,7 @@ runs:
#### `runs.steps[*].with`
**Optional** A `map` of the input parameters defined by the action. Each input parameter is a key/value pair. Input parameters are set as environment variables. The variable is prefixed with INPUT_ and converted to upper case.
**Optional** A `map` of the input parameters defined by the action. Each input parameter is a key/value pair. For more information, see [Example: Specifying inputs](#example-specifying-inputs).
```yaml
runs:

View File

@@ -38,9 +38,10 @@ You can add the action you've created to {% data variables.product.prodname_mark
To draft a new release and publish the action to {% data variables.product.prodname_marketplace %}, follow these instructions:
{% data reusables.repositories.navigate-to-repo %}
1. When a repository contains an action metadata file (`action.yml` or `action.yaml`), you'll see a banner to publish the action to {% data variables.product.prodname_marketplace %}. Click **Draft a release**.
![Publish this action to markeplace button](/assets/images/help/repository/publish-github-action-to-markeplace-button.png)
1. Select **Publish this action to the {% data variables.product.prodname_marketplace %}**. If you can't select the **Publish this action to the {% data variables.product.prodname_marketplace %}** checkbox, you'll need to read and accept the {% data variables.product.prodname_marketplace %} agreement first.
1. Navigate to the action metadata file in your repository (`action.yml` or `action.yaml`), and you'll see a banner to publish the action to {% data variables.product.prodname_marketplace %}. Click **Draft a release**.
![Publish this action to marketplace button](/assets/images/help/repository/publish-github-action-to-marketplace-button.png)
1. Under "Release Action", select the checkbox to publish the action to the {% data variables.product.prodname_marketplace %}. If you can't select the checkbox, you must first click the link to read and accept the {% data variables.product.prodname_marketplace %} Developer Agreement.
![Select publish to Marketplace](/assets/images/help/repository/marketplace_actions_publish.png)
1. If the labels in your metadata file contain any problems, you will see an error message.
![See notification](/assets/images/help/repository/marketplace_actions_fixerrors.png)

View File

@@ -73,6 +73,7 @@ The following example OIDC token uses a subject (`sub`) that references a job en
"repository": "octo-org/octo-repo",
"repository_owner": "octo-org",
"actor_id": "12",
"repo_visibility": private,
"repository_id": "74",
"repository_owner_id": "65",
"run_id": "example-run-id",
@@ -128,6 +129,7 @@ The token also includes custom claims provided by {% data variables.product.prod
| `job_workflow_ref`| This is the ref path to the reusable workflow used by this job. For more information, see "["Using OpenID Connect with reusable workflows"](/actions/deployment/security-hardening-your-deployments/using-openid-connect-with-reusable-workflows)." |
| `ref`| _(Reference)_ The git ref that triggered the workflow run. |
| `ref_type`| The type of `ref`, for example: "branch". |
| `repo_visibility` | The visibility of the repository where the workflow is running. Accepts the following values: `internal`, `private`, or `public`. |
| `repository`| The repository from where the workflow is running. |
| `repository_id`| The ID of the repository from where the workflow is running. |
| `repository_owner`| The name of the organization in which the `repository` is stored. |
@@ -143,11 +145,11 @@ With OIDC, a {% data variables.product.prodname_actions %} workflow requires a t
Audience and Subject claims are typically used in combination while setting conditions on the cloud role/resources to scope its access to the GitHub workflows.
- **Audience**: By default, this value uses the URL of the organization or repository owner. This can be used to set a condition that only the workflows in the specific organization can access the cloud role.
- **Subject**: Has a predefined format and is a concatenation of some of the key metadata about the workflow, such as the {% data variables.product.prodname_dotcom %} organization, repository, branch, or associated [`job`](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idenvironment) environment. See "[Example subject claims](#example-subject-claims)" to see how the subject claim is assembled from concatenated metadata.
- **Subject**: By default, has a predefined format and is a concatenation of some of the key metadata about the workflow, such as the {% data variables.product.prodname_dotcom %} organization, repository, branch, or associated [`job`](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idenvironment) environment. See "[Example subject claims](#example-subject-claims)" to see how the subject claim is assembled from concatenated metadata.
There are also many additional claims supported in the OIDC token that can also be used for setting these conditions.
If you need more granular trust conditions, you can customize the issuer (`iss`) and subject (`sub`) claims that are included with the JWT. For more information, see "[Customizing the token claims](#customizing-the-token-claims)".
In addition, your cloud provider could allow you to assign a role to the access tokens, letting you specify even more granular permissions.
There are also many additional claims supported in the OIDC token that can be used for setting these conditions. In addition, your cloud provider could allow you to assign a role to the access tokens, letting you specify even more granular permissions.
{% note %}
@@ -238,6 +240,198 @@ curl -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" "$ACTIONS_ID_TOK
{% data reusables.actions.oidc-permissions-token %}
{% ifversion actions-oidc-hardening-config %}
## Customizing the token claims
You can security harden your OIDC configuration by customizing the claims that are included with the JWT. These customisations allow you to define more granular trust conditions on your cloud roles when allowing your workflows to access resources hosted in the cloud:
{% ifversion ghec %} - For an additional layer of security, you can append the `issuer` url with your enterprise slug. This lets you set conditions on the issuer (`iss`) claim, configuring it to only accept JWT tokens from a unique `issuer` URL that must include your enterprise slug.{% endif %}
- You can standardize your OIDC configuration by setting conditions on the subject (`sub`) claim that require JWT tokens to originate from a specific repository, reusable workflow, or other source.
- You can define granular OIDC policies by using additional OIDC token claims, such as `repository_id` and `repo_visibility`. For more information, see "[Understanding the OIDC token](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#understanding-the-oidc-token)".
To customize these claim formats, organization and repository admins can use the REST API endpoints described in the following sections.
{% ifversion ghec %}
### Switching to a unique token URL
By default, the JWT is issued by {% data variables.product.prodname_dotcom %}'s OIDC provider at `https://token.actions.githubusercontent.com`. This path is presented to your cloud provider using the `iss` value in the JWT.
Enterprise admins can security harden their OIDC configuration by configuring their enterprise to receive tokens from a unique URL at `https://api.github.com/enterprises/<enterpriseSlug>/actions/oidc/customization/issuer`. Replace `<enterpriseSlug>` with the slug value of your enterprise.
This configuration means that your enterprise will receive the OIDC token from a unique URL, and you can then configure your cloud provider to only accept tokens from that URL. This helps ensure that only the enterprise's repositories can access your cloud resources using OIDC.
To activate this setting for your enterprise, an enterprise admin must use the `/enterprises/{enterprise}/actions/oidc/customization/issuer` endpoint and specify `"include_enterprise_slug": true` in the request body. For more information, see "[Set the {% data variables.product.prodname_actions %} OIDC custom issuer policy for an enterprise](/rest/actions/oidc#set-the-github-actions-oidc-custom-issuer-policy-for-an-enterprise)."
After this setting is applied, the JWT will contain the updated `iss` value. In the following example, the `iss` key uses `octocat-inc` as its `enterpriseSlug` value:
```json
{
"jti": "6f4762ed-0758-4ccb-808d-ee3af5d723a8"
"sub": "repo:octocat-inc/private-server:ref:refs/heads/main"
"aud": "http://octocat-inc.example/octocat-inc"
"enterprise": "octocat-inc"
"iss": "https://api.github.com/enterprises/octocat-inc/actions/oidc/customization/issuer",
"bf": 1755350653,
"exp": 1755351553,
"iat": 1755351253
}
```
{% endif %}
### Customizing the subject claims for an organization
To configure organization-wide security, compliance, and standardization, you can customize the standard claims to suit your required access conditions. If your cloud provider supports conditions on subject claims, you can create a condition that checks whether the `sub` value matches the path of the reusable workflow, such as `"job_workflow_ref: "octo-org/octo-automation/.github/workflows/oidc.yml@refs/heads/main""`. The exact format will vary depending on your cloud provider's OIDC configuration. To configure the matching condition on {% data variables.product.prodname_dotcom %}, you can can use the REST API to require that the `sub` claim must always include a specific custom claim, such as `job_workflow_ref`. For more information, see "[Set the customization template for an OIDC subject claim for an organization](/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization)."
Customizing the claims results in a new format for the entire `sub` claim, which replaces the default predefined `sub` format in the token described in "[Example subject claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims)."
The following example templates demonstrate various ways to customize the subject claim. To configure these settings on {% data variables.product.prodname_dotcom %}, organization admins use the REST API to specify a list of claims that must be included in the subject (`sub`) claim. {% data reusables.actions.use-request-body-api %}
To customize your subject claims, you should first create a matching condition in your cloud provider's OIDC configuration, before customizing the configuration using the REST API. Once the configuration is completed, each time a new job runs, the OIDC token generated during that job will follow the new customization template. If the matching condition doesn't exist in the cloud provider's OIDC configuration before the job runs, the generated token might not be accepted by the cloud provider, since the cloud conditions may not be synchronized.
{% note %}
**Note**: When the organization template is applied, it will not affect any existing repositories that already use OIDC. For existing repositories, as well as any new repositories that are created after the template has been applied, the repository owner will need to opt-in to receive this configuration. For more information, see "[Set the opt-in flag of an OIDC subject claim customization for a repository](/rest/actions/oidc#set-the-opt-in-flag-of-an-oidc-subject-claim-customization-for-a-repository)."
{% endnote %}
#### Example: Allowing repository based on visibility and owner
This example template allows the `sub` claim to have a new format, using `repository_owner` and `repository_visibility`:
```json
{
"include_claim_keys": [
"repository_owner",
"repository_visibility"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repository_owner` and `repository_visibility`. For example: `"repository_owner: "monalisa":repository_visibility:private"`. The approach lets you restrict cloud role access to only private repositories within an organization or enterprise.
#### Example: Allowing access to all repositories with a specific owner
This example template enables the `sub` claim to have a new format with only the value of `repository_owner`. {% data reusables.actions.use-request-body-api %}
```json
{
"include_claim_keys": [
"repository_owner"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include a specific value for `repository_owner`. For example: `"repository_owner: "monalisa""`
#### Example: Requiring a reusable workflow
This example template allows the `sub` claim to have a new format that contains the value of the `job_workflow_ref` claim. This enables an enterprise to use [reusable workflows](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims) to enforce consistent deployments across its organizations and repositories.
{% data reusables.actions.use-request-body-api %}
```json
{
"include_claim_keys": [
"job_workflow_ref"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include a specific value for `job_workflow_ref`. For example: `"job_workflow_ref: "octo-org/octo-automation/.github/workflows/oidc.yml@refs/heads/main""`.
#### Example: Requiring a reusable workflow and other claims
The following example template combines the requirement of a specific reusable workflow with additional claims. {% data reusables.actions.use-request-body-api %}
This example also demonstrates how to use `"context"` to define your conditions. This is the part that follows the repository in the [default `sub` format](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims). For example, when the job references an environment, the context contains: `environment:<environmentName>`.
```json
{
"include_claim_keys": [
"repo",
"context",
"job_workflow_ref"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repo`, `context`, and `job_workflow_ref`.
This customization template requires that the `sub` uses the following format: `repo:<orgName/repoName>:environment:<environmentName>:job_workflow_ref:<reusableWorkflowPath>`.
For example: `"sub": "repo:octo-org/octo-repo:environment:prod:job_workflow_ref:octo-org/octo-automation/.github/workflows/oidc.yml@refs/heads/main"`
#### Example: Granting access to a specific repository
This example template lets you grant cloud access to all the workflows in a specific repository, across all branches/tags and environments. To help improve security, combine this template with the custom issuer URL described in "[Customizing the token URL for an enterprise](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-url-for-an-enterprise)."
{% data reusables.actions.use-request-body-api %}
```json
{
"include_claim_keys": [
"repo"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require a `repo` claim that matches the required value.
#### Example: Using system-generated GUIDs
This example template enables predictable OIDC claims with system-generated GUIDs that do not change between renames of entities (such as renaming a repository). {% data reusables.actions.use-request-body-api %}
```json
{
"include_claim_keys": [
"repository_id"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require a `repository_id` claim that matches the required value.
or:
```json
{
"include_claim_keys": [
"repository_owner_id"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require a `repository_owner_id` claim that matches the required value.
#### Resetting your customizations
This example template resets the subject claims to the default format. {% data reusables.actions.use-request-body-api %} This template effectively opts out of any organization-level customization policy.
```json
{
"include_claim_keys": [
"repo",
"context"
]
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repo` and `context`.
#### Using the default subject claims
For repositories that can receive a subject claim policy from their organization, the repository owner can later choose to opt-out and instead use the default `sub` claim format. To configure this, the repository admin must use the REST API endpoint at "[Set the opt-out flag of an OIDC subject claim customization for a repository](/rest/actions/oidc#set-the-opt-out-flag-of-an-oidc-subject-claim-customization-for-a-repository)" with the following request body:
```json
{
"use_default": true
}
```
{% endif %}
## Updating your workflows for OIDC
You can now update your YAML workflows to use OIDC access tokens instead of secrets. Popular cloud providers have published their official login actions that make it easy for you to get started with OIDC. For more information about updating your workflows, see the cloud-specific guides listed below in "[Enabling OpenID Connect for your cloud provider](#enabling-openid-connect-for-your-cloud-provider)."

View File

@@ -32,10 +32,53 @@ This guide gives an overview of how to configure HashiCorp Vault to trust {% dat
To use OIDC with HashiCorp Vault, you will need to add a trust configuration for the {% data variables.product.prodname_dotcom %} OIDC provider. For more information, see the HashiCorp Vault [documentation](https://www.vaultproject.io/docs/auth/jwt).
Configure the vault to accept JSON Web Tokens (JWT) for authentication:
- For the `oidc_discovery_url`, use {% ifversion ghes %}`https://HOSTNAME/_services/token`{% else %}`https://token.actions.githubusercontent.com`{% endif %}
- For `bound_issuer`, use {% ifversion ghes %}`https://HOSTNAME/_services/token`{% else %}`https://token.actions.githubusercontent.com`{% endif %}
- Ensure that `bound_subject` is correctly defined for your security requirements. For more information, see ["Configuring the OIDC trust with the cloud"](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#configuring-the-oidc-trust-with-the-cloud) and [`hashicorp/vault-action`](https://github.com/hashicorp/vault-action).
To configure your Vault server to accept JSON Web Tokens (JWT) for authentication:
1. Enable the JWT `auth` method, and use `write` to apply the configuration to your Vault.
For `oidc_discovery_url` and `bound_issuer` parameters, use {% ifversion ghes %}`https://HOSTNAME/_services/token`{% else %}`https://token.actions.githubusercontent.com`{% endif %}. These parameters allow the Vault server to verify the received JSON Web Tokens (JWT) during the authentication process.
```sh{:copy}
vault auth enable jwt
```
```sh{:copy}
vault write auth/jwt/config \
bound_issuer="{% ifversion ghes %}https://HOSTNAME/_services/token{% else %}https://token.actions.githubusercontent.com{% endif %}" \
oidc_discovery_url="{% ifversion ghes %}https://HOSTNAME/_services/token{% else %}https://token.actions.githubusercontent.com{% endif %}"
```
2. Configure a policy that only grants access to the specific paths your workflows will use to retrieve secrets. For more advanced policies, see the HashiCorp Vault [Policies documentation](https://www.vaultproject.io/docs/concepts/policies).
```sh{:copy}
vault policy write myproject-production - <<EOF
# Read-only permission on 'secret/data/production/*' path
path "secret/data/production/*" {
capabilities = [ "read" ]
}
EOF
```
3. Configure roles to group different policies together. If the authentication is successful, these policies are attached to the resulting Vault access token.
```sh{:copy}
vault write auth/jwt/role/myproject-production -<<EOF
{
"role_type": "jwt",
"user_claim": "actor",
"bound_claims": {
"repository": "user-or-org-name/repo-name"
},
"policies": ["myproject-production"],
"ttl": "10m"
}
EOF
```
- `ttl` defines the validity of the resulting access token.
- Ensure that the `bound_claims` parameter is defined for your security requirements, and has at least one condition. Optionally, you can also set the `bound_subject` as well as the `bound_audiences` parameter.
- To check arbitrary claims in the received JWT payload, the `bound_claims` parameter contains a set of claims and their required values. In the above example, the role will accept any incoming authentication requests from the `repo-name` repository owned by the `user-or-org-name` account.
- To see all the available claims supported by {% data variables.product.prodname_dotcom %}'s OIDC provider, see ["Configuring the OIDC trust with the cloud"](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#configuring-the-oidc-trust-with-the-cloud).
For more information, see the HashiCorp Vault [documentation](https://www.vaultproject.io/docs/auth/jwt).
## Updating your {% data variables.product.prodname_actions %} workflow
@@ -49,39 +92,96 @@ To add OIDC integration to your workflows that allow them to access secrets in V
- Grant permission to fetch the token from the {% data variables.product.prodname_dotcom %} OIDC provider:
- The workflow needs `permissions:` settings with the `id-token` value set to `write`. This lets you fetch the OIDC token from every job in the workflow.
- Request the JWT from the {% data variables.product.prodname_dotcom %} OIDC provider, and present it to HashiCorp Vault to receive an access token:
- You could use the [Actions toolkit](https://github.com/actions/toolkit/) to fetch the tokens for your job, or you can use the [`hashicorp/vault-action`](https://github.com/hashicorp/vault-action) action to fetch the JWT and receive the access token from the Vault.
- You can use the [`hashicorp/vault-action`](https://github.com/hashicorp/vault-action) action to fetch the JWT and receive the access token from Vault, or you could use the [Actions toolkit](https://github.com/actions/toolkit/) to fetch the tokens for your job.
This example demonstrates how to use OIDC with the official action to request a secret from HashiCorp Vault.
### Adding permissions settings
 {% data reusables.actions.oidc-permissions-token %}
{% data reusables.actions.oidc-permissions-token %}
{% note %}
**Note**:
When the `permissions` key is used, all unspecified permissions are set to _no access_, with the exception of the metadata scope, which always gets _read_ access. As a result, you may need to add other permissions, such as `contents: read`. See [Automatic token authentication](/actions/security-guides/automatic-token-authentication) for more information.
{% endnote %}
### Requesting the access token
The `hashicorp/vault-action` action receives a JWT from the {% data variables.product.prodname_dotcom %} OIDC provider, and then requests an access token from your HashiCorp Vault instance to retrieve secrets. For more information, see the HashiCorp Vault [documentation](https://github.com/hashicorp/vault-action).
The `hashicorp/vault-action` action receives a JWT from the {% data variables.product.prodname_dotcom %} OIDC provider, and then requests an access token from your HashiCorp Vault instance to retrieve secrets. For more information, see the HashiCorp Vault GitHub Action [documentation](https://github.com/hashicorp/vault-action).
This example demonstrates how to create a job that requests a secret from HashiCorp Vault.
- `<Vault URL>`: Replace this with the URL of your HashiCorp Vault.
- `<Vault Namespace>`: Replace this with the Namespace you've set in HashiCorp Vault. For example: `admin`.
- `<Role name>`: Replace this with the role you've set in the HashiCorp Vault trust relationship.
- `<Audience>`: Replace this with the audience you've defined in the HashiCorp Vault trust relationship.
- `<Secret-Path>`: Replace this with the path to the secret you're retrieving from HashiCorp Vault. For example: `secret/data/ci npmToken`.
- `<Secret-Path>`: Replace this with the path to the secret you're retrieving from HashiCorp Vault. For example: `secret/data/production/ci npmToken`.
```yaml{:copy}
jobs:
retrieve-secret:
steps:
- name: Retrieve secret from Vault
uses: hashicorp/vault-action@v2.4.0
with:
url: <Vault URL>
role: <Role name>
method: jwt
jwtGithubAudience: <Audience>
secrets: <Secret-Path>
retrieve-secret:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
steps:
- name: Retrieve secret from Vault
uses: hashicorp/vault-action@v2.4.0
with:
method: jwt
url: <Vault URL>
namespace: <Vault Namespace - HCP Vault and Vault Enterprise only>
role: <Role name>
secrets: <Secret-Path>
- name: Use secret from Vault
run: |
# This step has access to the secret retrieved above; see hashicorp/vault-action for more details.
- name: Use secret from Vault
run: |
# This step has access to the secret retrieved above; see hashicorp/vault-action for more details.
```
{% note %}
**Note**:
- If your Vault server is not accessible from the public network, consider using a self-hosted runner with other available Vault [auth methods](https://www.vaultproject.io/docs/auth). For more information, see "[About self-hosted runners](/actions/hosting-your-own-runners/about-self-hosted-runners)."
- `<Vault Namespace>` must be set for a Vault Enterprise (including HCP Vault) deployment. For more information, see [Vault namespace](https://www.vaultproject.io/docs/enterprise/namespaces).
{% endnote %}
### Revoking the access token
By default, the Vault server will automatically revoke access tokens when their TTL is expired, so you don't have to manually revoke the access tokens. However, if you do want to revoke access tokens immediately after your job has completed or failed, you can manually revoke the issued token using the [Vault API](https://www.vaultproject.io/api/auth/token#revoke-a-token-self).
1. Set the `exportToken` option to `true` (default: `false`). This exports the issued Vault access token as an environment variable: `VAULT_TOKEN`.
2. Add a step to call the [Revoke a Token (Self)](https://www.vaultproject.io/api/auth/token#revoke-a-token-self) Vault API to revoke the access token.
```yaml{:copy}
jobs:
retrieve-secret:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
steps:
- name: Retrieve secret from Vault
uses: hashicorp/vault-action@v2.4.0
with:
exportToken: true
method: jwt
url: <Vault URL>
role: <Role name>
secrets: <Secret-Path>
- name: Use secret from Vault
run: |
# This step has access to the secret retrieved above; see hashicorp/vault-action for more details.
- name: Revoke token
# This step always runs at the end regardless of the previous steps result
if: always()
run: |
curl -X POST -sv -H "X-Vault-Token: {% raw %}${{ env.VAULT_TOKEN }}{% endraw %}" \
<Vault URL>/v1/auth/token/revoke-self
```

View File

@@ -23,11 +23,18 @@ topics:
Rather than copying and pasting deployment jobs from one workflow to another, you can create a reusable workflow that performs the deployment steps. A reusable workflow can be used by another workflow if it meets one of the access requirements described in "[Reusing workflows](/actions/learn-github-actions/reusing-workflows#access-to-reusable-workflows)."
When combined with OpenID Connect (OIDC), reusable workflows let you enforce consistent deployments across your repository, organization, or enterprise. You can do this by defining trust conditions on cloud roles based on reusable workflows.
You should be familiar with the concepts described in "[Reusing workflows](/actions/learn-github-actions/reusing-workflows" and "[About security hardening with OpenID Connect](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect)."
In order to create trust conditions based on reusable workflows, your cloud provider must support custom claims for `job_workflow_ref`. This allows your cloud provider to identify which repository the job originally came from. If your cloud provider only supports the standard claims (_audience_ and _subject_), it will not be able to determine that the job originated from the reusable workflow repository. Cloud providers that support `job_workflow_ref` include Google Cloud Platform and HashiCorp Vault.
## Defining the trust conditions
Before proceeding, you should be familiar with the concepts of [reusable workflows](/actions/learn-github-actions/reusing-workflows) and [OpenID Connect](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect).
When combined with OpenID Connect (OIDC), reusable workflows let you enforce consistent deployments across your repository, organization, or enterprise. You can do this by defining trust conditions on cloud roles based on reusable workflows. The available options will vary depending on your cloud provider:
- **Using `job_workflow_ref`**:
- To create trust conditions based on reusable workflows, your cloud provider must support custom claims for `job_workflow_ref`. This allows your cloud provider to identify which repository the job originally came from.
- For clouds that only support the standard claims (audience (`aud`) and subject (`sub`)), you can use the API to customize the `sub` claim to include `job_workflow_ref`. For more information, see "[Customizing the token claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-claims)". Support for custom claims is currently available for Google Cloud Platform and HashiCorp Vault.
- **Customizing the token claims**:
- You can configure more granular trust conditions by customizing the issuer (`iss`) and subject (`sub`) claims included with the JWT. For more information, see "[Customizing the token claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-claims)".
## How the token works with reusable workflows

View File

@@ -18,15 +18,24 @@ shortTitle: Run runner app on startup
{% capture service_first_step %}1. Stop the self-hosted runner application if it is currently running.{% endcapture %}
{% capture service_non_windows_intro_shell %}On the runner machine, open a shell in the directory where you installed the self-hosted runner application. Use the commands below to install and manage the self-hosted runner service.{% endcapture %}
{% capture service_nonwindows_intro %}You must add a runner to {% data variables.product.product_name %} before you can configure the self-hosted runner application as a service. For more information, see "[Adding self-hosted runners](/github/automating-your-workflow-with-github-actions/adding-self-hosted-runners)."{% endcapture %}
{% capture service_win_name %}actions.runner.*{% endcapture %}
{% capture service_nonwindows_intro %}
{% note %}
**Note:** You must add a runner to {% data variables.product.product_name %} before you can configure the self-hosted runner application as a service.
For more information, see "[Adding self-hosted runners](/github/automating-your-workflow-with-github-actions/adding-self-hosted-runners)."
{% endnote %}
{% endcapture %}
{% capture service_win_name %}actions.runner.*{% endcapture %}
{% linux %}
{{ service_nonwindows_intro }}
For Linux systems that use `systemd`, you can use the `svc.sh` script distributed with the self-hosted runner application to install and manage using the application as a service.
For Linux systems that use `systemd`, you can use the `svc.sh` script that is created after successfully adding the runner to install and manage using the application as a service.
{{ service_non_windows_intro_shell }}

View File

@@ -38,6 +38,8 @@ You can access contexts using the expression syntax. For more information, see "
| `github` | `object` | Information about the workflow run. For more information, see [`github` context](#github-context). |
| `env` | `object` | Contains environment variables set in a workflow, job, or step. For more information, see [`env` context](#env-context). |
| `job` | `object` | Information about the currently running job. For more information, see [`job` context](#job-context). |
{%- ifversion fpt or ghes > 3.3 or ghae-issue-4757 or ghec %}
| `jobs` | `object` | For reusable workflows only, contains outputs of jobs from the reusable workflow. For more information, see [`jobs` context](#jobs-context). |{% endif %}
| `steps` | `object` | Information about the steps that have been run in the current job. For more information, see [`steps` context](#steps-context). |
| `runner` | `object` | Information about the runner that is running the current job. For more information, see [`runner` context](#runner-context). |
| `secrets` | `object` | Contains the names and values of secrets that are available to a workflow run. For more information, see [`secrets` context](#secrets-context). |
@@ -87,7 +89,7 @@ The following table indicates where each context and special function can be use
| <code>jobs.&lt;job_id&gt;.name</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.outputs.&lt;output_id&gt;</code> | <code>github, needs, strategy, matrix, job, runner, env, secrets, steps, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.runs-on</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.secrets.&lt;secrets_id&gt;</code> | <code>github, needs, secrets{% ifversion actions-unified-inputs %}, inputs{% endif %}</code> | |
| <code>jobs.&lt;job_id&gt;.secrets.&lt;secrets_id&gt;</code> | <code>github, needs,{% ifversion actions-reusable-workflow-matrix %} strategy, matrix,{% endif %} secrets{% ifversion actions-unified-inputs %}, inputs{% endif %}</code> | |
| <code>jobs.&lt;job_id&gt;.services</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.services.&lt;service_id&gt;.credentials</code> | <code>github, needs, strategy, matrix, env, secrets, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.services.&lt;service_id&gt;.env.&lt;env_id&gt;</code> | <code>github, needs, strategy, matrix, job, runner, env, secrets, inputs</code> | |
@@ -101,7 +103,7 @@ The following table indicates where each context and special function can be use
| <code>jobs.&lt;job_id&gt;.steps.working-directory</code> | <code>github, needs, strategy, matrix, job, runner, env, secrets, steps, inputs</code> | <code>hashFiles</code> |
| <code>jobs.&lt;job_id&gt;.strategy</code> | <code>github, needs, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.timeout-minutes</code> | <code>github, needs, strategy, matrix, inputs</code> | |
| <code>jobs.&lt;job_id&gt;.with.&lt;with_id&gt;</code> | <code>github, needs{% ifversion actions-unified-inputs %}, inputs{% endif %}</code> | |
| <code>jobs.&lt;job_id&gt;.with.&lt;with_id&gt;</code> | <code>github, needs{% ifversion actions-reusable-workflow-matrix %}, strategy, matrix{% endif %}{% ifversion actions-unified-inputs %}, inputs{% endif %}</code> | |
| <code>on.workflow_call.inputs.&lt;inputs_id&gt;.default</code> | <code>github{% ifversion actions-unified-inputs %}, inputs{% endif %}</code> | |
| <code>on.workflow_call.outputs.&lt;output_id&gt;.value</code> | <code>github, jobs, inputs</code> | |
{% else %}
@@ -404,6 +406,72 @@ jobs:
- run: ./run-tests
```
{% ifversion fpt or ghes > 3.3 or ghae-issue-4757 or ghec %}
## `jobs` context
The `jobs` context is only available in reusable workflows, and can only be used to set outputs for a reusable workflow. For more information, see "[Reusing workflows](/actions/using-workflows/reusing-workflows#using-outputs-from-a-reusable-workflow)."
| Property name | Type | Description |
|---------------|------|-------------|
| `jobs` | `object` | This is only available in reusable workflows, and can only be used to set outputs for a reusable workflow. This object contains all the properties listed below.
| `jobs.<job_id>.result` | `string` | The result of a job in the reusable workflow. Possible values are `success`, `failure`, `cancelled`, or `skipped`. |
| `jobs.<job_id>.outputs` | `object` | The set of outputs of a job in a reusable workflow. |
| `jobs.<job_id>.outputs.<output_name>` | `string` | The value of a specific output for a job in a reusable workflow. |
### Example contents of the `jobs` context
This example `jobs` context contains the result and outputs of a job from a reusable workflow run.
```json
{
example_job: {
result: success,
outputs: {
output1: hello,
output2: world
}
}
}
```
### Example usage of the `jobs` context
This example reusable workflow uses the `jobs` context to set outputs for the reusable workflow. Note how the outputs flow up from the steps, to the job, then to the `workflow_call` trigger. For more information, see "[Reusing workflows](/actions/using-workflows/reusing-workflows#using-outputs-from-a-reusable-workflow)."
{% raw %}
```yaml{:copy}
name: Reusable workflow
on:
workflow_call:
# Map the workflow outputs to job outputs
outputs:
firstword:
description: "The first output string"
value: ${{ jobs.example_job.outputs.output1 }}
secondword:
description: "The second output string"
value: ${{ jobs.example_job.outputs.output2 }}
jobs:
example_job:
name: Generate output
runs-on: ubuntu-latest
# Map the job outputs to step outputs
outputs:
output1: ${{ steps.step1.outputs.firstword }}
output2: ${{ steps.step2.outputs.secondword }}
steps:
- id: step1
run: echo "::set-output name=firstword::hello"
- id: step2
run: echo "::set-output name=secondword::world"
```
{% endraw %}
{% endif %}
## `steps` context
The `steps` context contains information about the steps in the current job that have an [`id`](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepsid) specified and have already run.

View File

@@ -93,7 +93,7 @@ For more information, see "[Creating actions](/actions/creating-actions)."
- To continue learning about {% data variables.product.prodname_actions %}, see "[Finding and customizing actions](/actions/learn-github-actions/finding-and-customizing-actions)."
{% ifversion fpt or ghec or ghes %}
- To understand how billing works for {% data variables.product.prodname_actions %}, see "[About billing for {% data variables.product.prodname_actions %}](/actions/reference/usage-limits-billing-and-administration#about-billing-for-github-actions)".
- To understand how billing works for {% data variables.product.prodname_actions %}, see "[About billing for {% data variables.product.prodname_actions %}](/actions/reference/usage-limits-billing-and-administration#about-billing-for-github-actions)."
{% endif %}
## Contacting support

View File

@@ -98,9 +98,9 @@ Each time you create a new release, you can trigger a workflow to publish your p
### Configuring the destination repository
If you don't provide the `repository` key in your *package.json* file, then {% data variables.product.prodname_registry %} publishes a package in the {% data variables.product.prodname_dotcom %} repository you specify in the `name` field of the *package.json* file. For example, a package named `@my-org/test` is published to the `my-org/test` {% data variables.product.prodname_dotcom %} repository.
Linking your package to {% data variables.product.prodname_registry %} using the `repository` key is optional. If you choose not to provide the `repository` key in your *package.json* file, then {% data variables.product.prodname_registry %} publishes a package in the {% data variables.product.prodname_dotcom %} repository you specify in the `name` field of the *package.json* file. For example, a package named `@my-org/test` is published to the `my-org/test` {% data variables.product.prodname_dotcom %} repository. If the `url` specified in the `repository` key is invalid, your package may still be published however it won't be linked to the repository source as intended.
However, if you do provide the `repository` key, then the repository in that key is used as the destination npm registry for {% data variables.product.prodname_registry %}. For example, publishing the below *package.json* results in a package named `my-amazing-package` published to the `octocat/my-other-repo` {% data variables.product.prodname_dotcom %} repository.
If you do provide the `repository` key in your *package.json* file, then the repository in that key is used as the destination npm registry for {% data variables.product.prodname_registry %}. For example, publishing the below *package.json* results in a package named `my-amazing-package` published to the `octocat/my-other-repo` {% data variables.product.prodname_dotcom %} repository. Once published, only the repository source is updated, and the package doesn't inherit any permissions from the destination repository.
```json
{

View File

@@ -51,7 +51,7 @@ For more information on workflow run artifacts, see "[Persisting workflow data u
A workflow can access and restore a cache created in the current branch, the base branch (including base branches of forked repositories), or the default branch (usually `main`). For example, a cache created on the default branch would be accessible from any pull request. Also, if the branch `feature-b` has the base branch `feature-a`, a workflow triggered on `feature-b` would have access to caches created in the default branch (`main`), `feature-a`, and `feature-b`.
Access restrictions provide cache isolation and security by creating a logical boundary between different branches. For example, a cache created for the branch `feature-a` (with the base `main`) would not be accessible to a pull request for the branch `feature-c` (with the base `main`).
Access restrictions provide cache isolation and security by creating a logical boundary between different branches or tags. For example, a cache created for the branch `feature-a` (with the base `main`) would not be accessible to a pull request for the branch `feature-c` (with the base `main`). On similar lines, a cache created for the tag `release-a` (from the base `main`) would not be accessible to a workflow triggered for the tag `release-b` (with the base `main`).
Multiple workflows within a repository share cache entries. A cache created for a branch within a workflow can be accessed and restored from another workflow for the same repository and branch.

View File

@@ -812,6 +812,8 @@ Runs your workflow when activity on a pull request in the workflow's repository
This event runs in the context of the base of the pull request, rather than in the context of the merge commit, as the `pull_request` event does. This prevents execution of unsafe code from the head of the pull request that could alter your repository or steal any secrets you use in your workflow. This event allows your workflow to do things like label or comment on pull requests from forks. Avoid using this event if you need to build or run code from the pull request.
To ensure repository security, branches with names that match certain patterns (such as those which look similar to SHAs) may not trigger workflows with the `pull_request_target` event.
{% warning %}
**Warning:** For workflows that are triggered by the `pull_request_target` event, the `GITHUB_TOKEN` is granted read/write repository permission unless the `permissions` key is specified and the workflow can access secrets, even when it is triggered from a fork. Although the workflow runs in the context of the base of the pull request, you should make sure that you do not check out, build, or run untrusted code from the pull request with this event. Additionally, any caches share the same scope as the base branch. To help prevent cache poisoning, you should not save the cache if there is a possibility that the cache contents were altered. For more information, see "[Keeping your GitHub Actions and workflows secure: Preventing pwn requests](https://securitylab.github.com/research/github-actions-preventing-pwn-requests)" on the GitHub Security Lab website.

View File

@@ -39,7 +39,7 @@ You can view the reused workflows referenced in your {% data variables.product.p
### Reusable workflows and starter workflows
Starter workflows allow everyone in your organization who has permission to create workflows to do so more quickly and easily. When people create a new workflow, they can choose a starter workflow and some or all of the work of writing the workflow will be done for them. Within a starter workflow, you can also reference reusable workflows to make it easy for people to benefit from reusing centrally managed workflow code. If you use a tag or branch name when referencing the reusable workflow, you can ensure that everyone who reuses that workflow will always be using the same YAML code. However, if you reference a reusable workflow by a tag or branch, be sure that you can trust that version of the workflow. For more information, see "[Security hardening for {% data variables.product.prodname_actions %}](/actions/security-guides/security-hardening-for-github-actions#reusing-third-party-workflows)."
Starter workflows allow everyone in your organization who has permission to create workflows to do so more quickly and easily. When people create a new workflow, they can choose a starter workflow and some or all of the work of writing the workflow will be done for them. Within a starter workflow, you can also reference reusable workflows to make it easy for people to benefit from reusing centrally managed workflow code. If you use a commit SHA when referencing the reusable workflow, you can ensure that everyone who reuses that workflow will always be using the same YAML code. However, if you reference a reusable workflow by a tag or branch, be sure that you can trust that version of the workflow. For more information, see "[Security hardening for {% data variables.product.prodname_actions %}](/actions/security-guides/security-hardening-for-github-actions#reusing-third-party-workflows)."
For more information, see "[Creating starter workflows for your organization](/actions/learn-github-actions/creating-starter-workflows-for-your-organization)."
@@ -69,10 +69,14 @@ Called workflows that are owned by the same user or organization{% ifversion ghe
## Limitations
{% ifversion nested-reusable-workflow %}
* You can connect up to four levels of workflows. For more information, see "[Calling a nested reusable workflow](#calling-a-nested-reusable-workflow)."
{% else %}
* Reusable workflows can't call other reusable workflows.
{% endif %}
* Reusable workflows stored within a private repository can only be used by workflows within the same repository.
* Any environment variables set in an `env` context defined at the workflow level in the caller workflow are not propagated to the called workflow. For more information about the `env` context, see "[Context and expression syntax for GitHub Actions](/actions/reference/context-and-expression-syntax-for-github-actions#env-context)."
* The `strategy` property is not supported in any job that calls a reusable workflow.
* Any environment variables set in an `env` context defined at the workflow level in the caller workflow are not propagated to the called workflow. For more information about the `env` context, see "[Context and expression syntax for GitHub Actions](/actions/reference/context-and-expression-syntax-for-github-actions#env-context)."{% ifversion actions-reusable-workflow-matrix %}{% else %}
* The `strategy` property is not supported in any job that calls a reusable workflow.{% endif %}
## Creating a reusable workflow
@@ -105,7 +109,13 @@ You can define inputs and secrets, which can be passed from the caller workflow
{% endraw %}
For details of the syntax for defining inputs and secrets, see [`on.workflow_call.inputs`](/actions/reference/workflow-syntax-for-github-actions#onworkflow_callinputs) and [`on.workflow_call.secrets`](/actions/reference/workflow-syntax-for-github-actions#onworkflow_callsecrets).
{% ifversion actions-inherit-secrets-reusable-workflows %}
1. In the reusable workflow, reference the input or secret that you defined in the `on` key in the previous step. If the secrets are inherited using `secrets: inherit`, you can reference them even if they are not defined in the `on` key.
1. In the reusable workflow, reference the input or secret that you defined in the `on` key in the previous step.
{% note %}
**Note**: If the secrets are inherited by using `secrets: inherit` in the calling workflow, you can reference them even if they are not explicitly defined in the `on` key. For more information, see "[Workflow syntax for GitHub Actions](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idsecretsinherit)."
{% endnote %}
{%- else %}
1. In the reusable workflow, reference the input or secret that you defined in the `on` key in the previous step.
{%- endif %}
@@ -164,7 +174,36 @@ jobs:
token: ${{ secrets.token }}
```
{% endraw %}
{% ifversion actions-reusable-workflow-matrix %}
## Using a matrix strategy with a reusable workflow
Jobs using the matrix strategy can call a reusable workflow.
A matrix strategy lets you use variables in a single job definition to automatically create multiple job runs that are based on the combinations of the variables. For example, you can use a matrix strategy to pass different inputs to a reusable workflow. For more information about matrices, see "[Using a matrix for your jobs](/actions/using-jobs/using-a-matrix-for-your-jobs)."
### Example matrix strategy with a reusable workflow
This workflow file references the matrix context by defining the variable `target` with the values `[dev, stage, prod]`. The workflow will run three jobs, one for each value in the variable. The workflow file also calls a reusable workflow by using the `uses` keyword.
{% raw %}
```yaml{:copy}
name: Reusable workflow with matrix strategy
on:
push:
jobs:
ReuseableMatrixJobForDeployment:
strategy:
matrix:
target: [dev, stage, prod]
uses: octocat/octo-repo/.github/workflows/deployment.yml@main
with:
target: ${{ matrix.target }}
```
{% endraw %}
{% endif %}
## Calling a reusable workflow
You call a reusable workflow by using the `uses` keyword. Unlike when you are using actions within a workflow, you call reusable workflows directly within a job, and not from within job steps.
@@ -234,9 +273,62 @@ jobs:
```
{% endraw %}
{% ifversion nested-reusable-workflow %}
## Nesting reusable workflows
You can connect a maximum of four levels of workflows - that is, the top-level caller workflow and up to three levels of reusable workflows. For example: _caller-workflow.yml__called-workflow-1.yml__called-workflow-2.yml__called-workflow-3.yml_. Loops in the workflow tree are not permitted.
From within a reusable workflow you can call another reusable workflow.
{% raw %}
```yaml{:copy}
name: Reusable workflow
on:
workflow_call:
jobs:
call-another-reusable:
uses: octo-org/example-repo/.github/workflows/another-reusable.yml@v1
```
{% endraw %}
### Passing secrets to nested workflows
You can use `jobs.<job_id>.secrets` in a calling workflow to pass named secrets to a directly called workflow. Alternatively, you can use `jobs.<job_id>.secrets.inherit` to pass all of the calling workflow's secrets to a directly called workflow. For more information, see the section "[Passing inputs and secrets to a reusable workflow](/actions/using-workflows/reusing-workflows#passing-inputs-and-secrets-to-a-reusable-workflow)" above, and the reference article "[Workflow syntax for GitHub Actions](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idsecretsinherit)." Secrets are only passed to directly called workflow, so in the workflow chain A > B > C, workflow C will only receive secrets from A if they have been passed from A to B, and then from B to C.
In the following example, workflow A passes all of its secrets to workflow B, by using the `inherit` keyword, but workflow B only passes one secret to workflow C. Any of the other secrets passed to workflow B are not available to workflow C.
{% raw %}
```yaml
jobs:
workflowA-calls-workflowB:
uses: octo-org/example-repo/.github/workflows/B.yml@main
secrets: inherit # pass all secrets
```
```yaml
jobs:
workflowB-calls-workflowC:
uses: different-org/example-repo/.github/workflows/C.yml@main
secrets:
envPAT: ${{ secrets.envPAT }} # pass just this secret
```
{% endraw %}
### Access and permissions
A workflow that contains nested reusable workflows will fail if any of the nested workflows is inaccessible to the initial caller workflow. For more information, see "[Access to reusable workflows](/actions/using-workflows/reusing-workflows#access-to-reusable-workflows)."
`GITHUB_TOKEN` permissions can only be the same or more restrictive in nested workflows. For example, in the workflow chain A > B > C, if workflow A has `package: read` token permission, then B and C cannot have `package: write` permission. For more information, see "[Automatic token authentication](/actions/security-guides/automatic-token-authentication)."
{% endif %}
## Using outputs from a reusable workflow
A reusable workflow may generate data that you want to use in the caller workflow. To use these outputs, you must specify them as the outputs of the reusable workflow.
A reusable workflow may generate data that you want to use in the caller workflow. To use these outputs, you must specify them as the outputs of the reusable workflow.{% ifversion actions-reusable-workflow-matrix %}
If a reusable workflow that sets an output is executed with a matrix strategy, the output will be the output set by the last successful completing reusable workflow of the matrix which actually sets a value.
That means if the last successful completing reusable workflow sets an empty string for its output, and the second last successful completing reusable workflow sets an actual value for its output, the output will contain the value of the second last completing reusable workflow.{% endif %}
The following reusable workflow has a single job containing two steps. In each of these steps we set a single word as the output: "hello" and "world." In the `outputs` section of the job, we map these step outputs to job outputs called: `output1` and `output2`. In the `on.workflow_call.outputs` section we then define two outputs for the workflow itself, one called `firstword` which we map to `output1`, and one called `secondword` which we map to `output2`.

View File

@@ -365,9 +365,9 @@ A name for your step to display on {% data variables.product.prodname_dotcom %}.
Selects an action to run as part of a step in your job. An action is a reusable unit of code. You can use an action defined in the same repository as the workflow, a public repository, or in a [published Docker container image](https://hub.docker.com/).
We strongly recommend that you include the version of the action you are using by specifying a Git ref, SHA, or Docker tag number. If you don't specify a version, it could break your workflows or cause unexpected behavior when the action owner publishes an update.
We strongly recommend that you include the version of the action you are using by specifying a Git ref, SHA, or Docker tag. If you don't specify a version, it could break your workflows or cause unexpected behavior when the action owner publishes an update.
- Using the commit SHA of a released action version is the safest for stability and security.
- Using the specific major action version allows you to receive critical fixes and security patches while still maintaining compatibility. It also assures that your workflow should still work.
- If the action publishes major version tags, you should expect to receive critical fixes and security patches while still retaining compatibility. Note that this behavior is at the discretion of the action's author.
- Using the default branch of an action may be convenient, but if someone releases a new major version with a breaking change, your workflow could break.
Some actions require inputs that you must set using the [`with`](#jobsjob_idstepswith) keyword. Review the action's README file to determine the inputs required.
@@ -1037,15 +1037,23 @@ You can use special characters in path, branch, and tag filters.
- `[]` Matches one character listed in the brackets or included in ranges. Ranges can only include `a-z`, `A-Z`, and `0-9`. For example, the range`[0-9a-z]` matches any digit or lowercase letter. For example, `[CB]at` matches `Cat` or `Bat` and `[1-2]00` matches `100` and `200`.
- `!`: At the start of a pattern makes it negate previous positive patterns. It has no special meaning if not the first character.
The characters `*`, `[`, and `!` are special characters in YAML. If you start a pattern with `*`, `[`, or `!`, you must enclose the pattern in quotes.
The characters `*`, `[`, and `!` are special characters in YAML. If you start a pattern with `*`, `[`, or `!`, you must enclose the pattern in quotes. Also, if you use a [flow sequence](https://yaml.org/spec/1.2.2/#flow-sequences) with a pattern containing `[` and/or `]`, the pattern must be enclosed in quotes.
```yaml
# Valid
- '**/README.md'
branches:
- '**/README.md'
# Invalid - creates a parse error that
# prevents your workflow from running.
- **/README.md
branches:
- **/README.md
# Valid
branches: [ main, 'release/v[0-9].[0-9]' ]
# Invalid - creates a parse error
branches: [ main, release/v[0-9].[0-9] ]
```
For more information about branch, tag, and path filter syntax, see "[`on.<push>.<branches|tags>`](#onpushbranchestagsbranches-ignoretags-ignore)", "[`on.<pull_request>.<branches|tags>`](#onpull_requestpull_request_targetbranchesbranches-ignore)", and "[`on.<push|pull_request>.paths`](#onpushpull_requestpull_request_targetpathspaths-ignore)."

View File

@@ -8,6 +8,7 @@ type: overview
topics:
- Enterprise
- GitHub Connect
miniTocMaxHeadingLevel: 3
---
## About {% data variables.product.prodname_github_connect %}
@@ -37,6 +38,16 @@ Unified contributions | Allow users to include anonymized contribution counts fo
## Data transmission for {% data variables.product.prodname_github_connect %}
When {% data variables.product.prodname_github_connect %} is enabled, a record on {% data variables.product.prodname_ghe_cloud %} stores information about the connection. If you enable individual features of {% data variables.product.prodname_github_connect %}, additional data is transmitted.
{% note %}
**Note:** No repositories, issues, or pull requests are ever transmitted from {% data variables.product.product_name %} to {% data variables.product.prodname_dotcom_the_website %} by {% data variables.product.prodname_github_connect %}.
{% endnote %}
### Data transmitted when {% data variables.product.prodname_github_connect %} is enabled
When you enable {% data variables.product.prodname_github_connect %} or specific {% data variables.product.prodname_github_connect %} features, a record on {% data variables.product.prodname_ghe_cloud %} stores the following information about the connection.
{% ifversion ghes %}
- The public key portion of your {% data variables.product.prodname_ghe_server %} license
@@ -54,11 +65,7 @@ When you enable {% data variables.product.prodname_github_connect %} or specific
{% data variables.product.prodname_github_connect %} syncs the above connection data between {% data variables.product.product_location %} and {% data variables.product.prodname_ghe_cloud %} weekly, from the day and approximate time that {% data variables.product.prodname_github_connect %} was enabled.
{% note %}
**Note:** No repositories, issues, or pull requests are ever transmitted from {% data variables.product.product_name %} to {% data variables.product.prodname_dotcom_the_website %} by {% data variables.product.prodname_github_connect %}.
{% endnote %}
### Data transmitted by individual features of {% data variables.product.prodname_github_connect %}
Additional data is transmitted if you enable individual features of {% data variables.product.prodname_github_connect %}.
@@ -68,7 +75,7 @@ Automatic user license sync | Each {% data variables.product.product_name %} use
{% data variables.product.prodname_dependabot_alerts %} | Vulnerability alerts | From {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.product_name %} | {% data variables.product.product_name %} |{% endif %}{% ifversion dependabot-updates-github-connect %}
{% data variables.product.prodname_dependabot_updates %} | Dependencies and the metadata for each dependency's repository<br><br>If a dependency is stored in a private repository on {% data variables.product.prodname_dotcom_the_website %}, data will only be transmitted if {% data variables.product.prodname_dependabot %} is configured and authorized to access that repository. | From {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.product_name %} | {% data variables.product.product_name %} {% endif %}
{% data variables.product.prodname_dotcom_the_website %} actions | Name of action, action (YAML file from {% data variables.product.prodname_marketplace %}) | From {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.product_name %}<br><br>From {% data variables.product.product_name %} to {% data variables.product.prodname_dotcom_the_website %} | {% data variables.product.product_name %}{% ifversion server-statistics %}
{% data variables.product.prodname_server_statistics %} | Aggregate {% data variables.product.prodname_ghe_server %} usage metrics<br>For the list of aggregate metrics collected, see "[{% data variables.product.prodname_server_statistics %} data collected](/admin/monitoring-activity-in-your-enterprise/analyzing-how-your-team-works-with-server-statistics/about-server-statistics#server-statistics-data-collected)." | From {% data variables.product.product_name %} to {% data variables.product.prodname_ghe_cloud %} | {% data variables.product.prodname_ghe_cloud %}{% endif %}
{% data variables.product.prodname_server_statistics %} | Aggregate metrics about your usage of {% data variables.product.prodname_ghe_server %}. For the complete list of metrics, see "[About {% data variables.product.prodname_server_statistics %}](/admin/monitoring-activity-in-your-enterprise/analyzing-how-your-team-works-with-server-statistics/about-server-statistics#server-statistics-data-collected)." | From {% data variables.product.product_name %} to {% data variables.product.prodname_ghe_cloud %} | {% data variables.product.prodname_ghe_cloud %}{% endif %}
Unified search | Search terms, search results | From {% data variables.product.prodname_dotcom_the_website %} to {% data variables.product.product_name %}<br><br>From {% data variables.product.product_name %} to {% data variables.product.prodname_dotcom_the_website %} | {% data variables.product.product_name %} |
Unified contributions | Contribution counts | From {% data variables.product.product_name %} to {% data variables.product.prodname_dotcom_the_website %} | {% data variables.product.prodname_dotcom_the_website %} |

View File

@@ -672,7 +672,7 @@ You can add the optional `--prune` argument to remove unreachable Git objects th
{% warning %}
**Warning**: Before using the `--prune` argument to remove unreachable Git objects, put {% data variables.product.product_location %} into maintenance mode, or ensure the repository is offline. For more information, see "[Enabling and scheduling maintenance mode](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)."
**Warning**: Before using the `--prune` argument to remove unreachable Git objects, put {% data variables.product.product_location %} into maintenance mode, or ensure all repositories within the same repository network are locked. For more information, see "[Enabling and scheduling maintenance mode](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)."
{% endwarning %}

View File

@@ -30,12 +30,15 @@ For instance-level restrictions using Azure NSGs, contact {% data variables.cont
## Adding an allowed IP address
{% data reusables.identity-and-permissions.about-adding-ip-allow-list-entries %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-ip %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-description %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-entry %}
{% data reusables.identity-and-permissions.check-ip-address %}
## Allowing access by {% data variables.product.prodname_github_apps %}
@@ -43,6 +46,8 @@ For instance-level restrictions using Azure NSGs, contact {% data variables.cont
## Enabling allowed IP addresses
{% data reusables.identity-and-permissions.about-enabling-allowed-ip-addresses %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
@@ -52,6 +57,8 @@ For instance-level restrictions using Azure NSGs, contact {% data variables.cont
## Editing an allowed IP address
{% data reusables.identity-and-permissions.about-editing-ip-allow-list-entries %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
@@ -59,6 +66,18 @@ For instance-level restrictions using Azure NSGs, contact {% data variables.cont
{% data reusables.identity-and-permissions.ip-allow-lists-edit-ip %}
{% data reusables.identity-and-permissions.ip-allow-lists-edit-description %}
8. Click **Update**.
{% data reusables.identity-and-permissions.check-ip-address %}
{% ifversion ip-allow-list-address-check %}
## Checking if an IP address is permitted
{% data reusables.identity-and-permissions.about-checking-ip-address %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
{% data reusables.identity-and-permissions.check-ip-address-step %}
{% endif %}
## Deleting an allowed IP address

View File

@@ -19,7 +19,7 @@ shortTitle: Create HA replica
## Creating a high availability replica
1. Set up a new {% data variables.product.prodname_ghe_server %} appliance on your desired platform. The replica appliance should mirror the primary appliance's CPU, RAM, and storage settings. We recommend that you install the replica appliance in an independent environment. The underlying hardware, software, and network components should be isolated from those of the primary appliance. If you are a using a cloud provider, use a separate region or zone. For more information, see ["Setting up a {% data variables.product.prodname_ghe_server %} instance"](/enterprise/admin/guides/installation/setting-up-a-github-enterprise-server-instance).
1. Ensure that both the primary appliance and the new replica appliance can communicate with each other over ports 122/TCP and 1194/UDP. For more information, see "[Network ports](/admin/configuration/configuring-network-settings/network-ports#administrative-ports)."
1. Ensure that the new appliance can communicate with all other appliances in this high availability environment over ports 122/TCP and 1194/UDP. For more information, see "[Network ports](/admin/configuration/configuring-network-settings/network-ports#administrative-ports)."
1. In a browser, navigate to the new replica appliance's IP address and upload your {% data variables.product.prodname_enterprise %} license.
{% data reusables.enterprise_installation.replica-steps %}
1. Connect to the replica appliance's IP address using SSH.

View File

@@ -82,7 +82,11 @@ These restrictions are unacceptable for some enterprises. To determine whether {
### Do your developers rely on collaboration outside of your enterprise?
{% data variables.product.prodname_managed_users_caps %} can only contribute to repositories within your enterprise. If your developers need to collaborate in repositories outside your enterprise, even private repositories, to complete their work, {% data variables.product.prodname_emus %} may not be right for your enterprise, and SAML SSO may be a better solution.
{% data variables.product.prodname_managed_users_caps %} can only contribute to repositories within your enterprise. If your developers must contribute to both repositories within and outside of your enterprise, including private repositories, {% data variables.product.prodname_emus %} may not be right for your enterprise. SAML SSO may be a better solution.
Some companies maintain repositories within an existing enterprise using SAML SSO on {% data variables.product.product_location %}, and also create an {% data variables.product.prodname_emu_enterprise %}. Developers who contribute to repositories owned by both enterprises from a single workstation must switch between the accounts on {% data variables.product.product_location %} within a single browser, or use a different browser for each account. The developer may also need to customize the workstation's Git configuration to accommodate the two accounts. The complexity of this workflow can increase the risk of mistakenly leaking internal code to the public.
If you decide to create an {% data variables.product.prodname_emu_enterprise %} but require that developers contribute to resources outside of the enterprise from a single workstation, you can provide support for switching between the accounts in a developer's local Git configuration. For more information, see "[About {% data variables.product.prodname_emus %}](/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users#supporting-developers-with-multiple-user-accounts-on-githubcom)."
### Does your enterprise rely on outside collaborators?

View File

@@ -36,8 +36,6 @@ You can grant {% data variables.product.prodname_managed_users %} access to and
The usernames of your enterprise's {% data variables.product.prodname_managed_users %} and their profile information, such as display names and email addresses, are set by through your IdP and cannot be changed by the users themselves. For more information, see "[Usernames and profile information](#usernames-and-profile-information)."
{% data reusables.enterprise-accounts.emu-forks %}
Enterprise owners can audit all of the {% data variables.product.prodname_managed_users %}' actions on {% data variables.product.prodname_dotcom %}. For more information, see "[Audit log events for your enterprise](/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/audit-log-events-for-your-enterprise#about-audit-log-events-for-your-enterprise)."
To use {% data variables.product.prodname_emus %}, you need a separate type of enterprise account with {% data variables.product.prodname_emus %} enabled. For more information about creating this account, see "[About enterprises with managed users](#about-enterprises-with-managed-users)."
@@ -75,7 +73,8 @@ To use {% data variables.product.prodname_emus %}, you need a separate type of e
* {% data variables.product.prodname_managed_users_caps %} cannot create gists or comment on gists.
* {% data variables.product.prodname_managed_users_caps %} cannot install {% data variables.product.prodname_github_apps %} on their user accounts.
* Other {% data variables.product.prodname_dotcom %} users cannot see, mention, or invite a {% data variables.product.prodname_managed_user %} to collaborate.
* {% data variables.product.prodname_managed_users_caps %} can only own private repositories and {% data variables.product.prodname_managed_users %} can only invite other enterprise members to collaborate on their owned repositories.
* You can choose whether {% data variables.product.prodname_managed_users %} are able to create repositories owned by their user accounts. For more information, see "[Enforcing repository management policies in your enterprise](/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-repository-creation)."
* If you allow {% data variables.product.prodname_managed_users %} to create repositories owned by their user accounts, they can only own private repositories and can only invite other enterprise members to collaborate on their user-owned repositories.
* {% data reusables.enterprise-accounts.emu-forks %}
* Only private and internal repositories can be created in organizations owned by an {% data variables.product.prodname_emu_enterprise %}, depending on organization and enterprise repository visibility settings.
* {% data variables.product.prodname_managed_users_caps %} are limited in their use of {% data variables.product.prodname_pages %}. For more information, see "[About {% data variables.product.prodname_pages %}](/pages/getting-started-with-github-pages/about-github-pages#limitations-for-enterprise-managed-users)."
@@ -113,6 +112,8 @@ Before your developers can use {% data variables.product.prodname_ghe_cloud %} w
5. Once authentication and provisioning are configured, you can start provisioning members and managing teams. For more information, see "[Managing team memberships with identity provider groups](/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/managing-team-memberships-with-identity-provider-groups)."
If members of your enterprise must use one workstation to contribute to repositories on {% data variables.product.product_location %} from both a {% data variables.product.prodname_managed_user %} and a personal account, you can provide support. For more information, see "[Supporting developers with multiple user accounts on {% data variables.product.prodname_dotcom_the_website %}](#supporting-developers-with-multiple-user-accounts-on-githubcom)."
## Authenticating as a {% data variables.product.prodname_managed_user %}
{% data variables.product.prodname_managed_users_caps %} must authenticate through their identity provider. To authenticate, a {% data variables.product.prodname_managed_user %} can visit their IdP application portal or use the login page on {% data variables.product.prodname_dotcom_the_website %}.
@@ -135,3 +136,9 @@ Before your developers can use {% data variables.product.prodname_ghe_cloud %} w
A conflict may occur when provisioning users if the unique parts of the identifier provided by your IdP are removed during normalization. If you're unable to provision a user due to a username conflict, you should modify the username provided by your IdP. For more information, see "[Resolving username conflicts](/admin/identity-and-access-management/managing-iam-for-your-enterprise/username-considerations-for-external-authentication#resolving-username-conflicts)."
The profile name and email address of a {% data variables.product.prodname_managed_user %} is also provided by the IdP. {% data variables.product.prodname_managed_users_caps %} cannot change their profile name or email address on {% data variables.product.prodname_dotcom %}, and the IdP can only provide a single email address.
## Supporting developers with multiple user accounts on {% data variables.product.product_location %}
People on your team may need to contribute to resources on {% data variables.product.product_location %} that are outside of your {% data variables.product.prodname_emu_enterprise %}. For example, you may wish to maintain a separate enterprise for your company's open source projects. Because a {% data variables.product.prodname_managed_user %} cannot contribute to public resources, users will need to maintain a separate, personal account for this work.
People who must contribute from two user accounts on {% data variables.product.product_location %} using one workstation can configure Git to simplify the process. For more information, see "[Managing multiple accounts](/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-personal-account/managing-multiple-accounts)."

View File

@@ -22,9 +22,11 @@ CAP support is enabled automatically for any {% data variables.product.prodname_
For more information about using OIDC with {% data variables.product.prodname_emus %}, see "[Configuring OIDC for Enterprise Managed Users](/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/configuring-oidc-for-enterprise-managed-users)" and "[Migrating from SAML to OIDC](/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/migrating-from-saml-to-oidc)."
## About using CAP with IP allow lists
{% note %}
We recommend disabling your enterprise account's IP allow list and relying on your IdP's CAP. If you enable IP allow lists for your enterprise and also make use of your IdP's CAP, both the IP allow list and CAP will be enforced. If either restriction rejects a user's IP address, the request fails. For more information about IP allow lists, see "[Enforcing policies for security settings in your enterprise](/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-security-settings-in-your-enterprise#managing-allowed-ip-addresses-for-organizations-in-your-enterprise)."
**Note:** If you use Conditional Access (CA) network location policies in your Azure AD tenant, do not use the IP allow list feature on {% data variables.product.prodname_dotcom_the_website %}, with your enterprise account or with any of the organizations owned by the enterprise. Using both is unsupported and can result in the wrong policy applying. For more information about IP allow lists, see "[Enforcing security settings in your enterprise](/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-security-settings-in-your-enterprise#managing-allowed-ip-addresses-for-organizations-in-your-enterprise)" and "[Managing allowed IP addresses for your organization](/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/managing-allowed-ip-addresses-for-your-organization)."
{% endnote %}
## Considerations for integrations and automations

View File

@@ -31,6 +31,7 @@ If you're new to {% data variables.product.prodname_emus %} and haven't yet conf
{% endnote %}
1. Before you begin the migration, sign in to Azure and disable provisioning in the existing {% data variables.product.prodname_emu_idp_application %} application.
1. If you use [Conditional Access (CA) network location policies](https://docs.microsoft.com/en-us/azure/active-directory/conditional-access/location-condition) in Azure AD, and you're currently using an IP allow list with your enterprise account or any of the organizations owned by the enterprise account on {% data variables.product.prodname_dotcom_the_website %}, disable the IP allow lists. For more information, see "[Enforcing security settings in your enterprise](/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-security-settings-in-your-enterprise#managing-allowed-ip-addresses-for-organizations-in-your-enterprise)" and "[Managing allowed IP addresses for your organization](/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/managing-allowed-ip-addresses-for-your-organization)."
1. Sign into {% data variables.product.prodname_dotcom_the_website %} as the setup user for your enterprise with the username **@<em>SHORT-CODE</em>_admin**.
1. When prompted to continue to your identity provider, click **Use a recovery code** and sign in using one of your enterprise's recovery codes.
{% data reusables.enterprise-accounts.access-enterprise %}

View File

@@ -77,7 +77,7 @@ The following SAML attributes are available for {% data variables.product.produc
| Name | Required? | Description |
| :- | :- | :- |
| `NameID` | Yes | A persistent user identifier. Any persistent name identifier format may be used. {% ifversion ghec %}If you use an enterprise with {% data variables.product.prodname_emus %}, {% endif %}{% data variables.product.product_name %} will normalize the `NameID` element to use as a username unless one of the alternative assertions is provided. For more information, see "[Username considerations for external authentication](/admin/identity-and-access-management/managing-iam-for-your-enterprise/username-considerations-for-external-authentication)." |
| `NameID` | Yes | A persistent user identifier. Any persistent name identifier format may be used. {% ifversion ghec %}If you use an enterprise with {% data variables.product.prodname_emus %}, {% endif %}{% data variables.product.product_name %} will normalize the `NameID` element to use as a username unless one of the alternative assertions is provided. For more information, see "[Username considerations for external authentication](/admin/identity-and-access-management/managing-iam-for-your-enterprise/username-considerations-for-external-authentication)."<br><br>{% note %}**Note:** It's important to use a human-readable, persistent identifier. Using a transient identifier format like `urn:oasis:names:tc:SAML:2.0:nameid-format:transient` will result in re-linking of accounts on every sign-in, which can be detrimental to authorization management.{% endnote %} |
| `SessionNotOnOrAfter` | No | The date that {% data variables.product.product_name %} invalidates the associated session. After invalidation, the person must authenticate once again to access {% ifversion ghec or ghae %}your enterprise's resources{% elsif ghes %}{% data variables.product.product_location %}{% endif %}. For more information, see "[Session duration and timeout](#session-duration-and-timeout)." |
{%- ifversion ghes or ghae %}
| `administrator` | No | When the value is `true`, {% data variables.product.product_name %} will automatically promote the user to be a {% ifversion ghes %}site administrator{% elsif ghae %}enterprise owner{% endif %}. Setting this attribute to anything but `true` will result in demotion, as long as the value is not blank. Omitting this attribute or leaving the value blank will not change the role of the user. |

View File

@@ -44,7 +44,8 @@ You set up the audit log stream on {% data variables.product.product_name %} by
- [Amazon S3](#setting-up-streaming-to-amazon-s3)
- [Azure Blob Storage](#setting-up-streaming-to-azure-blob-storage)
- [Azure Event Hubs](#setting-up-streaming-to-azure-event-hubs)
- [Azure Event Hubs](#setting-up-streaming-to-azure-event-hubs){% ifversion streaming-datadog %}
- [Datadog](#setting-up-streaming-to-datadog){% endif %}
- [Google Cloud Storage](#setting-up-streaming-to-google-cloud-storage)
- [Splunk](#setting-up-streaming-to-splunk)
@@ -60,7 +61,7 @@ You can set up streaming to S3 with access keys or, to avoid storing long-lived
#### Setting up streaming to S3 with access keys
{% endif %}
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see [Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html) in the the AWS documentation. Make sure to block public access to the bucket to protect your audit log information.
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see [Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html) in the AWS documentation. Make sure to block public access to the bucket to protect your audit log information.
To set up audit log streaming from {% data variables.product.prodname_dotcom %} you will need:
* The name of your Amazon S3 bucket
@@ -231,6 +232,32 @@ You need two pieces of information about your event hub: its instance name and t
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
{% ifversion streaming-datadog %}
### Setting up streaming to Datadog
To set up streaming to Datadog, you must create a client token or an API key in Datadog, then configure audit log streaming in {% data variables.product.product_name %} using the token for authentication. You do not need to create a bucket or other storage container in Datadog.
After you set up streaming to Datadog, you can see your audit log data by filtering by "github.audit.streaming." For more information, see [Log Management](https://docs.datadoghq.com/logs/).
1. If you don't already have a Datadog account, create one.
1. In Datadog, generate a client token or an API key, then click **Copy key**. For more information, see [API and Application Keys](https://docs.datadoghq.com/account_management/api-app-keys/) in Datadog Docs.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Select the **Configure stream** dropdown menu and click **Datadog**.
![Screenshot of the "Configure stream" dropdown menu with "Datadog" highlighted](/assets/images/help/enterprises/audit-stream-choice-datadog.png)
1. Under "Token", paste the token you copied earlier.
![Screenshot of the "Token" field](/assets/images/help/enterprises/audit-stream-datadog-token.png)
1. Select the "Site" dropdown menu and click your Datadog site. To determine your Datadog site, compare your Datadog URL to the table in [Datadog sites](https://docs.datadoghq.com/getting_started/site/) in Datadog Docs.
![Screenshot of the "Site" dropdown menu](/assets/images/help/enterprises/audit-stream-datadog-site.png)
1. To verify that {% data variables.product.prodname_dotcom %} can connect and write to the Datadog endpoint, click **Check endpoint**.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
1. After a few minutes, confirm that audit log data is appearing on the **Logs** tab in Datadog. If audit log data is not appearing, confirm that your token and site are correct in {% data variables.product.prodname_dotcom %}.
{% endif %}
### Setting up streaming to Google Cloud Storage
To set up streaming to Google Cloud Storage, you must create a service account in Google Cloud with the appropriate credentials and permissions, then configure audit log streaming in {% data variables.product.product_name %} using the service account's credentials for authentication.
@@ -292,6 +319,10 @@ To stream audit logs to Splunk's HTTP Event Collector (HEC) endpoint you must ma
Pausing the stream allows you to perform maintenance on the receiving application without losing audit data. Audit logs are stored for up to seven days on {% data variables.product.product_location %} and are then exported when you unpause the stream.
{% ifversion streaming-datadog %}
Datadog only accepts logs from up to 18 hours in the past. If you pause a stream to a Datadog endpoint for more than 18 hours, you risk losing logs that Datadog won't accept after you resume streaming.
{% endif %}
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Pause stream**.

View File

@@ -79,12 +79,17 @@ You can also configure allowed IP addresses for an individual organization. For
### Adding an allowed IP address
{% data reusables.identity-and-permissions.about-adding-ip-allow-list-entries %}
{% data reusables.identity-and-permissions.ipv6-allow-lists %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-ip %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-description %}
{% data reusables.identity-and-permissions.ip-allow-lists-add-entry %}
{% data reusables.identity-and-permissions.check-ip-address %}
### Allowing access by {% data variables.product.prodname_github_apps %}
@@ -92,6 +97,8 @@ You can also configure allowed IP addresses for an individual organization. For
### Enabling allowed IP addresses
{% data reusables.identity-and-permissions.about-enabling-allowed-ip-addresses %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
@@ -101,6 +108,8 @@ You can also configure allowed IP addresses for an individual organization. For
### Editing an allowed IP address
{% data reusables.identity-and-permissions.about-editing-ip-allow-list-entries %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
@@ -108,6 +117,18 @@ You can also configure allowed IP addresses for an individual organization. For
{% data reusables.identity-and-permissions.ip-allow-lists-edit-ip %}
{% data reusables.identity-and-permissions.ip-allow-lists-edit-description %}
8. Click **Update**.
{% data reusables.identity-and-permissions.check-ip-address %}
{% ifversion ip-allow-list-address-check %}
### Checking if an IP address is permitted
{% data reusables.identity-and-permissions.about-checking-ip-address %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}
{% data reusables.identity-and-permissions.check-ip-address-step %}
{% endif %}
### Deleting an allowed IP address

View File

@@ -72,24 +72,29 @@ If an enterprise owner disallows members from creating certain types of reposito
{% endif %}
## Enforcing a policy for {% ifversion ghec or ghes or ghae %}base{% else %}default{% endif %} repository permissions
## Enforcing a policy for base repository permissions
Across all organizations owned by your enterprise, you can set a {% ifversion ghec or ghes or ghae %}base{% else %}default{% endif %} repository permission level (none, read, write, or admin) for organization members, or allow owners to administer the setting on the organization level.
Across all organizations owned by your enterprise, you can set a base repository permission level (none, read, write, or admin) for organization members, or allow owners to administer the setting on the organization level.
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.policies-tab %}
{% data reusables.enterprise-accounts.repositories-tab %}
4. Under "{% ifversion ghec or ghes or ghae %}Base{% else %}Default{% endif %} permissions", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
5. Under "{% ifversion ghec or ghes or ghae %}Base{% else %}Default{% endif %} permissions", use the drop-down menu and choose a policy.
{% ifversion ghec or ghes or ghae %}
4. Under "Base permissions", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
5. Under "Base permissions", use the drop-down menu and choose a policy.
![Drop-down menu with repository permissions policy options](/assets/images/help/business-accounts/repository-permissions-policy-drop-down.png)
{% else %}
![Drop-down menu with repository permissions policy options](/assets/images/enterprise/business-accounts/repository-permissions-policy-drop-down.png)
{% endif %}
## Enforcing a policy for repository creation
Across all organizations owned by your enterprise, you can allow members to create repositories, restrict repository creation to organization owners, or allow owners to administer the setting on the organization level. If you allow members to create repositories, you can choose whether members can create any combination of public, private, and internal repositories. {% data reusables.repositories.internal-repo-default %} For more information about internal repositories, see "[Creating an internal repository](/articles/creating-an-internal-repository)."
Across all organizations owned by your enterprise, you can allow members to create repositories, restrict repository creation to organization owners, or allow owners to administer the setting on the organization level.
If you allow members to create repositories in your organizations, you can choose which types of repositories (public, private, and internal) that members can create.
{% ifversion enterprise-namespace-repo-setting %}
{% ifversion ghec %}If your enterprise uses {% data variables.product.prodname_emus %}, you{% else %}You{% endif %} can also prevent users from creating repositories owned by their user accounts.
{% endif %}
{% data reusables.repositories.internal-repo-default %} For more information about internal repositories, see "[Creating an internal repository](/articles/creating-an-internal-repository)."
{% data reusables.organizations.repo-creation-constants %}
@@ -97,33 +102,33 @@ Across all organizations owned by your enterprise, you can allow members to crea
{% data reusables.enterprise-accounts.policies-tab %}
{% data reusables.enterprise-accounts.repositories-tab %}
5. Under "Repository creation", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
{% ifversion ghes or ghae or ghec %}
{% data reusables.enterprise-accounts.repo-creation-policy %}
{% data reusables.enterprise-accounts.repo-creation-types %}
{% else %}
6. Under "Repository creation", use the drop-down menu and choose a policy.
![Drop-down menu with repository creation policies](/assets/images/enterprise/site-admin-settings/repository-creation-drop-down.png)
{% endif %}
{% data reusables.enterprise-accounts.repo-creation-types %}{% ifversion enterprise-namespace-repo-setting %}
1. Optionally, {% ifversion ghec %}if your enterprise uses {% data variables.product.prodname_emus %} and you want {% endif %}to prevent enterprise members from creating repositories owned by their user accounts, select **Block the creation of user namespace repositories**.
![Screenshot showing the list of disabled options from forking policy](/assets/images/help/business-accounts/restrict-personal-namespace-enabled-setting.png){% endif %}
## Enforcing a policy for forking private or internal repositories
Across all organizations owned by your enterprise, you can allow people with access to a private or internal repository to fork the repository, never allow forking of private or internal repositories, or allow owners to administer the setting on the organization level.
{% ifversion enterprise-namespace-repo-setting %}
{% note %}
**Note:** If {% ifversion ghec %}your enterprise uses {% data variables.product.prodname_emus %} and {% endif %}your "Repository creation" policy prevents enterprise members from creating repositories owned by their user accounts, members will not be allowed to fork a repository in their user accounts, regardless of your "Repository forking" policy.
{% endnote %}
{% endif %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.policies-tab %}
{% data reusables.enterprise-accounts.repositories-tab %}
3. Under "Repository forking", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
4. Under "Repository forking", use the drop-down menu and choose a policy.
![Drop-down menu with repository forking policy options](/assets/images/help/business-accounts/repository-forking-policy-drop-down.png)
{% ifversion innersource-fork-policies %}
![Drop-down menu with repository forking policy options](/assets/images/help/business-accounts/repository-forking-policy-drop-down.png){% ifversion innersource-fork-policies %}
5. If forking is enabled, you can specify where users are allowed to fork repositories. Review the information about changing the setting and choose a policy.
![Screenshot showing the list of repository forking policy options](/assets/images/help/business-accounts/repository-forking-policy-settings.png)
{% endif %}
![Screenshot showing the list of repository forking policy options](/assets/images/help/business-accounts/repository-forking-policy-settings.png){% endif %}
## Enforcing a policy for inviting{% ifversion ghec %} outside{% endif %} collaborators to repositories
@@ -140,8 +145,6 @@ Across all organizations owned by your enterprise, you can allow members to invi
{% elsif ghes or ghae %}
![Drop-down menu with invitation policy options](/assets/images/enterprise/business-accounts/repository-invitation-policy-drop-down.png)
{% endif %}
{% ifversion ghec or ghes or ghae %}
## Enforcing a policy for the default branch name
@@ -156,8 +159,6 @@ Across all organizations owned by your enterprise, you can set the default branc
5. Click **Update**.
![Update button](/assets/images/help/business-accounts/default-branch-name-update.png)
{% endif %}
## Enforcing a policy for changes to repository visibility
Across all organizations owned by your enterprise, you can allow members with admin access to change a repository's visibility, restrict repository visibility changes to organization owners, or allow owners to administer the setting on the organization level. When you prevent members from changing repository visibility, only enterprise owners can change the visibility of a repository.
@@ -167,9 +168,9 @@ If an enterprise owner has restricted repository creation to organization owners
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.policies-tab %}
{% data reusables.enterprise-accounts.repositories-tab %}
5. Under "Repository visibility change", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
{% data reusables.enterprise-accounts.repository-visibility-policy %}
1. Under "Repository visibility change", review the information about changing the setting. {% data reusables.enterprise-accounts.view-current-policy-config-orgs %}
1. Under "Repository visibility change", use the drop-down menu and choose a policy.
![Drop-down menu with repository visibility policy options](/assets/images/help/business-accounts/repository-visibility-policy-drop-down.png)
## Enforcing a policy for repository deletion and transfer

View File

@@ -1,6 +1,6 @@
---
title: About SSH
intro: 'Using the SSH protocol, you can connect and authenticate to remote servers and services. With SSH keys, you can connect to {% data variables.product.product_name %} without supplying your username and personal access token at each visit.'
intro: 'Using the SSH protocol, you can connect and authenticate to remote servers and services. With SSH keys, you can connect to {% data variables.product.product_name %} without supplying your username and personal access token at each visit.{% ifversion ssh-commit-verification %} You can also use an SSH key to sign commits.{% endif %}'
redirect_from:
- /articles/about-ssh
- /github/authenticating-to-github/about-ssh
@@ -16,7 +16,7 @@ topics:
{% data reusables.ssh.about-ssh %} For more information about SSH, see [Secure Shell](https://en.wikipedia.org/wiki/Secure_Shell) on Wikipedia.
When you set up SSH, you will need to generate a new private SSH key and add it to the SSH agent. You must also add the public SSH key to your account on {% data variables.product.product_name %} before you use the key to authenticate. For more information, see "[Generating a new SSH key and adding it to the ssh-agent](/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent)" and "[Adding a new SSH key to your {% data variables.product.prodname_dotcom %} account](/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account)."
When you set up SSH, you will need to generate a new private SSH key and add it to the SSH agent. You must also add the public SSH key to your account on {% data variables.product.product_name %} before you use the key to authenticate{% ifversion ssh-commit-verification %} or sign commits{% endif %}. For more information, see "[Generating a new SSH key and adding it to the ssh-agent](/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent)"{% ifversion ssh-commit-verification %}, {% else %} and{% endif %} "[Adding a new SSH key to your {% data variables.product.prodname_dotcom %} account](/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account){% ifversion ssh-commit-verification %}" and "[About commit signature verification](/articles/about-commit-signature-verification){% endif %}."
You can further secure your SSH key by using a hardware security key, which requires the physical hardware security key to be attached to your computer when the key pair is used to authenticate with SSH. You can also secure your SSH key by adding your key to the ssh-agent and using a passphrase. For more information, see "[Working with SSH key passphrases](/github/authenticating-to-github/working-with-ssh-key-passphrases)."
@@ -33,7 +33,6 @@ Organizations that use {% data variables.product.prodname_ghe_cloud %} can provi
{% else ghec or ghes or ghae %}
If you're a member of an organization that provides SSH certificates, you can use your certificate to access that organization's repositories without adding the certificate to your account on {% data variables.product.product_name %}. You cannot use your certificate to access forks of the organization's repositories, if the forks is owned by your personal account. For more information, see "[About SSH certificate authorities](/organizations/managing-git-access-to-your-organizations-repositories/about-ssh-certificate-authorities)."
{% endif %}
## Further reading
- "[Troubleshooting SSH](/articles/troubleshooting-ssh)"

View File

@@ -19,6 +19,8 @@ shortTitle: Add a new SSH key
{% data reusables.ssh.about-ssh %} For more information, see "[About SSH](/authentication/connecting-to-github-with-ssh/about-ssh)."
{% ifversion ssh-commit-verification %}You can also use SSH to sign commits and tags. For more information about commit signing, see "[About commit signature verification](/articles/about-commit-signature-verification)."{% endif %}
After you generate an SSH key pair, you must add the public key to {% ifversion fpt or ghec or ghes %}{% data variables.product.product_location %}{% elsif ghae %}{% data variables.product.product_name %}{% endif %} to enable SSH access for your account.
## Prerequisites
@@ -30,120 +32,46 @@ Before adding a new SSH key to your account on {% ifversion ghae %}{% data varia
## Adding a new SSH key to your account
After adding a new SSH key to your account on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.product.product_location %}{% endif %}, you can reconfigure any local repositories to use SSH. For more information, see "[Switching remote URLs from HTTPS to SSH](/github/getting-started-with-github/managing-remote-repositories/#switching-remote-urls-from-https-to-ssh)."
After adding a new SSH authentication key to your account on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.product.product_location %}{% endif %}, you can reconfigure any local repositories to use SSH. For more information, see "[Switching remote URLs from HTTPS to SSH](/github/getting-started-with-github/managing-remote-repositories/#switching-remote-urls-from-https-to-ssh)."
{% data reusables.ssh.key-type-support %}
{% mac %}
{% webui %}
1. Copy the SSH public key to your clipboard.
If your SSH public key file has a different name than the example code, modify the filename to match your current setup. When copying your key, don't add any newlines or whitespace.
```shell
$ pbcopy &lt; ~/.ssh/id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub
# Copies the contents of the id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub file to your clipboard
```
{% tip %}
**Tip:** If `pbcopy` isn't working, you can locate the hidden `.ssh` folder, open the file in your favorite text editor, and copy it to your clipboard.
{% endtip %}
{% data reusables.gpg.copy-ssh-public-key %}
{% data reusables.user-settings.access_settings %}
{% data reusables.user-settings.ssh %}
4. Click **New SSH key** or **Add SSH key**.
{% ifversion ssh-commit-verification %}
![SSH Key button](/assets/images/help/settings/ssh-add-ssh-key-with-auth.png)
{% else %}
![SSH Key button](/assets/images/help/settings/ssh-add-ssh-key.png)
5. In the "Title" field, add a descriptive label for the new key. For example, if you're using a personal Mac, you might call this key "Personal MacBook Air".
6. Paste your key into the "Key" field.
{% endif %}
5. In the "Title" field, add a descriptive label for the new key. For example, if you're using a personal laptop, you might call this key "Personal laptop".
{% ifversion ssh-commit-verification %}
6. Select the type of key, either authentication or signing. For more information about commit signing, see "[About commit signature verification](/articles/about-commit-signature-verification)."
{% endif %}
7. Paste your key into the "Key" field.
{% ifversion ssh-commit-verification %}
![The key field](/assets/images/help/settings/ssh-key-paste-with-type.png)
{% else %}
![The key field](/assets/images/help/settings/ssh-key-paste.png)
7. Click **Add SSH key**.
{% endif %}
8. Click **Add SSH key**.
![The Add key button](/assets/images/help/settings/ssh-add-key.png)
{% data reusables.user-settings.sudo-mode-popup %}
{% endwebui %}
{% endmac %}
{% windows %}
{% webui %}
1. Copy the SSH public key to your clipboard.
If your SSH public key file has a different name than the example code, modify the filename to match your current setup. When copying your key, don't add any newlines or whitespace.
```shell
$ clip &lt; ~/.ssh/id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub
# Copies the contents of the id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub file to your clipboard
```
{% tip %}
**Tip:** If `clip` isn't working, you can locate the hidden `.ssh` folder, open the file in your favorite text editor, and copy it to your clipboard.
{% endtip %}
{% data reusables.user-settings.access_settings %}
{% data reusables.user-settings.ssh %}
4. Click **New SSH key** or **Add SSH key**.
![SSH Key button](/assets/images/help/settings/ssh-add-ssh-key.png)
5. In the "Title" field, add a descriptive label for the new key. For example, if you're using a personal Mac, you might call this key "Personal MacBook Air".
6. Paste your key into the "Key" field.
![The key field](/assets/images/help/settings/ssh-key-paste.png)
7. Click **Add SSH key**.
![The Add key button](/assets/images/help/settings/ssh-add-key.png)
{% data reusables.user-settings.sudo-mode-popup %}
{% endwebui %}
{% endwindows %}
{% linux %}
{% webui %}
1. Copy the SSH public key to your clipboard.
If your SSH public key file has a different name than the example code, modify the filename to match your current setup. When copying your key, don't add any newlines or whitespace.
```shell
$ cat ~/.ssh/id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub
# Then select and copy the contents of the id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %}.pub file
# displayed in the terminal to your clipboard
```
{% tip %}
**Tip:** Alternatively, you can locate the hidden `.ssh` folder, open the file in your favorite text editor, and copy it to your clipboard.
{% endtip %}
{% data reusables.user-settings.access_settings %}
{% data reusables.user-settings.ssh %}
4. Click **New SSH key** or **Add SSH key**.
![SSH Key button](/assets/images/help/settings/ssh-add-ssh-key.png)
5. In the "Title" field, add a descriptive label for the new key. For example, if you're using a personal Mac, you might call this key "Personal MacBook Air".
6. Paste your key into the "Key" field.
![The key field](/assets/images/help/settings/ssh-key-paste.png)
7. Click **Add SSH key**.
![The Add key button](/assets/images/help/settings/ssh-add-key.png)
{% data reusables.user-settings.sudo-mode-popup %}
{% endwebui %}
{% endlinux %}
{% cli %}
{% data reusables.cli.cli-learn-more %}
Before you can use the {% data variables.product.prodname_cli %} to add an SSH key to your account, you must authenticate to the {% data variables.product.prodname_cli %}. For more information, see [`gh auth login`](https://cli.github.com/manual/gh_auth_login) in the {% data variables.product.prodname_cli %} documentation.
To add an SSH key to your GitHub account, use the `ssh-key add` subcommand, specifying your public key.
{% ifversion ssh-commit-verification %}At present, you can only use {% data variables.product.prodname_cli %} to add SSH authentication keys, you cannot add SSH signing keys.{% endif %}
To add an SSH authentication key to your GitHub account, use the `ssh-key add` subcommand, specifying your public key.
```shell
gh ssh-key add <em>key-file</em>

View File

@@ -1,6 +1,6 @@
---
title: About commit signature verification
intro: 'Using GPG or S/MIME, you can sign tags and commits locally. These tags or commits are marked as verified on {% data variables.product.product_name %} so other people can be confident that the changes come from a trusted source.'
intro: 'Using GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME, you can sign tags and commits locally. These tags or commits are marked as verified on {% data variables.product.product_name %} so other people can be confident that the changes come from a trusted source.'
redirect_from:
- /articles/about-gpg-commit-and-tag-signatures
- /articles/about-gpg
@@ -19,10 +19,18 @@ shortTitle: Commit signature verification
---
## About commit signature verification
You can sign commits and tags locally, to give other people confidence about the origin of a change you have made. If a commit or tag has a GPG or S/MIME signature that is cryptographically verifiable, GitHub marks the commit or tag {% ifversion fpt or ghec %}"Verified" or "Partially verified."{% else %}"Verified."{% endif %}
You can sign commits and tags locally, to give other people confidence about the origin of a change you have made. If a commit or tag has a GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME signature that is cryptographically verifiable, {% data variables.product.product_name %} marks the commit or tag {% ifversion fpt or ghec %}"Verified" or "Partially verified."{% else %}"Verified."{% endif %}
![Verified commit](/assets/images/help/commits/verified-commit.png)
{% ifversion ghes or ghae %}
If a commit or tag has a signature that can't be verified, {% data variables.product.product_name %} marks the commit or tag "Unverified."
{% endif %}
{% ifversion ssh-commit-verification %}
For most individual users, GPG or SSH will be the best choice for signing commits. S/MIME signatures are usually required in the context of a larger organization. SSH signatures are the simplest to generate. You can even upload your existing authentication key to {% data variables.product.product_name %} to also use as a signing key. Generating a GPG signing key is more involved than generating an SSH key, but GPG has features that SSH does not. A GPG key can expire or be revoked when no longer used. {% data variables.product.product_name %} shows commits that were signed with such a key as "Verified" unless the key was marked as compromised. SSH keys don't have this capability.
{% endif %}
{% ifversion fpt or ghec %}
Commits and tags have the following verification statuses, depending on whether you have enabled vigilant mode. By default vigilant mode is not enabled. For information on how to enable vigilant mode, see "[Displaying verification statuses for all of your commits](/github/authenticating-to-github/displaying-verification-statuses-for-all-of-your-commits)."
@@ -47,10 +55,9 @@ For more information, see "[Rebasing and merging your commits](/repositories/con
{% data reusables.identity-and-permissions.vigilant-mode-verification-statuses %}
{% else %}
If a commit or tag has a signature that can't be verified, {% data variables.product.product_name %} marks the commit or tag "Unverified."
{% endif %}
Repository administrators can enforce required commit signing on a branch to block all commits that are not signed and verified. For more information, see "[About protected branches](/github/administering-a-repository/about-protected-branches#require-signed-commits)."
{% data reusables.identity-and-permissions.verification-status-check %}
@@ -59,7 +66,7 @@ Repository administrators can enforce required commit signing on a branch to blo
{% ifversion ghes %}If a site administrator has enabled web commit signing, {% data variables.product.product_name %} will automatically use GPG to sign commits you make using the web interface. Commits signed by {% data variables.product.product_name %} will have a verified status. You can verify the signature locally using the public key available at `https://HOSTNAME/web-flow.gpg`. For more information, see "[Configuring web commit signing](/admin/configuration/configuring-your-enterprise/configuring-web-commit-signing)."
{% else %}{% data variables.product.prodname_dotcom %} will automatically use GPG to sign commits you make using the web interface. Commits signed by {% data variables.product.prodname_dotcom %} will have a verified status. You can verify the signature locally using the public key available at https://github.com/web-flow.gpg. The full fingerprint of the key is `5DE3 E050 9C47 EA3C F04A 42D3 4AEE 18F8 3AFD EB23`.
You can optionally choose to have {% data variables.product.prodname_dotcom %} sign commits you make in {% data variables.product.prodname_github_codespaces %}. For more information about enabling GPG verification for your codespaces, see "[Managing GPG verification for {% data variables.product.prodname_github_codespaces %}](/codespaces/managing-your-codespaces/managing-gpg-verification-for-github-codespaces)."{% endif %}
You can optionally choose to have {% data variables.product.prodname_dotcom %} GPG sign commits you make in {% data variables.product.prodname_github_codespaces %}. For more information about enabling GPG verification for your codespaces, see "[Managing GPG verification for {% data variables.product.prodname_github_codespaces %}](/codespaces/managing-your-codespaces/managing-gpg-verification-for-github-codespaces)."{% endif %}
{% endif %}
## GPG commit signature verification
@@ -77,6 +84,26 @@ To sign commits using GPG and have those commits verified on {% data variables.p
5. [Sign commits](/articles/signing-commits)
6. [Sign tags](/articles/signing-tags)
{% ifversion ssh-commit-verification %}
## SSH commit signature verification
You can use SSH to sign commits with an SSH public key that you generate yourself. If you already use an SSH key to authenticate with {% data variables.product.product_name %},
you can also upload that same key again for use as a signing key. There's no limit on the number of signing keys you can add to your account.
{% data variables.product.product_name %} uses [ssh_data](https://github.com/github/ssh_data), an open source Ruby library, to confirm that your locally signed commits and tags are cryptographically verifiable against a public key you have added to your account on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.product.product_location %}{% endif %}.
{% data reusables.gpg.ssh-git-version %}
To sign commits using SSH and have those commits verified on {% data variables.product.product_name %}, follow these steps:
1. [Check for existing SSH keys](/articles/checking-for-existing-ssh-keys)
2. [Generate a new SSH key](/articles/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent)
3. [Add a SSH signing key to your GitHub account](/articles/adding-a-new-ssh-key-to-your-github-account)
4. [Tell Git about your signing key](/articles/telling-git-about-your-signing-key)
5. [Sign commits](/articles/signing-commits)
6. [Sign tags](/articles/signing-tags)
{% endif %}
## S/MIME commit signature verification
You can use S/MIME to sign commits with an X.509 key issued by your organization.

View File

@@ -18,7 +18,7 @@ redirect_from:
When you work locally on your computer, Git allows you to set the author of your changes and the identity of the committer. This, potentially, makes it difficult for other people to be confident that commits and tags you create were actually created by you. To help solve this problem you can sign your commits and tags. For more information, see "[Signing commits](/github/authenticating-to-github/signing-commits)" and "[Signing tags](/github/authenticating-to-github/signing-tags)." {% data variables.product.prodname_dotcom %} marks signed commits and tags with a verification status.
By default commits and tags are marked "Verified" if they are signed with a GPG or S/MIME key that was successfully verified. If a commit or tag has a signature that can't be verified by {% data variables.product.prodname_dotcom %}, we mark the commit or tag "Unverified." In all other cases no verification status is displayed.
By default commits and tags are marked "Verified" if they are signed with a GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME key that was successfully verified. If a commit or tag has a signature that can't be verified by {% data variables.product.prodname_dotcom %}, we mark the commit or tag "Unverified." In all other cases no verification status is displayed.
However, you can give other users increased confidence in the identity attributed to your commits and tags by enabling vigilant mode in your {% data variables.product.prodname_dotcom %} settings. With vigilant mode enabled, all of your commits and tags are marked with one of three verification statuses.

View File

@@ -28,11 +28,11 @@ topics:
{% data reusables.command_line.open_the_multi_os_terminal %}
3. Generate a GPG key pair. Since there are multiple versions of GPG, you may need to consult the relevant [_man page_](https://en.wikipedia.org/wiki/Man_page) to find the appropriate key generation command. Your key must use RSA.
- If you are on version 2.1.17 or greater, paste the text below to generate a GPG key pair.
```shell
```shell{:copy}
$ gpg --full-generate-key
```
- If you are not on version 2.1.17 or greater, the `gpg --full-generate-key` command doesn't work. Paste the text below and skip to step 6.
```shell
```shell{:copy}
$ gpg --default-new-key-algo rsa4096 --gen-key
```
4. At the prompt, specify the kind of key you want, or press `Enter` to accept the default.
@@ -51,10 +51,10 @@ topics:
{% data reusables.gpg.list-keys-with-note %}
{% data reusables.gpg.copy-gpg-key-id %}
10. Paste the text below, substituting in the GPG key ID you'd like to use. In this example, the GPG key ID is `3AA5C34371567BD2`:
```shell
$ gpg --armor --export <em>3AA5C34371567BD2</em>
# Prints the GPG key ID, in ASCII armor format
```
```shell{:copy}
$ gpg --armor --export 3AA5C34371567BD2
# Prints the GPG key ID, in ASCII armor format
```
11. Copy your GPG key, beginning with `-----BEGIN PGP PUBLIC KEY BLOCK-----` and ending with `-----END PGP PUBLIC KEY BLOCK-----`.
12. [Add the GPG key to your GitHub account](/articles/adding-a-gpg-key-to-your-github-account).

View File

@@ -1,6 +1,6 @@
---
title: Managing commit signature verification
intro: 'You can sign your work locally using GPG or S/MIME. {% data variables.product.product_name %} will verify these signatures so other people will know that your commits come from a trusted source.{% ifversion fpt %} {% data variables.product.product_name %} will automatically sign commits you make using the {% data variables.product.product_name %} web interface.{% endif %}'
intro: '{% data variables.product.product_name %} will verify GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME signatures so other people will know that your commits come from a trusted source.{% ifversion fpt %} {% data variables.product.product_name %} will automatically sign commits you make using the {% data variables.product.product_name %} web interface.{% endif %}'
redirect_from:
- /articles/generating-a-gpg-key
- /articles/signing-commits-with-gpg

View File

@@ -1,6 +1,6 @@
---
title: Signing commits
intro: You can sign commits locally using GPG or S/MIME.
intro: You can sign commits locally using GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME.
redirect_from:
- /articles/signing-commits-and-tags-using-gpg
- /articles/signing-commits-using-gpg
@@ -52,9 +52,5 @@ If you have multiple keys or are attempting to sign commits or tags with a key t
## Further reading
* "[Checking for existing GPG keys](/articles/checking-for-existing-gpg-keys)"
* "[Generating a new GPG key](/articles/generating-a-new-gpg-key)"
* "[Adding a GPG key to your GitHub account](/articles/adding-a-gpg-key-to-your-github-account)"
* "[Telling Git about your signing key](/articles/telling-git-about-your-signing-key)"
* "[Associating an email with your GPG key](/articles/associating-an-email-with-your-gpg-key)"
* "[Signing tags](/articles/signing-tags)"

View File

@@ -1,6 +1,6 @@
---
title: Signing tags
intro: You can sign tags locally using GPG or S/MIME.
intro: You can sign tags locally using GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or S/MIME.
redirect_from:
- /articles/signing-tags-using-gpg
- /articles/signing-tags
@@ -31,9 +31,6 @@ topics:
## Further reading
- "[Viewing your repository's tags](/articles/viewing-your-repositorys-tags)"
- "[Checking for existing GPG keys](/articles/checking-for-existing-gpg-keys)"
- "[Generating a new GPG key](/articles/generating-a-new-gpg-key)"
- "[Adding a GPG key to your GitHub account](/articles/adding-a-gpg-key-to-your-github-account)"
- "[Telling Git about your signing key](/articles/telling-git-about-your-signing-key)"
- "[Associating an email with your GPG key](/articles/associating-an-email-with-your-gpg-key)"
- "[Signing commits](/articles/signing-commits)"

View File

@@ -1,6 +1,6 @@
---
title: Telling Git about your signing key
intro: 'To sign commits locally, you need to inform Git that there''s a GPG or X.509 key you''d like to use.'
intro: 'To sign commits locally, you need to inform Git that there''s a GPG{% ifversion ssh-commit-verification %}, SSH,{% endif %} or X.509 key you''d like to use.'
redirect_from:
- /articles/telling-git-about-your-gpg-key
- /articles/telling-git-about-your-signing-key
@@ -51,8 +51,6 @@ If you have multiple GPG keys, you need to tell Git which one to use.
$ killall gpg-agent
```
{% data reusables.gpg.x-509-key %}
{% endmac %}
{% windows %}
@@ -74,8 +72,6 @@ If you have multiple GPG keys, you need to tell Git which one to use.
{% data reusables.gpg.copy-gpg-key-id %}
{% data reusables.gpg.paste-gpg-key-id %}
{% data reusables.gpg.x-509-key %}
{% endwindows %}
{% linux %}
@@ -100,15 +96,25 @@ If you have multiple GPG keys, you need to tell Git which one to use.
```bash
$ [ -f ~/.bashrc ] && echo 'export GPG_TTY=$(tty)' >> ~/.bashrc
```
{% endlinux %}
{% ifversion ssh-commit-verification %}
## Telling Git about your SSH key
You can use an existing SSH key to sign commits and tags, or generate a new one specifically for signing. For more information, see "[Generating a new SSH key and adding it to the ssh-agent](/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent)."
{% data reusables.gpg.ssh-git-version %}
{% data reusables.command_line.open_the_multi_os_terminal %}
{% data reusables.gpg.configure-ssh-signing %}
{% data reusables.gpg.copy-ssh-public-key %}
{% data reusables.gpg.paste-ssh-public-key %}
{% endif %}
{% data reusables.gpg.x-509-key %}
## Further reading
- "[Checking for existing GPG keys](/articles/checking-for-existing-gpg-keys)"
- "[Generating a new GPG key](/articles/generating-a-new-gpg-key)"
- "[Using a verified email address in your GPG key](/articles/using-a-verified-email-address-in-your-gpg-key)"
- "[Adding a GPG key to your GitHub account](/articles/adding-a-gpg-key-to-your-github-account)"
- "[Associating an email with your GPG key](/articles/associating-an-email-with-your-gpg-key)"
- "[Adding a new SSH key to your GitHub account](/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account)."
- "[Signing commits](/articles/signing-commits)"
- "[Signing tags](/articles/signing-tags)"

View File

@@ -23,7 +23,12 @@ shortTitle: Check verification status
3. Next to your commit's abbreviated commit hash, there is a box that shows whether your commit signature is verified{% ifversion fpt or ghec %}, partially verified,{% endif %} or unverified.
![Signed commit](/assets/images/help/commits/gpg-signed-commit-verified-without-details.png)
4. To view more detailed information about the commit signature, click **Verified**{% ifversion fpt or ghec %}, **Partially verified**,{% endif %} or **Unverified**.
![Verified signed commit](/assets/images/help/commits/gpg-signed-commit_verified_details.png)
GPG signed commits will show the ID of the key that was used.
![Verified GPG signed commit](/assets/images/help/commits/gpg-signed-commit_verified_details.png)
{% ifversion ssh-commit-verification %}
SSH signed commits will show the signature of the public key that was used.
![Verified SSH signed commit](/assets/images/help/commits/ssh-signed-commit-verified-details.png)
{% endif %}
## Checking your tag signature verification status

View File

@@ -49,9 +49,9 @@ All data transferred out, when triggered by {% data variables.product.prodname_a
Storage usage is shared with build artifacts produced by {% data variables.product.prodname_actions %} for repositories owned by your account. For more information, see "[About billing for {% data variables.product.prodname_actions %}](/billing/managing-billing-for-github-actions/about-billing-for-github-actions)."
{% data variables.product.prodname_dotcom %} charges usage to the account that owns the repository where the package is published. If your account's usage surpasses these limits and you have set a spending limit above $0 USD, you will pay $0.25 USD per GB of storage per day and $0.50 USD per GB of data transfer.
{% data variables.product.prodname_dotcom %} charges usage to the account that owns the repository where the package is published. If your account's usage surpasses these limits and you have set a spending limit above $0 USD, you will pay $0.008 USD per GB of storage per day and $0.50 USD per GB of data transfer.
For example, if your organization uses {% data variables.product.prodname_team %}, allows unlimited spending, uses 150GB of storage, and has 50GB of data transfer out during a month, the organization would have overages of 148GB for storage and 40GB for data transfer for that month. The storage overage would cost $0.25 USD per GB per day or $37 USD. The overage for data transfer would cost $0.50 USD per GB or $20 USD.
For example, if your organization uses {% data variables.product.prodname_team %}, allows unlimited spending, uses 150GB of storage, and has 50GB of data transfer out during a month, the organization would have overages of 148GB for storage and 40GB for data transfer for that month. The storage overage would cost $0.008 USD per GB per day or approximately $37 USD for a 31-day month. The overage for data transfer would cost $0.50 USD per GB or $20 USD.
{% data reusables.dotcom_billing.pricing_calculator.pricing_cal_packages %}

View File

@@ -32,7 +32,7 @@ There are a few approaches for tackling newly committed credentials, but one exa
{% note %}
**Note:** You can automate this step. For large enterprises and organizations with hundreds of repositories, manually following up is unsustainable. You could incorporate automation into the webhook process defined in the first step. The webhook payload contains repository and organization information about the leaked secret. Using this information, you can contact the current maintainers on the repository and create a email/message to the responsible people or open an issue.
**Note:** You can automate this step. For large enterprises and organizations with hundreds of repositories, manually following up is unsustainable. You could incorporate automation into the webhook process defined in the first step. The webhook payload contains repository and organization information about the leaked secret. Using this information, you can contact the current maintainers on the repository and create an email/message to the responsible people or open an issue.
{% endnote %}
3. **Educate**: Create an internal training document assigned to the developer who committed the secret. Within this training document, you can explain the risks created by committing secrets and direct them to your best practice information about using secrets securely in development. If the a developer doesn't learn from the experience and continues to commit secrets, you could create an escalation process, but education usually works well.

View File

@@ -43,8 +43,8 @@ By default, the code scanning alerts page is filtered to show alerts for the def
![Summary of alerts](/assets/images/help/repository/code-scanning-click-alert.png)
{% ifversion fpt or ghec or ghes > 3.4 or ghae-issue-6249 %}
{% data reusables.code-scanning.alert-default-branch %}
![The "Affected branches" section in an alert](/assets/images/help/repository/code-scanning-affected-branches.png){% endif %}
{% data reusables.code-scanning.alert-default-branch %}
![The "Affected branches" section in an alert](/assets/images/help/repository/code-scanning-affected-branches.png){% endif %}
1. Optionally, if the alert highlights a problem with data flow, click **Show paths** to display the path from the data source to the sink where it's used.
{% ifversion fpt or ghec or ghes > 3.4 or ghae-issue-6249 %}
![The "Show paths" link on an alert](/assets/images/help/repository/code-scanning-show-paths.png)

View File

@@ -32,6 +32,12 @@ If you're setting up {% data variables.product.prodname_code_scanning %} for a c
You must run {% data variables.product.prodname_codeql %} inside the container in which you build your code. This applies whether you are using the {% data variables.product.prodname_codeql_cli %}{% ifversion codeql-runner-supported %}, the {% data variables.product.prodname_codeql_runner %},{% endif %} or {% data variables.product.prodname_actions %}. For the {% data variables.product.prodname_codeql_cli %} {% ifversion codeql-runner-supported %}or the {% data variables.product.prodname_codeql_runner %}{% endif %}, see "[Installing {% data variables.product.prodname_codeql_cli %} in your CI system](/code-security/secure-coding/using-codeql-code-scanning-with-your-existing-ci-system/installing-codeql-cli-in-your-ci-system)"{% ifversion codeql-runner-supported %} or "[Running {% data variables.product.prodname_codeql_runner %} in your CI system](/code-security/secure-coding/running-codeql-runner-in-your-ci-system)"{% endif %} for more information. If you're using {% data variables.product.prodname_actions %}, configure your workflow to run all the actions in the same container. For more information, see "[Example workflow](#example-workflow)."
{% note %}
**Note:** {% data reusables.code-scanning.non-glibc-linux-support %}
{% endnote %}
## Dependencies
You may have difficulty running {% data variables.product.prodname_code_scanning %} if the container you're using is missing certain dependencies (for example, Git must be installed and added to the PATH variable). If you encounter dependency issues, review the list of software typically included on {% data variables.product.prodname_dotcom %}'s runner images. For more information, see the version-specific `readme` files in these locations:

View File

@@ -151,25 +151,29 @@ The names of the {% data variables.product.prodname_code_scanning %} analysis ch
![{% data variables.product.prodname_code_scanning %} pull request checks](/assets/images/help/repository/code-scanning-pr-checks.png)
When the {% data variables.product.prodname_code_scanning %} jobs complete, {% data variables.product.prodname_dotcom %} works out whether any alerts were added by the pull request and adds the "{% data variables.product.prodname_code_scanning_capc %} results / TOOL NAME" entry to the list of checks. After {% data variables.product.prodname_code_scanning %} has been performed at least once, you can click **Details** to view the results of the analysis. If you used a pull request to add {% data variables.product.prodname_code_scanning %} to the repository, you will initially see {% ifversion fpt or ghes > 3.2 or ghae or ghec %}an "Analysis not found"{% else %}a "Missing analysis"{% endif %} message when you click **Details** on the "{% data variables.product.prodname_code_scanning_capc %} results / TOOL NAME" check.
When the {% data variables.product.prodname_code_scanning %} jobs complete, {% data variables.product.prodname_dotcom %} works out whether any alerts were added by the pull request and adds the "{% data variables.product.prodname_code_scanning_capc %} results / TOOL NAME" entry to the list of checks. After {% data variables.product.prodname_code_scanning %} has been performed at least once, you can click **Details** to view the results of the analysis.
{% ifversion fpt or ghes > 3.2 or ghae or ghec %}
![Analysis not found for commit message](/assets/images/help/repository/code-scanning-analysis-not-found.png)
{% ifversion fpt or ghec or ghes > 3.4 or ghae-issue-7095 %}
<!--Troubleshooting section no longer relevant-->
{% elsif ghes < 3.5 or ghae %}
If you used a pull request to add {% data variables.product.prodname_code_scanning %} to the repository, you will initially see {% ifversion ghes > 3.2 or ghae %}an "Analysis not found"{% elsif ghes = 3.2 %}a "Missing analysis"{% endif %} message when you click **Details** on the "{% data variables.product.prodname_code_scanning_capc %} results / TOOL NAME" check.
{% ifversion ghes > 3.2 or ghae %}
![Analysis not found for commit message](/assets/images/enterprise/3.4/repository/code-scanning-analysis-not-found.png)
The table lists one or more categories. Each category relates to specific analyses, for the same tool and commit, performed on a different language or a different part of the code. For each category, the table shows the two analyses that {% data variables.product.prodname_code_scanning %} attempted to compare to determine which alerts were introduced or fixed in the pull request.
For example, in the screenshot above, {% data variables.product.prodname_code_scanning %} found an analysis for the merge commit of the pull request, but no analysis for the head of the main branch.
{% else %}
![Missing analysis for commit message](/assets/images/enterprise/3.2/repository/code-scanning-missing-analysis.png)
{% endif %}
{% ifversion fpt or ghes > 3.2 or ghae or ghec %}
### Reasons for the "Analysis not found" message
{% else %}
{% elsif ghes = 3.2 %}
![Missing analysis for commit message](/assets/images/enterprise/3.2/repository/code-scanning-missing-analysis.png)
### Reasons for the "Missing analysis" message
{% endif %}
After {% data variables.product.prodname_code_scanning %} has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows {% data variables.product.prodname_code_scanning %} to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add {% data variables.product.prodname_code_scanning %} to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the {% ifversion fpt or ghes > 3.2 or ghae or ghec %}"Analysis not found"{% else %}"Missing analysis for base commit SHA-HASH"{% endif %} message.
After {% data variables.product.prodname_code_scanning %} has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows {% data variables.product.prodname_code_scanning %} to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add {% data variables.product.prodname_code_scanning %} to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the {% ifversion ghes > 3.2 or ghae %}"Analysis not found"{% elsif ghes = 3.2 %}"Missing analysis for base commit SHA-HASH"{% endif %} message.
There are other situations where there may be no analysis for the latest commit to the base branch for a pull request. These include:
@@ -177,7 +181,7 @@ There are other situations where there may be no analysis for the latest commit
To check whether a branch has been scanned, go to the {% data variables.product.prodname_code_scanning_capc %} page, click the **Branch** drop-down and select the relevant branch.
![Choose a branch from the Branch drop-down menu](/assets/images/help/repository/code-scanning-branch-dropdown.png)
![Choose a branch from the Branch drop-down menu](/assets/images/help/repository/code-scanning-branch-dropdown.png)
The solution in this situation is to add the name of the base branch to the `on:push` and `on:pull_request` specification in the {% data variables.product.prodname_code_scanning %} workflow on that branch and then make a change that updates the open pull request that you want to scan.
@@ -189,6 +193,8 @@ There are other situations where there may be no analysis for the latest commit
Merge a trivial change into the base branch to trigger {% data variables.product.prodname_code_scanning %} on this latest commit, then push a change to the pull request to retrigger {% data variables.product.prodname_code_scanning %}.
{% endif %}
## Next steps
After setting up {% data variables.product.prodname_code_scanning %}, and allowing its actions to complete, you can:

View File

@@ -45,9 +45,15 @@ If you upload a second SARIF file for a commit with the same category and from t
If you're new to SARIF and want to learn more, see Microsoft's [`SARIF tutorials`](https://github.com/microsoft/sarif-tutorials) repository.
## Preventing duplicate alerts using fingerprints
## Providing data to track {% data variables.product.prodname_code_scanning %} alerts across runs
Each time the results of a new code scan are uploaded, the results are processed and alerts are added to the repository. To prevent duplicate alerts for the same problem, {% data variables.product.prodname_code_scanning %} uses fingerprints to match results across various runs so they only appear once in the latest run for the selected branch. This makes it possible to match alerts to the right line of code when files are edited.
Each time the results of a new code scan are uploaded, the results are processed and alerts are added to the repository. To prevent duplicate alerts for the same problem, {% data variables.product.prodname_code_scanning %} uses fingerprints to match results across various runs so they only appear once in the latest run for the selected branch. This makes it possible to match alerts to the correct line of code when files are edited. The `ruleID` for a result has to be the same across analysis.
### Reporting consistent filepaths
The filepath has to be consistent across the runs to enable a computation of a stable fingerprint. If the filepaths differ for the same result, each time there is a new analysis a new alert will be created, and the old one will be closed. This will cause having multiple alerts for the same result.
### Including data for fingerprint generation
{% data variables.product.prodname_dotcom %} uses the `partialFingerprints` property in the OASIS standard to detect when two results are logically identical. For more information, see the "[partialFingerprints property](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Toc16012611)" entry in the OASIS documentation.
@@ -77,6 +83,12 @@ You can check a SARIF file is compatible with {% data variables.product.prodname
If you use a code analysis engine other than {% data variables.product.prodname_codeql %}, you can review the supported SARIF properties to optimize how your analysis results will appear on {% data variables.product.prodname_dotcom %}.
{% note %}
**Note:** You must supply an explicit value for any property marked as "required". The empty string is not supported for required properties.
{% endnote %}
Any valid SARIF 2.1.0 output file can be uploaded, however, {% data variables.product.prodname_code_scanning %} will only use the following supported properties.
### `sarifLog` object
@@ -138,7 +150,7 @@ Each `result` object contains details for one alert in the codebase. Within the
| `level`| **Optional.** The severity of the result. This level overrides the default severity defined by the rule. {% data variables.product.prodname_code_scanning_capc %} uses the level to filter results by severity on {% data variables.product.prodname_dotcom %}.
| `message.text`| **Required.** A message that describes the result. {% data variables.product.prodname_code_scanning_capc %} displays the message text as the title of the result. Only the first sentence of the message will be displayed when visible space is limited.
| `locations[]`| **Required.** The set of locations where the result was detected up to a maximum of 10. Only one location should be included unless the problem can only be corrected by making a change at every specified location. **Note:** At least one location is required for {% data variables.product.prodname_code_scanning %} to display a result. {% data variables.product.prodname_code_scanning_capc %} will use this property to decide which file to annotate with the result. Only the first value of this array is used. All other values are ignored.
| `partialFingerprints`| **Required.** A set of strings used to track the unique identity of the result. {% data variables.product.prodname_code_scanning_capc %} uses `partialFingerprints` to accurately identify which results are the same across commits and branches. {% data variables.product.prodname_code_scanning_capc %} will attempt to use `partialFingerprints` if they exist. If you are uploading third-party SARIF files with the `upload-action`, the action will create `partialFingerprints` for you when they are not included in the SARIF file. For more information, see "[Preventing duplicate alerts using fingerprints](#preventing-duplicate-alerts-using-fingerprints)." **Note:** {% data variables.product.prodname_code_scanning_capc %} only uses the `primaryLocationLineHash`.
| `partialFingerprints`| **Required.** A set of strings used to track the unique identity of the result. {% data variables.product.prodname_code_scanning_capc %} uses `partialFingerprints` to accurately identify which results are the same across commits and branches. {% data variables.product.prodname_code_scanning_capc %} will attempt to use `partialFingerprints` if they exist. If you are uploading third-party SARIF files with the `upload-action`, the action will create `partialFingerprints` for you when they are not included in the SARIF file. For more information, see "[Providing data to track code scanning alerts across runs](#providing-data-to-track-code-scanning-alerts-across-runs)." **Note:** {% data variables.product.prodname_code_scanning_capc %} only uses the `primaryLocationLineHash`.
| `codeFlows[].threadFlows[].locations[]`| **Optional.** An array of `location` objects for a `threadFlow` object, which describes the progress of a program through a thread of execution. A `codeFlow` object describes a pattern of code execution used to detect a result. If code flows are provided, {% data variables.product.prodname_code_scanning %} will expand code flows on {% data variables.product.prodname_dotcom %} for the relevant result. For more information, see the [`location` object](#location-object).
| `relatedLocations[]`| A set of locations relevant to this result. {% data variables.product.prodname_code_scanning_capc %} will link to related locations when they are embedded in the result message. For more information, see the [`location` object](#location-object).
@@ -204,7 +216,7 @@ These example SARIF output files show supported properties and example values.
### Example with minimum required properties
This SARIF output file has example values to show the minimum required properties for {% data variables.product.prodname_code_scanning %} results to work as expected. If you remove any properties or don't include values, this data will not be displayed correctly or sync on {% data variables.product.prodname_dotcom %}.
This SARIF output file has example values to show the minimum required properties for {% data variables.product.prodname_code_scanning %} results to work as expected. If you remove any properties, omit values, or use an empty string, this data will not be displayed correctly or sync on {% data variables.product.prodname_dotcom %}.
```json
{

View File

@@ -58,7 +58,7 @@ For more information see the [`upload-sarif` action](https://github.com/github/c
The `upload-sarif` action can be configured to run when the `push` and `scheduled` event occur. For more information about {% data variables.product.prodname_actions %} events, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows)."
If your SARIF file doesn't include `partialFingerprints`, the `upload-sarif` action will calculate the `partialFingerprints` field for you and attempt to prevent duplicate alerts. {% data variables.product.prodname_dotcom %} can only create `partialFingerprints` when the repository contains both the SARIF file and the source code used in the static analysis. For more information about preventing duplicate alerts, see "[About SARIF support for code scanning](/code-security/secure-coding/sarif-support-for-code-scanning#preventing-duplicate-alerts-using-fingerprints)."
If your SARIF file doesn't include `partialFingerprints`, the `upload-sarif` action will calculate the `partialFingerprints` field for you and attempt to prevent duplicate alerts. {% data variables.product.prodname_dotcom %} can only create `partialFingerprints` when the repository contains both the SARIF file and the source code used in the static analysis. For more information about preventing duplicate alerts, see "[About SARIF support for code scanning](/code-security/secure-coding/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs)."
{% data reusables.code-scanning.upload-sarif-alert-limit %}

View File

@@ -37,21 +37,40 @@ You can enable automatic security updates for any repository that uses {% data v
{% data variables.product.product_name %} generates {% data variables.product.prodname_dependabot_alerts %} when we detect that your codebase is using dependencies with known security risks. For repositories where {% data variables.product.prodname_dependabot_security_updates %} are enabled, when {% data variables.product.product_name %} detects a vulnerable dependency in the default branch, {% data variables.product.prodname_dependabot %} creates a pull request to fix it. The pull request will upgrade the dependency to the minimum possible secure version needed to avoid the vulnerability.
{% ifversion dependabot-most-important-sort-option %} By default, {% data variables.product.prodname_dependabot_alerts %} are displayed in the {% data variables.product.prodname_dependabot_alerts %} tab in order of importance, but you can sort alerts by other criteria. {% endif %}{% ifversion fpt or ghec or ghes > 3.4 or ghae-issue-5638 %}You can sort and filter {% data variables.product.prodname_dependabot_alerts %} with the dropdown menus in the {% data variables.product.prodname_dependabot_alerts %} tab or by typing filters as `key:value` pairs into the search bar. The available filters are repository (for example, `repo:my-repository`), package (for example, `package:django`), ecosystem (for example, `ecosystem:npm`), manifest (for example, `manifest:webwolf/pom.xml`), state (for example, `is:open`), and whether an advisory has a patch (for example, `has: patch`).{% ifversion dependabot-alerts-development-label %} You can also filter alerts with dependency scope data using `scope`, for example: `scope:development` or `scope:runtime`. With `scope:development`, the list of alerts will only show dependencies used during development, not production.{% endif %}
Each {% data variables.product.prodname_dependabot %} alert has a unique numeric identifier and the {% data variables.product.prodname_dependabot_alerts %} tab lists an alert for every detected vulnerability. Legacy {% data variables.product.prodname_dependabot_alerts %} grouped vulnerabilities by dependency and generated a single alert per dependency. If you navigate to a legacy {% data variables.product.prodname_dependabot %} alert, you will be redirected to a {% data variables.product.prodname_dependabot_alerts %} tab filtered for that package. {% endif %}
{% ifversion fpt or ghec or ghes > 3.4 or ghae-issue-5638 %}
You can filter and sort {% data variables.product.prodname_dependabot_alerts %} using a variety of filters and sort options available on the user interface. For more information, see "[Prioritizing {% data variables.product.prodname_dependabot_alerts %}](#prioritizing-across--data-variablesproductprodname_dependabot_alerts-)" below.
## Prioritizing {% data variables.product.prodname_dependabot_alerts %}
{% data variables.product.company_short %} helps you prioritize fixing {% data variables.product.prodname_dependabot_alerts %}. {% ifversion dependabot-most-important-sort-option %} By default, {% data variables.product.prodname_dependabot_alerts %} are sorted by importance. The "Most important" sort order helps you prioritize which {% data variables.product.prodname_dependabot_alerts %} to focus on first. Alerts are ranked based on their potential impact, actionability, and relevance. Our prioritization calculation is constantly being improved and includes factors like CVSS score, dependency scope, and whether vulnerable function calls are found for the alert.
![Screenshot of Sort dropdown with "Most important" sort](/assets/images/help/dependabot/dependabot-alerts-sort-dropdown.png)
{% endif %}
{% data reusables.dependabot.dependabot-alerts-filters %}
In addition to the filters available via the search bar, you can sort and filter {% data variables.product.prodname_dependabot_alerts %} using the dropdown menus at the top of the alert list. The search bar also allows for full text searching of alerts and related security advisories. You can search for part of a security advisory name or description to return the alerts in your repository that relate to that security advisory. For example, searching for `yaml.load() API could execute arbitrary code` will return {% data variables.product.prodname_dependabot_alerts %} linked to "[PyYAML insecurely deserializes YAML strings leading to arbitrary code execution](https://github.com/advisories/GHSA-rprw-h62v-c2w7)" as the search string appears in the advisory description.
{% endif %}
{% ifversion dependabot-bulk-alerts %}
![Screenshot of the filter and sort menus in the {% data variables.product.prodname_dependabot_alerts %} tab](/assets/images/help/graphs/dependabot-alerts-filters-checkbox.png){% elsif ghes = 3.5 %}
You can select a filter in a dropdown menu at the top of the list, then click the filter that you would like to apply.
![Screenshot of the filter and sort menus in the {% data variables.product.prodname_dependabot_alerts %} tab](/assets/images/enterprise/3.5/dependabot/dependabot-alerts-filters.png){% endif %}
{% ifversion dependabot-alerts-development-label %}
## Supported ecosystems and manifests for dependency scope
<!-- TODO: for now we'd have this table and heading as they are, but we're planning to replace this with at a later date a new heading containing all the available filters in one or more tables -->
{% data reusables.dependabot.dependabot-alerts-dependency-scope %}
Alerts for packages listed as development dependencies are marked with the `Development` label on the {% data variables.product.prodname_dependabot_alerts %} page and are also available for filtering via the `scope` filter.
![Screenshot showing the "Development" label in the list of alerts](/assets/images/help/repository/dependabot-alerts-development-label.png)
The alert details page of alerts on development-scoped packages shows a "Tags" section containing a `Development` label.
![Screenshot showing the "Tags" section in the alert details page](/assets/images/help/repository/dependabot-alerts-tags-section.png)
{% endif %}
@@ -92,20 +111,7 @@ For more information, see "[Reviewing and fixing alerts](#reviewing-and-fixing-a
{% data reusables.repositories.navigate-to-repo %}
{% data reusables.repositories.sidebar-security %}
{% data reusables.repositories.sidebar-dependabot-alerts %}
1. Optionally, to filter alerts, select the **Repository**, **Package**, **Ecosystem**, or **Manifest** dropdown menu then click the filter that you would like to apply. You can also type filters into the search bar. For example, `ecosystem:npm`{% ifversion ghes < 3.7 or ghae-issue-5638 %} or `has:patch`{% endif %}{% ifversion dependabot-alerts-development-label %}, `has:patch` or `scope:development`{% endif %}. To sort alerts, select the **Sort** dropdown menu then click the option that you would like to sort by, or type `sort:` into the search bar and choose an option from the suggestions (for example, `sort:newest`).
{% ifversion dependabot-most-important-sort-option %}
{% note %}
**Note:** By default, {% data variables.product.prodname_dependabot_alerts %} are sorted by importance. The "Most important" sort helps you prioritize which {% data variables.product.prodname_dependabot_alerts %} to focus on first. Alerts are ranked based on their potential impact, actionability, and relevance. Our prioritization calculation is constantly being improved and includes factors like CVSS score, dependency scope, and whether vulnerable function calls are found for the alert.
{% endnote %}
![Screenshot of Sort dropdown with "Most important" sort](/assets/images/help/dependabot/dependabot-alerts-sort-dropdown.png)
{% endif %}
You can also click a label on an alert to only show alerts of that type.{% ifversion dependabot-alerts-development-label %} For example, clicking the `Development` label in the list of alerts will only show alerts relating to dependencies used in development, not production. For information about the list of ecosystems supported, see "[Supported ecosystems and manifests for dependency scope ](#supported-ecosystems-and-manifests-for-dependency-scope)."
{% endif %}
1. Optionally, to filter alerts, select a filter in a dropdown menu then click the filter that you would like to apply. You can also type filters into the search bar. For more information about filtering and sorting alerts, see "[Prioritizing {% data variables.product.prodname_dependabot_alerts %}](#prioritizing-across--data-variablesproductprodname_dependabot_alerts-)."
{%- ifversion dependabot-bulk-alerts %}
![Screenshot of the filter and sort menus in the {% data variables.product.prodname_dependabot_alerts %} tab](/assets/images/help/graphs/dependabot-alerts-filters-checkbox.png){% else %}
![Screenshot of the filter and sort menus in the {% data variables.product.prodname_dependabot_alerts %} tab](/assets/images/enterprise/3.5/dependabot/dependabot-alerts-filters.png){% endif %}
@@ -162,7 +168,11 @@ If you schedule extensive work to upgrade a dependency, or decide that an alert
1. View the details for an alert. For more information, see "[Viewing vulnerable dependencies](#viewing-dependabot-alerts)" (above).
1. Select the "Dismiss" dropdown, and click a reason for dismissing the alert.{% ifversion reopen-dependabot-alerts %} Unfixed dismissed alerts can be reopened later.{% endif %}
![Choosing reason for dismissing the alert via the "Dismiss" drop-down](/assets/images/help/repository/dependabot-alert-dismiss-drop-down-ungrouped.png)
{% ifversion dependabot-alerts-dismissal-comment %}1. Optionally, add a dismissal comment. The dismissal comment will be added to the alert timeline and can be used as justification during auditing and reporting. You can retrieve or set a comment by using the GraphQL API. The comment is contained in the `dismissComment` field. For more information, see "[{% data variables.product.prodname_dependabot_alerts %}](/graphql/reference/objects#repositoryvulnerabilityalert)" in the GraphQL API documentation.
![Screenshot showing how to dismiss an alert via the "Dismiss" drop-down, with the option to add a dismissal comment](/assets/images/help/repository/dependabot-alerts-dismissal-comment.png)
1. Click **Dismiss alert**.
{% else %}
![Choosing reason for dismissing the alert via the "Dismiss" drop-down](/assets/images/help/repository/dependabot-alert-dismiss-drop-down-ungrouped.png){% endif %}
{% ifversion dependabot-bulk-alerts %}
### Dismissing multiple alerts at once

Some files were not shown because too many files have changed in this diff Show More