1
0
mirror of synced 2026-01-08 03:01:54 -05:00

Merge branch 'main' into rename-code-scanning-fgps

This commit is contained in:
Sarita Iyer
2021-12-08 15:28:54 -05:00
committed by GitHub
177 changed files with 2092 additions and 572 deletions

View File

@@ -2,10 +2,4 @@
import purgeEdgeCache from '../../script/deployment/purge-edge-cache.js'
try {
await purgeEdgeCache()
} catch (error) {
console.error(`Failed to purge the edge cache: ${error.message}`)
console.error(error)
throw error
}
await purgeEdgeCache()

View File

@@ -8,15 +8,17 @@ const github = getOctokit(token)
// https://docs.github.com/en/graphql/reference/enums#mergestatestatus
// https://docs.github.com/en/graphql/reference/enums#mergeablestate
/*
This script gets a list of automerge-enabled PRs and sorts them
/*
This script gets a list of automerge-enabled PRs and sorts them
by priority. The PRs with the skip-to-front-of-merge-queue label
are prioritized first. The rest of the PRs are sorted by the date
they were updated. This is basically a FIFO queue, while allowing
are prioritized first. The rest of the PRs are sorted by the date
they were updated. This is basically a FIFO queue, while allowing
writers the ability to skip the line when high-priority ships are
needed but a freeze isn't necessary.
*/
const DRY_RUN = Boolean(JSON.parse(process.env.DRY_RUN || 'false'))
main()
async function main() {
@@ -47,6 +49,15 @@ async function main() {
name
}
}
commits(last: 1) {
nodes {
commit {
statusCheckRollup {
state
}
}
}
}
}
}
pageInfo {
@@ -105,6 +116,14 @@ async function main() {
// a PR is green and the automerge is enabled
.filter((pr) => pr.mergeStateStatus !== 'DIRTY')
.filter((pr) => pr.mergeStateStatus !== 'UNSTABLE')
.filter((pr) => {
const nodes = pr.commits.nodes
if (!nodes || !nodes.length) {
// If it has no commits, why is it even here? Anyway, skip it.
return false
}
return nodes[0].commit.statusCheckRollup.state !== 'FAILURE'
})
autoMergeEnabledPRs.push(...filteredPrs)
}
@@ -120,11 +139,15 @@ async function main() {
if (prioritizedPrList.length) {
const nextInQueue = prioritizedPrList.shift()
// Update the branch for the next PR in the merge queue
github.rest.pulls.updateBranch({
owner: org,
repo,
pull_number: nextInQueue.number,
})
if (DRY_RUN) {
console.log('DRY RUN! But *would* update on next-in-queue')
} else {
github.rest.pulls.updateBranch({
owner: org,
repo,
pull_number: nextInQueue.number,
})
}
console.log(`⏱ Total PRs in the merge queue: ${prioritizedPrList.length + 1}`)
console.log(`🚂 Updated branch for PR #${JSON.stringify(nextInQueue, null, 2)}`)
}

View File

@@ -49,5 +49,17 @@ jobs:
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
run: npm ci --include=optional
- name: Cache nextjs build
uses: actions/cache@c64c572235d810460d0d6876e9c705ad5002b353
with:
path: .next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}-${{ hashFiles('.github/workflows/browser-test.yml') }}
- name: Cache lib/redirects/.redirects-cache_en_ja.json
uses: actions/cache@c64c572235d810460d0d6876e9c705ad5002b353
with:
path: lib/redirects/.redirects-cache_en_ja.json
key: ${{ runner.os }}-redirects-cache-${{ hashFiles('.github/workflows/browser-test.yml') }}
- name: Run browser-test
run: npm run browser-test

View File

@@ -1,9 +1,12 @@
name: Lint JS
name: Lint code
# **What it does**: Lints our JavaScript to ensure the code matches the specified code style.
# **Why we have it**: We want some level of consistency to our JavaScript.
# **What it does**: Lints our code to ensure the code matches the specified code style.
# **Why we have it**: We want some level of consistency to our code.
# **Who does it impact**: Docs engineering, open-source engineering contributors.
permissions:
contents: read
on:
workflow_dispatch:
push:
@@ -15,10 +18,13 @@ on:
- '**.mjs'
- '**.ts'
- '**.tsx'
- '**.yaml'
- '**.yml'
- '**.scss'
# In case something like eslint or tsc or prettier upgrades
- 'package-lock.json'
# Ultimately, for debugging this workflow itself
- .github/workflows/js-lint.yml
- .github/workflows/code-lint.yml
jobs:
lint:
@@ -39,5 +45,8 @@ jobs:
- name: Run linter
run: npm run lint
- name: Run Prettier
run: npm run prettier-check
- name: Run TypeScript
run: npm run tsc

View File

@@ -16,6 +16,7 @@ on:
jobs:
create-translation-batch:
name: Create translation batch
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
# A sync's average run time is ~3.2 hours.
# This sets a maximum execution time of 300 minutes (5 hours) to prevent the workflow from running longer than necessary.

View File

@@ -178,7 +178,7 @@ jobs:
env:
FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
FASTLY_SURROGATE_KEY: 'all-the-things'
FASTLY_SURROGATE_KEY: 'every-deployment'
run: .github/actions-scripts/purge-fastly-edge-cache.js
- name: Send Slack notification if workflow failed

View File

@@ -1,2 +1,3 @@
translations/
includes/
includes/
data/release-notes/

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 101 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 165 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

View File

@@ -1,9 +1,6 @@
import React, { useEffect, useState } from 'react'
import {
ArticleGuide,
useProductGuidesContext,
} from 'components/context/ProductGuidesContext'
import { ArticleGuide, useProductGuidesContext } from 'components/context/ProductGuidesContext'
import { useTranslation } from 'components/hooks/useTranslation'
import { ArticleCard } from './ArticleCard'
import { DropdownMenu } from '@primer/components'
@@ -35,20 +32,16 @@ export const ArticleCards = () => {
const guides = isUserFiltering ? filteredResults : includeGuides || []
const types = Object.entries(guideTypes).map(([key, val]) => {
return (
{text: val, key: key}
)
}) as ItemInput[]
types.unshift({text: t('filters.all'), key: undefined})
const topics = allTopics?.map((topic) => {
return (
{text: topic, key: topic}
)
return { text: val, key: key }
}) as ItemInput[]
topics.unshift({text: t('filters.all'), key: undefined})
types.unshift({ text: t('filters.all'), key: undefined })
const topics = allTopics?.map((topic) => {
return { text: topic, key: topic }
}) as ItemInput[]
topics.unshift({ text: t('filters.all'), key: undefined })
return (
<div>
@@ -58,26 +51,28 @@ export const ArticleCards = () => {
<label htmlFor="type" className="text-uppercase f6 color-fg-muted d-block">
{t('filters.type')}
</label>
<DropdownMenu
aria-label="guide types"
data-testid="types-dropdown"
placeholder={t('filters.all')}
items={types}
selectedItem={typeFilter}
onChange={setTypeFilter} />
<DropdownMenu
aria-label="guide types"
data-testid="types-dropdown"
placeholder={t('filters.all')}
items={types}
selectedItem={typeFilter}
onChange={setTypeFilter}
/>
</div>
<div data-testid="card-filter-topics" className="mx-4">
<label htmlFor="topic" className="text-uppercase f6 color-fg-muted d-block">
{t('filters.topic')}
</label>
<DropdownMenu
aria-label="guide topics"
data-testid="topics-dropdown"
placeholder={t('filters.all')}
items={topics}
selectedItem={topicFilter}
onChange={setTopicFilter} />
<DropdownMenu
aria-label="guide topics"
data-testid="topics-dropdown"
placeholder={t('filters.all')}
items={topics}
selectedItem={topicFilter}
onChange={setTopicFilter}
/>
</div>
</form>

View File

@@ -70,7 +70,7 @@ export const ArticleList = ({ title, viewAllHref, articles }: ArticleListPropsT)
{link.date && (
<time
className="tooltipped tooltipped-n color-fg-muted text-mono mt-1"
aria-label={dayjs(link.date).format('LLL')}
aria-label={dayjs(link.date).format('MMMM DD')}
>
{dayjs(link.date).format('MMMM DD')}
</time>

View File

@@ -30,7 +30,7 @@ export const CodeExampleCard = ({ example }: Props) => {
</div>
</div>
<footer className="border-top p-4 color-fg-muted d-flex flex-items-center">
<RepoIcon className="flex-shrink-0" />
<RepoIcon aria-label="repository URL" className="flex-shrink-0" />
<TruncateLines
as="span"
maxLines={1}

View File

@@ -20,54 +20,71 @@ export const TableOfContents = (props: Props) => {
className={cx(variant === 'compact' ? 'list-style-outside pl-2' : 'list-style-none')}
>
<ActionList
items=
{(items || []).filter(item => typeof item !== 'undefined').map((item) => {
items={(items || [])
.filter((item) => typeof item !== 'undefined')
.map((item) => {
const { fullPath: href, title, intro, childTocItems } = item
const isActive = router.pathname === href
return (variant === 'compact') ? {
key: href,
text: title,
renderItem: () => (
<ActionList.Item>
<li key={href} className="f4 d-list-item width-full">
<Link className="d-block width-full" href={href}>{title}</Link>
{(childTocItems || []).length > 0 && (
<ul
className={cx(
variant === 'compact' ? 'list-style-circle pl-5 my-3' : 'list-style-none'
)}
return variant === 'compact'
? {
key: href,
text: title,
renderItem: () => (
<ActionList.Item>
<li key={href} className="f4 d-list-item width-full">
<Link className="d-block width-full" href={href}>
{title}
</Link>
{(childTocItems || []).length > 0 && (
<ul
className={cx(
variant === 'compact'
? 'list-style-circle pl-5 my-3'
: 'list-style-none'
)}
>
{(childTocItems || []).map((childItem) => {
if (!childItem) {
return null
}
return (
<li key={childItem.fullPath} className="f4 d-block width-full">
<Link className="d-block width-full" href={childItem.fullPath}>
{childItem.title}
</Link>
</li>
)
})}
</ul>
)}
</li>
</ActionList.Item>
),
}
: {
key: href,
title: title,
renderItem: () => (
<ActionList.Item className={cx('border-bottom')}>
<li key={href} className={cx('mt-2', isActive && 'color-fg-muted')}>
<BumpLink
as={Link}
href={href}
title={<h2 className="py-1 h4">{title}</h2>}
>
{(childTocItems || []).map((childItem) => {
if (!childItem) {
return null
}
return (
<li key={childItem.fullPath} className="f4 d-block width-full">
<Link className="d-block width-full" href={childItem.fullPath}>{childItem.title}</Link>
</li>
)
})}
</ul>
)}
</li>
</ActionList.Item>
)
} : {
key: href,
title: title,
renderItem: () => (
<ActionList.Item className={cx('border-bottom')}>
<li key={href} className={cx('mt-2', isActive && 'color-fg-muted')}>
<BumpLink as={Link} href={href} title={<h2 className="py-1 h4">{title}</h2>}>
{intro && (
<p className="f4 color-fg-muted" dangerouslySetInnerHTML={{ __html: intro }} />
)}
</BumpLink>
</li>
</ActionList.Item>
)
}
})} />
{intro && (
<p
className="f4 color-fg-muted"
dangerouslySetInnerHTML={{ __html: intro }}
/>
)}
</BumpLink>
</li>
</ActionList.Item>
),
}
})}
/>
</ul>
)
}

View File

@@ -4,7 +4,7 @@ import { ArrowLeftIcon } from '@primer/octicons-react'
export const AllProductsLink = () => {
const router = useRouter()
return (
<li title="Home">
<li>
<a href={`/${router.locale}`} className="f6 pl-4 pr-5 ml-n1 pb-1 color-fg-default">
<ArrowLeftIcon size="small" className="mr-1" />
All products

View File

@@ -36,7 +36,6 @@ redirect_from:
- /actions/automating-your-workflow-with-github-actions/
- /categories/automating-your-workflow-with-github-actions
- /marketplace/actions
- /actions/guides
- /actions/reference
layout: product-landing
versions:

View File

@@ -24,7 +24,7 @@ shortTitle: Node.js packages
## Introduction
This guide shows you how to create a workflow that publishes Node.js packages to the {% data variables.product.prodname_registry %} and npm registries after continuous integration (CI) tests pass. With a single workflow, you can publish packages to a single registry or multiple registries.
This guide shows you how to create a workflow that publishes Node.js packages to the {% data variables.product.prodname_registry %} and npm registries after continuous integration (CI) tests pass.
## Prerequisites
@@ -55,7 +55,7 @@ Each time you create a new release, you can trigger a workflow to publish your p
To perform authenticated operations against the npm registry in your workflow, you'll need to store your npm authentication token as a secret. For example, create a repository secret called `NPM_TOKEN`. For more information, see "[Creating and using encrypted secrets](/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)."
By default, npm uses the `name` field of the *package.json* file to determine the npm registry. When publishing to a global namespace, you only need to include the package name. For example, you would publish a package named `npm-hello-world-test` to the `https://www.npmjs.com/package/npm-hello-world-test`.
By default, npm uses the `name` field of the *package.json* file to determine the name of your published package. When publishing to a global namespace, you only need to include the package name. For example, you would publish a package named `npm-hello-world-test` to `https://www.npmjs.com/package/npm-hello-world-test`.
If you're publishing a package that includes a scope prefix, include the scope in the name of your *package.json* file. For example, if your npm scope prefix is octocat and the package name is hello-world, the `name` in your *package.json* file should be `@octocat/hello-world`. If your npm package uses a scope prefix and the package is public, you need to use the option `npm publish --access public`. This is an option that npm requires to prevent someone from publishing a private package unintentionally.
@@ -63,7 +63,7 @@ This example stores the `NPM_TOKEN` secret in the `NODE_AUTH_TOKEN` environment
{% raw %}
```yaml{:copy}
name: Node.js Package
name: Publish Package to npmjs
on:
release:
types: [created]
@@ -75,7 +75,7 @@ jobs:
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v2
with:
node-version: '12.x'
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm publish
@@ -92,6 +92,8 @@ registry=https://registry.npmjs.org/
always-auth=true
```
Please note that you need to set the `registry-url` to `https://registry.npmjs.org/` in `setup-node` to properly configure your credentials.
## Publishing packages to {% data variables.product.prodname_registry %}
Each time you create a new release, you can trigger a workflow to publish your package. The workflow in the example below runs anytime the `release` event with type `created` occurs. The workflow publishes the package to {% data variables.product.prodname_registry %} if CI tests pass.
@@ -122,7 +124,7 @@ If you want to publish your package to a different repository, you must use a pe
This example stores the `GITHUB_TOKEN` secret in the `NODE_AUTH_TOKEN` environment variable. When the `setup-node` action creates an *.npmrc* file, it references the token from the `NODE_AUTH_TOKEN` environment variable.
```yaml{:copy}
name: Node.js Package
name: Publish package to GitHub Packages
on:
release:
types: [created]
@@ -137,7 +139,7 @@ jobs:
# Setup .npmrc file to publish to GitHub Packages
- uses: actions/setup-node@v2
with:
node-version: '12.x'
node-version: '16.x'
registry-url: 'https://npm.pkg.github.com'
# Defaults to the user or organization that owns the workflow file
scope: '@octocat'
@@ -161,7 +163,7 @@ If you use the Yarn package manager, you can install and publish packages using
{% raw %}
```yaml{:copy}
name: Node.js Package
name: Publish Package to npmjs
on:
release:
types: [created]
@@ -173,7 +175,7 @@ jobs:
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v2
with:
node-version: '12.x'
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
# Defaults to the user or organization that owns the workflow file
scope: '@octocat'
@@ -183,59 +185,3 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
```
{% endraw %}
## Publishing packages to npm and {% data variables.product.prodname_registry %}
{% note %}
**Note:** If you need to publish to registries that have different scope prefixes, you'll need to modify the *package.json* file on the runner to change the scope prefix. For example, if you publish a package to the `@mona` scope for npm and `@octocat` scope for {% data variables.product.prodname_registry %}, you can replace the `@mona` scope with `@octocat` in the *package.json* file on the runner after publishing to npm and before publishing to {% data variables.product.prodname_registry %}.
{% endnote %}
You can publish your packages to both the npm registry and {% data variables.product.prodname_registry %} by using the `setup-node` action for each registry.
If you publish a package to both registries, you'll need to ensure that your scope prefix on npm matches your {% data variables.product.prodname_dotcom %} user or organization name. To publish packages to a public registry with a scope prefix, you can use the command `npm publish --access public`. For more information, see [`npm-scope`](https://docs.npmjs.com/misc/scope) and "[Creating and publishing scoped public packages](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages)" in the npm documentation.
Ensure your *package.json* file includes the scope of your {% data variables.product.prodname_dotcom %} repository and npm registry. For example, if you plan to publish a package in the `octocat/npm-hello-world-test` repository to {% data variables.product.prodname_dotcom %} and https://www.npmjs.com/package/@octocat/npm-hello-world-test, the name in your *package.json* file would be `"name": "@octocat/npm-hello-world-test"`.
To perform authenticated operations against the {% data variables.product.prodname_registry %} registry in your workflow, you can use the `GITHUB_TOKEN`. {% data reusables.github-actions.github-token-permissions %}
When you use the `scope` input to the `setup-node` action, the action creates an *.npmrc* file that includes the scope prefix. By default, the `setup-node` action sets the scope in the *.npmrc* file to the user or organization that owns the workflow file.
This workflow calls the `setup-node` action two times. Each time the `setup-node` action runs, it overwrites the *.npmrc* file. The *.npmrc* file references the token that allows you to perform authenticated operations against the package registry from the `NODE_AUTH_TOKEN` environment variable. The workflow sets the `NODE_AUTH_TOKEN` environment variable each time the `npm publish` command is run, first with a token to publish to npm (`NPM_TOKEN`) and then with a token to publish to {% data variables.product.prodname_registry %} (`GITHUB_TOKEN`).
```yaml{:copy}
name: Node.js Package
on:
release:
types: [created]
jobs:
build:
runs-on: ubuntu-latest {% ifversion fpt or ghes > 3.1 or ghae or ghec %}
permissions:
contents: read
packages: write {% endif %}
steps:
- uses: actions/checkout@v2
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v2
with:
node-version: '10.x'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
# Publish to npm
- run: npm publish --access public
env:{% raw %}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
# Setup .npmrc file to publish to GitHub Packages
- uses: actions/setup-node@v2
with:
registry-url: 'https://npm.pkg.github.com'
# Defaults to the user or organization that owns the workflow file
scope: '@octocat'
# Publish to GitHub Packages
- run: npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}{% endraw %}
```

View File

@@ -0,0 +1,157 @@
---
title: Configuring authentication and provisioning for your enterprise using Okta
shortTitle: Configuring with Okta
intro: 'You can use Okta as an identity provider (IdP) to centrally manage authentication and user provisioning for {% data variables.product.prodname_ghe_managed %}.'
permissions: 'Enterprise owners can configure authentication and provisioning for {% data variables.product.prodname_ghe_managed %}.'
product: '{% data reusables.gated-features.saml-sso %}'
versions:
github-ae: '*'
type: how_to
topics:
- Accounts
- Authentication
- Enterprise
- Identity
- SSO
miniTocMaxHeadingLevel: 3
---
{% data reusables.saml.okta-ae-sso-beta %}
## About SAML and SCIM with Okta
You can use Okta as an Identity Provider (IdP) for {% data variables.product.prodname_ghe_managed %}, which allows your Okta users to sign in to {% data variables.product.prodname_ghe_managed %} using their Okta credentials.
To use Okta as your IdP for {% data variables.product.prodname_ghe_managed %}, you can add the {% data variables.product.prodname_ghe_managed %} app to Okta, configure Okta as your IdP in {% data variables.product.prodname_ghe_managed %}, and provision access for your Okta users and groups.
The following provisioning features are available for all Okta users that you assign to your {% data variables.product.prodname_ghe_managed %} application.
| Feature | Description |
| --- | --- |
| Push New Users | When you create a new user in Okta, the user is added to {% data variables.product.prodname_ghe_managed %}. |
| Push User Deactivation | When you deactivate a user in Okta, it will suspend the user from your enterprise on {% data variables.product.prodname_ghe_managed %}. |
| Push Profile Updates | When you update a user's profile in Okta, it will update the metadata for the user's membership in your enterprise on {% data variables.product.prodname_ghe_managed %}. |
| Reactivate Users | When you reactivate a user in Okta, it will unsuspend the user in your enterprise on {% data variables.product.prodname_ghe_managed %}. |
## Adding the {% data variables.product.prodname_ghe_managed %} application in Okta
{% data reusables.saml.okta-ae-applications-menu %}
1. Click **Browse App Catalog**
!["Browse App Catalog"](/assets/images/help/saml/okta-ae-browse-app-catalog.png)
1. In the search field, type "GitHub AE", then click **GitHub AE** in the results.
!["Search result"](/assets/images/help/saml/okta-ae-search.png)
1. Click **Add**.
!["Add GitHub AE app"](/assets/images/help/saml/okta-ae-add-github-ae.png)
1. For "Base URL", type the URL of your enterprise on {% data variables.product.prodname_ghe_managed %}.
!["Configure Base URL"](/assets/images/help/saml/okta-ae-configure-base-url.png)
1. Click **Done**.
## Enabling SAML SSO for {% data variables.product.prodname_ghe_managed %}
To enable single sign-on (SSO) for {% data variables.product.prodname_ghe_managed %}, you must configure {% data variables.product.prodname_ghe_managed %} to use the sign-on URL, issuer URL, and public certificate provided by Okta. You can find locate these details in the "GitHub AE" app.
{% data reusables.saml.okta-ae-applications-menu %}
{% data reusables.saml.okta-ae-configure-app %}
1. Click **Sign On**.
![Sign On tab](/assets/images/help/saml/okta-ae-sign-on-tab.png)
1. Click **View Setup Instructions**.
![Sign On tab](/assets/images/help/saml/okta-ae-view-setup-instructions.png)
1. Take note of the "Sign on URL", "Issuer", and "Public certificate" details.
1. Use the details to enable SAML SSO for your enterprise on {% data variables.product.prodname_ghe_managed %}. For more information, see "[Configuring SAML single sign-on for your enterprise](/admin/authentication/managing-identity-and-access-for-your-enterprise/configuring-saml-single-sign-on-for-your-enterprise)."
{% note %}
**Note:** To test your SAML configuration from {% data variables.product.prodname_ghe_managed %}, your Okta user account must be assigned to the {% data variables.product.prodname_ghe_managed %} app.
{% endnote %}
## Enabling API integration
The "GitHub AE" app in Okta uses the {% data variables.product.product_name %} API to interact with your enterprise for SCIM and SSO. This procedure explains how to enable and test access to the API by configuring Okta with a personal access token for {% data variables.product.prodname_ghe_managed %}.
1. In {% data variables.product.prodname_ghe_managed %}, generate a personal access token with the `admin:enterprise` scope. For more information, see "[Creating a personal access token](/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token)".
{% data reusables.saml.okta-ae-applications-menu %}
{% data reusables.saml.okta-ae-configure-app %}
{% data reusables.saml.okta-ae-provisioning-tab %}
1. Click **Configure API Integration**.
1. Select **Enable API integration**.
![Enable API integration](/assets/images/help/saml/okta-ae-enable-api-integration.png)
1. For "API Token", type the {% data variables.product.prodname_ghe_managed %} personal access token you generated previously.
1. Click **Test API Credentials**.
{% note %}
**Note:** If you see `Error authenticating: No results for users returned`, confirm that you have enabled SSO for {% data variables.product.prodname_ghe_managed %}. For more information see "[Enabling SAML SSO for {% data variables.product.prodname_ghe_managed %}](#enabling-saml-sso-for-github-ae)."
{% endnote %}
## Configuring SCIM provisioning settings
This procedure demonstrates how to configure the SCIM settings for Okta provisioning. These settings define which features will be used when automatically provisioning Okta user accounts to {% data variables.product.prodname_ghe_managed %}.
{% data reusables.saml.okta-ae-applications-menu %}
{% data reusables.saml.okta-ae-configure-app %}
{% data reusables.saml.okta-ae-provisioning-tab %}
1. Under "Settings", click **To App**.
!["To App" settings](/assets/images/help/saml/okta-ae-to-app-settings.png)
1. To the right of "Provisioning to App", click **Edit**.
1. To the right of "Create Users", select **Enable**.
1. To the right of "Update User Attributes", select **Enable**.
1. To the right of "Deactivate Users", select **Enable**.
1. Click **Save**.
## Allowing Okta users and groups to access {% data variables.product.prodname_ghe_managed %}
You can provision access to {% data variables.product.product_name %} for your individual Okta users, or for entire groups.
### Provisioning access for Okta users
Before your Okta users can use their credentials to sign in to {% data variables.product.prodname_ghe_managed %}, you must assign the users to the "GitHub AE" app in Okta.
{% data reusables.saml.okta-ae-applications-menu %}
{% data reusables.saml.okta-ae-configure-app %}
1. Click **Assignments**.
![Assignments tab](/assets/images/help/saml/okta-ae-assignments-tab.png)
1. Select the Assign drop-down menu and click **Assign to People**.
!["Assign to People" button](/assets/images/help/saml/okta-ae-assign-to-people.png)
1. To the right of the required user account, click **Assign**.
![List of users](/assets/images/help/saml/okta-ae-assign-user.png)
1. To the right of "Role", click a role for the user, then click **Save and go back**.
![Role selection](/assets/images/help/saml/okta-ae-assign-role.png)
1. Click **Done**.
### Provisioning access for Okta groups
You can map your Okta group to a team in {% data variables.product.prodname_ghe_managed %}. Members of the Okta group will then automatically become members of the mapped {% data variables.product.prodname_ghe_managed %} team. For more information, see "[Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
## Further reading
- [Understanding SAML](https://developer.okta.com/docs/concepts/saml/) in the Okta documentation.
- [Understanding SCIM](https://developer.okta.com/docs/concepts/scim/) in the Okta documentation.

View File

@@ -1,10 +1,12 @@
---
title: Configuring authentication and provisioning with your identity provider
intro: 'You can use an identity provider (IdP) that supports both SAML single sign-on (SSO) and System for Cross-domain Identity Management (SCIM) to configure authentication and user provisioning for {% data variables.product.product_location %}.'
intro: 'You can configure user authentication and provisioning by integrating with an identity provider (IdP) that supports SAML single sign-on (SSO) and SCIM.'
versions:
ghae: '*'
children:
- /configuring-authentication-and-provisioning-for-your-enterprise-using-azure-ad
- /configuring-authentication-and-provisioning-for-your-enterprise-using-okta
- /mapping-okta-groups-to-teams
shortTitle: Use an IdP for SSO & SCIM
---

View File

@@ -0,0 +1,101 @@
---
title: Mapping Okta groups to teams
intro: 'You can map your Okta groups to teams on {% data variables.product.prodname_ghe_managed %} to automatically add and remove team members.'
permissions: 'Enterprise owners can configure authentication and provisioning for {% data variables.product.prodname_ghe_managed %}.'
product: '{% data reusables.gated-features.saml-sso %}'
versions:
github-ae: '*'
type: how_to
topics:
- Accounts
- Authentication
- Enterprise
- Identity
- SSO
---
{% data reusables.saml.okta-ae-sso-beta %}
## About team mapping
If you use Okta as your IdP, you can map your Okta group to a team in {% data variables.product.prodname_ghe_managed %}. Members of the Okta group will automatically become members of the mapped {% data variables.product.prodname_ghe_managed %} team. To configure this mapping, you can configure the Okta "GitHub AE" app to push the group and its members to {% data variables.product.prodname_ghe_managed %}. You can then choose which team in {% data variables.product.prodname_ghe_managed %} will be mapped to the Okta group.
## Prerequisites
You or your Okta administrator must be a Global administrator or a Privileged Role administrator in Okta.
You must enable SAML single sign-on with Okta. For more information, see "[Configuring SAML single sign-on for your enterprise](/admin/authentication/managing-identity-and-access-for-your-enterprise/configuring-saml-single-sign-on-for-your-enterprise)."
You must authenticate to your enterprise account using SAML SSO and Okta. For more information, see "[Authenticating with SAML single sign-on](/github/authenticating-to-github/authenticating-with-saml-single-sign-on)."
## Assigning your Okta group to the "GitHub AE" app
1. In the Okta Dashboard, open your group's settings.
1. Click **Manage Apps**.
![Add group to app](/assets/images/help/saml/okta-ae-group-add-app.png)
1. To the right of "GitHub AE", click **Assign**.
![Assign app](/assets/images/help/saml/okta-ae-assign-group-to-app.png)
1. Click **Done**.
## Pushing the Okta group to {% data variables.product.prodname_ghe_managed %}
When you push an Okta group and map the group to a team, all of the group's members will be able to sign in to {% data variables.product.prodname_ghe_managed %}.
{% data reusables.saml.okta-ae-applications-menu %}
{% data reusables.saml.okta-ae-configure-app %}
1. Click **Push Groups**.
![Push Groups tab](/assets/images/help/saml/okta-ae-push-groups-tab.png)
1. Select the Push Groups drop-down menu and click **Find groups by name**.
![Add groups button](/assets/images/help/saml/okta-ae-push-groups-add.png)
1. Type the name of the group to push to {% data variables.product.prodname_ghe_managed %}, then click **Save**.
![Add group name](/assets/images/help/saml/okta-ae-push-groups-by-name.png)
## Mapping a team to the Okta group
You can map a team in your enterprise to an Okta group you previously pushed to {% data variables.product.prodname_ghe_managed %}. Members of the Okta group will then automatically becomes members of the {% data variables.product.prodname_ghe_managed %} team. Any subsequent changes to the Okta group's membership are automatically synchronized with the {% data variables.product.prodname_ghe_managed %} team.
{% data reusables.profile.access_org %}
{% data reusables.user_settings.access_org %}
{% data reusables.organizations.specific_team %}
{% data reusables.organizations.team_settings %}
6. Under "Identity Provider Group", select the drop-down menu and click an identity provider group.
![Drop-down menu to choose identity provider group](/assets/images/enterprise/github-ae/teams/choose-an-idp-group.png)
7. Click **Save changes**.
## Checking the status of your mapped teams
Enterprise owners can use the site admin dashboard to check how Okta groups are mapped to teams on {% data variables.product.prodname_ghe_managed %}.
1. To access the dashboard, in the upper-right corner of any page, click {% octicon "rocket" aria-label="The rocket ship" %}.
![Rocket ship icon for accessing site admin settings](/assets/images/enterprise/site-admin-settings/access-new-settings.png)
1. In the left pane, click **External groups**.
![Add group name](/assets/images/help/saml/okta-ae-site-admin-external-groups.png)
1. To view more details about a group, in the list of external groups, click on a group.
![List of external groups](/assets/images/help/saml/okta-ae-site-admin-list-groups.png)
1. The group's details includes the name of the Okta group, a list of the Okta users that are members of the group, and the corresponding mapped team on {% data variables.product.prodname_ghe_managed %}.
![List of external groups](/assets/images/help/saml/okta-ae-site-admin-group-details.png)
## Viewing audit log events for mapped groups
To monitor SSO activity for mapped groups, you can review the following events in the {% data variables.product.prodname_ghe_managed %} audit log.
{% data reusables.saml.external-group-audit-events %}
{% data reusables.saml.external-identity-audit-events %}
For more information, see "[Reviewing the audit log for your organization](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization)."

View File

@@ -54,12 +54,24 @@ Shibboleth | {% octicon "check-circle-fill" aria-label="The check icon" %} | |
{% data reusables.saml.ae-uses-saml-sso %} {% data reusables.saml.ae-enable-saml-sso-during-bootstrapping %}
After you configure the application for {% data variables.product.product_name %} on your IdP, you can grant access to {% data variables.product.product_location %} by assigning the application to users and groups on your IdP. For more information about SAML SSO for {% data variables.product.product_name %}, see "[Configuring SAML single sign-on for your enterprise](/admin/authentication/configuring-saml-single-sign-on-for-your-enterprise)."
After you configure the application for {% data variables.product.product_name %} on your identity provider (IdP), you can provision access to {% data variables.product.product_location %} by assigning the application to users and groups on your IdP. For more information about SAML SSO for {% data variables.product.product_name %}, see "[Configuring SAML single sign-on for your enterprise](/admin/authentication/configuring-saml-single-sign-on-for-your-enterprise)."
{% data reusables.scim.after-you-configure-saml %} For more information, see "[Configuring user provisioning for your enterprise](/admin/authentication/configuring-user-provisioning-for-your-enterprise)."
To learn how to configure both authentication and user provisioning for {% data variables.product.product_location %} with your specific IdP, see "[Configuring authentication and provisioning with your identity provider](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider)."
## Supported IdPs
The following IdPs are officially supported for integration with {% data variables.product.prodname_ghe_managed %}.
{% data reusables.saml.okta-ae-sso-beta %}
{% data reusables.github-ae.saml-idp-table %}
## Mapping {% data variables.product.prodname_ghe_managed %} teams to Okta groups
If you use Okta as your IdP, you can map your Okta groups to teams on {% data variables.product.prodname_ghe_managed %}. For more information, see "[Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
{% endif %}
## Further reading

View File

@@ -87,15 +87,14 @@ For more detailed information about how to enable SAML using Okta, see "[Configu
## Enabling SAML SSO
{% ifversion ghae %}
{% data reusables.saml.ae-enable-saml-sso-during-bootstrapping %}
The following IdPs provide documentation about configuring SAML SSO for {% data variables.product.product_name %}. If your IdP isn't listed, please contact your IdP to request support for {% data variables.product.product_name %}.
| IdP | More information |
| :- | :- |
| Azure AD | [Tutorial: Azure Active Directory single sign-on (SSO) integration with {% data variables.product.prodname_ghe_managed %}](https://docs.microsoft.com/azure/active-directory/saas-apps/github-ae-tutorial) in the Microsoft Docs |
| Azure AD | [Tutorial: Azure Active Directory single sign-on (SSO) integration with {% data variables.product.prodname_ghe_managed %}](https://docs.microsoft.com/azure/active-directory/saas-apps/github-ae-tutorial) in the Microsoft Docs. To configure Azure AD for {% data variables.product.prodname_ghe_managed %}, see "[Configuring authentication and provisioning for your enterprise using Azure AD](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-azure-ad)." |
| Okta (Beta) | To configure Okta for {% data variables.product.prodname_ghe_managed %}, see "[Configuring authentication and provisioning for your enterprise using Okta](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta)."|
During initialization for {% data variables.product.product_name %}, you must configure {% data variables.product.product_name %} as a SAML Service Provider (SP) on your IdP. You must enter several unique values on your IdP to configure {% data variables.product.product_name %} as a valid SP.
@@ -105,8 +104,6 @@ During initialization for {% data variables.product.product_name %}, you must co
| SP Assertion Consumer Service (ACS) URL | Reply URL | URL where IdP sends SAML responses | <code>https://<em>YOUR-GITHUB-AE-HOSTNAME</em>/saml/consume</code> |
| SP Single Sign-On (SSO) URL | | URL where IdP begins SSO | <code>https://<em>YOUR-GITHUB-AE-HOSTNAME</em>/sso</code> |
{% endif %}
## Editing the SAML SSO configuration
If the details for your IdP change, you'll need to edit the SAML SSO configuration for {% data variables.product.product_location %}. For example, if the certificate for your IdP expires, you can edit the value for the public certificate.
@@ -137,10 +134,10 @@ If the details for your IdP change, you'll need to edit the SAML SSO configurati
{% endif %}
## Disabling SAML SSO
{% ifversion ghae %}
## Disabling SAML SSO
{% warning %}
**Warning**: If you disable SAML SSO for {% data variables.product.product_location %}, users without existing SAML SSO sessions cannot sign into {% data variables.product.product_location %}. SAML SSO sessions on {% data variables.product.product_location %} end after 24 hours.

View File

@@ -34,9 +34,13 @@ The provisioning application on your IdP communicates with {% data variables.pro
## Supported identity providers
{% data reusables.scim.supported-idps %}
The following IdPs are supported for SSO with {% data variables.product.prodname_ghe_managed %}:
When you set up user provisioning with a supported IdP, you can also assign or unassign the application for {% data variables.product.product_name %} to groups of users. These groups are then available to organization owners and team maintainers in {% data variables.product.product_location %} to map to {% data variables.product.product_name %} teams. For more information, see "[Synchronizing a team with an identity provider group](/organizations/organizing-members-into-teams/synchronizing-a-team-with-an-identity-provider-group)."
{% data reusables.saml.okta-ae-sso-beta %}
{% data reusables.github-ae.saml-idp-table %}
For IdPs that support team mapping, you can assign or unassign the application for {% data variables.product.product_name %} to groups of users in your IdP. These groups are then available to organization owners and team maintainers in {% data variables.product.product_location %} to map to {% data variables.product.product_name %} teams. For more information, see "[Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
## Prerequisites
@@ -78,7 +82,8 @@ You must have administrative access on your IdP to configure the application for
| IdP | More information |
| :- | :- |
| Azure AD | [Tutorial: Configure {% data variables.product.prodname_ghe_managed %} for automatic user provisioning](https://docs.microsoft.com/azure/active-directory/saas-apps/github-ae-provisioning-tutorial) in the Microsoft Docs |
| Azure AD | [Tutorial: Configure {% data variables.product.prodname_ghe_managed %} for automatic user provisioning](https://docs.microsoft.com/azure/active-directory/saas-apps/github-ae-provisioning-tutorial) in the Microsoft Docs. To configure Azure AD for {% data variables.product.prodname_ghe_managed %}, see "[Configuring authentication and provisioning for your enterprise using Azure AD](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-azure-ad)."|
| Okta | (beta) To configure Okta for {% data variables.product.prodname_ghe_managed %}, see "[Configuring authentication and provisioning for your enterprise using Okta](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta)."|
The application on your IdP requires two values to provision or deprovision user accounts on {% data variables.product.product_location %}.

View File

@@ -16,6 +16,9 @@ topics:
- Networking
shortTitle: Use a load balancer
---
## About load balancers
{% data reusables.enterprise_clustering.load_balancer_intro %}
{% data reusables.enterprise_clustering.load_balancer_dns %}
@@ -32,6 +35,8 @@ Because client connections to {% data variables.product.prodname_ghe_server %} c
We strongly recommend enabling PROXY protocol support for both your appliance and the load balancer. Use the instructions provided by your vendor to enable the PROXY protocol on your load balancer. For more information, see [the PROXY protocol documentation](http://www.haproxy.org/download/1.8/doc/proxy-protocol.txt).
{% data reusables.enterprise_installation.proxy-incompatible-with-aws-nlbs %}
{% data reusables.enterprise_site_admin_settings.access-settings %}
{% data reusables.enterprise_site_admin_settings.management-console %}
{% data reusables.enterprise_management_console.privacy %}

View File

@@ -99,6 +99,8 @@ Because client connections to the cluster come from the load balancer, the clien
We strongly recommend enabling PROXY support for both your instance and the load balancer.
{% data reusables.enterprise_installation.proxy-incompatible-with-aws-nlbs %}
- For your instance, use this command:
```shell
$ ghe-config 'loadbalancer.proxy-protocol' 'true' && ghe-cluster-config-apply

View File

@@ -64,21 +64,29 @@ As more users join {% data variables.product.product_location %}, you may need t
{% endwarning %}
1. Attach a new disk to your {% data variables.product.prodname_ghe_server %} appliance.
2. Run the `parted` command to format the disk:
1. Run the `parted` command to format the disk:
```shell
$ sudo parted /dev/xvdg mklabel msdos
$ sudo parted /dev/xvdg mkpart primary ext4 0% 50%
$ sudo parted /dev/xvdg mkpart primary ext4 50% 100%
```
3. Run the `ghe-upgrade` command to install a full, platform specific package to the newly partitioned disk. A universal hotpatch upgrade package, such as `github-enterprise-2.11.9.hpkg`, will not work as expected. After the `ghe-upgrade` command completes, application services will automatically terminate.
1. To stop replication, run the `ghe-repl-stop` command.
```shell
$ ghe-repl-stop
```
1. Run the `ghe-upgrade` command to install a full, platform specific package to the newly partitioned disk. A universal hotpatch upgrade package, such as `github-enterprise-2.11.9.hpkg`, will not work as expected. After the `ghe-upgrade` command completes, application services will automatically terminate.
```shell
$ ghe-upgrade PACKAGE-NAME.pkg -s -t /dev/xvdg1
```
4. Shut down the appliance:
1. Shut down the appliance:
```shell
$ sudo poweroff
```
5. In the hypervisor, remove the old root disk and attach the new root disk at the same location as the old root disk.
6. Start the appliance.
7. Ensure system services are functioning correctly, then release maintenance mode. For more information, see "[Enabling and scheduling maintenance mode](/admin/guides/installation/enabling-and-scheduling-maintenance-mode)."
1. In the hypervisor, remove the old root disk and attach the new root disk at the same location as the old root disk.
1. Start the appliance.
1. Ensure system services are functioning correctly, then release maintenance mode. For more information, see "[Enabling and scheduling maintenance mode](/admin/guides/installation/enabling-and-scheduling-maintenance-mode)."
If your appliance is configured for high-availability or geo-replication, remember to start replication on each replica node using `ghe-repl-start` after the storage on all nodes has been upgraded.

View File

@@ -70,18 +70,32 @@ The peak quantity of concurrent jobs running without performance loss depends on
{%- endif %}
{%- ifversion ghes > 3.1 %}
{%- ifversion ghes = 3.2 %}
| vCPUs | Memory | Maximum Concurrency*|
| :--- | :--- | :--- |
| 32 | 128 GB | 1500 jobs |
| 64 | 256 GB | 1900 jobs |
| 32 | 128 GB | 1000 jobs |
| 64 | 256 GB | 1300 jobs |
| 96 | 384 GB | 2200 jobs |
*Maximum concurrency was measured using multiple repositories, job duration of approximately 10 minutes, and 10 MB artifact uploads. You may experience different performance depending on the overall levels of activity on your instance.
{%- endif %}
{%- ifversion ghes > 3.2 %}
| vCPUs | Memory | Maximum Concurrency*|
| :--- | :--- | :--- |
| 8 | 64 GB | 300 jobs |
| 16 | 160 GB | 700 jobs |
| 32 | 128 GB | 1300 jobs |
| 64 | 256 GB | 2000 jobs |
| 96 | 384 GB | 4000 jobs |
*Maximum concurrency was measured using multiple repositories, job duration of approximately 10 minutes, and 10 MB artifact uploads. You may experience different performance depending on the overall levels of activity on your instance.
{%- endif %}
If you plan to enable {% data variables.product.prodname_actions %} for the users of an existing instance, review the levels of activity for users and automations on the instance and ensure that you have provisioned adequate CPU and memory for your users. For more information about monitoring the capacity and performance of {% data variables.product.prodname_ghe_server %}, see "[Monitoring your appliance](/admin/enterprise-management/monitoring-your-appliance)."
For more information about minimum hardware requirements for {% data variables.product.product_location %}, see the hardware considerations for your instance's platform.

View File

@@ -1,6 +1,6 @@
---
title: Streaming the audit logs for organizations in your enterprise account
intro: 'Enterprise owners can stream audit and Git events data from {% data variables.product.prodname_dotcom %} to an external data management system.'
intro: 'You can stream audit and Git events data from {% data variables.product.prodname_dotcom %} to an external data management system.'
product: '{% data reusables.gated-features.enterprise-accounts %}'
miniTocMaxHeadingLevel: 3
versions:
@@ -14,6 +14,7 @@ topics:
shortTitle: Stream organization audit logs
redirect_from:
- /github/setting-up-and-managing-your-enterprise/managing-organizations-in-your-enterprise-account/streaming-the-audit-logs-for-organizations-in-your-enterprise-account
permissions: Enterprise owners can configure audit log streaming.
---
{% note %}
@@ -26,12 +27,16 @@ redirect_from:
You can extract audit log and Git events data from {% data variables.product.prodname_dotcom %} in multiple ways:
* Go to the Audit log page in {% data variables.product.prodname_dotcom %} and click **Export**. <br/>
For more information, see "[Viewing the audit logs for organizations in your enterprise account](/github/setting-up-and-managing-your-enterprise/managing-organizations-in-your-enterprise-account/viewing-the-audit-logs-for-organizations-in-your-enterprise-account)" and "[Exporting the audit log](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization#exporting-the-audit-log)."
* Use the API to poll for new audit log events. <br/>
For more information, see "[Using the audit log API](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization#using-the-audit-log-api)."
* Go to the Audit log page in {% data variables.product.prodname_dotcom %} and click **Export**. For more information, see "[Viewing the audit logs for organizations in your enterprise account](/github/setting-up-and-managing-your-enterprise/managing-organizations-in-your-enterprise-account/viewing-the-audit-logs-for-organizations-in-your-enterprise-account)" and "[Exporting the audit log](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization#exporting-the-audit-log)."
* Use the API to poll for new audit log events. For more information, see "[Using the audit log API](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization#using-the-audit-log-api)."
* Set up {% data variables.product.product_name %} to stream audit data as events are logged.
Currently, audit log streaming is supported for multiple storage providers.
- Amazon S3
- Azure Event Hubs
- Google Cloud Storage
- Splunk
## About audit log streaming
To help protect your intellectual property and maintain compliance for your organization, you can use streaming to keep copies of your audit log data and monitor:
@@ -47,24 +52,96 @@ Enterprise owners can set up, pause, or delete a stream at any time. The stream
## Setting up audit log streaming
{% data variables.product.prodname_dotcom %} supports streaming of audit data to Splunk, Azure Event Hubs, and Amazon S3.
You set up the audit log stream on {% data variables.product.product_name %} by following the instructions for your provider.
You set up the audit log stream from the Log streaming tab of the Audit log page in your enterprise settings.
- [Amazon S3](#setting-up-streaming-to-amazon-s3)
- [Azure Event Hubs](#setting-up-streaming-to-splunk)
- [Google Cloud Storage](#setting-up-streaming-to-google-cloud-storage)
- [Splunk](#setting-up-streaming-to-splunk)
### Navigating to the Log streaming tab
### Setting up streaming to Amazon S3
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.audit-log-tab %}
1. Click the **Log streaming** tab.
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see [Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html) in the the AWS documentation. Make sure to block public access to the bucket to protect your audit log information.
From the Log streaming tab you can set up an audit log stream by following the steps in one of the sections below.
To set up audit log streaming from {% data variables.product.prodname_dotcom %} you will need:
* The name of your Amazon S3 bucket
* Your AWS access key ID
* Your AWS secret key
For information on creating or accessing your access key ID and secret key, see [Understanding and getting your AWS credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html) in the AWS documentation.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Configure stream** and select **Amazon S3**.
![Choose Amazon S3 from the drop-down menu](/assets/images/help/enterprises/audit-stream-choice-s3.png)
1. On the configuration page, enter:
* The name of the bucket you want to stream to. For example, `auditlog-streaming-test`.
* Your access key ID. For example, `ABCAIOSFODNN7EXAMPLE1`.
* Your secret key. For example, `aBcJalrXUtnWXYZ/A1MDENG/zPxRfiCYEXAMPLEKEY`.
![Enter stream settings](/assets/images/help/enterprises/audit-stream-add-s3.png)
1. Click **Check endpoint** to verify that {% data variables.product.prodname_dotcom %} can connect to the Amazon S3 endpoint.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
### Setting up streaming to Azure Event Hubs
Before setting up a stream in {% data variables.product.prodname_dotcom %}, you must first have an event hub namespace in Microsoft Azure. Next, you must create an event hub instance within the namespace. You'll need the details of this event hub instance when you set up the stream. For details, see the Microsoft documentation, "[Quickstart: Create an event hub using Azure portal](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-create)."
You need two pieces of information about your event hub: its instance name and the connection string.
**On Microsoft Azure portal**:
1. In the left menu select **Entities**. Then select **Event Hubs**. The names of your event hubs are listed.
![A list of event hubs](/assets/images/help/enterprises/azure-event-hubs-list.png)
1. Make a note of the name of the event hub you want to stream to.
1. Click the required event hub. Then, in the left menu, select **Shared Access Policies**.
1. Select a shared access policy in the list of policies, or create a new policy.
![A list of shared access policies](/assets/images/help/enterprises/azure-shared-access-policies.png)
1. Click the button to the right of the **Connection string-primary key** field to copy the connection string.
![The event hub connection string](/assets/images/help/enterprises/azure-connection-string.png)
**On {% data variables.product.prodname_dotcom %}**:
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Configure stream** and select **Azure Event Hubs**.
![Choose Azure Events Hub from the drop-down menu](/assets/images/help/enterprises/audit-stream-choice-azure.png)
1. On the configuration page, enter:
* The name of the Azure Event Hubs instance.
* The connection string.
![Enter stream settings](/assets/images/help/enterprises/audit-stream-add-azure.png)
1. Click **Check endpoint** to verify that {% data variables.product.prodname_dotcom %} can connect to the Azure endpoint.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
### Setting up streaming to Google Cloud Storage
To set up streaming to Google Cloud Storage, you must create a service account in Google Cloud with the appropriate credentials and permissions, then configure audit log streaming in {% data variables.product.product_name %} using the service account's credentials for authentication.
1. Create a service account for Google Cloud. You do not need to set access controls or IAM roles for the service account. For more information, see [Creating and managing service accounts](https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating) in the Google Cloud documentation.
1. Create a JSON key for the service account, and store the key securely. For more information, see [Creating and managing service account keys](https://cloud.google.com/iam/docs/creating-managing-service-account-keys#creating) in the Google Cloud documentation.
1. If you haven't created a bucket yet, create the bucket. For more information, see [Creating storage buckets](https://cloud.google.com/storage/docs/creating-buckets) in the Google Cloud documentation.
1. Give the service account the Storage Object Creator role for the bucket. For more information, see [Using Cloud IAM permissions](https://cloud.google.com/storage/docs/access-control/using-iam-permissions#bucket-add) in the Google Cloud documentation.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Select the Configure stream drop-down menu and click **Google Cloud Storage**.
![Screenshot of the "Configure stream" drop-down menu](/assets/images/help/enterprises/audit-stream-choice-google-cloud-storage.png)
1. Under "Bucket", type the name of your Google Cloud Storage bucket.
![Screenshot of the "Bucket" text field](/assets/images/help/enterprises/audit-stream-bucket-google-cloud-storage.png)
1. Under "JSON Credentials", paste the entire contents of the file for your service account's JSON key.
![Screenshot of the "JSON Credentials" text field](/assets/images/help/enterprises/audit-stream-json-credentials-google-cloud-storage.png)
1. To verify that {% data variables.product.prodname_dotcom %} can connect and write to the Google Cloud Storage bucket, click **Check endpoint**.
![Screenshot of the "Check endpoint" button](/assets/images/help/enterprises/audit-stream-check-endpoint-google-cloud-storage.png)
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
### Setting up streaming to Splunk
To stream audit logs to Splunk's HTTP Event Collector (HEC) endpoint you must make sure that the endpoint is configured to accept HTTPS connections. For more information, see the Splunk documentation article "[Set up and use HTTP Event Collector in Splunk Web](https://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector)."
To stream audit logs to Splunk's HTTP Event Collector (HEC) endpoint you must make sure that the endpoint is configured to accept HTTPS connections. For more information, see [Set up and use HTTP Event Collector in Splunk Web](https://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector) in the Splunk documentation.
1. Go to the Log streaming tab in your enterprise settings. For more information, see "[Navigating to the Log streaming tab](#navigating-to-the-log-streaming-tab)."
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Configure stream** and select **Splunk**.
![Choose Splunk from the drop-down menu](/assets/images/help/enterprises/audit-stream-choice-splunk.png)
1. On the configuration page, enter:
@@ -84,64 +161,13 @@ To stream audit logs to Splunk's HTTP Event Collector (HEC) endpoint you must ma
Audit logs are always streamed as encrypted data, however, with this option selected, {% data variables.product.prodname_dotcom %} verifies the SSL certificate of your Splunk instance when delivering events. SSL verification helps ensure that events are delivered to your URL endpoint securely. You can clear the selection of this option, but we recommend you leave SSL verification enabled.
1. Click **Check endpoint** to verify that {% data variables.product.prodname_dotcom %} can connect to the Splunk endpoint.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check-splunk.png)
1. After you have successfully verified the endpoint, click **Save**.
### Setting up streaming to Azure Event Hubs
Before setting up a stream in {% data variables.product.prodname_dotcom %}, you must first have an event hub namespace in Microsoft Azure. Next, you must create an event hub instance within the namespace. You'll need the details of this event hub instance when you set up the stream. For details, see the Microsoft documentation, "[Quickstart: Create an event hub using Azure portal](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-create)."
You need two pieces of information about your event hub: its instance name and the connection string.
**On Microsoft Azure portal**:
1. In the left menu select **Entities**. Then select **Event Hubs**. The names of your event hubs are listed.
![A list of event hubs](/assets/images/help/enterprises/azure-event-hubs-list.png)
1. Make a note of the name of the event hub you want to stream to.
1. Click the required event hub. Then, in the left menu, select **Shared Access Policies**.
1. Select a shared access policy in the list of policies, or create a new policy.
![A list of shared access policies](/assets/images/help/enterprises/azure-shared-access-policies.png)
1. Click the button to the right of the **Connection string-primary key** field to copy the connection string.
![The event hub connection string](/assets/images/help/enterprises/azure-connection-string.png)
**On {% data variables.product.prodname_dotcom %}**:
1. Go to the Log streaming tab in your enterprise settings. For more information, see "[Navigating to the Log streaming tab](#navigating-to-the-log-streaming-tab)."
1. Click **Configure stream** and select **Azure Event Hubs**.
![Choose Splunk from the drop-down menu](/assets/images/help/enterprises/audit-stream-choice-azure.png)
1. On the configuration page, enter:
* The name of the Azure Event Hubs instance.
* The connection string.
![Enter stream settings](/assets/images/help/enterprises/audit-stream-add-azure.png)
1. Click **Check endpoint** to verify that {% data variables.product.prodname_dotcom %} can connect to the Azure endpoint.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
1. After you have successfully verified the endpoint, click **Save**.
### Setting up streaming to Amazon S3
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see the AWS documentation "[Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html)." Make sure to block public access to the bucket to protect your audit log information.
To set up audit log streaming from {% data variables.product.prodname_dotcom %} you will need:
* The name of your Amazon S3 bucket
* Your AWS access key ID
* Your AWS secret key
For information on creating or accessing your access key ID and secret key, see the AWS documentation "[Understanding and getting your AWS credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html)."
1. Go to the Log streaming tab in your enterprise settings. For more information, see "[Navigating to the Log streaming tab](#navigating-to-the-log-streaming-tab)."
1. Click **Configure stream** and select **Amazon S3**.
![Choose Amazon S3 from the drop-down menu](/assets/images/help/enterprises/audit-stream-choice-s3.png)
1. On the configuration page, enter:
* The name of the bucket you want to stream to. For example, `auditlog-streaming-test`.
* Your access key ID. For example, `ABCAIOSFODNN7EXAMPLE1`.
* Your secret key. For example, `aBcJalrXUtnWXYZ/A1MDENG/zPxRfiCYEXAMPLEKEY`.
![Enter stream settings](/assets/images/help/enterprises/audit-stream-add-s3.png)
1. Click **Check endpoint** to verify that {% data variables.product.prodname_dotcom %} can connect to the Amazon S3 endpoint.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
1. After you have successfully verified the endpoint, click **Save**.
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
## Pausing audit log streaming
Pausing the stream allows you to perform maintenance on the receiving application without losing audit data. Audit logs are stored for up to seven days on {% data variables.product.product_location %} and are then exported when you unpause the stream.
1. Display the "Log streaming" tab, as described above.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Pause stream**.
![Pause the stream](/assets/images/help/enterprises/audit-stream-pause.png)
1. A confirmation message is displayed. Click **Pause stream** to confirm.
@@ -150,7 +176,7 @@ When the application is ready to receive audit logs again, click **Resume stream
## Deleting the audit log stream
1. Display the "Log streaming" tab, as described above.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Delete stream**.
![Delete the stream](/assets/images/help/enterprises/audit-stream-delete.png)
2. A confirmation message is displayed. Click **Delete stream** to confirm.

View File

@@ -29,6 +29,8 @@ Each audit log entry shows applicable information about an event, such as:
You can search the audit log for specific events and export audit log data. For more information on searching the audit log and on specific organization events, see "[Reviewing the audit log for your organization](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization)."
You can also stream audit and Git events data from {% data variables.product.prodname_dotcom %} to an external data management system. For more information, see "[Streaming the audit logs for organizations in your enterprise account](/admin/user-management/managing-organizations-in-your-enterprise/streaming-the-audit-logs-for-organizations-in-your-enterprise-account)."
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.audit-log-tab %}

View File

@@ -18,12 +18,5 @@ shortTitle: SSH key fingerprints
These are {% data variables.product.prodname_dotcom %}'s public key fingerprints:
- `SHA256:nThbg6kXUpJWGl7E1IGOCspRomTxdCARLviKw6E5SY8` (RSA)
These keys will be supported beginning September 14, 2021:
- `SHA256:p2QAMXNIC1TJYWeIOttrVc98/R1BUFWu3/LiyKgUfQM` (ECDSA)
- `SHA256:+DiY3wvvV6TuJJhbpZisF/zLDA0zPMSvHdkr4UvCOqU` (Ed25519)
This key will be sunset on November 16, 2021:
- `SHA256:br9IjFspm1vxR3iA35FWE+4VTyz1hYVLIE2t1/CeyWQ` (DSA)

View File

@@ -41,27 +41,27 @@ To set up {% data variables.product.prodname_vss_ghe %}, members of your team mu
One person may be able to complete the tasks because the person has all of the roles, but you may need to coordinate the tasks with multiple people. For more information, see "[Roles for {% data variables.product.prodname_vss_ghe %}](#roles-for-visual-studio-subscriptions-with-github-enterprise)."
1. An [enterprise owner](#roles-for-visual-studio-subscriptions-with-github-enterprise) must create at least one organization in your enterprise on {% data variables.product.product_location %}. For more information, see "[Adding organizations to your enterprise](/admin/user-management/managing-organizations-in-your-enterprise/adding-organizations-to-your-enterprise)."
1. An enterprise owner must create at least one organization in your enterprise on {% data variables.product.product_location %}. For more information, see "[Adding organizations to your enterprise](/admin/user-management/managing-organizations-in-your-enterprise/adding-organizations-to-your-enterprise)."
1. The [subscription admin](#roles-for-visual-studio-subscriptions-with-github-enterprise) must assign a license for {% data variables.product.prodname_vs %} to a [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) in {% data variables.product.prodname_vss_admin_portal_with_url %}. For more information, see [Overview of the {% data variables.product.prodname_vs %} Subscriptions Administrator Portal](https://docs.microsoft.com/en-us/visualstudio/subscriptions/using-admin-portal) and [Assign {% data variables.product.prodname_vs %} Licenses in the {% data variables.product.prodname_vs %} Subscriptions Administration Portal](https://docs.microsoft.com/en-us/visualstudio/subscriptions/assign-license) in Microsoft Docs.
1. The subscription admin must assign a license for {% data variables.product.prodname_vs %} to a subscriber in {% data variables.product.prodname_vss_admin_portal_with_url %}. For more information, see [Overview of the {% data variables.product.prodname_vs %} Subscriptions Administrator Portal](https://docs.microsoft.com/en-us/visualstudio/subscriptions/using-admin-portal) and [Assign {% data variables.product.prodname_vs %} Licenses in the {% data variables.product.prodname_vs %} Subscriptions Administration Portal](https://docs.microsoft.com/en-us/visualstudio/subscriptions/assign-license) in Microsoft Docs.
1. Optionally, if the [subscription admin](#roles-for-visual-studio-subscriptions-with-github-enterprise) assigned licenses to [subscribers](#roles-for-visual-studio-subscriptions-with-github-enterprise) in {% data variables.product.prodname_vs %} before adding {% data variables.product.prodname_enterprise %} to the subscription, the [subscription admin](#roles-for-visual-studio-subscriptions-with-github-enterprise) can move the [subscribers](#roles-for-visual-studio-subscriptions-with-github-enterprise) to the combined offering in the {% data variables.product.prodname_vs %} administration portal. For more information, see [Manage {% data variables.product.prodname_vs %} subscriptions with {% data variables.product.prodname_enterprise %}](https://docs.microsoft.com/en-us/visualstudio/subscriptions/assign-github#moving-to-visual-studio-with-github-enterprise) in Microsoft Docs.
1. Optionally, if the subscription admin assigned licenses to subscribers in {% data variables.product.prodname_vs %} before adding {% data variables.product.prodname_enterprise %} to the subscription, the subscription admin can move the subscribers to the combined offering in the {% data variables.product.prodname_vs %} administration portal. For more information, see [Manage {% data variables.product.prodname_vs %} subscriptions with {% data variables.product.prodname_enterprise %}](https://docs.microsoft.com/en-us/visualstudio/subscriptions/assign-github#moving-to-visual-studio-with-github-enterprise) in Microsoft Docs.
1. If the [subscription admin](#roles-for-visual-studio-subscriptions-with-github-enterprise) has not disabled email notifications, the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) will receive two confirmation emails. For more information, see [{% data variables.product.prodname_vs %} subscriptions with {% data variables.product.prodname_enterprise %}](https://docs.microsoft.com/en-us/visualstudio/subscriptions/access-github#what-is-the-visual-studio-subscription-with-github-enterprise-setup-process) in Microsoft Docs.
1. If the subscription admin has not disabled email notifications, the subscriber will receive two confirmation emails. For more information, see [{% data variables.product.prodname_vs %} subscriptions with {% data variables.product.prodname_enterprise %}](https://docs.microsoft.com/en-us/visualstudio/subscriptions/access-github#what-is-the-visual-studio-subscription-with-github-enterprise-setup-process) in Microsoft Docs.
1. An [organization owner](#roles-for-visual-studio-subscriptions-with-github-enterprise) must invite the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) to the organization on {% data variables.product.product_location %} from step 1. The [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) can accept the invitation with an existing user account on {% data variables.product.prodname_dotcom_the_website %} or create a new account. After the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) joins the organization, the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) becomes an [enterprise member](#roles-for-visual-studio-subscriptions-with-github-enterprise). For more information, see "[Inviting users to join your organization](/organizations/managing-membership-in-your-organization/inviting-users-to-join-your-organization)."
1. An organization owner must invite the subscriber to the organization on {% data variables.product.product_location %} from step 1. The subscriber can accept the invitation with an existing user account on {% data variables.product.prodname_dotcom_the_website %} or create a new account. After the subscriber joins the organization, the subscriber becomes an enterprise member. For more information, see "[Inviting users to join your organization](/organizations/managing-membership-in-your-organization/inviting-users-to-join-your-organization)."
{% tip %}
**Tips**:
- While not required, we recommend that the [organization owner](#roles-for-visual-studio-subscriptions-with-github-enterprise) sends an invitation to the same email address used for the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise)'s User Primary Name (UPN). When the email address on {% data variables.product.product_location %} matches the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise)'s UPN, you can ensure that another [enterprise member](#roles-for-visual-studio-subscriptions-with-github-enterprise) does not claim the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise)'s license.
- If the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) accepts the invitation to the organization with an existing user account on {% data variables.product.product_location %}, we recommend that the [subscriber](#roles-for-visual-studio-subscriptions-with-github-enterprise) add the email address they use for {% data variables.product.prodname_vs %} to their user account on {% data variables.product.product_location %}. For more information, see "[Adding an email address to your {% data variables.product.prodname_dotcom %} account](/account-and-profile/setting-up-and-managing-your-github-user-account/managing-email-preferences/adding-an-email-address-to-your-github-account)."
- If the [organization owner](#roles-for-visual-studio-subscriptions-with-github-enterprise) must invite a large number of [subscribers](#roles-for-visual-studio-subscriptions-with-github-enterprise), a script may make the process faster. For more information, see [the sample PowerShell script](https://github.com/github/platform-samples/blob/master/api/powershell/invite_members_to_org.ps1) in the `github/platform-samples` repository.
- While not required, we recommend that the organization owner sends an invitation to the same email address used for the subscriber's User Primary Name (UPN). When the email address on {% data variables.product.product_location %} matches the subscriber's UPN, you can ensure that another enterprise does not claim the subscriber's license.
- If the subscriber accepts the invitation to the organization with an existing user account on {% data variables.product.product_location %}, we recommend that the subscriber add the email address they use for {% data variables.product.prodname_vs %} to their user account on {% data variables.product.product_location %}. For more information, see "[Adding an email address to your {% data variables.product.prodname_dotcom %} account](/account-and-profile/setting-up-and-managing-your-github-user-account/managing-email-preferences/adding-an-email-address-to-your-github-account)."
- If the organization owner must invite a large number of subscribers, a script may make the process faster. For more information, see [the sample PowerShell script](https://github.com/github/platform-samples/blob/master/api/powershell/invite_members_to_org.ps1) in the `github/platform-samples` repository.
{% endtip %}
After {% data variables.product.prodname_vss_ghe %} is set up for [subscribers](#roles-for-visual-studio-subscriptions-with-github-enterprise) on your team, [enterprise owners](#roles-for-visual-studio-subscriptions-with-github-enterprise) can review licensing information on {% data variables.product.product_location %}. For more information, see "[Viewing the subscription and usage for your enterprise account](/billing/managing-billing-for-your-github-account/viewing-the-subscription-and-usage-for-your-enterprise-account)."
After {% data variables.product.prodname_vss_ghe %} is set up for subscribers on your team, enterprise owners can review licensing information on {% data variables.product.product_location %}. For more information, see "[Viewing the subscription and usage for your enterprise account](/billing/managing-billing-for-your-github-account/viewing-the-subscription-and-usage-for-your-enterprise-account)."
## Further reading

View File

@@ -0,0 +1,45 @@
---
title: Connecting to a private network
intro: 'You can connect {% data variables.product.prodname_codespaces %} to resources on a private network, including package registries, license servers, and on-premises databases.'
product: '{% data reusables.gated-features.codespaces %}'
versions:
fpt: '*'
ghec: '*'
type: how_to
topics:
- Codespaces
- Fundamentals
- Developer
---
## About codespace networking
By default, your codespaces have access to all resources on the public internet, including package managers, license servers, databases, and cloud platform APIs, but they have no access to resources on private networks.
## Connecting to resources on a private network
The currently supported method of accessing resources on a private network is to use a VPN. It is currently not recommended to allowlist codespaces IPs as this would allow all codespaces (both yours and those of other customers) access to the network protected resources.
### Using a VPN to access resources behind a private network
The easiest way to access resources behind a private network is to VPN into that network from within your codespace.
We recommend VPN tools like [OpenVPN](https://openvpn.net/) to access resources on a private network. For more information, see "[Using the OpenVPN client from GitHub Codespaces](https://github.com/codespaces-contrib/codespaces-openvpn)."
There are also a number of third party solutions that, while not explicitly endorsed by {% data variables.product.prodname_dotcom %}, have provided examples of how to integrate with {% data variables.product.prodname_codespaces %}.
These third party solutions include:
- [Tailscale](https://tailscale.com/kb/1160/github-codespaces/)
### Allowlisting private resources for codespaces
While {% data variables.product.prodname_dotcom %} publishes IP ranges for several products on its Meta API, codespaces IPs are dynamically assigned, meaning your codespace is not guaranteed to have the same IP address day to day. We highly discourage users from allowlisting an entire IP range, as this would give overly broad access to all codespaces (including users not affiliated with your codespaces).
For more information about the Meta API, see "[Meta](/rest/reference/meta)."
## Restricting access to the public internet
At present, there is no way to restrict codespaces from accessing the public internet, or to restrict appropriately authenticated users from accessing a forwarded port.
For more information on how to secure your codespaces, see "[Security in Codespaces](/codespaces/codespaces-reference/security-in-codespaces)."

View File

@@ -15,6 +15,7 @@ children:
- /using-codespaces-for-pull-requests
- /deleting-a-codespace
- /forwarding-ports-in-your-codespace
- /connecting-to-a-private-network
- /using-codespaces-in-visual-studio-code
- /using-codespaces-with-github-cli
---

View File

@@ -4,6 +4,7 @@ shortTitle: Integrate with an IDE
intro: 'You can preconfigure a supported integrated development environment (IDE) for assignments you create in {% data variables.product.prodname_classroom %}.'
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can integrate {% data variables.product.prodname_classroom %} with an IDE. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/online-ide-integrations
- /education/manage-coursework-with-github-classroom/integrate-github-classroom-with-an-online-ide

View File

@@ -3,6 +3,7 @@ title: Connect a learning management system to GitHub Classroom
intro: 'You can configure an LTI-compliant learning management system (LMS) to connect to {% data variables.product.prodname_classroom %} so that you can import a roster for your classroom.'
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can connect learning management systems to {% data variables.product.prodname_classroom %}. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/configuring-a-learning-management-system-for-github-classroom
- /education/manage-coursework-with-github-classroom/connect-to-lms

View File

@@ -3,6 +3,7 @@ title: Create a group assignment
intro: You can create a collaborative assignment for teams of students who participate in your course.
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can create and manage group assignments for a classroom. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/create-group-assignments
- /education/manage-coursework-with-github-classroom/create-a-group-assignment

View File

@@ -1,6 +1,7 @@
---
title: Create an assignment from a template repository
intro: 'You can create an assignment from a template repository to provide starter code, documentation, and other resources to your students.'
permissions: Organization owners who are admins for a classroom can create an assignment from a template repository that is public or owned by the organization. {% data reusables.classroom.classroom-admins-link %}
versions:
fpt: '*'
redirect_from:

View File

@@ -3,6 +3,7 @@ title: Create an individual assignment
intro: You can create an assignment for students in your course to complete individually.
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can create and manage individual assignments for a classroom. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/creating-an-individual-assignment
- /education/manage-coursework-with-github-classroom/create-an-individual-assignment

View File

@@ -1,7 +1,7 @@
---
title: Manage classrooms
intro: 'You can create and manage a classroom for each course that you teach using {% data variables.product.prodname_classroom %}.'
permissions: Organization owners can manage a classroom for an organization.
permissions: Organization owners who are admins for a classroom can manage the classroom for an organization. {% data reusables.classroom.classroom-admins-link %}
versions:
fpt: '*'
redirect_from:

View File

@@ -4,6 +4,7 @@ intro: You can automatically provide feedback on code submissions from your stud
miniTocMaxHeadingLevel: 3
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can set up and use autograding on assignments in a classroom. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/adding-tests-for-auto-grading
- /education/manage-coursework-with-github-classroom/reviewing-auto-graded-work-teachers

View File

@@ -3,6 +3,7 @@ title: Use the Git and GitHub starter assignment
intro: 'You can use the Git & {% data variables.product.company_short %} starter assignment to give students an overview of Git and {% data variables.product.company_short %} fundamentals.'
versions:
fpt: '*'
permissions: Organization owners who are admins for a classroom can use Git & {% data variables.product.company_short %} starter assignments. {% data reusables.classroom.classroom-admins-link %}
redirect_from:
- /education/manage-coursework-with-github-classroom/use-the-git-and-github-starter-assignment
shortTitle: Starter assignment
@@ -40,11 +41,18 @@ The Git & {% data variables.product.company_short %} starter assignment is a pre
Import the starter course into your organization, name your assignment, decide whether to assign a deadline, and choose the visibility of assignment repositories.
- [Importing the assignment](#importing-the-assignment)
- [Naming an assignment](#naming-an-assignment)
- [Assigning a deadline for an assignment](#assigning-a-deadline-for-an-assignment)
- [Choosing an assignment type](#choosing-an-assignment-type)
- [Choosing a visibility for assignment repositories](#choosing-a-visibility-for-assignment-repositories)
- [Prerequisites](#prerequisites)
- [Creating the starter assignment](#creating-the-starter-assignment)
- [If there are no existing assignments in the classroom](#if-there-are-no-existing-assignments-in-the-classroom)
- [If there already are existing assignments in the classroom](#if-there-already-are-existing-assignments-in-the-classroom)
- [Setting up the basics for an assignment](#setting-up-the-basics-for-an-assignment)
- [Importing the assignment](#importing-the-assignment)
- [Naming the assignment](#naming-the-assignment)
- [Assigning a deadline for an assignment](#assigning-a-deadline-for-an-assignment)
- [Choosing a visibility for assignment repositories](#choosing-a-visibility-for-assignment-repositories)
- [Inviting students to an assignment](#inviting-students-to-an-assignment)
- [Next steps](#next-steps)
- [Further reading](#further-reading)
### Importing the assignment

View File

@@ -97,7 +97,7 @@ Now you are ready to start making queries.
## An example query using the Enterprise Accounts API
This GraphQL query requests the total number of {% ifversion not ghae %}`public`{% else %}`private`{% endif %} repositories in each of your appliance's organizations using the Enterprise Accounts API. To customize this query, replace `<enterprise-account-name>` with the slug of your Enterprise's instance slug.
This GraphQL query requests the total number of {% ifversion not ghae %}`public`{% else %}`private`{% endif %} repositories in each of your appliance's organizations using the Enterprise Accounts API. To customize this query, replace `<enterprise-account-name>` with the handle for your enterprise account. For example, if your enterprise account is located at `https://github.com/enterprises/octo-enterprise`, replace `<enterprise-account-name>` with `octo-enterprise`.
{% ifversion not ghae %}

View File

@@ -1,7 +1,26 @@
---
title: GitHub GraphQL API
intro: 'You can use the {% data variables.product.prodname_dotcom %} GraphQL API to create precise and flexible queries for the data you need to integrate with {% data variables.product.prodname_dotcom %}.'
intro: 'To create integrations, retrieve data, and automate your workflows, use the {% data variables.product.prodname_dotcom %} GraphQL API. The {% data variables.product.prodname_dotcom %} GraphQL API offers more precise and flexible queries than the {% data variables.product.prodname_dotcom %} REST API.'
shortTitle: GraphQL API
introLinks:
overview: /graphql/overview/about-the-graphql-api
featuredLinks:
guides:
- /graphql/guides/forming-calls-with-graphql
- /graphql/guides/introduction-to-graphql
- /graphql/guides/using-the-explorer
popular:
- /graphql/overview/explorer
- /graphql/overview/public-schema
- /graphql/overview/schema-previews
- /graphql/guides/using-the-graphql-api-for-discussions
guideCards:
- /graphql/guides/migrating-from-rest-to-graphql
- /graphql/guides/managing-enterprise-accounts
- /graphql/guides/using-global-node-ids
changelog:
label: 'api, apis'
layout: product-landing
redirect_from:
- /v4
versions:

View File

@@ -307,6 +307,20 @@ An overview of some of the most common actions that are recorded as events in th
| `update_actions_secret` | Triggered when a secret in an environment is updated. For more information, see ["Environment secrets](/actions/reference/environments#environment-secrets)."
{% endif %}
{% ifversion ghae %}
### `external_group` category actions
{% data reusables.saml.external-group-audit-events %}
{% endif %}
{% ifversion ghae %}
### `external_identity` category actions
{% data reusables.saml.external-identity-audit-events %}
{% endif %}
{% ifversion fpt or ghec %}
### `git` category actions

View File

@@ -1,7 +1,27 @@
---
title: GitHub REST API
shortTitle: REST API
intro: 'You can use the {% data variables.product.prodname_dotcom %} REST API to create calls to get the data you need to integrate with GitHub.'
intro: 'To create integrations, retrieve data, and automate your workflows, build with the {% data variables.product.prodname_dotcom %} REST API.'
introLinks:
quickstart: /rest/guides/getting-started-with-the-rest-api
featuredLinks:
guides:
- /rest/guides/getting-started-with-the-rest-api
- /rest/guides/basics-of-authentication
- /rest/guides/best-practices-for-integrators
popular:
- /rest/overview/resources-in-the-rest-api
- /rest/overview/other-authentication-methods
- /rest/overview/troubleshooting
- /rest/overview/endpoints-available-for-github-apps
- /rest/overview/openapi-description
guideCards:
- /rest/guides/delivering-deployments
- /rest/guides/getting-started-with-the-checks-api
- /rest/guides/traversing-with-pagination
changelog:
label: 'api, apis'
layout: product-landing
redirect_from:
- /v3
versions:

View File

@@ -34,6 +34,10 @@ REST API endpoints{% ifversion ghes %}—except [Management Console](#management
{% data variables.product.api_url_pre %}
```
{% ifversion fpt or ghec %}
When endpoints include `{enterprise}`, replace `{enterprise}` with the handle for your enterprise account, which is included in the URL for your enterprise settings. For example, if your enterprise account is located at `https://github.com/enterprises/octo-enterprise`, replace `{enterprise}` with `octo-enterprise`.
{% endif %}
{% ifversion ghes %}
[Management Console](#management-console) API endpoints are only prefixed with a hostname:

View File

@@ -0,0 +1,20 @@
date: '2021-12-07'
sections:
security_fixes:
- Support bundles could include sensitive files if they met a specific set of conditions.
bugs:
- Running `ghe-config-apply` could sometimes fail because of permission issues in `/data/user/tmp/pages`.
- A misconfiguration in the Management Console caused scheduling errors.
- Docker would hold log files open after a log rotation.
- GraphQL requests did not set the GITHUB_USER_IP variable in pre-receive hook environments.
changes:
- Clarifies explanation of Actions path-style in documentation.
- Updates support contact URLs to use the current support site, support.github.com.
known_issues:
- On a freshly set up {% data variables.product.prodname_ghe_server %} without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- When a replica node is offline in a high availability configuration, {% data variables.product.product_name %} may still route {% data variables.product.prodname_pages %} requests to the offline node, reducing the availability of {% data variables.product.prodname_pages %} for users.
- Resource limits that are specific to processing pre-receive hooks may cause some pre-receive hooks to fail.

View File

@@ -0,0 +1,21 @@
date: '2021-12-07'
sections:
security_fixes:
- Support bundles could include sensitive files if they met a specific set of conditions.
bugs:
- Running `ghe-config-apply` could sometimes fail because of permission issues in `/data/user/tmp/pages`.
- A misconfiguration in the Management Console caused scheduling errors.
- Docker would hold log files open after a log rotation.
- GraphQL requests did not set the GITHUB_USER_IP variable in pre-receive hook environments.
changes:
- Clarifies explanation of Actions path-style in documentation.
- Updates support contact URLs to use the current support site, support.github.com.
known_issues:
- The {% data variables.product.prodname_registry %} npm registry no longer returns a time value in metadata responses. This was done to allow for substantial performance improvements. We continue to have all the data necessary to return a time value as part of the metadata response and will resume returning this value in the future once we have solved the existing performance issues.
- On a freshly set up {% data variables.product.prodname_ghe_server %} without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- If {% data variables.product.prodname_actions %} is enabled for {% data variables.product.prodname_ghe_server %}, teardown of a replica node with `ghe-repl-teardown` will succeed, but may return `ERROR:Running migrations`.
- Resource limits that are specific to processing pre-receive hooks may cause some pre-receive hooks to fail.

View File

@@ -173,7 +173,9 @@ sections:
- The audit log now includes events associated with {% data variables.product.prodname_actions %} workflow runs. This data provides administrators with a greatly expanded data set for security and compliance audits. For more information, see "[Reviewing the audit log for your organization](/organizations/keeping-your-organization-secure/reviewing-the-audit-log-for-your-organization#workflows-category-actions)."
# https://github.com/github/releases/issues/1587
- '{% data variables.product.prodname_ghe_server %} 3.2 contains performance improvements for job concurrency with {% data variables.product.prodname_actions %}. For more information on the new performance targets on a range of CPU and memory configurations, see "[Getting started with {% data variables.product.prodname_actions %} for {% data variables.product.prodname_ghe_server %}](/admin/github-actions/enabling-github-actions-for-github-enterprise-server/getting-started-with-github-actions-for-github-enterprise-server#review-hardware-considerations)."'
- |
{% data variables.product.prodname_ghe_server %} 3.2 contains performance improvements for job concurrency with {% data variables.product.prodname_actions %}. For more information about the new performance targets for a range of CPU and memory configurations, see "[Getting started with {% data variables.product.prodname_actions %} for {% data variables.product.prodname_ghe_server %}](/admin/github-actions/enabling-github-actions-for-github-enterprise-server/getting-started-with-github-actions-for-github-enterprise-server#review-hardware-considerations)."
* The "Maximum Concurrency" values were modified to reflect our most up to date performance testing. [Updated: 2021-12-07]
- The [{% data variables.product.prodname_actions %} Runner](https://github.com/actions/runner) application in {% data variables.product.prodname_ghe_server %} 3.2 has been updated to [v2.279.0](https://github.com/actions/runner/releases/tag/v2.279.0).

View File

@@ -0,0 +1,25 @@
date: '2021-12-07'
sections:
security_fixes:
- Support bundles could include sensitive files if they met a specific set of conditions.
bugs:
- In some cases when Actions was not enabled, `ghe-support-bundle` reported an unexpected message `Unable to find MS SQL container.`
- Running `ghe-config-apply` could sometimes fail because of permission issues in `/data/user/tmp/pages`.
- A misconfiguration in the Management Console caused scheduling errors.
- Docker would hold log files open after a log rotation.
- Migrations could get stuck due to incorrect handling of `blob_path` values that are not UTF-8 compatible.
- GraphQL requests did not set the GITHUB_USER_IP variable in pre-receive hook environments.
- Pagination links on org audit logs would not persist query parameters.
- During a hotpatch, it was possible for duplicate hashes if a transition ran more than once.
changes:
- Clarifies explanation of Actions path-style in documentation.
- Updates support contact URLs to use the current support site, support.github.com.
- Additional troubleshooting provided when running `ghe-mssql-diagnostic`.
known_issues:
- On a freshly set up {% data variables.product.prodname_ghe_server %} without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- The {% data variables.product.prodname_registry %} npm registry no longer returns a time value in metadata responses. This was done to allow for substantial performance improvements. We continue to have all the data necessary to return a time value as part of the metadata response and will resume returning this value in the future once we have solved the existing performance issues.
- Resource limits that are specific to processing pre-receive hooks may cause some pre-receive hooks to fail.

View File

@@ -1,6 +1,6 @@
date: '2021-11-09'
release_candidate: true
deprecated: false
deprecated: true
intro: |
{% note %}

View File

@@ -0,0 +1,293 @@
date: '2021-12-07'
intro: For upgrade instructions, see "[Upgrading {% data variables.product.prodname_ghe_server %}](/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server)."
sections:
features:
- heading: Security Manager role
notes:
# https://github.com/github/releases/issues/1610
- |
Organization owners can now grant teams the access to manage security alerts and settings on their repositories. The "security manager" role can be applied to any team and grants the team's members the following access:
- Read access on all repositories in the organization.
- Write access on all security alerts in the organization.
- Access to the organization-level security tab.
- Write access on security settings at the organization level.
- Write access on security settings at the repository level.
For more information, see "[Managing security managers in your organization](/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)."
- heading: 'Ephemeral self-hosted runners for GitHub Actions & new webhooks for auto-scaling'
notes:
# https://github.com/github/releases/issues/1378
- |
{% data variables.product.prodname_actions %} now supports ephemeral (single job) self-hosted runners and a new [`workflow_job`](/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job) webhook to make autoscaling runners easier.
Ephemeral runners are good for self-managed environments where each job is required to run on a clean image. After a job is run, ephemeral runners are automatically unregistered from {% data variables.product.product_location %}, allowing you to perform any post-job management.
You can combine ephemeral runners with the new `workflow_job` webhook to automatically scale self-hosted runners in response to {% data variables.product.prodname_actions %} job requests.
For more information, see "[Autoscaling with self-hosted runners](/actions/hosting-your-own-runners/autoscaling-with-self-hosted-runners)" and "[Webhook events and payloads](/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job)."
- heading: 'Dark high contrast theme'
notes:
# https://github.com/github/releases/issues/1539
- |
A dark high contrast theme, with greater contrast between foreground and background elements, is now available on {% data variables.product.prodname_ghe_server %} 3.3. This release also includes improvements to the color system across all {% data variables.product.company_short %} themes.
![Animated image of switching between dark default theme and dark high contrast on the appearance settings page](https://user-images.githubusercontent.com/334891/123645834-ad096c00-d7f4-11eb-85c9-b2c92b00d70a.gif)
For more information about changing your theme, see "[Managing your theme settings](/account-and-profile/setting-up-and-managing-your-github-user-account/managing-user-account-settings/managing-your-theme-settings)."
changes:
- heading: Administration Changes
notes:
# https://github.com/github/releases/issues/1666
- '{% data variables.product.prodname_ghe_server %} 3.3 includes improvements to the maintenance of repositories, especially for repositories that contain many unreachable objects. Note that the first maintenance cycle after upgrading to {% data variables.product.prodname_ghe_server %} 3.3 may take longer than usual to complete.'
# https://github.com/github/releases/issues/1533
- '{% data variables.product.prodname_ghe_server %} 3.3 includes the public beta of a repository cache for geographically-distributed teams and CI infrastructure. The repository cache keeps a read-only copy of your repositories available in additional geographies, which prevents clients from downloading duplicate Git content from your primary instance. For more information, see "[About repository caching](/admin/enterprise-management/caching-repositories/about-repository-caching)."'
# https://github.com/github/releases/issues/1616
- '{% data variables.product.prodname_ghe_server %} 3.3 includes improvements to the user impersonation process. An impersonation session now requires a justification for the impersonation, actions are recorded in the audit log as being performed as an impersonated user, and the user who is impersonated will receive an email notification that they have been impersonated by an enterprise administrator. For more information, see "[Impersonating a user](/enterprise-server@3.3/admin/user-management/managing-users-in-your-enterprise/impersonating-a-user)."'
# https://github.com/github/releases/issues/1609
- A new stream processing service has been added to facilitate the growing set of events that are published to the audit log, including events associated with Git and {% data variables.product.prodname_actions %} activity.
- heading: Token Changes
notes:
# https://github.com/github/releases/issues/1390
- |
An expiration date can now be set for new and existing personal access tokens. Setting an expiration date on personal access tokens is highly recommended to prevent older tokens from leaking and compromising security. Token owners will receive an email when it's time to renew a token that's about to expire. Tokens that have expired can be regenerated, giving users a duplicate token with the same properties as the original.
When using a personal access token with the {% data variables.product.company_short %} API, a new `GitHub-Authentication-Token-Expiration` header is included in the response, which indicates the token's expiration date. For more information, see "[Creating a personal access token](/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token)."
- heading: 'Notifications changes'
notes:
# https://github.com/github/releases/issues/1625
- 'Notification emails from discussions now include `(Discussion #xx)` in the subject, so you can recognize and filter emails that reference discussions.'
- heading: 'Repositories changes'
notes:
# https://github.com/github/releases/issues/1735
- Public repositories now have a `Public` label next to their names like private and internal repositories. This change makes it easier to identify public repositories and avoid accidentally committing private code.
# https://github.com/github/releases/issues/1733
- If you specify the exact name of a branch when using the branch selector menu, the result now appears at the top of the list of matching branches. Previously, exact branch name matches could appear at the bottom of the list.
# https://github.com/github/releases/issues/1673
- When viewing a branch that has a corresponding open pull request, {% data variables.product.prodname_ghe_server %} now links directly to the pull request. Previously, there would be a prompt to contribute using branch comparison or to open a new pull request.
# https://github.com/github/releases/issues/1670
- You can now click a button to copy the full raw contents of a file to the clipboard. Previously, you would need to open the raw file, select all, and then copy. To copy the contents of a file, navigate to the file and click {% octicon "copy" aria-label="The copy icon" %} in the toolbar. Note that this feature is currently only available in some browsers.
# https://github.com/github/releases/issues/1571
- When creating a new release, you can now select or create the tag using a dropdown selector, rather than specifying the tag in a text field. For more information, see "[Managing releases in a repository](/repositories/releasing-projects-on-github/managing-releases-in-a-repository)."
# https://github.com/github/releases/issues/1752
- A warning is now displayed when viewing a file that contains bidirectional Unicode text. Bidirectional Unicode text can be interpreted or compiled differently than it appears in a user interface. For example, hidden bidirectional Unicode characters can be used to swap segments of text in a file. For more information about replacing these characters, see the [{% data variables.product.prodname_dotcom %} changelog](https://github.blog/changelog/2021-10-31-warning-about-bidirectional-unicode-text/).
# https://github.com/github/releases/issues/1416
- You can now use `CITATION.cff` files to let others know how you would like them to cite your work. `CITATION.cff` files are plain text files with human- and machine-readable citation information. {% data variables.product.prodname_ghe_server %} parses this information into common citation formats such as [APA](https://apastyle.apa.org) and [BibTeX](https://en.wikipedia.org/wiki/BibTeX). For more information, see "[About CITATION files](/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files)."
- heading: 'Markdown changes'
notes:
# https://github.com/github/releases/issues/1645
- |
You can use new keyboard shortcuts for quotes and lists in Markdown files, issues, pull requests, and comments.
* To add quotes, use <kbd>cmd shift .</kbd> on Mac, or <kbd>ctrl shift .</kbd> on Windows and Linux.
* To add an ordered list, use <kbd>cmd shift 7</kbd> on Mac, or <kbd>ctrl shift 7</kbd> on Windows and Linux.
* To add an unordered list, use <kbd>cmd shift 8</kbd> on Mac, or <kbd>ctrl shift 8</kbd> on Windows and Linux.
See "[Keyboard shortcuts](/get-started/using-github/keyboard-shortcuts)" for a full list of available shortcuts.
# https://github.com/github/releases/issues/1684
- You can now use footnote syntax in any Markdown field. Footnotes are displayed as superscript links that you can click to jump to the referenced information, which is displayed in a new section at the bottom of the document. For more information about the syntax, see "[Basic writing and formatting syntax](/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#footnotes)."
# https://github.com/github/releases/issues/1647
- When viewing Markdown files, you can now click {% octicon "code" aria-label="The code icon" %} in the toolbar to view the source of a Markdown file. Previously, you needed to use the blame view to link to specific line numbers in the source of a Markdown file.
# https://github.com/github/releases/issues/1600
- You can now add images and videos to Markdown files in gists by pasting them into the Markdown body or selecting them from the dialog at the bottom of the Markdown file. For information about supported file types, see "[Attaching files](https://docs.github.com/en/github/writing-on-github/working-with-advanced-formatting/attaching-files)."
# https://github.com/github/releases/issues/1523
- '{% data variables.product.prodname_ghe_server %} now automatically generates a table of contents for Wikis, based on headings.'
# https://github.com/github/releases/issues/1626
- When dragging and dropping files into a Markdown editor, such as images and videos, {% data variables.product.prodname_ghe_server %} now uses the mouse pointer location instead of the cursor location when placing the file.
- heading: 'Issues and pull requests changes'
notes:
# https://github.com/github/releases/issues/1504
- You can now search issues by label using a logical OR operator. To filter issues using logical OR, use the comma syntax. For example, `label:"good first issue","bug"` will list all issues with a label of `good first issue` or `bug`. For more information, see "[Filtering and searching issues and pull requests](/issues/tracking-your-work-with-issues/filtering-and-searching-issues-and-pull-requests#about-search-terms)."
# https://github.com/github/releases/issues/1685
- |
Improvements have been made to help teams manage code review assignments. You can now:
- Limit assignment to only direct members of the team.
- Continue with automatic assignment even if one or more members of the team are already requested.
- Keep a team assigned to review even if one or more members is newly assigned.
The timeline and reviewers sidebar on the pull request page now indicate if a review request was automatically assigned to one or more team members.
For more information, see the [{% data variables.product.prodname_dotcom %} changelog](https://github.blog/changelog/2021-09-29-new-code-review-assignment-settings-and-team-filtering-improvements/).
- You can now filter pull request searches to only include pull requests you are directly requested to review.
# https://github.com/github/releases/issues/1683
- Filtered files in pull requests are now completely hidden from view, and are no longer shown as collapsed in the "Files Changed" tab. The "File Filter" menu has also been simplified. For more information, see "[Filtering files in a pull request](/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/filtering-files-in-a-pull-request)."
- heading: 'GitHub Actions changes'
notes:
# https://github.com/github/releases/issues/1593
- You can now create "composite actions" which combine multiple workflow steps into one action, and includes the ability to reference other actions. This makes it easier to reduce duplication in workflows. Previously, an action could only use scripts in its YAML definition. For more information, see "[Creating a composite action](/actions/creating-actions/creating-a-composite-action)."
# https://github.com/github/releases/issues/1694
- Managing self-hosted runners at the enterprise level no longer requires using personal access tokens with the `admin:enterprise` scope. You can instead use the new `manage_runners:enterprise` scope to restrict the permissions on your tokens. Tokens with this scope can authenticate to [many REST API endpoints](/rest/reference/enterprise-admin#list-self-hosted-runner-groups-for-an-enterprise) to manage your enterprise's self-hosted runners.
# https://github.com/github/releases/issues/1157
- |
The audit log now includes additional events for {% data variables.product.prodname_actions %}. Audit log entries are now recorded for the following events:
* A self-hosted runner is registered or removed.
* A self-hosted runner is added to a runner group, or removed from a runner group.
* A runner group is created or removed.
* A workflow run is created or completed.
* A workflow job is prepared. Importantly, this log includes the list of secrets that were provided to the runner.
For more information, see "[Security hardening for {% data variables.product.prodname_actions %}](/actions/security-guides/security-hardening-for-github-actions#auditing-github-actions-events)."
# https://github.com/github/releases/issues/1588
- '{% data variables.product.prodname_ghe_server %} 3.3 contains performance improvements for job concurrency with {% data variables.product.prodname_actions %}. For more information about the new performance targets for a range of CPU and memory configurations, see "[Getting started with {% data variables.product.prodname_actions %} for {% data variables.product.prodname_ghe_server %}](/admin/github-actions/enabling-github-actions-for-github-enterprise-server/getting-started-with-github-actions-for-github-enterprise-server#review-hardware-considerations)."'
# https://github.com/github/releases/issues/1556
- To mitigate insider man in the middle attacks when using actions resolved through {% data variables.product.prodname_github_connect %} to {% data variables.product.prodname_dotcom_the_website %} from {% data variables.product.prodname_ghe_server %}, the actions namespace (`owner/name`) is retired on use. Retiring the namespace prevents that namespace from being created on your {% data variables.product.prodname_ghe_server %} instance, and ensures all workflows referencing the action will download it from {% data variables.product.prodname_dotcom_the_website %}.
- heading: 'GitHub Packages changes'
notes:
# https://github.com/github/docs-content/issues/5554
- When a repository is deleted, any associated package files are now immediately deleted from your {% data variables.product.prodname_registry %} external storage.
- heading: 'Dependabot and Dependency graph changes'
notes:
# https://github.com/github/releases/issues/1141
- Dependency review is out of beta and is now generally available for {% data variables.product.prodname_GH_advanced_security %} customers. Dependency review provides an easy-to-understand view of dependency changes and their security impact in the "Files changed" tab of pull requests. It informs you of which dependencies were added, removed, or updated, along with vulnerability information. For more information, see "[Reviewing dependency changes in a pull request](/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/reviewing-dependency-changes-in-a-pull-request)."
# https://github.com/github/releases/issues/1630
- '{% data variables.product.prodname_dependabot %} is now available as a private beta, offering both version updates and security updates for several popular ecosystems. {% data variables.product.prodname_dependabot %} on {% data variables.product.prodname_ghe_server %} requires {% data variables.product.prodname_actions %} and a pool of self-hosted runners configured for {% data variables.product.prodname_dependabot %} use. {% data variables.product.prodname_dependabot %} on {% data variables.product.prodname_ghe_server %} also requires {% data variables.product.prodname_github_connect %} to be enabled. To learn more and sign up for the beta, contact the GitHub Sales team.'
- heading: 'Code scanning and secret scanning changes'
notes:
# https://github.com/github/releases/issues/1724
- The depth of {% data variables.product.prodname_codeql %}'s analysis has been improved by adding support for more [libraries and frameworks](https://codeql.github.com/docs/codeql-overview/supported-languages-and-frameworks/) and increasing the coverage of our existing library and framework models. [JavaScript](https://github.com/github/codeql/tree/main/javascript) analysis now supports most common templating languages, and [Java](https://github.com/github/codeql/tree/main/java) now covers more than three times the endpoints of previous {% data variables.product.prodname_codeql %} versions. As a result, {% data variables.product.prodname_codeql %} can now detect even more potential sources of untrusted user data, steps through which that data flows, and potentially dangerous sinks where the data could end up. This results in an overall improvement of the quality of {% data variables.product.prodname_code_scanning %} alerts.
# https://github.com/github/releases/issues/1639
- '{% data variables.product.prodname_codeql %} now supports scanning standard language features in Java 16, such as records and pattern matching. {% data variables.product.prodname_codeql %} is able to analyze code written in Java version 7 through 16. For more information about supported languages and frameworks, see the [{% data variables.product.prodname_codeql %} documentation](https://codeql.github.com/docs/codeql-overview/supported-languages-and-frameworks/#id5).'
# https://github.com/github/releases/issues/1655
- |
Improvements have been made to the {% data variables.product.prodname_code_scanning %} `on:push` trigger when code is pushed to a pull request. If an `on:push` scan returns results that are associated with a pull request, {% data variables.product.prodname_code_scanning %} will now show these alerts on the pull request.
Some other CI/CD systems can be exclusively configured to trigger a pipeline when code is pushed to a branch, or even exclusively for every commit. Whenever such an analysis pipeline is triggered and results are uploaded to the SARIF API, {% data variables.product.prodname_code_scanning %} will also try to match the analysis results to an open pull request. If an open pull request is found, the results will be published as described above. For more information, see the [{% data variables.product.prodname_dotcom %} changelog](https://github.blog/changelog/2021-09-27-showing-code-scanning-alerts-on-pull-requests/).
# https://github.com/github/releases/issues/1546
- You can now use the new pull request filter on the {% data variables.product.prodname_code_scanning %} alerts page to find all the {% data variables.product.prodname_code_scanning %} alerts associated with a pull request. A new "View all branch alerts" link on the pull request "Checks" tab allows you to directly view {% data variables.product.prodname_code_scanning %} alerts with the specific pull request filter already applied. For more information, see the [{% data variables.product.prodname_dotcom %} changelog](https://github.blog/changelog/2021-08-23-pull-request-filter-for-code-scanning-alerts/).
# https://github.com/github/releases/issues/1562
- User defined patterns for {% data variables.product.prodname_secret_scanning %} is out of beta and is now generally available for {% data variables.product.prodname_GH_advanced_security %} customers. Also new in this release is the ability to edit custom patterns defined at the repository, organization, and enterprise levels. After editing and saving a pattern, {% data variables.product.prodname_secret_scanning %} searches for matches both in a repository's entire Git history and in any new commits. Editing a pattern will close alerts previously associated with the pattern if they no longer match the updated version. Other improvements, such as dry-runs, are planned in future releases. For more information, see "[Defining custom patterns for secret scanning](/code-security/secret-scanning/defining-custom-patterns-for-secret-scanning)."
- heading: API and webhook changes
notes:
# https://github.com/github/releases/issues/1744
- Most REST API previews have graduated and are now an official part of the API. Preview headers are no longer required for most REST API endpoints, but will still function as expected if you specify a graduated preview in the `Accept` header of a request. For previews that still require specifying the preview in the `Accept` header of a request, see "[API previews](/rest/overview/api-previews)."
# https://github.com/github/releases/issues/1513
- You can now use the REST API to configure custom autolinks to external resources. The REST API now provides beta `GET`/`POST`/`DELETE` endpoints which you can use to view, add, or delete custom autolinks associated with a repository. For more information, see "[Autolinks](/rest/reference/repos#autolinks)."
# https://github.com/github/releases/issues/1578
- You can now use the REST API to sync a forked repository with its upstream repository. For more information, see "[Repositories](/rest/reference/repos#sync-a-fork-branch-with-the-upstream-repository)" in the REST API documentation.
# https://github.com/github/releases/issues/1527
- Enterprise administrators on GitHub Enterprise Server can now use the REST API to enable or disable Git LFS for a repository. For more information, see "[Repositories](/rest/reference/repos#git-lfs)."
# https://github.com/github/releases/issues/1476
- You can now use the REST API to query the audit log for an enterprise. While audit log forwarding provides the ability to retain and analyze data with your own toolkit and determine patterns over time, the new endpoint can help you perform limited analysis on recent events. For more information, see "[{% data variables.product.prodname_enterprise %} administration](/rest/reference/enterprise-admin#get-the-audit-log-for-an-enterprise)" in the REST API documentation.
# https://github.com/github/releases/issues/1485
- GitHub App user-to-server API requests can now read public resources using the REST API. This includes, for example, the ability to list a public repository's issues and pull requests, and to access a public repository's comments and content.
# https://github.com/github/releases/issues/1734
- When creating or updating a repository, you can now configure whether forking is allowed using the REST and GraphQL APIs. Previously, APIs for creating and updating repositories didn't include the fields `allow_forking` (REST) or `forkingAllowed` (GraphQL). For more information, see "[Repositories](/rest/reference/repos)" in the REST API documentation and "[Repositories](/graphql/reference/objects#repository)" in the GraphQL API documentation.
# https://github.com/github/releases/issues/1637
- |
A new GraphQL mutation [`createCommitOnBranch`](/graphql/reference/mutations#createcommitonbranch) makes it easier to add, update, and delete files in a branch of a repository. Compared to the REST API, you do not need to manually create blobs and trees before creating the commit. This allows you to add, update, or delete multiple files in a single API call.
Commits authored using the new API are automatically GPG signed and are [marked as verified](/github/authenticating-to-github/managing-commit-signature-verification/about-commit-signature-verification) in the {% data variables.product.prodname_ghe_server %} UI. GitHub Apps can use the mutation to author commits directly or [on behalf of users](/developers/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests).
# https://github.com/github/releases/issues/1665
- When a new tag is created, the [push](/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push) webhook payload now always includes a `head_commit` object that contains the data of the commit that the new tag points to. As a result, the `head_commit` object will always contain the commit data of the payload's `after` commit.
- heading: 'Performance Changes'
notes:
# https://github.com/github/releases/issues/1823
- Page loads and jobs are now significantly faster for repositories with many Git refs.
# No security/bug fixes for the RC release
# security_fixes:
# - PLACEHOLDER
# bugs:
# - PLACEHOLDER
known_issues:
- On a freshly set up {% data variables.product.prodname_ghe_server %} instance without any users, an attacker could create the first admin user.
- Custom firewall rules are removed during the upgrade process.
- Git LFS tracked files [uploaded through the web interface](https://github.com/blog/2105-upload-files-to-your-repositories) are incorrectly added directly to the repository.
- Issues cannot be closed if they contain a permalink to a blob in the same repository, where the blob's file path is longer than 255 characters.
- When "Users can search GitHub.com" is enabled with GitHub Connect, issues in private and internal repositories are not included in GitHub.com search results.
- The {% data variables.product.prodname_registry %} npm registry no longer returns a time value in metadata responses. This was done to allow for substantial performance improvements. We continue to have all the data necessary to return a time value as part of the metadata response and will resume returning this value in the future once we have solved the existing performance issues.
- Resource limits that are specific to processing pre-receive hooks may cause some pre-receive hooks to fail.
deprecations:
- heading: Deprecation of GitHub Enterprise Server 2.22
notes:
- '**{% data variables.product.prodname_ghe_server %} 2.22 was discontinued on September 23, 2021**. This means that no patch releases will be made, even for critical security issues, after this date. For better performance, improved security, and new features, [upgrade to the newest version of {% data variables.product.prodname_ghe_server %}](/enterprise-server@3.3/admin/enterprise-management/upgrading-github-enterprise-server) as soon as possible.'
- heading: Deprecation of GitHub Enterprise Server 3.0
notes:
- '**{% data variables.product.prodname_ghe_server %} 3.0 will be discontinued on February 16, 2022**. This means that no patch releases will be made, even for critical security issues, after this date. For better performance, improved security, and new features, [upgrade to the newest version of {% data variables.product.prodname_ghe_server %}](/enterprise-server@3.3/admin/enterprise-management/upgrading-github-enterprise-server) as soon as possible.'
- heading: Deprecation of XenServer Hypervisor support
notes:
# https://github.com/github/docs-content/issues/4439
- Starting with {% data variables.product.prodname_ghe_server %} 3.3, {% data variables.product.prodname_ghe_server %} on XenServer is deprecated and is no longer supported. Please contact [GitHub Support](https://support.github.com) with questions or concerns.
- heading: Deprecation of OAuth Application API endpoints and API authentication using query parameters
notes:
# https://github.com/github/releases/issues/1316
- |
To prevent accidental logging or exposure of `access_tokens`, we discourage the use of OAuth Application API endpoints and the use of API authentication using query parameters. View the following posts to see the proposed replacements:
* [Replacement OAuth Application API endpoints](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/#changes-to-make)
* [Replacement authentication using headers instead of query param](https://developer.github.com/changes/2020-02-10-deprecating-auth-through-query-param/#changes-to-make)
These endpoints and authentication route are planned to be removed from {% data variables.product.prodname_ghe_server %} in {% data variables.product.prodname_ghe_server %} 3.4.
- heading: Deprecation of the CodeQL runner
notes:
# https://github.com/github/releases/issues/1632
- The {% data variables.product.prodname_codeql %} runner is being deprecated. {% data variables.product.prodname_ghe_server %} 3.3 will be the final release series that supports the {% data variables.product.prodname_codeql %} runner. Starting with {% data variables.product.prodname_ghe_server %} 3.4, the {% data variables.product.prodname_codeql %} runner will be removed and no longer supported. The {% data variables.product.prodname_codeql %} CLI version 2.6.2 or greater is a feature-complete replacement for the {% data variables.product.prodname_codeql %} runner. For more information, see the [{% data variables.product.prodname_dotcom %} changelog](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
- heading: Deprecation of custom bit-cache extensions
notes:
# https://github.com/github/releases/issues/1415
- |
Starting in {% data variables.product.prodname_ghe_server %} 3.1, support for {% data variables.product.company_short %}'s proprietary bit-cache extensions began to be phased out. These extensions are now deprecated in {% data variables.product.prodname_ghe_server %} 3.3.
Any repositories that were already present and active on {% data variables.product.product_location %} running version 3.1 or 3.2 will have been automatically updated.
Repositories which were not present and active before upgrading to {% data variables.product.prodname_ghe_server %} 3.3 may not perform optimally until a repository maintenance task is run and has successfully completed.
To start a repository maintenance task manually, browse to `https://<hostname>/stafftools/repositories/<owner>/<repository>/network` for each affected repository and click the **Schedule** button.
backups:
- '{% data variables.product.prodname_ghe_server %} 3.3 requires at least [GitHub Enterprise Backup Utilities 3.3.0](https://github.com/github/backup-utils) for [Backups and Disaster Recovery](/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance).'

View File

@@ -61,6 +61,8 @@ sections:
- A self-hosted runner's version is updated.
- heading: 'Authentication'
notes:
- |
GitHub AE now officially supports Okta for SAML single sign-on (SSO) and user provisioning with SCIM. You can also map groups in Okta to teams on GitHub AE. For more information, see "[Configuring authentication and provisioning for your enterprise using Okta](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta)" and "[Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
- |
The format of authentication tokens for {% data variables.product.product_name %} has changed. The change affects the format of personal access tokens and access tokens for OAuth Apps, as well as user-to-server, server-to-server, and refresh tokens for GitHub Apps. {% data variables.product.company_short %} recommends updating existing tokens as soon as possible to improve security and allow secret scanning to detect the tokens. For more information, see "[About authentication to {% data variables.product.prodname_dotcom %}](/github/authenticating-to-github/keeping-your-account-and-data-secure/about-authentication-to-github#githubs-token-formats)" and "[About secret scanning](/code-security/secret-security/about-secret-scanning)."
- |

View File

@@ -0,0 +1 @@
For more information on classroom admins, see "[About management of classrooms](/education/manage-coursework-with-github-classroom/teach-with-github-classroom/manage-classrooms#about-management-of-classrooms)."

View File

@@ -0,0 +1,4 @@
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.audit-log-tab %}
1. Click the **Log streaming** tab.

View File

@@ -0,0 +1 @@
1. Verify the endpoint, then click **Save**.

View File

@@ -0,0 +1,5 @@
{% note %}
**Note:** {% data variables.product.prodname_ghe_server %} supports PROXY Protocol V1, which is incompatible with AWS Network Load Balancers. If you use AWS Network Load Balancers with {% data variables.product.prodname_ghe_server %}, do not enable PROXY support.
{% endnote %}

View File

@@ -6,7 +6,7 @@
{% elsif ghec or ghes or ghae %}
1. Navigate to where your self-hosted runner groups are located:
* **In an organization**: navigate to the main page and click {% octicon "gear" aria-label="The Settings gear" %} **Settings**.{% ifversion ghec %}
* **If using an enterprise account**: navigate to your enterprise account by visiting `https://github.com/enterprises/ENTERPRISE-NAME`, replacing `ENTERPRISE-NAME` with your enterprise account's name.{% elsif ghes or ghae %}
* **If using an enterprise account**: navigate to your enterprise account by clicking your profile photo in the top-right corner of {% data variables.product.prodname_dotcom_the_website %}, then clicking **Your enterprises**, then clicking the enterprise.{% elsif ghes or ghae %}
* **If using an enterprise-level runner**:
1. In the upper-right corner of any page, click {% octicon "rocket" aria-label="The rocket ship" %}.
2. In the left sidebar, click **Enterprise overview**.

View File

@@ -6,7 +6,7 @@
{% elsif ghec or ghes or ghae %}
1. Navigate to where your self-hosted runner is registered:
* **In an organization**: navigate to the main page and click {% octicon "gear" aria-label="The Settings gear" %} **Settings**.
* {% ifversion ghec %}**If using an enterprise account**: navigate to your enterprise account by visiting `https://github.com/enterprises/ENTERPRISE-NAME`, replacing `ENTERPRISE-NAME` with your enterprise account's name.{% elsif ghes or ghae %}**If using an enterprise-level runner**:
* {% ifversion ghec %}**If using an enterprise account**: navigate to your enterprise account by clicking your profile photo in the top-right corner of {% data variables.product.prodname_dotcom_the_website %}, then clicking **Your enterprises**, then clicking the enterprise.{% elsif ghes or ghae %}**If using an enterprise-level runner**:
1. In the upper-right corner of any page, click {% octicon "rocket" aria-label="The rocket ship" %}.
1. In the left sidebar, click **Enterprise overview**.

View File

@@ -6,7 +6,7 @@
{% elsif ghec or ghes or ghae %}
1. Navigate to where your self-hosted runner is registered:
* **In an organization or repository**: navigate to the main page and click {% octicon "gear" aria-label="The Settings gear" %} **Settings**. {% ifversion ghec %}
* **If using an enterprise account**: navigate to your enterprise account by visiting `https://github.com/enterprises/ENTERPRISE-NAME`, replacing `ENTERPRISE-NAME` with your enterprise account's name.{% elsif ghes or ghae %}
* **If using an enterprise account**: navigate to your enterprise account by clicking your profile photo in the top-right corner of {% data variables.product.prodname_dotcom_the_website %}, then clicking **Your enterprises**, then clicking the enterprise.{% elsif ghes or ghae %}
* **If using an enterprise-level runner**:
1. In the upper-right corner of any page, click {% octicon "rocket" aria-label="The rocket ship" %}.
2. In the left sidebar, click **Enterprise overview**.

View File

@@ -0,0 +1,4 @@
IdP | SAML | User provisioning | Team mapping|
--- | --- | ---------------- | --------- |
[Azure Active Directory (Azure AD)](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-azure-ad) | {% octicon "check-circle-fill" aria-label="The check icon" %} | {% octicon "check-circle-fill" aria-label="The check icon" %}| {% octicon "check-circle-fill" aria-label="The check icon" %} |
[Okta](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta) | {% octicon "check-circle-fill" aria-label="The check icon" %}[<sup>Beta</sup>](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta)| {% octicon "check-circle-fill" aria-label="The check icon" %}[<sup>Beta</sup>](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/configuring-authentication-and-provisioning-for-your-enterprise-using-okta)| {% octicon "check-circle-fill" aria-label= "The check icon" %}[<sup>Beta</sup>](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams) |

View File

@@ -3,7 +3,8 @@ Organizations include:
- The ability to give members [a range of access permissions to your organization's repositories](/articles/repository-permission-levels-for-an-organization)
- [Nested teams that reflect your company or group's structure](/articles/about-teams) with cascading access permissions and mentions{% ifversion not ghae %}
- The ability for organization owners to view members' [two-factor authentication (2FA) status](/articles/about-two-factor-authentication)
- The option to [require all organization members to use two-factor authentication](/articles/requiring-two-factor-authentication-in-your-organization){% endif %}
- The option to [require all organization members to use two-factor authentication](/articles/requiring-two-factor-authentication-in-your-organization){% endif %}{% ifversion fpt%}
- The ability to [create and administer classrooms with GitHub Classroom](/education/manage-coursework-with-github-classroom/teach-with-github-classroom/manage-classrooms){% endif %}
{% ifversion fpt or ghec %}
You can use organizations for free, with {% data variables.product.prodname_free_team %}, which includes unlimited collaborators on unlimited public repositories with full features, and unlimited private repositories with limited features.

View File

@@ -0,0 +1,7 @@
| Action | Description
|------------------|-------------------
| `external_group.delete` | Triggered when your Okta group is deleted. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_group.link` | Triggered when your Okta group is mapped to your {% data variables.product.prodname_ghe_managed %} team. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_group.provision` | Triggered when an Okta group is mapped to your team on {% data variables.product.prodname_ghe_managed %}. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_group.unlink` | Triggered when your Okta group is unmapped from your {% data variables.product.prodname_ghe_managed %} team. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_group.update` | Triggered when your Okta group's settings are updated. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."

View File

@@ -0,0 +1,5 @@
| Action | Description
|------------------|-------------------
| `external_identity.deprovision` | Triggered when a user is removed from your Okta group and is subsequently deprovisioned from {% data variables.product.prodname_ghe_managed %}. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_identity.provision` | Triggered when an Okta user is added to your Okta group and is subsequently provisioned to the mapped team on {% data variables.product.prodname_ghe_managed %}. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."
| `external_identity.update` | Triggered when an Okta user's settings are updated. For more information, see ["Mapping Okta groups to teams](/admin/authentication/configuring-authentication-and-provisioning-with-your-identity-provider/mapping-okta-groups-to-teams)."

View File

@@ -0,0 +1,3 @@
1. In the Okta Dashboard, expand the **Applications** menu, then click **Applications**.
!["Applications" menu navigation](/assets/images/help/saml/okta-ae-add-application.png)

View File

@@ -0,0 +1,3 @@
1. Click on the {% data variables.product.prodname_ghe_managed %} app.
![Configure app](/assets/images/help/saml/okta-ae-configure-app.png)

View File

@@ -0,0 +1,3 @@
1. Click **Provisioning**.
![Configure app](/assets/images/help/saml/okta-ae-provisioning-tab.png)

View File

@@ -0,0 +1,5 @@
{% note %}
**Note:** {% data variables.product.prodname_ghe_managed %} single sign-on (SSO) support for Okta is currently in beta.
{% endnote %}

View File

@@ -11,4 +11,5 @@
- Shibboleth
{% elsif ghae %}
- Azure Active Directory (Azure AD)
- Okta (beta)
{% endif %}

View File

@@ -2,4 +2,5 @@ The following IdPs can provision or deprovision user accounts on {% data variabl
{% ifversion ghae %}
- Azure AD
- Okta (currently in beta)
{% endif %}

View File

@@ -1 +1 @@
version: enterprise-server@3.3
version: ''

View File

@@ -30414,18 +30414,6 @@
"rawType": "boolean",
"rawDescription": "Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`.",
"childParamsGroups": []
},
"contexts": {
"type": "array of strings",
"description": "<p>The list of status checks to require in order to merge into this branch.</p>",
"items": {
"type": "string"
},
"name": "contexts",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch.",
"childParamsGroups": []
}
},
"required": [
@@ -31108,18 +31096,6 @@
"rawType": "boolean",
"rawDescription": "Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`.",
"childParamsGroups": []
},
{
"type": "array of strings",
"description": "<p>The list of status checks to require in order to merge into this branch.</p>",
"items": {
"type": "string"
},
"name": "contexts",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch.",
"childParamsGroups": []
}
]
},
@@ -32330,15 +32306,76 @@
"contexts": {
"type": "array of strings",
"deprecated": true,
"description": "<p>The list of status checks to require in order to merge into this branch</p>",
"description": "<p><strong>Deprecated</strong>: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use <code>checks</code> instead of <code>contexts</code> for more fine-grained control.</p>",
"items": {
"type": "string"
},
"name": "contexts",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch",
"rawDescription": "**Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control.\n",
"childParamsGroups": []
},
"checks": {
"type": "array of objects",
"description": "<p>The list of status checks to require in order to merge into this branch.</p>",
"items": {
"type": "object",
"required": [
"context"
],
"properties": {
"context": {
"type": "string",
"description": "<p><strong>Required</strong>. The name of the required check</p>",
"name": "context",
"in": "body",
"rawType": "string",
"rawDescription": "The name of the required check",
"childParamsGroups": []
},
"app_id": {
"type": "integer",
"description": "<p>The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.</p>",
"name": "app_id",
"in": "body",
"rawType": "integer",
"rawDescription": "The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.",
"childParamsGroups": []
}
}
},
"name": "checks",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch.",
"childParamsGroups": [
{
"parentName": "checks",
"parentType": "items",
"id": "checks-items",
"params": [
{
"type": "string",
"description": "<p><strong>Required</strong>. The name of the required check</p>",
"name": "context",
"in": "body",
"rawType": "string",
"rawDescription": "The name of the required check",
"childParamsGroups": []
},
{
"type": "integer",
"description": "<p>The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.</p>",
"name": "app_id",
"in": "body",
"rawType": "integer",
"rawDescription": "The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.",
"childParamsGroups": []
}
]
}
]
}
}
},
@@ -32365,30 +32402,6 @@
"contentType": "application/json",
"notes": [],
"descriptionHTML": "<p>Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see <a href=\"https://help.github.com/github/getting-started-with-github/githubs-products\">GitHub's products</a> in the GitHub Help documentation.</p>\n<p>Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled.</p>",
"bodyParameters": [
{
"type": "boolean",
"description": "<p>Require branches to be up to date before merging.</p>",
"name": "strict",
"in": "body",
"rawType": "boolean",
"rawDescription": "Require branches to be up to date before merging.",
"childParamsGroups": []
},
{
"type": "array of strings",
"deprecated": true,
"description": "<p>The list of status checks to require in order to merge into this branch</p>",
"items": {
"type": "string"
},
"name": "contexts",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch",
"childParamsGroups": []
}
],
"responses": [
{
"httpStatusCode": "200",
@@ -32406,6 +32419,91 @@
"httpStatusMessage": "Unprocessable Entity",
"description": "Validation failed"
}
],
"bodyParameters": [
{
"type": "boolean",
"description": "<p>Require branches to be up to date before merging.</p>",
"name": "strict",
"in": "body",
"rawType": "boolean",
"rawDescription": "Require branches to be up to date before merging.",
"childParamsGroups": []
},
{
"type": "array of strings",
"deprecated": true,
"description": "<p><strong>Deprecated</strong>: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use <code>checks</code> instead of <code>contexts</code> for more fine-grained control.</p>",
"items": {
"type": "string"
},
"name": "contexts",
"in": "body",
"rawType": "array",
"rawDescription": "**Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control.\n",
"childParamsGroups": []
},
{
"type": "array of objects",
"description": "<p>The list of status checks to require in order to merge into this branch.</p>",
"items": {
"type": "object",
"required": [
"context"
],
"properties": {
"context": {
"type": "string",
"description": "<p><strong>Required</strong>. The name of the required check</p>",
"name": "context",
"in": "body",
"rawType": "string",
"rawDescription": "The name of the required check",
"childParamsGroups": []
},
"app_id": {
"type": "integer",
"description": "<p>The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.</p>",
"name": "app_id",
"in": "body",
"rawType": "integer",
"rawDescription": "The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.",
"childParamsGroups": []
}
}
},
"name": "checks",
"in": "body",
"rawType": "array",
"rawDescription": "The list of status checks to require in order to merge into this branch.",
"childParamsGroups": [
{
"parentName": "checks",
"parentType": "items",
"id": "checks-items",
"params": [
{
"type": "string",
"description": "<p><strong>Required</strong>. The name of the required check</p>",
"name": "context",
"in": "body",
"rawType": "string",
"rawDescription": "The name of the required check",
"childParamsGroups": []
},
{
"type": "integer",
"description": "<p>The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.</p>",
"name": "app_id",
"in": "body",
"rawType": "integer",
"rawDescription": "The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status.",
"childParamsGroups": []
}
]
}
]
}
]
},
{
@@ -40513,6 +40611,113 @@
}
]
},
{
"verb": "delete",
"requestPath": "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}",
"serverUrl": "https://{hostname}/api/v3",
"parameters": [
{
"name": "owner",
"in": "path",
"required": true,
"schema": {
"type": "string"
},
"descriptionHTML": ""
},
{
"name": "repo",
"in": "path",
"required": true,
"schema": {
"type": "string"
},
"descriptionHTML": ""
},
{
"name": "analysis_id",
"in": "path",
"description": "The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation.",
"required": true,
"schema": {
"type": "integer"
},
"descriptionHTML": "<p>The ID of the analysis, as returned from the <code>GET /repos/{owner}/{repo}/code-scanning/analyses</code> operation.</p>"
},
{
"name": "confirm_delete",
"in": "query",
"description": "Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.`",
"required": false,
"schema": {
"type": "string",
"nullable": true
},
"descriptionHTML": "<p>Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to <code>true</code>, you'll get a 400 response with the message: <code>Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.</code></p>"
}
],
"x-codeSamples": [
{
"lang": "Shell",
"source": "curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://{hostname}/api/v3/repos/octocat/hello-world/code-scanning/analyses/42",
"html": "<pre><code class=\"hljs language-shell\">curl \\\n -X DELETE \\\n -H \"Accept: application/vnd.github.v3+json\" \\\n https://{hostname}/api/v3/repos/octocat/hello-world/code-scanning/analyses/42</code></pre>"
},
{
"lang": "JavaScript",
"source": "await octokit.request('DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}', {\n owner: 'octocat',\n repo: 'hello-world',\n analysis_id: 42\n})",
"html": "<pre><code class=\"hljs language-javascript\"><span class=\"hljs-keyword\">await</span> octokit.<span class=\"hljs-title hljs-function\">request</span>(<span class=\"hljs-string\">'DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}'</span>, {\n <span class=\"hljs-attr\">owner</span>: <span class=\"hljs-string\">'octocat'</span>,\n <span class=\"hljs-attr\">repo</span>: <span class=\"hljs-string\">'hello-world'</span>,\n <span class=\"hljs-attr\">analysis_id</span>: <span class=\"hljs-number\">42</span>\n})\n</code></pre>"
}
],
"summary": "Delete a code scanning analysis from a repository",
"description": "Deletes a specified code scanning analysis from a repository. For\nprivate repositories, you must use an access token with the `repo` scope. For public repositories,\nyou must use an access token with `public_repo` and `repo:security_events` scopes.\nGitHub Apps must have the `security_events` write permission to use this endpoint.\n\nYou can delete one analysis at a time.\nTo delete a series of analyses, start with the most recent analysis and work backwards.\nConceptually, the process is similar to the undo function in a text editor.\n\nWhen you list the analyses for a repository,\none or more will be identified as deletable in the response:\n\n```\n\"deletable\": true\n```\n\nAn analysis is deletable when it's the most recent in a set of analyses.\nTypically, a repository will have multiple sets of analyses\nfor each enabled code scanning tool,\nwhere a set is determined by a unique combination of analysis values:\n\n* `ref`\n* `tool`\n* `analysis_key`\n* `environment`\n\nIf you attempt to delete an analysis that is not the most recent in a set,\nyou'll get a 400 response with the message:\n\n```\nAnalysis specified is not deletable.\n```\n\nThe response from a successful `DELETE` operation provides you with\ntwo alternative URLs for deleting the next analysis in the set:\n`next_analysis_url` and `confirm_delete_url`.\nUse the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis\nin a set. This is a useful option if you want to preserve at least one analysis\nfor the specified tool in your repository.\nUse the `confirm_delete_url` URL if you are content to remove all analyses for a tool.\nWhen you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url`\nin the 200 response is `null`.\n\nAs an example of the deletion process,\nlet's imagine that you added a workflow that configured a particular code scanning tool\nto analyze the code in a repository. This tool has added 15 analyses:\n10 on the default branch, and another 5 on a topic branch.\nYou therefore have two separate sets of analyses for this tool.\nYou've now decided that you want to remove all of the analyses for the tool.\nTo do this you must make 15 separate deletion requests.\nTo start, you must find an analysis that's identified as deletable.\nEach set of analyses always has one that's identified as deletable.\nHaving found the deletable analysis for one of the two sets,\ndelete this analysis and then continue deleting the next analysis in the set until they're all deleted.\nThen repeat the process for the second set.\nThe procedure therefore consists of a nested loop:\n\n**Outer loop**:\n* List the analyses for the repository, filtered by tool.\n* Parse this list to find a deletable analysis. If found:\n\n **Inner loop**:\n * Delete the identified analysis.\n * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration.\n\nThe above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely.",
"operationId": "code-scanning/delete-analysis",
"tags": [
"code-scanning"
],
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/github-ae@latest/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository"
},
"x-github": {
"enabledForGitHubApps": true,
"githubCloudOnly": false,
"category": "code-scanning"
},
"slug": "delete-a-code-scanning-analysis-from-a-repository",
"category": "code-scanning",
"categoryLabel": "Code scanning",
"notes": [],
"bodyParameters": [],
"descriptionHTML": "<p>Deletes a specified code scanning analysis from a repository. For\nprivate repositories, you must use an access token with the <code>repo</code> scope. For public repositories,\nyou must use an access token with <code>public_repo</code> and <code>repo:security_events</code> scopes.\nGitHub Apps must have the <code>security_events</code> write permission to use this endpoint.</p>\n<p>You can delete one analysis at a time.\nTo delete a series of analyses, start with the most recent analysis and work backwards.\nConceptually, the process is similar to the undo function in a text editor.</p>\n<p>When you list the analyses for a repository,\none or more will be identified as deletable in the response:</p>\n<pre><code>\"deletable\": true\n</code></pre>\n<p>An analysis is deletable when it's the most recent in a set of analyses.\nTypically, a repository will have multiple sets of analyses\nfor each enabled code scanning tool,\nwhere a set is determined by a unique combination of analysis values:</p>\n<ul>\n<li><code>ref</code></li>\n<li><code>tool</code></li>\n<li><code>analysis_key</code></li>\n<li><code>environment</code></li>\n</ul>\n<p>If you attempt to delete an analysis that is not the most recent in a set,\nyou'll get a 400 response with the message:</p>\n<pre><code>Analysis specified is not deletable.\n</code></pre>\n<p>The response from a successful <code>DELETE</code> operation provides you with\ntwo alternative URLs for deleting the next analysis in the set:\n<code>next_analysis_url</code> and <code>confirm_delete_url</code>.\nUse the <code>next_analysis_url</code> URL if you want to avoid accidentally deleting the final analysis\nin a set. This is a useful option if you want to preserve at least one analysis\nfor the specified tool in your repository.\nUse the <code>confirm_delete_url</code> URL if you are content to remove all analyses for a tool.\nWhen you delete the last analysis in a set, the value of <code>next_analysis_url</code> and <code>confirm_delete_url</code>\nin the 200 response is <code>null</code>.</p>\n<p>As an example of the deletion process,\nlet's imagine that you added a workflow that configured a particular code scanning tool\nto analyze the code in a repository. This tool has added 15 analyses:\n10 on the default branch, and another 5 on a topic branch.\nYou therefore have two separate sets of analyses for this tool.\nYou've now decided that you want to remove all of the analyses for the tool.\nTo do this you must make 15 separate deletion requests.\nTo start, you must find an analysis that's identified as deletable.\nEach set of analyses always has one that's identified as deletable.\nHaving found the deletable analysis for one of the two sets,\ndelete this analysis and then continue deleting the next analysis in the set until they're all deleted.\nThen repeat the process for the second set.\nThe procedure therefore consists of a nested loop:</p>\n<p><strong>Outer loop</strong>:</p>\n<ul>\n<li>\n<p>List the analyses for the repository, filtered by tool.</p>\n</li>\n<li>\n<p>Parse this list to find a deletable analysis. If found:</p>\n<p><strong>Inner loop</strong>:</p>\n<ul>\n<li>Delete the identified analysis.</li>\n<li>Parse the response for the value of <code>confirm_delete_url</code> and, if found, use this in the next iteration.</li>\n</ul>\n</li>\n</ul>\n<p>The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the <code>confirm_delete_url</code> value. Alternatively, you could use the <code>next_analysis_url</code> value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely.</p>",
"responses": [
{
"httpStatusCode": "200",
"httpStatusMessage": "OK",
"description": "Default response",
"payload": "<pre><code class=\"hljs language-json\"><span class=\"hljs-punctuation\">{</span>\n <span class=\"hljs-attr\">\"next_analysis_url\"</span><span class=\"hljs-punctuation\">:</span> <span class=\"hljs-string\">\"https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41\"</span><span class=\"hljs-punctuation\">,</span>\n <span class=\"hljs-attr\">\"confirm_delete_url\"</span><span class=\"hljs-punctuation\">:</span> <span class=\"hljs-string\">\"https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41?confirm_delete\"</span>\n<span class=\"hljs-punctuation\">}</span>\n</code></pre>"
},
{
"httpStatusCode": "400",
"httpStatusMessage": "Bad Request",
"description": "Bad Request"
},
{
"httpStatusCode": "403",
"httpStatusMessage": "Forbidden",
"description": "Response if the repository is archived or if github advanced security is not enabled for this repository"
},
{
"httpStatusCode": "404",
"httpStatusMessage": "Not Found",
"description": "Resource not found"
},
{
"httpStatusCode": "503",
"httpStatusMessage": "Service Unavailable",
"description": "Service unavailable"
}
]
},
{
"verb": "post",
"requestPath": "/repos/{owner}/{repo}/code-scanning/sarifs",
@@ -40550,7 +40755,7 @@
}
],
"summary": "Upload an analysis as SARIF data",
"description": "Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint.\n\nThere are two places where you can upload code scanning results.\n - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see \"[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests).\"\n - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see \"[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository).\"\n\nYou must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example:\n\n```\ngzip -c analysis-data.sarif | base64 -w0\n```\n\nSARIF upload supports a maximum of 1000 results per analysis run. Any results over this limit are ignored. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.\n\nThe `202 Accepted`, response includes an `id` value.\nYou can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint.\nFor more information, see \"[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload).\"",
"description": "Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint.\n\nThere are two places where you can upload code scanning results.\n - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see \"[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests).\"\n - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see \"[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository).\"\n\nYou must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example:\n\n```\ngzip -c analysis-data.sarif | base64 -w0\n```\n\nSARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.\n\nThe `202 Accepted`, response includes an `id` value.\nYou can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint.\nFor more information, see \"[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload).\"",
"operationId": "code-scanning/upload-sarif",
"tags": [
"code-scanning"
@@ -40646,7 +40851,7 @@
"categoryLabel": "Code scanning",
"contentType": "application/json",
"notes": [],
"descriptionHTML": "<p>Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the <code>security_events</code> scope to use this endpoint. GitHub Apps must have the <code>security_events</code> write permission to use this endpoint.</p>\n<p>There are two places where you can upload code scanning results.</p>\n<ul>\n<li>If you upload to a pull request, for example <code>--ref refs/pull/42/merge</code> or <code>--ref refs/pull/42/head</code>, then the results appear as alerts in a pull request check. For more information, see \"<a href=\"/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests\">Triaging code scanning alerts in pull requests</a>.\"</li>\n<li>If you upload to a branch, for example <code>--ref refs/heads/my-branch</code>, then the results appear in the <strong>Security</strong> tab for your repository. For more information, see \"<a href=\"/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository\">Managing code scanning alerts for your repository</a>.\"</li>\n</ul>\n<p>You must compress the SARIF-formatted analysis data that you want to upload, using <code>gzip</code>, and then encode it as a Base64 format string. For example:</p>\n<pre><code>gzip -c analysis-data.sarif | base64 -w0\n</code></pre>\n<p>SARIF upload supports a maximum of 1000 results per analysis run. Any results over this limit are ignored. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.</p>\n<p>The <code>202 Accepted</code>, response includes an <code>id</code> value.\nYou can use this ID to check the status of the upload by using this for the <code>/sarifs/{sarif_id}</code> endpoint.\nFor more information, see \"<a href=\"/rest/reference/code-scanning#get-information-about-a-sarif-upload\">Get information about a SARIF upload</a>.\"</p>",
"descriptionHTML": "<p>Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the <code>security_events</code> scope to use this endpoint. GitHub Apps must have the <code>security_events</code> write permission to use this endpoint.</p>\n<p>There are two places where you can upload code scanning results.</p>\n<ul>\n<li>If you upload to a pull request, for example <code>--ref refs/pull/42/merge</code> or <code>--ref refs/pull/42/head</code>, then the results appear as alerts in a pull request check. For more information, see \"<a href=\"/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests\">Triaging code scanning alerts in pull requests</a>.\"</li>\n<li>If you upload to a branch, for example <code>--ref refs/heads/my-branch</code>, then the results appear in the <strong>Security</strong> tab for your repository. For more information, see \"<a href=\"/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository\">Managing code scanning alerts for your repository</a>.\"</li>\n</ul>\n<p>You must compress the SARIF-formatted analysis data that you want to upload, using <code>gzip</code>, and then encode it as a Base64 format string. For example:</p>\n<pre><code>gzip -c analysis-data.sarif | base64 -w0\n</code></pre>\n<p>SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.</p>\n<p>The <code>202 Accepted</code>, response includes an <code>id</code> value.\nYou can use this ID to check the status of the upload by using this for the <code>/sarifs/{sarif_id}</code> endpoint.\nFor more information, see \"<a href=\"/rest/reference/code-scanning#get-information-about-a-sarif-upload\">Get information about a SARIF upload</a>.\"</p>",
"bodyParameters": [
{
"description": "<p><strong>Required</strong>. The SHA of the commit to which the analysis you are uploading relates.</p>",

View File

@@ -107536,13 +107536,6 @@
"required_conversation_resolution": {
"type": "boolean",
"description": "Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`."
},
"contexts": {
"type": "array",
"description": "The list of status checks to require in order to merge into this branch.",
"items": {
"type": "string"
}
}
},
"required": [
@@ -107629,6 +107622,26 @@
"type": "string"
}
},
"checks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"context": {
"type": "string",
"example": "continuous-integration/travis-ci"
},
"app_id": {
"type": "integer",
"nullable": true
}
},
"required": [
"context",
"app_id"
]
}
},
"contexts_url": {
"type": "string",
"format": "uri",
@@ -107639,7 +107652,8 @@
"url",
"contexts_url",
"strict",
"contexts"
"contexts",
"checks"
]
},
"required_pull_request_reviews": {
@@ -110222,6 +110236,26 @@
"type": "string"
}
},
"checks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"context": {
"type": "string",
"example": "continuous-integration/travis-ci"
},
"app_id": {
"type": "integer",
"nullable": true
}
},
"required": [
"context",
"app_id"
]
}
},
"contexts_url": {
"type": "string",
"format": "uri",
@@ -110232,7 +110266,8 @@
"url",
"contexts_url",
"strict",
"contexts"
"contexts",
"checks"
]
},
"examples": {
@@ -110337,10 +110372,30 @@
"contexts": {
"type": "array",
"deprecated": true,
"description": "The list of status checks to require in order to merge into this branch",
"description": "**Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control.\n",
"items": {
"type": "string"
}
},
"checks": {
"type": "array",
"description": "The list of status checks to require in order to merge into this branch.",
"items": {
"type": "object",
"required": [
"context"
],
"properties": {
"context": {
"type": "string",
"description": "The name of the required check"
},
"app_id": {
"type": "integer",
"description": "The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status."
}
}
}
}
}
},
@@ -110381,6 +110436,26 @@
"type": "string"
}
},
"checks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"context": {
"type": "string",
"example": "continuous-integration/travis-ci"
},
"app_id": {
"type": "integer",
"nullable": true
}
},
"required": [
"context",
"app_id"
]
}
},
"contexts_url": {
"type": "string",
"format": "uri",
@@ -110391,7 +110466,8 @@
"url",
"contexts_url",
"strict",
"contexts"
"contexts",
"checks"
]
},
"examples": {
@@ -133103,12 +133179,242 @@
"githubCloudOnly": false,
"category": "code-scanning"
}
},
"delete": {
"summary": "Delete a code scanning analysis from a repository",
"description": "Deletes a specified code scanning analysis from a repository. For\nprivate repositories, you must use an access token with the `repo` scope. For public repositories,\nyou must use an access token with `public_repo` and `repo:security_events` scopes.\nGitHub Apps must have the `security_events` write permission to use this endpoint.\n\nYou can delete one analysis at a time.\nTo delete a series of analyses, start with the most recent analysis and work backwards.\nConceptually, the process is similar to the undo function in a text editor.\n\nWhen you list the analyses for a repository,\none or more will be identified as deletable in the response:\n\n```\n\"deletable\": true\n```\n\nAn analysis is deletable when it's the most recent in a set of analyses.\nTypically, a repository will have multiple sets of analyses\nfor each enabled code scanning tool,\nwhere a set is determined by a unique combination of analysis values:\n\n* `ref`\n* `tool`\n* `analysis_key`\n* `environment`\n\nIf you attempt to delete an analysis that is not the most recent in a set,\nyou'll get a 400 response with the message:\n\n```\nAnalysis specified is not deletable.\n```\n\nThe response from a successful `DELETE` operation provides you with\ntwo alternative URLs for deleting the next analysis in the set:\n`next_analysis_url` and `confirm_delete_url`.\nUse the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis\nin a set. This is a useful option if you want to preserve at least one analysis\nfor the specified tool in your repository.\nUse the `confirm_delete_url` URL if you are content to remove all analyses for a tool.\nWhen you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url`\nin the 200 response is `null`.\n\nAs an example of the deletion process,\nlet's imagine that you added a workflow that configured a particular code scanning tool\nto analyze the code in a repository. This tool has added 15 analyses:\n10 on the default branch, and another 5 on a topic branch.\nYou therefore have two separate sets of analyses for this tool.\nYou've now decided that you want to remove all of the analyses for the tool.\nTo do this you must make 15 separate deletion requests.\nTo start, you must find an analysis that's identified as deletable.\nEach set of analyses always has one that's identified as deletable.\nHaving found the deletable analysis for one of the two sets,\ndelete this analysis and then continue deleting the next analysis in the set until they're all deleted.\nThen repeat the process for the second set.\nThe procedure therefore consists of a nested loop:\n\n**Outer loop**:\n* List the analyses for the repository, filtered by tool.\n* Parse this list to find a deletable analysis. If found:\n\n **Inner loop**:\n * Delete the identified analysis.\n * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration.\n\nThe above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely.",
"operationId": "code-scanning/delete-analysis",
"tags": [
"code-scanning"
],
"externalDocs": {
"description": "API method documentation",
"url": "https://docs.github.com/github-ae@latest/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository"
},
"parameters": [
{
"name": "owner",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "repo",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "analysis_id",
"in": "path",
"description": "The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation.",
"required": true,
"schema": {
"type": "integer"
}
},
{
"name": "confirm_delete",
"in": "query",
"description": "Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.`",
"required": false,
"schema": {
"type": "string",
"nullable": true
}
}
],
"responses": {
"200": {
"description": "Response",
"content": {
"application/json": {
"schema": {
"title": "Analysis deletion",
"description": "Successful deletion of a code scanning analysis",
"type": "object",
"properties": {
"next_analysis_url": {
"type": "string",
"description": "Next deletable analysis in chain, without last analysis deletion confirmation",
"format": "uri",
"readOnly": true,
"nullable": true
},
"confirm_delete_url": {
"type": "string",
"description": "Next deletable analysis in chain, with last analysis deletion confirmation",
"format": "uri",
"readOnly": true,
"nullable": true
}
},
"required": [
"next_analysis_url",
"confirm_delete_url"
]
},
"examples": {
"default-response": {
"summary": "Default response",
"value": {
"next_analysis_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41",
"confirm_delete_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41?confirm_delete"
}
}
}
}
}
},
"400": {
"description": "Bad Request",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
},
"application/scim+json": {
"schema": {
"title": "Scim Error",
"description": "Scim Error",
"type": "object",
"properties": {
"message": {
"type": "string",
"nullable": true
},
"documentation_url": {
"type": "string",
"nullable": true
},
"detail": {
"type": "string",
"nullable": true
},
"status": {
"type": "integer"
},
"scimType": {
"type": "string",
"nullable": true
},
"schemas": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
}
}
},
"403": {
"description": "Response if the repository is archived or if github advanced security is not enabled for this repository",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"404": {
"description": "Resource not found",
"content": {
"application/json": {
"schema": {
"title": "Basic Error",
"description": "Basic Error",
"type": "object",
"properties": {
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
},
"url": {
"type": "string"
},
"status": {
"type": "string"
}
}
}
}
}
},
"503": {
"description": "Service unavailable",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"code": {
"type": "string"
},
"message": {
"type": "string"
},
"documentation_url": {
"type": "string"
}
}
}
}
}
}
},
"x-github": {
"enabledForGitHubApps": true,
"githubCloudOnly": false,
"category": "code-scanning"
}
}
},
"/repos/{owner}/{repo}/code-scanning/sarifs": {
"post": {
"summary": "Upload an analysis as SARIF data",
"description": "Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint.\n\nThere are two places where you can upload code scanning results.\n - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see \"[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests).\"\n - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see \"[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository).\"\n\nYou must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example:\n\n```\ngzip -c analysis-data.sarif | base64 -w0\n```\n\nSARIF upload supports a maximum of 1000 results per analysis run. Any results over this limit are ignored. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.\n\nThe `202 Accepted`, response includes an `id` value.\nYou can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint.\nFor more information, see \"[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload).\"",
"description": "Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint.\n\nThere are two places where you can upload code scanning results.\n - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see \"[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests).\"\n - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see \"[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository).\"\n\nYou must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example:\n\n```\ngzip -c analysis-data.sarif | base64 -w0\n```\n\nSARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries.\n\nThe `202 Accepted`, response includes an `id` value.\nYou can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint.\nFor more information, see \"[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload).\"",
"operationId": "code-scanning/upload-sarif",
"tags": [
"code-scanning"

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7cafabad17e1a97057be1b8d590ed0cfa947f880a020a8d24c5f3541c408be1a
size 628268
oid sha256:9c2c73f4a2b06107b5f007c4f1b134d4669a638649dbb7dae0147b1270344927
size 624988

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:dd4253038257244436f230eec1e2fe4734a65740e0b442d54a7de6ed81c01aaa
size 1357450
oid sha256:5036d45db925a4ea465767fdfc855dd3606a73948a4576f20e61896a64b39488
size 1366309

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ed1bb5929cf73689c7a06555251e2151321182dc033a677d2248d0474d8207cf
size 948157
oid sha256:3974113f62f15b35c0af1b265db7c851f3ad62f7665cc956a756b5c1d572a227
size 947384

Some files were not shown because too many files have changed in this diff Show More