1
0
mirror of synced 2026-01-07 18:01:41 -05:00

Merge branch 'main' into patch-2

This commit is contained in:
Sophie
2022-08-30 15:36:41 +02:00
committed by GitHub
296 changed files with 1426 additions and 876 deletions

View File

@@ -53,6 +53,14 @@ jobs:
# Don't care about CDN caching image URLs
DISABLE_REWRITE_ASSET_URLS: true
run: |
# Note as of Aug 2022, we *don't* check external links
# on the pages you touched in the PR. We could enable that
# but it has the added risk of false positives blocking CI.
# We are using this script for the daily/nightly checker that
# checks external links too. Once we're confident it really works
# well, we can consider enabling it here on every content PR too.
./script/rendered-content-link-checker.js \
--language en \
--max 100 \

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -45,7 +45,7 @@ export function getShellExample(operation: Operation, codeSample: CodeSample) {
const args = [
operation.verb !== 'get' && `-X ${operation.verb.toUpperCase()}`,
`-H "Accept: ${defaultAcceptHeader}" \\ \n -H "Authorization: token <TOKEN>"`,
`-H "Accept: ${defaultAcceptHeader}" \\ \n -H "Authorization: Bearer <YOUR-TOKEN>"`,
`${operation.serverUrl}${requestPath}`,
requestBodyParams,
].filter(Boolean)
@@ -141,11 +141,7 @@ export function getJSExample(operation: Operation, codeSample: CodeSample) {
}
}
const comment = `// Octokit.js\n// https://github.com/octokit/core.js#readme\n`
const require = `const octokit = new Octokit(${stringify(
{ auth: 'personal-access-token123' },
null,
2
)})\n\n`
const require = `const octokit = new Octokit(${stringify({ auth: 'YOUR-TOKEN' }, null, 2)})\n\n`
return `${comment}${require}await octokit.request('${operation.verb.toUpperCase()} ${
operation.requestPath

View File

@@ -13,6 +13,6 @@
table-layout: fixed !important;
}
.codeBlock code {
.codeBlock code:not(td *) {
word-break: break-all;
}

View File

@@ -1,4 +1,3 @@
import React from 'react'
import { Heading, NavList } from '@primer/react'
import cx from 'classnames'

View File

@@ -145,11 +145,11 @@ With OIDC, a {% data variables.product.prodname_actions %} workflow requires a t
Audience and Subject claims are typically used in combination while setting conditions on the cloud role/resources to scope its access to the GitHub workflows.
- **Audience**: By default, this value uses the URL of the organization or repository owner. This can be used to set a condition that only the workflows in the specific organization can access the cloud role.
- **Subject**: Has a predefined format and is a concatenation of some of the key metadata about the workflow, such as the {% data variables.product.prodname_dotcom %} organization, repository, branch, or associated [`job`](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idenvironment) environment. See "[Example subject claims](#example-subject-claims)" to see how the subject claim is assembled from concatenated metadata.
- **Subject**: By default, has a predefined format and is a concatenation of some of the key metadata about the workflow, such as the {% data variables.product.prodname_dotcom %} organization, repository, branch, or associated [`job`](/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idenvironment) environment. See "[Example subject claims](#example-subject-claims)" to see how the subject claim is assembled from concatenated metadata.
There are also many additional claims supported in the OIDC token that can also be used for setting these conditions.
If you need more granular trust conditions, you can customize the issuer (`iss`) and subject (`sub`) claims that are included with the JWT. For more information, see "[Customizing the token claims](#customizing-the-token-claims)".
In addition, your cloud provider could allow you to assign a role to the access tokens, letting you specify even more granular permissions.
There are also many additional claims supported in the OIDC token that can be used for setting these conditions. In addition, your cloud provider could allow you to assign a role to the access tokens, letting you specify even more granular permissions.
{% note %}
@@ -243,9 +243,13 @@ curl -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" "$ACTIONS_ID_TOK
{% ifversion actions-oidc-hardening-config %}
## Customizing the token claims
You can security harden your OIDC configuration by customizing the claims that are included with the JWT. This allows your cloud provider to apply more granular trust conditions when determining whether to grant access to its resources. For example, {% ifversion ghec %}you can customize the issuer (`iss`) claim to only allow access from a specific enterprise URL, and {% endif %}you can customize the subject (`sub`) value to require that requests originate from a specific repository, reusable workflow, or other source.
You can security harden your OIDC configuration by customizing the claims that are included with the JWT. These customisations allow you to define more granular trust conditions on your cloud roles when allowing your workflows to access resources hosted in the cloud:
To configure the claim conditions on {% data variables.product.prodname_dotcom %}, you can use the REST API endpoints described in the following sections.
{% ifversion ghec %} - For an additional layer of security, you can append the `issuer` url with your enterprise slug. This lets you set conditions on the issuer (`iss`) claim, configuring it to only accept JWT tokens from a unique `issuer` URL that must include your enterprise slug.{% endif %}
- You can standardize your OIDC configuration by setting conditions on the subject (`sub`) claim that require JWT tokens to originate from a specific repository, reusable workflow, or other source.
- You can define granular OIDC policies by using additional OIDC token claims, such as `repository_id` and `repo_visibility`. For more information, see "[Understanding the OIDC token](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#understanding-the-oidc-token)".
To customize these claim formats, organization and repository admins can use the REST API endpoints described in the following sections.
{% ifversion ghec %}
@@ -280,19 +284,21 @@ After this setting is applied, the JWT will contain the updated `iss` value. In
To configure organization-wide security, compliance, and standardization, you can customize the standard claims to suit your required access conditions. If your cloud provider supports conditions on subject claims, you can create a condition that checks whether the `sub` value matches the path of the reusable workflow, such as `"job_workflow_ref: "octo-org/octo-automation/.github/workflows/oidc.yml@refs/heads/main""`. The exact format will vary depending on your cloud provider's OIDC configuration. To configure the matching condition on {% data variables.product.prodname_dotcom %}, you can can use the REST API to require that the `sub` claim must always include a specific custom claim, such as `job_workflow_ref`. For more information, see "[Set the customization template for an OIDC subject claim for an organization](/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization)."
Customizing the claims results in a new format for the entire `sub` claim, which replaces the default predefined `sub` format in the token described in "[Example subject claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims)."
The following example templates demonstrate various ways to customize the subject claim. To configure these settings on {% data variables.product.prodname_dotcom %}, organization admins use the REST API to specify a list of claims that must be included in the subject (`sub`) claim. {% data reusables.actions.use-request-body-api %}
To customize your subject claims, you should first create a matching condition in your cloud provider's OIDC configuration, before adding the configuration using the REST API. Once the configuration is completed, each time a new job runs, the OIDC token generated during that job will follow the new customization template. If the matching condition doesn't exist in the cloud provider's OIDC configuration before the job runs, the generated token might not be accepted by the cloud provider, since the cloud conditions may not be synchronized.
To customize your subject claims, you should first create a matching condition in your cloud provider's OIDC configuration, before customizing the configuration using the REST API. Once the configuration is completed, each time a new job runs, the OIDC token generated during that job will follow the new customization template. If the matching condition doesn't exist in the cloud provider's OIDC configuration before the job runs, the generated token might not be accepted by the cloud provider, since the cloud conditions may not be synchronized.
{% note %}
**Note**: When the organization template is applied, it will not affect any existing repositories that already use OIDC. For new repositories that are created after the template has been applied, the repository owner will need to opt-in to receive this configuration. For more information, see "[Set the opt-in flag of an OIDC subject claim customization for a repository](/rest/actions/oidc#set-the-opt-in-flag-of-an-oidc-subject-claim-customization-for-a-repository)."
**Note**: When the organization template is applied, it will not affect any existing repositories that already use OIDC. For existing repositories, as well as any new repositories that are created after the template has been applied, the repository owner will need to opt-in to receive this configuration. For more information, see "[Set the opt-in flag of an OIDC subject claim customization for a repository](/rest/actions/oidc#set-the-opt-in-flag-of-an-oidc-subject-claim-customization-for-a-repository)."
{% endnote %}
#### Example: Allowing repository based on visibility and owner
This example template enables cloud access based on repository visibility and owner, letting you restrict cloud role access to only private repositories within an organization or enterprise. {% data reusables.actions.use-request-body-api %}
This example template allows the `sub` claim to have a new format, using `repository_owner` and `repository_visibility`:
```json
{
@@ -303,11 +309,11 @@ This example template enables cloud access based on repository visibility and ow
}
```
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repository_owner` and `repository_visibility`. For example: `"repository_owner: "monalisa":repository_visibility:private"`.
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repository_owner` and `repository_visibility`. For example: `"repository_owner: "monalisa":repository_visibility:private"`. The approach lets you restrict cloud role access to only private repositories within an organization or enterprise.
#### Example: Allowing access to all repositories with a specific owner
This example template grants access to all repositories with a specified `repository_owner`. {% data reusables.actions.use-request-body-api %}
This example template enables the `sub` claim to have a new format with only the value of `repository_owner`. {% data reusables.actions.use-request-body-api %}
```json
{
@@ -322,7 +328,9 @@ In your cloud provider's OIDC configuration, configure the `sub` condition to re
#### Example: Requiring a reusable workflow
This example template requires a specific reusable workflow in a claim, letting an enterprise enforce consistent deployments across its enterprise, organizations, and repositories. {% data reusables.actions.use-request-body-api %}
This example template allows the `sub` claim to have a new format that contains the value of the `job_workflow_ref` claim. This enables an enterprise to use [reusable workflows](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims) to enforce consistent deployments across its organizations and repositories.
{% data reusables.actions.use-request-body-api %}
```json
{
@@ -336,7 +344,9 @@ In your cloud provider's OIDC configuration, configure the `sub` condition to re
#### Example: Requiring a reusable workflow and other claims
This example template combines the requirement of a specific reusable workflow with additional claims. {% data reusables.actions.use-request-body-api %}
The following example template combines the requirement of a specific reusable workflow with additional claims. {% data reusables.actions.use-request-body-api %}
This example also demonstrates how to use `"context"` to define your conditions. This is the part that follows the repository in the [default `sub` format](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims). For example, when the job references an environment, the context contains: `environment:<environmentName>`.
```json
{
@@ -350,6 +360,9 @@ This example template combines the requirement of a specific reusable workflow w
In your cloud provider's OIDC configuration, configure the `sub` condition to require that claims must include specific values for `repo`, `context`, and `job_workflow_ref`.
This customization template requires that the `sub` uses the following format: `repo:<orgName/repoName>:environment:<environmentName>:job_workflow_ref:<reusableWorkflowPath>`.
For example: `"sub": "repo:octo-org/octo-repo:environment:prod:job_workflow_ref:octo-org/octo-automation/.github/workflows/oidc.yml@refs/heads/main"`
#### Example: Granting access to a specific repository
This example template lets you grant cloud access to all the workflows in a specific repository, across all branches/tags and environments. To help improve security, combine this template with the custom issuer URL described in "[Customizing the token URL for an enterprise](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-url-for-an-enterprise)."

View File

@@ -23,11 +23,18 @@ topics:
Rather than copying and pasting deployment jobs from one workflow to another, you can create a reusable workflow that performs the deployment steps. A reusable workflow can be used by another workflow if it meets one of the access requirements described in "[Reusing workflows](/actions/learn-github-actions/reusing-workflows#access-to-reusable-workflows)."
When combined with OpenID Connect (OIDC), reusable workflows let you enforce consistent deployments across your repository, organization, or enterprise. You can do this by defining trust conditions on cloud roles based on reusable workflows.
You should be familiar with the concepts described in "[Reusing workflows](/actions/learn-github-actions/reusing-workflows" and "[About security hardening with OpenID Connect](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect)."
In order to create trust conditions based on reusable workflows, your cloud provider must support custom claims for `job_workflow_ref`. This allows your cloud provider to identify which repository the job originally came from. If your cloud provider only supports the standard claims (_audience_ and _subject_), it will not be able to determine that the job originated from the reusable workflow repository. Cloud providers that support `job_workflow_ref` include Google Cloud Platform and HashiCorp Vault.
## Defining the trust conditions
Before proceeding, you should be familiar with the concepts of [reusable workflows](/actions/learn-github-actions/reusing-workflows) and [OpenID Connect](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect).
When combined with OpenID Connect (OIDC), reusable workflows let you enforce consistent deployments across your repository, organization, or enterprise. You can do this by defining trust conditions on cloud roles based on reusable workflows. The available options will vary depending on your cloud provider:
- **Using `job_workflow_ref`**:
- To create trust conditions based on reusable workflows, your cloud provider must support custom claims for `job_workflow_ref`. This allows your cloud provider to identify which repository the job originally came from.
- For clouds that only support the standard claims (audience (`aud`) and subject (`sub`)), you can use the API to customize the `sub` claim to include `job_workflow_ref`. For more information, see "[Customizing the token claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-claims)". Support for custom claims is currently available for Google Cloud Platform and HashiCorp Vault.
- **Customizing the token claims**:
- You can configure more granular trust conditions by customizing the issuer (`iss`) and subject (`sub`) claims included with the JWT. For more information, see "[Customizing the token claims](/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-claims)".
## How the token works with reusable workflows

View File

@@ -51,7 +51,7 @@ For more information on workflow run artifacts, see "[Persisting workflow data u
A workflow can access and restore a cache created in the current branch, the base branch (including base branches of forked repositories), or the default branch (usually `main`). For example, a cache created on the default branch would be accessible from any pull request. Also, if the branch `feature-b` has the base branch `feature-a`, a workflow triggered on `feature-b` would have access to caches created in the default branch (`main`), `feature-a`, and `feature-b`.
Access restrictions provide cache isolation and security by creating a logical boundary between different branches. For example, a cache created for the branch `feature-a` (with the base `main`) would not be accessible to a pull request for the branch `feature-c` (with the base `main`).
Access restrictions provide cache isolation and security by creating a logical boundary between different branches or tags. For example, a cache created for the branch `feature-a` (with the base `main`) would not be accessible to a pull request for the branch `feature-c` (with the base `main`). On similar lines, a cache created for the tag `release-a` (from the base `main`) would not be accessible to a workflow triggered for the tag `release-b` (with the base `main`).
Multiple workflows within a repository share cache entries. A cache created for a branch within a workflow can be accessed and restored from another workflow for the same repository and branch.

View File

@@ -44,7 +44,8 @@ You set up the audit log stream on {% data variables.product.product_name %} by
- [Amazon S3](#setting-up-streaming-to-amazon-s3)
- [Azure Blob Storage](#setting-up-streaming-to-azure-blob-storage)
- [Azure Event Hubs](#setting-up-streaming-to-azure-event-hubs)
- [Azure Event Hubs](#setting-up-streaming-to-azure-event-hubs){% ifversion streaming-datadog %}
- [Datadog](#setting-up-streaming-to-datadog){% endif %}
- [Google Cloud Storage](#setting-up-streaming-to-google-cloud-storage)
- [Splunk](#setting-up-streaming-to-splunk)
@@ -60,7 +61,7 @@ You can set up streaming to S3 with access keys or, to avoid storing long-lived
#### Setting up streaming to S3 with access keys
{% endif %}
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see [Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html) in the the AWS documentation. Make sure to block public access to the bucket to protect your audit log information.
To stream audit logs to Amazon's S3 endpoint, you must have a bucket and access keys. For more information, see [Creating, configuring, and working with Amazon S3 buckets](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-buckets-s3.html) in the AWS documentation. Make sure to block public access to the bucket to protect your audit log information.
To set up audit log streaming from {% data variables.product.prodname_dotcom %} you will need:
* The name of your Amazon S3 bucket
@@ -231,6 +232,32 @@ You need two pieces of information about your event hub: its instance name and t
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
{% ifversion streaming-datadog %}
### Setting up streaming to Datadog
To set up streaming to Datadog, you must create a client token or an API key in Datadog, then configure audit log streaming in {% data variables.product.product_name %} using the token for authentication. You do not need to create a bucket or other storage container in Datadog.
After you set up streaming to Datadog, you can see your audit log data by filtering by "github.audit.streaming." For more information, see [Log Management](https://docs.datadoghq.com/logs/).
1. If you don't already have a Datadog account, create one.
1. In Datadog, generate a client token or an API key, then click **Copy key**. For more information, see [API and Application Keys](https://docs.datadoghq.com/account_management/api-app-keys/) in Datadog Docs.
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Select the **Configure stream** dropdown menu and click **Datadog**.
![Screenshot of the "Configure stream" dropdown menu with "Datadog" highlighted](/assets/images/help/enterprises/audit-stream-choice-datadog.png)
1. Under "Token", paste the token you copied earlier.
![Screenshot of the "Token" field](/assets/images/help/enterprises/audit-stream-datadog-token.png)
1. Select the "Site" dropdown menu and click your Datadog site. To determine your Datadog site, compare your Datadog URL to the table in [Datadog sites](https://docs.datadoghq.com/getting_started/site/) in Datadog Docs.
![Screenshot of the "Site" dropdown menu](/assets/images/help/enterprises/audit-stream-datadog-site.png)
1. To verify that {% data variables.product.prodname_dotcom %} can connect and write to the Datadog endpoint, click **Check endpoint**.
![Check the endpoint](/assets/images/help/enterprises/audit-stream-check.png)
{% data reusables.enterprise.verify-audit-log-streaming-endpoint %}
1. After a few minutes, confirm that audit log data is appearing on the **Logs** tab in Datadog. If audit log data is not appearing, confirm that your token and site are correct in {% data variables.product.prodname_dotcom %}.
{% endif %}
### Setting up streaming to Google Cloud Storage
To set up streaming to Google Cloud Storage, you must create a service account in Google Cloud with the appropriate credentials and permissions, then configure audit log streaming in {% data variables.product.product_name %} using the service account's credentials for authentication.
@@ -292,6 +319,10 @@ To stream audit logs to Splunk's HTTP Event Collector (HEC) endpoint you must ma
Pausing the stream allows you to perform maintenance on the receiving application without losing audit data. Audit logs are stored for up to seven days on {% data variables.product.product_location %} and are then exported when you unpause the stream.
{% ifversion streaming-datadog %}
Datadog only accepts logs from up to 18 hours in the past. If you pause a stream to a Datadog endpoint for more than 18 hours, you risk losing logs that Datadog won't accept after you resume streaming.
{% endif %}
{% data reusables.enterprise.navigate-to-log-streaming-tab %}
1. Click **Pause stream**.

View File

@@ -81,6 +81,8 @@ You can also configure allowed IP addresses for an individual organization. For
{% data reusables.identity-and-permissions.about-adding-ip-allow-list-entries %}
{% data reusables.identity-and-permissions.ipv6-allow-lists %}
{% data reusables.enterprise-accounts.access-enterprise %}
{% data reusables.enterprise-accounts.settings-tab %}
{% data reusables.enterprise-accounts.security-tab %}

View File

@@ -52,7 +52,7 @@ topics:
{% data reusables.gpg.copy-gpg-key-id %}
10. Paste the text below, substituting in the GPG key ID you'd like to use. In this example, the GPG key ID is `3AA5C34371567BD2`:
```shell{:copy}
$ gpg --armor --export <em>3AA5C34371567BD2</em>
$ gpg --armor --export 3AA5C34371567BD2
# Prints the GPG key ID, in ASCII armor format
```
11. Copy your GPG key, beginning with `-----BEGIN PGP PUBLIC KEY BLOCK-----` and ending with `-----END PGP PUBLIC KEY BLOCK-----`.

View File

@@ -83,6 +83,12 @@ You can check a SARIF file is compatible with {% data variables.product.prodname
If you use a code analysis engine other than {% data variables.product.prodname_codeql %}, you can review the supported SARIF properties to optimize how your analysis results will appear on {% data variables.product.prodname_dotcom %}.
{% note %}
**Note:** You must supply an explicit value for any property marked as "required". The empty string is not supported for required properties.
{% endnote %}
Any valid SARIF 2.1.0 output file can be uploaded, however, {% data variables.product.prodname_code_scanning %} will only use the following supported properties.
### `sarifLog` object
@@ -210,7 +216,7 @@ These example SARIF output files show supported properties and example values.
### Example with minimum required properties
This SARIF output file has example values to show the minimum required properties for {% data variables.product.prodname_code_scanning %} results to work as expected. If you remove any properties or don't include values, this data will not be displayed correctly or sync on {% data variables.product.prodname_dotcom %}.
This SARIF output file has example values to show the minimum required properties for {% data variables.product.prodname_code_scanning %} results to work as expected. If you remove any properties, omit values, or use an empty string, this data will not be displayed correctly or sync on {% data variables.product.prodname_dotcom %}.
```json
{

View File

@@ -23,6 +23,8 @@ When an organization has an allow list, third-party applications that connect vi
## Adding an IP address allow list to a {% data variables.product.prodname_github_app %}
{% data reusables.identity-and-permissions.ipv6-allow-lists %}
{% data reusables.apps.settings-step %}
{% data reusables.user-settings.developer_settings %}
{% data reusables.user-settings.github_apps %}

View File

@@ -47,7 +47,7 @@ Rate limits for {% data variables.product.prodname_github_apps %} and {% data va
{% ifversion fpt or ghec %}
{% data variables.product.prodname_github_apps %} that are installed on an organization or a repository within an enterprise on {% data variables.product.product_location %} are subject to a limit of 15,000 requests per hour.
{% data variables.product.prodname_github_apps %} that are installed on an organization within an enterprise on {% data variables.product.product_location %} are subject to a limit of 15,000 requests per hour per organization that has installed the app.
{% endif %}

View File

@@ -37,12 +37,12 @@ $ export SECRET_TOKEN=<em>your_token</em>
## Validating payloads from GitHub
When your secret token is set, {% data variables.product.product_name %} uses it to create a hash signature with each payload. This hash signature is included with the headers of each request as `X-Hub-Signature-256`.
When your secret token is set, {% data variables.product.product_name %} uses it to create a hash signature with each payload. This hash signature is included with the headers of each request as `x-hub-signature-256`.
{% ifversion fpt or ghes or ghec %}
{% note %}
**Note:** For backward-compatibility, we also include the `X-Hub-Signature` header that is generated using the SHA-1 hash function. If possible, we recommend that you use the `X-Hub-Signature-256` header for improved security. The example below demonstrates using the `X-Hub-Signature-256` header.
**Note:** For backward-compatibility, we also include the `x-hub-signature` header that is generated using the SHA-1 hash function. If possible, we recommend that you use the `x-hub-signature-256` header for improved security. The example below demonstrates using the `x-hub-signature-256` header.
{% endnote %}
{% endif %}

View File

@@ -38,6 +38,8 @@ You can also configure allowed IP addresses for the organizations in an enterpri
{% data reusables.identity-and-permissions.about-adding-ip-allow-list-entries %}
{% data reusables.identity-and-permissions.ipv6-allow-lists %}
{% data reusables.profile.access_org %}
{% data reusables.profile.org_settings %}
{% data reusables.organizations.security %}

View File

@@ -1,6 +1,6 @@
---
title: Enabling or disabling GitHub Discussions for an organization
intro: 'You can use {% data variables.product.prodname_discussions %} in a organization as a place for your organization to have conversations that aren''t specific to a single repository within your organization.'
intro: 'You can use {% data variables.product.prodname_discussions %} in an organization as a place for your organization to have conversations that aren''t specific to a single repository within your organization.'
permissions: 'Organization owners can enable {% data variables.product.prodname_discussions %} for their organization.'
versions:
feature: discussions

View File

@@ -47,7 +47,7 @@ To set up a `www` or custom subdomain, such as `www.example.com` or `blog.exampl
{% data reusables.pages.sidebar-pages %}
4. Under "Custom domain", type your custom domain, then click **Save**. If you are publishing your site from a branch, this will create a commit that adds a `CNAME` file to the root of your source branch. If you are publishing your site with a custom {% data variables.product.prodname_actions %} workflow , no `CNAME` file is created. For more information about your publishing source, see "[Configuring a publishing source for your GitHub Pages site](/pages/getting-started-with-github-pages/configuring-a-publishing-source-for-your-github-pages-site)."
![Save custom domain button](/assets/images/help/pages/save-custom-subdomain.png)
5. Navigate to your DNS provider and create a `CNAME` record that points your subdomain to the default domain for your site. For example, if you want to use the subdomain `www.example.com` for your user site, create a `CNAME` record that points `www.example.com` to `<user>.github.io`. If you want to use the subdomain `www.anotherexample.com` for your organization site, create a `CNAME` record that points `www.anotherexample.com` to `<organization>.github.io`. The `CNAME` record should always point to `<user>.github.io` or `<organization>.github.io`, excluding the repository name. {% data reusables.pages.contact-dns-provider %} {% data reusables.pages.default-domain-information %}
5. Navigate to your DNS provider and create a `CNAME` record that points your subdomain to the default domain for your site. For example, if you want to use the subdomain `www.example.com` for your user site, create a `CNAME` record that points `www.example.com` to `<user>.github.io`. If you want to use the subdomain `another.example.com` for your organization site, create a `CNAME` record that points `another.example.com` to `<organization>.github.io`. The `CNAME` record should always point to `<user>.github.io` or `<organization>.github.io`, excluding the repository name. {% data reusables.pages.contact-dns-provider %} {% data reusables.pages.default-domain-information %}
{% indented_data_reference reusables.pages.wildcard-dns-warning spaces=3 %}
{% data reusables.command_line.open_the_multi_os_terminal %}

View File

@@ -41,7 +41,7 @@ A check run is an individual test that is part of a check suite. Each run includ
![Check runs workflow](/assets/images/check_runs.png)
If a check run is in a incomplete state for more than 14 days, then the check run's `conclusion` becomes `stale` and appears on {% data variables.product.prodname_dotcom %} as stale with {% octicon "issue-reopened" aria-label="The issue-reopened icon" %}. Only {% data variables.product.prodname_dotcom %} can mark check runs as `stale`. For more information about possible conclusions of a check run, see the [`conclusion` parameter](/rest/reference/checks#create-a-check-run--parameters).
If a check run is in an incomplete state for more than 14 days, then the check run's `conclusion` becomes `stale` and appears on {% data variables.product.prodname_dotcom %} as stale with {% octicon "issue-reopened" aria-label="The issue-reopened icon" %}. Only {% data variables.product.prodname_dotcom %} can mark check runs as `stale`. For more information about possible conclusions of a check run, see the [`conclusion` parameter](/rest/reference/checks#create-a-check-run--parameters).
As soon as you receive the [`check_suite`](/webhooks/event-payloads/#check_suite) webhook, you can create the check run, even if the check is not complete. You can update the `status` of the check run as it completes with the values `queued`, `in_progress`, or `completed`, and you can update the `output` as more details become available. A check run can contain timestamps, a link to more details on your external site, detailed annotations for specific lines of code, and information about the analysis performed.

View File

@@ -19,7 +19,7 @@ To use this API, the authenticated user must be a team maintainer or an owner of
**Notes:**
- The external groups API is only available for organizations that are part of a enterprise using {% data variables.product.prodname_emus %}. For more information, see "[About Enterprise Managed Users](/admin/authentication/managing-your-enterprise-users-with-your-identity-provider/about-enterprise-managed-users)."
- The external groups API is only available for organizations that are part of an enterprise using {% data variables.product.prodname_emus %}. For more information, see "[About Enterprise Managed Users](/admin/authentication/managing-your-enterprise-users-with-your-identity-provider/about-enterprise-managed-users)."
- If your organization uses team synchronization, you can use the Team Synchronization API. For more information, see "[Team synchronization API](#team-synchronization)."
{% endnote %}

View File

@@ -25,7 +25,7 @@ shortTitle: Sponsor a contributor
You can sponsor an account on behalf of your personal account to invest in projects that you personally benefit from. You can sponsor an account on behalf of your organization for many reasons.
- Sustaining specific libraries that your organization's work depends on
- Investing in the ecosystem you rely on as a organization (such as blockchain)
- Investing in the ecosystem you rely on as an organization (such as blockchain)
- Developing brand awareness as an organization that values open source
- Thanking open source developers for building libraries that complement the product your organization offers

View File

@@ -0,0 +1,4 @@
# Reference #7495
# Documentation for audit log streaming to a Datadog endpoint
versions:
ghec: '*'

View File

@@ -2,7 +2,7 @@ You can sort and filter {% data variables.product.prodname_dependabot_alerts %}
| Option | Description | Example |
|:---|:---|:---|
| `ecosystem` | Displays alerts for the selected ecosystem | Use `ecosystem:npm` to show {% data variables.product.prodname_dependabot_alerts %} for npm |{% ifversion fpt or ghec or ghes > 3.5 %}
| `ecosystem` | Displays alerts for the selected ecosystem | Use `ecosystem:npm` to show {% data variables.product.prodname_dependabot_alerts %} for npm |{% ifversion fpt or ghec or ghes > 3.5 or ghae-issue-7891 %}
| `has` | Displays alerts meeting the selected filter criteria | Use `has:patch` to show alerts related to advisories that have a patch{% ifversion dependabot-alerts-vulnerable-calls %}</br>Use `has:vulnerable-calls` to show alerts relating to calls to vulnerable functions{% endif %} |{% endif %}
| `is` | Displays alerts based on their state | Use `is:open` to show open alerts |
| `manifest` | Displays alerts for the selected manifest | Use `manifest:webwolf/pom.xml` to show alerts on the pom.xml file of the webwolf application |

View File

@@ -1,4 +1,4 @@
1. Define the key as a environment variable for {% data variables.product.product_name %}, replacing `<YOUR-KEY-ID>` with the GPG key ID.
1. Define the key as an environment variable for {% data variables.product.product_name %}, replacing `<YOUR-KEY-ID>` with the GPG key ID.
```bash{:copy}
ghe-config "secrets.gpgverify.web-signing-key" "$(gpg --export-secret-keys -a <YOUR-KEY-ID> | awk '{printf "%s\\n", $0}')"

View File

@@ -0,0 +1,7 @@
{% ifversion fpt or ghec %}
{% note %}
**Note:** {% data variables.product.company_short %} is gradually rolling out support for IPv6. As {% data variables.product.prodname_dotcom %} services continue to add IPv6 support, we will start recognizing IPv6 addresses of {% data variables.product.prodname_dotcom %} users. To prevent possible access interruptions, please ensure you have added any necessary IPv6 addresses to your IP allow list.
{% endnote %}
{% endif %}

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:cb25b32833d1124e4a17b6c40ab72b159df76a0f9e53a89a51b5dc9499e74c12
size 795273
oid sha256:8e4fd24124be4dafd0fffb477818c1770d74b405185486d13433983f14ff3e77
size 795639

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:47f563456a714121c19b2ded5557f87b4910e75b9eeaf80f47d59e01bacc9a02
size 1647466
oid sha256:3597f9b621adf0dc34b4b0b308b16c926a2fd66e06e26ada008c3e8d22e9f465
size 1649660

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:65c675e5f104664b4d6cbecf58cde848d72bd2f8c01c8ce569db16c30c1df20e
size 1094114
oid sha256:318260c2bb7c78086cdf8f2e68c4492227c46fffa9d1eeba68a321d36d97f5e1
size 1094110

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:450620a3a384de163399ed147b9547ef278ed1f7abd30c5a2630be0ae6e0a815
size 4437229
oid sha256:04ea369781545ecea0232fadd3ad8bc001fa6c917424984fb158fa6548864b2c
size 4438276

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:43223d09de092e16012e00e9da96c997206eaeb1278ebc917f4b39bf6c6d3f40
size 734370
oid sha256:e8eb3b1569727a3de82db56532a060237d45041a981fe96808a72fa86b1d1487
size 734314

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3dfe28649d52e784ecd50b2d34564563a5628e41eb926e5f84d5dd09080509e3
size 3125098
oid sha256:4999684fc5fd73c72d01608d2d7ac3b99dafae6a3a70e17628c999c75e86273f
size 3128550

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:32e94d21f4a49f5e5e95dd7554a7ff9c75af6ff6baebe54d95c210e32a39b8a8
size 811034
oid sha256:89f6beed67931d51a1fe0fe3d9e78a6db70a418cbb8b98d298e892d72d3f9903
size 811143

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0c1fc1f0ef8f2ddaac6b4b5970a8cb6da003a1a0319d56e29c639d598e9c0060
size 4451448
oid sha256:033952049794b83c102b5f00d2c94df3c7f35e38fbb622d155e94de18f99b50a
size 4455546

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0eca46b20e92ff6f083168b28a0b55491a4e3b71907e612ac8705b1071384698
size 723274
oid sha256:51f3715e38d2655aa753b07a88ebad91f37ce390dd21b03ca925db234e44736c
size 723186

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:855dd5398d7595ec16bf13370512a73cd75016851e2611ce7f1f758021185996
size 3026820
oid sha256:84252f6f6479fcc8feb4801b07701701a1410e9497243e5dbea8a7c31578934b
size 3025332

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:35e0a3a1790fa8fdd1b9294fb9052889b981b91ef57594c29d7938601672d49d
size 820889
oid sha256:162dd420314fd80bb35aa23b672cb70532091097b35b79fb2b551855eebc2fff
size 821189

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a305d8a1e872dd86b02ed2915512f3321c6ffd38caa5dffc2a77ba03a8fda0ca
size 1695428
oid sha256:e1c5deb019638daa9bb4b489fb0cf0532bce8754021ef230954dab6f2824b289
size 1697252

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:569cd46175697bdea64a571326908e3afac08568336f05dea81b33c463e97d80
size 1129000
oid sha256:7fd93464c303800c2280a8f35273c331a49c73740abda7371208a6bf03ab369a
size 1129514

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f26ce21e1fe3f618e5c5eb6e32ce17528df43319431a6e5ed9113e7c90cb3988
size 4539403
oid sha256:859e40301754dade2ae0d49a8dff41526d1f94263c9da2531baa4a30662e3d8d
size 4540926

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4a49461b63c0b7b40f39aa090b2c8f6f9d94c55aec5e19f43cca1b4eab6cc9f7
size 755453
oid sha256:a94f2bf34011e1d7b86b35530f9e080126706f1aa4911b755d36c828c0b45f09
size 755435

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:790acfccb509da5e588fed0126f8fb44a9348ca32a4baeb59d0308e2a3eb243c
size 3210230
oid sha256:192f03d2b502784bc12fd00f7a5de21467ee4a8cd6e23cbeeb5a13cecc20e30f
size 3212242

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5d43d63560b6069a62a408f4214937b5e7ae70c9b0907e216cd8c6e017a0a839
size 836193
oid sha256:30b6551e608333f971d4438ede61a33e2231dda293a9ab3098c065a11be38dc3
size 836416

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:34dbe3e22c9c6a8ace20e2b4fc18fd6fb243f5479a8fefbfb9d6c836c71998b1
size 4587688
oid sha256:b7f23f46068b54348c5c97ad0de20a0435e112ca2be28b3d103699e30c5b1dad
size 4589730

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2330c3c6cc0fc7af0df2d3284ad6e6f13582ba0c79942230a3a3a3a1da9c6d1b
size 745064
oid sha256:f09b1c22f162be03e794a1e8a03b2b56455cddcc62341baa2badbb4284433d1d
size 744682

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f36b990967ba204a7451bda694d168bc13ff3dd23b137620e32f84081202b971
size 3112029
oid sha256:b8a581d6f4647d39949f3692690df126ca03f3d46a6a3c6b2a7ffcfa903e753d
size 3111050

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:64def5be96c9c1b071653f061ee7ddbf7329d81199e0468d296893a35b9bf629
size 823420
oid sha256:c0f024ba9667b547106406d04b5ff81d053e3ff55220139cb6acac6e0d6ab3f3
size 823703

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4f114b0ad0215b32b749e1f13c4985987e5bfc8dc4784057656e5d026ffec86d
size 1711233
oid sha256:827bf1a6ef56804ce5b54175d86910be86898cdd66473a54800333266bc836f9
size 1713230

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9cce05a06cb13d71b7ac6830d7d37d24dadd0e630b5022e30285f49d2fde76e5
size 1140897
oid sha256:ddb69795d54e27342ac15092d89acad6e60222df2731953fa4ed88023adb97f1
size 1141387

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:feb96fcdaabc771220defad88c18412bed268e749c484ac44fe094582f12d2f0
size 4599384
oid sha256:eadc403b448c945a2b76b718e970a286fb03b27ce59a00a8376b2a2e29e803c7
size 4601260

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1305f28155bf2282e15141d25840a943fb4c437f5475acd6f33fd755bf009df0
oid sha256:2a8db95c7465c8fad3b2b1b46a6007716d80e5672c88fbdea68a251bb7e42860
size 760232

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:32995e933345d3cb6949d078fc410b19f73d612ae281d7a5560eba3d4a15f70d
size 3232708
oid sha256:cf04aec2df222bfa0f1e146b2ecf90c5101d76aef4d3e334a37ad7d19271b36b
size 3235753

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:65bc97dd9cbd0e8f4cb94d070469bbcce7d73a73bcceb3e1f555d76e4a852db4
size 839542
oid sha256:bf2a10216c8fdcd9107890b2d400f8bc58c282deeb12f8ead0760efd9104b8e2
size 839758

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b0a6781801153f8ede1505b897a23427c5786661f268d8b19b2ae7ab7f0552c8
size 4619866
oid sha256:6048e390fb706f8acba424ba6e7de5ce14d45a22186c22717057f23ed335a3c0
size 4623700

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eafba25c47527f5ddd10af9aa77e62dc845bd48d76ab7f6e664de5a3380b6669
size 749311
oid sha256:c6209a9f31278930c2f6fd2c406c7486fd36b31abfa42b26621764e9ff713bc8
size 749004

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:50d0dd437c7b7564e83c37004458027dbefc57b6d5314ad43f99b81ba9132e99
size 3136045
oid sha256:e2bfee3ad0b0914db2765162c71ae79df15f97443c5f97a8c1626827035b5a81
size 3135249

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1a7a6187fe17491a60fe71b6d775f6353cb6162aeaede30893efecc7d4d405f3
size 854762
oid sha256:97f36c96cefa04b9026ce215c430b2bc95fbb697794d9dc60c44275e83b07d05
size 855305

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:63bc952bb69aca5a213cb1a0e19618ac229b87ae28a91b1f4419497075a7a0f1
size 1779339
oid sha256:964359fece2c6e02b9835c8623163f5f9210a2f5c19bdc9de9e1a54cb6ba39e2
size 1780418

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:df31c765bc2aab164478e6c659fb3b51209c21c2e43f2b43fe2bc698f232cf4b
size 1180389
oid sha256:2df4827afe5390ad5b505071105f86b64aafd0e666d1678bbd713e5c810faf38
size 1181256

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:29239ff303bbe532c53bea57c95d2b153c96dcec10f4061c3515de2907768a00
size 4762126
oid sha256:8cad1c87956ff3d3834c0eb99d65cdcedd98a01643cd928903330bd39e7d7092
size 4765282

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7562c45f6e72c9b67fabf3a8981cd9a36f9d43fbbc10e673304916fb4870d810
size 784804
oid sha256:53ad4f87e745ee5b6f10bbdb44c68c400fc043e5442bc6cfb3e0421859b411e4
size 784711

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2b164af792e610f220d36ca1cd21db7eab0a0400cdb6ed6a8fdbee9736266205
size 3352551
oid sha256:41ddfb9b7d4f310a932fae392004db51b790fd03db379e381ea4fa6ca5c9c774
size 3355542

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d9f23b28d3f140c22b75303156dbe77d886a7cae5ec0cd340666507cf2ac7ba0
size 868772
oid sha256:45c5dca7d986fe3d2f183c800bb2542c6726c28fcfc5035e2a87c6bae7f99130
size 868936

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:99fe57cf9ce30f27354b751c7dc64020b84496cc44b189cf90ed91a81bc467de
size 4796824
oid sha256:af57f7a9a0a50ca4a7beefb0add246d14032cc1b35a14f6275ab454c062f5191
size 4800710

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:aac5571aead3f0255eb139173ae22d2088406e29ff0839a8cc4e81f01900dfb0
size 773579
oid sha256:fc7ef40cc4455061b55af214c7474e2276ad3c65d5ab2bfa4ac51ccb4260d188
size 773400

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7fdb1b3d04c2d1e35980d57b56ed112e9cade861b478f38396a25a1c64e3f933
size 3250664
oid sha256:a9eab6a525b0ef538f413188bad11673046a04b5945f693392830231ed35390f
size 3249640

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b6fdd004ce8cf95a7ee49817181509bda518ac7d810b5557834eb5a5336b692c
size 876395
oid sha256:d1884178ed5e2b157200cffc0e16aaee229dafd89a4885e553723187cb1fd77b
size 876633

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fed734d92cad41e0e9ec75df4df14153f7e01a3a7ffca6ccd1a31c4dcbea154f
size 1826114
oid sha256:166ea4cd1fac603bbfee5f9449a9e3bfed9556195c8def8e2e865474d1e0c70a
size 1828129

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:87a0699c9263deb0b24237d0d99c27a138f18bf3c7de0ea4c6bb4ea3c7f4245f
size 1208912
oid sha256:f6c74190cd2e5722cccc316f71809921ba759c9396971dbd9a052fbf1fa363ab
size 1208961

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d5137cb2468579e3a1139ff898c9ddd1c7033f0a8010ef9b7564b8538135abb2
size 4876979
oid sha256:322a38add2daa5b1da6c7ce0b69cf5d55456f65bba204e56a538dc27a60fcced
size 4879011

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6d4471c828a9bf0ca73061b84df70f34635ee84ed841bdb92c797e0119dfc775
size 804859
oid sha256:e6840eb2f84dce1a864d2addcdc43b275ec830446da03cca876643676476527f
size 804932

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:288a171cc20034678389b41695c8b8e03329c228f7454b3f2d38e994bf7f2f75
size 3444609
oid sha256:be3bbe240fb2b6cde4a969b5225b647192c29eb5d396a8618cfc94fc2c8d40a8
size 3446669

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e164782021812863e28b26c6197f3a51d2c2a17d1723a5fcc8b64984f9681dcc
size 890554
oid sha256:9b73d58d5c1f405dc8808cd5e8164169c9a7f5bfd6804bf31b54a06ef26ad0a2
size 890573

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ad7df307f2da5dcef7d4816ef6eb4bb9e64a015ef51a9cbb94e6bb4fb25385fc
size 4922936
oid sha256:96565ed2e115f58120f3ae75c6d74fbccbfb6a360bf02790214054a6777ab319
size 4926964

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:72a544f063bbe284fc03e09539316fb19823a68a42a069d5941485712a5f0fc0
size 793723
oid sha256:f6c96fbf7bdfcab224c182fddf9c6715e3148fb45227b1c8833423167d4c7906
size 793363

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:71f901bd18edccd27e8dab30c2f0518d003f42659757be3f63a16087b3540cb9
size 3343316
oid sha256:c289d5731100012dc6d1820ca2125f3c869585b6152db18712fc2f85c78fef57
size 3340523

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c9aa5e93e5d963aefc9bdff2d7844e232feadbb7d46d96190f9d1197a69afbcf
size 1039289
oid sha256:566061b85e6308bfd91647eb3b00da3649157906e5349cf3b78bd17017bcb46e
size 1039362

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7497c537b182d508580a8270601e934dd1b1aec238ff4889564ad6d73c55ac5b
size 1883594
oid sha256:9eb8fb4390eb4802e91cdf16ebf9b0162ce7b26a864ee974a2af35c510ce0c20
size 1888377

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6d65cc25849b1b4377397b6ef0929189f39c8c24a5e5bc6ea570cbe634c8979f
size 1456534
oid sha256:84fe6efd686023cff54937d3eca475227a494ecf6619504258557f05fe7ac202
size 1457276

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:20cde1d053f5f21a13a8a998c83e50ebca74038c78d0426d0b77df4e6e561c8f
size 5612796
oid sha256:ad9d8f9e83e72174e1b921d3506cf0ddc632cb5344619c5a67b62ed3877ee155
size 5617719

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:60fd51942077eb6ef0991c1c75430371c34aae842cdf35f958db84f8b664b37d
size 937205
oid sha256:377f8403f0a5d973d20262b737044083878929bfedc453b808ef54b5263047ef
size 937364

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:229416565eb6607a6029777024903187c1af11719ad00d4207b96a0b6513da6a
size 3886393
oid sha256:717feb7263970160154e82459c3f45cbf3aabd73e381cefc21fb47d4f958e23f
size 3890835

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:160cf0d4ea69bb06c431f78ab0de79b154b0decfeef3eb7e7a859e719fd7b318
size 1047178
oid sha256:ed7cad864c9982710d53ecf43d74c51a1769f0a2a7336f52246e91cfacff5a07
size 1047274

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b93d02b171c054d9daa241ff696d56b4267392ac1d4228293fd104f9b120ddd7
size 5624999
oid sha256:2973d62d37f7d3bc86f814850a7d585393106231d6c4e4d0a9d91f978ace23ee
size 5631033

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ca23e671d4caa000aa9c935eba60746bff22bae66863e984a30dfcc873aa5ee2
size 926517
oid sha256:40ced2079b231b6f200cbfbcbf2adce7baba8059c0f8ceb0df99111a73d430b5
size 926371

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1ad053c45ae3a7bcf5041c5a6e74419dc6f5cffe7950c26d5f2185543b51d490
size 3796841
oid sha256:91667003a277f77437c209d5611c07127da30a3ab1cfb20c2c0d537217723204
size 3794006

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2bac4f46452853d430db717ed6f196435db68566078012d31585618281d53811
size 660714
oid sha256:ca08f50bc77f55c3db0b98544983126c02751fedca1793e6303b188ec718647c
size 660896

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2858340904d8e8a27a0b9768a59448219b711cec0236070691101d2c4560a45e
size 1323235
oid sha256:1409aad00321f2e2475620b8fc45f2b7100eaa43660f742bf392556476140ce0
size 1324570

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:af8a9f7d374af6839f520c10431feca103d68579aa316f746fc0a1081b3ab58d
size 942784
oid sha256:d33e4fe46c31028c6f5c5c63629f05270049c02627f44d32f5420abdaf983c64
size 938336

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c5ef2b2c52f757df8ed660e01bf8390fa8c5e5df9afd8530b58a40dff7c93581
size 3721152
oid sha256:b121366ba66554fc27fc30f8b88b8b928ba5159713cf45c6beae963dfc61ef37
size 3722363

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8aa87147a816bd89b6b2e0bf64384c6a48b52831a3ff5b45c4e30e9f960d9d6d
size 614508
oid sha256:3043882e6ef75720a8a84059da356c079674770d7550d6a0cd8c5a7a9a2ec8a3
size 614499

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8481b5205ad59b50374b05aa90fdcc2c8f09bdb4b30bbc3e881df0e5286af4a3
size 2535182
oid sha256:83f205d41cd8a73dbad213f399eb65179e640c50f4b93fea98d36a94f93120f6
size 2536832

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:06890e5f9fe0ae352445277f22f4bffe8878dabc9be5e0cc6f546ee8c1a787bf
size 674248
oid sha256:2a12c4614109ff34ff0fabf9855e8b59d876846ea9fb936e15f02944afe6d76d
size 674268

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:82013dc5b422e26d9d6b3e843435c232707b7fc8c53429ced4f2ba8a7b984699
size 3605892
oid sha256:cb7fc4a3fd3b517d1b343e0ff94c6fbbdc9eaf71b5b22eeb7e98f0b81b10a85c
size 3608305

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c7a46501bf82a173b9de0a64f38df9a95567fa9847091314008af9e06896c694
size 606069
oid sha256:40bfb26fb50d59ea01a8cfce08522b37f68239b1796c116d3984564477316e65
size 605945

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b1df53e646368a7d3fe20872f158faa87b1bdd7f621606acd22bf5558c09d22d
size 2438269
oid sha256:1a34223f08b1191b9260e5d9489f154d867d93f2b51cbc5998e88ba24bdfcc33
size 2434958

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e640fc585143a8087b4ae0982011551d09908b7b31aa146c6d97df2b8853553f
size 1011741
oid sha256:6f8b1344c5af8823dc5a53fb109c6eeac6b438857872b1cee90977ac77093f03
size 1011650

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6ec7d6bbb3e13b446df57e141e80900b0bc52ffbabbf54890ab07d06d7de0e7c
size 2015993
oid sha256:1278f1c2bdd34edfb7ae714b9fed8b82e5c73dd7b0227424743d7d1bb64aef6a
size 2021773

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f53d4146a155f141741669e22153972d781427d8d83b48542f36af0fdc8266d2
size 1392048
oid sha256:930ad1f89beca8511b018bd77aa542fa8d7de09ba57289dccb4683093c10944f
size 1393775

Some files were not shown because too many files have changed in this diff Show More