From a8a6e4554a5ac974417dd45dadb9972902a6f35c Mon Sep 17 00:00:00 2001 From: Grace Park Date: Mon, 26 Jun 2023 10:21:48 -0700 Subject: [PATCH] Fix for blank lines around code fences (#38255) --- .../setting-your-commit-email-address.md | 8 +++ .../managing-multiple-accounts.md | 2 + .../building-and-testing-go.md | 2 + .../building-and-testing-powershell.md | 1 + .../building-and-testing-ruby.md | 4 ++ .../building-and-testing-swift.md | 2 + .../creating-a-composite-action.md | 3 + .../creating-a-docker-container-action.md | 8 +++ .../creating-a-javascript-action.md | 5 ++ .../developing-a-third-party-cli-action.md | 2 + .../metadata-syntax-for-github-actions.md | 6 ++ ...ing-to-amazon-elastic-container-service.md | 2 + .../deploying-docker-to-azure-app-service.md | 1 + .../deploying-to-google-kubernetes-engine.md | 14 ++++ ...-on-macos-runners-for-xcode-development.md | 3 + ...ting-custom-deployment-protection-rules.md | 1 + .../configuring-openid-connect-in-azure.md | 2 + ...openid-connect-in-google-cloud-platform.md | 2 + ...uring-openid-connect-in-hashicorp-vault.md | 1 + ...ncurrency-expressions-and-a-test-matrix.md | 28 ++++++++ ...g-scripts-to-test-your-code-on-a-runner.md | 21 ++++++ .../using-the-github-cli-on-a-runner.md | 19 ++++++ ...-hosted-runner-application-as-a-service.md | 25 ++++++++ .../actions/learn-github-actions/contexts.md | 11 ++++ .../learn-github-actions/expressions.md | 10 +++ .../actions/learn-github-actions/variables.md | 6 ++ ...nting-on-an-issue-when-a-label-is-added.md | 1 + ...oving-assigned-issues-on-project-boards.md | 1 + .../scheduling-issue-creation.md | 1 + ...ure-devops-with-github-actions-importer.md | 1 + ...rom-bamboo-with-github-actions-importer.md | 2 + ...m-circleci-with-github-actions-importer.md | 1 + ...rom-gitlab-with-github-actions-importer.md | 1 + ...om-jenkins-with-github-actions-importer.md | 1 + ...-travis-ci-with-github-actions-importer.md | 1 + .../supplemental-arguments-and-settings.md | 1 + ...-from-azure-pipelines-to-github-actions.md | 18 ++++++ ...grating-from-circleci-to-github-actions.md | 12 ++++ ...ting-from-gitlab-cicd-to-github-actions.md | 30 +++++++++ ...rating-from-travis-ci-to-github-actions.md | 20 ++++++ .../publishing-java-packages-with-gradle.md | 6 ++ .../publishing-java-packages-with-maven.md | 4 ++ content/actions/quickstart.md | 1 + .../security-guides/encrypted-secrets.md | 8 +++ .../security-hardening-for-github-actions.md | 8 +++ .../about-service-containers.md | 4 ++ .../customizing-github-hosted-runners.md | 2 + .../using-workflows/about-workflows.md | 2 + ...hing-dependencies-to-speed-up-workflows.md | 9 +++ ...starter-workflows-for-your-organization.md | 3 + .../events-that-trigger-workflows.md | 2 + .../using-workflows/reusing-workflows.md | 18 ++++++ .../workflow-commands-for-github-actions.md | 7 ++ .../workflow-syntax-for-github-actions.md | 23 +++++++ ...b-advanced-security-for-your-enterprise.md | 13 ++++ ...ot-to-work-with-limited-internet-access.md | 1 + ...he-dependency-graph-for-your-enterprise.md | 1 + .../accessing-the-management-console.md | 2 + ...onfiguring-an-outbound-web-proxy-server.md | 1 + .../configuring-built-in-firewall-rules.md | 9 +++ .../command-line-utilities.md | 35 ++++++++++ .../configuring-backups-on-your-appliance.md | 6 ++ ...configuring-host-keys-for-your-instance.md | 2 + .../configuring-rate-limits.md | 3 + ...guring-ssh-connections-to-your-instance.md | 1 + .../configuring-web-commit-signing.md | 3 + .../troubleshooting-tls-errors.md | 8 +++ ...-approving-a-domain-for-your-enterprise.md | 2 + .../configuring-a-repository-cache.md | 12 +++- .../cluster-network-configuration.md | 2 + ...ng-a-cluster-node-running-data-services.md | 8 +++ ...ter-nodes-with-node-eligibility-service.md | 11 ++++ .../monitoring-the-health-of-your-cluster.md | 7 ++ .../rebalancing-cluster-workloads.md | 3 + .../upgrading-a-cluster.md | 7 ++ .../creating-a-high-availability-replica.md | 19 ++++++ ...ng-a-failover-to-your-replica-appliance.md | 5 ++ ...ering-a-high-availability-configuration.md | 6 ++ .../removing-a-high-availability-replica.md | 6 ++ .../monitoring-using-snmp.md | 3 + .../increasing-storage-capacity.md | 11 ++++ ...n-issues-with-upgrades-to-your-instance.md | 1 + ...ng-from-github-enterprise-1110x-to-2123.md | 1 + .../upgrade-requirements.md | 2 + .../upgrading-github-enterprise-server.md | 8 +++ ...rise-server-with-github-actions-enabled.md | 2 + ...ting-github-actions-for-your-enterprise.md | 12 ++++ .../using-a-staging-environment.md | 1 + ...g-github-actions-with-amazon-s3-storage.md | 3 + ...ithub-actions-with-google-cloud-storage.md | 1 + ...-hosted-runners-without-internet-access.md | 1 + ...talling-github-enterprise-server-on-aws.md | 5 ++ ...lling-github-enterprise-server-on-azure.md | 3 + ...erprise-server-on-google-cloud-platform.md | 7 ++ ...ing-github-enterprise-server-on-hyper-v.md | 10 +++ .../setting-up-a-staging-instance.md | 2 + .../viewing-push-logs.md | 2 + ...aming-the-audit-log-for-your-enterprise.md | 2 + ...inio-storage-bucket-for-github-packages.md | 6 ++ ...creating-a-pre-receive-hook-environment.md | 4 ++ .../creating-a-pre-receive-hook-script.md | 1 + ...-large-file-storage-for-your-enterprise.md | 7 ++ .../migrating-to-internal-repositories.md | 4 ++ ...moting-or-demoting-a-site-administrator.md | 2 + .../suspending-and-unsuspending-users.md | 2 + .../authenticating-as-a-github-app.md | 1 + .../managing-private-keys-for-github-apps.md | 2 + .../creating-ci-tests-with-the-checks-api.md | 1 + ...ment-environment-to-create-a-github-app.md | 1 + ...-ssh-key-and-adding-it-to-the-ssh-agent.md | 15 +++++ .../testing-your-ssh-connection.md | 3 + .../about-anonymized-urls.md | 4 ++ ...moving-sensitive-data-from-a-repository.md | 13 ++++ .../reviewing-your-ssh-keys.md | 3 + .../associating-an-email-with-your-gpg-key.md | 10 +++ .../generating-a-new-gpg-key.md | 6 ++ .../signing-commits.md | 4 ++ .../signing-tags.md | 3 + .../telling-git-about-your-signing-key.md | 7 ++ .../error-permission-denied-publickey.md | 17 +++++ .../error-were-doing-an-ssh-key-audit.md | 1 + ...nformation-for-github-advanced-security.md | 1 + .../customizing-code-scanning.md | 7 ++ ...onfiguring-codeql-cli-in-your-ci-system.md | 2 + ...installing-codeql-cli-in-your-ci-system.md | 1 + ...ng-from-the-codeql-runner-to-codeql-cli.md | 21 ++++++ .../about-codeql-packs.md | 17 +++++ .../creating-codeql-databases.md | 1 + .../creating-codeql-query-suites.md | 1 + .../publishing-and-using-codeql-packs.md | 5 ++ .../testing-custom-queries.md | 4 ++ ...ion-options-for-the-dependabot.yml-file.md | 55 ++++++++++++++++ ...ss-to-private-registries-for-dependabot.md | 2 + ...-dependabot-access-to-public-registries.md | 4 ++ .../troubleshooting-dependabot-errors.md | 2 + .../secret-scanning-partner-program.md | 5 ++ .../about-the-github-advisory-database.md | 1 + .../configuring-dependency-review.md | 6 ++ ...ing-the-machine-type-for-your-codespace.md | 1 + .../changing-the-shell-in-a-codespace.md | 1 + .../adding-features-to-a-devcontainer-file.md | 2 + ...mplate-repository-for-github-codespaces.md | 1 + ...ing-creation-and-deletion-of-codespaces.md | 1 + ...-gpg-verification-for-github-codespaces.md | 1 + .../troubleshooting-included-usage.md | 2 + ...ngle-issue-template-for-your-repository.md | 1 + .../getting-started-with-github-copilot.md | 14 ++++ .../associating-text-editors-with-git.md | 8 +++ .../caching-your-github-credentials-in-git.md | 3 + .../configuring-git-to-handle-line-endings.md | 3 + .../ignoring-files.md | 4 ++ .../managing-remote-repositories.md | 13 ++++ .../setting-your-username-in-git.md | 4 ++ .../quickstart/contributing-to-projects.md | 2 + content/get-started/quickstart/fork-a-repo.md | 5 ++ .../using-git/about-git-subtree-merges.md | 13 ++++ ...g-a-subfolder-out-into-a-new-repository.md | 5 ++ .../support-for-subversion-clients.md | 5 ++ .../basic-writing-and-formatting-syntax.md | 1 + .../quickstart-for-writing-on-github.md | 6 ++ .../autolinked-references-and-urls.md | 1 + .../writing-mathematical-expressions.md | 2 + content/github-cli/github-cli/quickstart.md | 4 ++ .../guides/managing-enterprise-accounts.md | 5 ++ .../guides/migrating-from-rest-to-graphql.md | 4 ++ .../migrating-graphql-global-node-ids.md | 3 + .../using-the-api-to-manage-projects.md | 64 +++++++++++++++++++ .../adding-locally-hosted-code-to-github.md | 12 ++++ .../importing-a-mercurial-repository.md | 2 + .../importing-a-subversion-repository.md | 2 + ...l-git-repository-using-the-command-line.md | 5 ++ ...l-systems-with-the-administrative-shell.md | 15 +++++ ...tion-data-from-github-enterprise-server.md | 14 ++++ ...exporting-migration-data-from-githubcom.md | 6 ++ ...rating-data-to-github-enterprise-server.md | 3 + ...igrate-data-to-github-enterprise-server.md | 5 ++ ...nnequins-for-github-enterprise-importer.md | 2 + ...om-githubcom-to-github-enterprise-cloud.md | 1 + ...pproving-a-domain-for-your-organization.md | 2 + .../connecting-a-repository-to-a-package.md | 12 ++++ ...nstalling-a-package-with-github-actions.md | 1 + content/packages/quickstart.md | 16 +++++ .../working-with-the-apache-maven-registry.md | 5 ++ .../working-with-the-container-registry.md | 5 ++ .../working-with-the-docker-registry.md | 36 +++++++++++ .../working-with-the-gradle-registry.md | 6 ++ .../working-with-the-npm-registry.md | 8 +++ .../working-with-the-nuget-registry.md | 14 ++++ .../working-with-the-rubygems-registry.md | 5 ++ ...ustom-domain-for-your-github-pages-site.md | 3 + ...ing-your-custom-domain-for-github-pages.md | 4 ++ ...tom-404-page-for-your-github-pages-site.md | 2 + ...yll-build-errors-for-github-pages-sites.md | 3 + ...-to-your-github-pages-site-using-jekyll.md | 2 + ...-to-your-github-pages-site-using-jekyll.md | 4 ++ ...reating-a-github-pages-site-with-jekyll.md | 20 ++++++ ...r-github-pages-site-locally-with-jekyll.md | 2 + ...yll-build-errors-for-github-pages-sites.md | 1 + .../about-merge-conflicts.md | 1 + ...a-merge-conflict-using-the-command-line.md | 18 ++++++ .../about-status-checks.md | 3 + ...pull-request-branch-created-from-a-fork.md | 10 +++ .../checking-out-pull-requests-locally.md | 7 ++ ...figuring-a-remote-repository-for-a-fork.md | 5 ++ .../working-with-forks/syncing-a-fork.md | 2 + .../changing-a-commit-message.md | 6 ++ ...g-a-commit-on-behalf-of-an-organization.md | 2 + ...creating-a-commit-with-multiple-authors.md | 3 + .../renaming-a-branch.md | 1 + .../troubleshooting-required-status-checks.md | 2 + .../duplicating-a-repository.md | 22 +++++++ .../about-citation-files.md | 4 ++ .../about-code-owners.md | 1 + ...ing-a-sponsor-button-in-your-repository.md | 1 + .../automatically-generated-release-notes.md | 4 ++ .../adding-a-file-to-a-repository.md | 4 ++ .../moving-a-file-to-a-new-location.md | 8 +++ .../managing-files/renaming-a-file.md | 6 ++ .../about-large-files-on-github.md | 5 ++ .../configuring-git-large-file-storage.md | 7 ++ .../installing-git-large-file-storage.md | 14 ++++ ...oving-files-from-git-large-file-storage.md | 3 + ...-git-large-file-storage-upload-failures.md | 3 + content/rest/enterprise-admin/index.md | 1 + content/rest/quickstart.md | 6 ++ content/rest/search.md | 1 + ...understanding-github-code-search-syntax.md | 4 ++ .../providing-data-to-github-support.md | 6 ++ .../events/github-event-types.md | 1 + .../actions/actions-group-concurrency.md | 10 +++ .../actions-vars-context-example-usage.md | 2 + ...gure-storage-provider-platform-commands.md | 3 + .../actions/configure-storage-provider.md | 3 + .../actions/example-github-runner.md | 4 ++ .../actions/installing-actions-importer.md | 1 + .../section-choosing-the-runner-for-a-job.md | 4 ++ .../jobs/section-defining-outputs-for-jobs.md | 2 + ...running-jobs-in-a-container-credentials.md | 2 + .../section-using-environments-for-jobs.md | 6 ++ .../actions/matrix-variable-example.md | 2 + .../pass-inputs-to-reusable-workflows.md | 4 ++ .../using-context-or-environment-variables.md | 2 + .../workflow-basic-example-and-explanation.md | 12 ++++ data/reusables/audit_log/create-s3-policy.md | 1 + .../troubleshooting-simple-browser.md | 1 + data/reusables/command_line/git-clone-url.md | 1 + .../command_line/local-clone-created.md | 1 + .../windows_git_bash_turn_on_ssh_agent.md | 1 + ...ndows_git_for_windows_turn_on_ssh_agent.md | 1 + .../grant-migrator-role-pat.md | 3 + .../migrate-multiple-repos.md | 3 + .../enterprise/apply-configuration.md | 1 + .../enterprise-backup-utils-verify-upgrade.md | 2 + .../configuration-recognized.md | 1 + .../download-package.md | 1 + .../generate-replication-key-pair.md | 1 + .../enterprise_installation/replica-ssh.md | 1 + .../replication-command.md | 2 + .../ssh-into-target-instance.md | 1 + .../verify-replication-channel.md | 1 + .../unlocking-on-instances.md | 1 + .../add-key-to-web-flow-user.md | 1 + data/reusables/git/git-push.md | 1 + data/reusables/gpg/configure-ssh-signing.md | 1 + data/reusables/gpg/copy-gpg-key-id.md | 1 + data/reusables/gpg/paste-gpg-key-id.md | 2 + data/reusables/gpg/paste-ssh-public-key.md | 1 + data/reusables/gpg/x-509-key.md | 15 +++++ ...uthenticate-to-container-registry-steps.md | 4 ++ .../package_registry/checksum-maven-plugin.md | 3 + .../create-npmrc-owner-step.md | 6 ++ data/reusables/pages/remove-submodule.md | 1 + 272 files changed, 1552 insertions(+), 2 deletions(-) diff --git a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/setting-your-commit-email-address.md b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/setting-your-commit-email-address.md index 126963c313..8ce8447412 100644 --- a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/setting-your-commit-email-address.md +++ b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/setting-your-commit-email-address.md @@ -75,14 +75,18 @@ You can use the `git config` command to change the email address you associate w {% data reusables.command_line.open_the_multi_os_terminal %} 2. {% data reusables.user-settings.set_your_email_address_in_git %} + ```shell git config --global user.email "YOUR_EMAIL" ``` + 3. {% data reusables.user-settings.confirm_git_email_address_correct %} + ```shell $ git config --global user.email email@example.com ``` + 4. {% data reusables.user-settings.link_email_with_your_account %} ### Setting your email address for a single repository @@ -94,12 +98,16 @@ You can change the email address associated with commits you make in a single re {% data reusables.command_line.open_the_multi_os_terminal %} 2. Change the current working directory to the local repository where you want to configure the email address that you associate with your Git commits. 3. {% data reusables.user-settings.set_your_email_address_in_git %} + ```shell git config user.email "YOUR_EMAIL" ``` + 4. {% data reusables.user-settings.confirm_git_email_address_correct %} + ```shell $ git config user.email email@example.com ``` + 5. {% data reusables.user-settings.link_email_with_your_account %} diff --git a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-personal-account/managing-multiple-accounts.md b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-personal-account/managing-multiple-accounts.md index 15f6ce1fbb..836463974d 100644 --- a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-personal-account/managing-multiple-accounts.md +++ b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-personal-account/managing-multiple-accounts.md @@ -51,6 +51,7 @@ Alternatively, if you want to use the HTTPS protocol for both accounts, you can ```shell copy git credential-osxkeychain erase https://github.com ``` + {% data reusables.git.clear-stored-gcm-credentials %} {% data reusables.git.cache-on-repository-path %} {% data reusables.accounts.create-personal-access-tokens %} @@ -70,6 +71,7 @@ Alternatively, if you want to use the HTTPS protocol for both accounts, you can ```shell copy cmdkey /delete:LegacyGeneric:target=git:https://github.com ``` + {% data reusables.git.cache-on-repository-path %} {% data reusables.accounts.create-personal-access-tokens %} {% data reusables.git.provide-credentials %} diff --git a/content/actions/automating-builds-and-tests/building-and-testing-go.md b/content/actions/automating-builds-and-tests/building-and-testing-go.md index 62b5ed9438..3df9bdb168 100644 --- a/content/actions/automating-builds-and-tests/building-and-testing-go.md +++ b/content/actions/automating-builds-and-tests/building-and-testing-go.md @@ -141,6 +141,7 @@ You can use the `cache-dependency-path` parameter for cases when multiple depend go-version: '1.17' cache-dependency-path: subdir/go.sum ``` + {% else %} When caching is enabled, the `setup-go` action searches for the dependency file, `go.sum`, in the repository root and uses the hash of the dependency file as a part of the cache key. @@ -162,6 +163,7 @@ Alternatively, you can use the `cache-dependency-path` parameter for cases when cache: true cache-dependency-path: subdir/go.sum ``` + {% endif %} If you have a custom requirement or need finer controls for caching, you can use the [`cache` action](https://github.com/marketplace/actions/cache). For more information, see "[AUTOTITLE](/actions/using-workflows/caching-dependencies-to-speed-up-workflows)." diff --git a/content/actions/automating-builds-and-tests/building-and-testing-powershell.md b/content/actions/automating-builds-and-tests/building-and-testing-powershell.md index b22b4de348..be94371cd3 100644 --- a/content/actions/automating-builds-and-tests/building-and-testing-powershell.md +++ b/content/actions/automating-builds-and-tests/building-and-testing-powershell.md @@ -73,6 +73,7 @@ jobs: ![Screenshot of a workflow run failure for a Pester test. Test reports "Expected $true, but got $false" and "Error: Process completed with exit code 1."](/assets/images/help/repository/actions-failed-pester-test-updated.png) - `Invoke-Pester Unit.Tests.ps1 -Passthru` - Uses Pester to execute tests defined in a file called `Unit.Tests.ps1`. For example, to perform the same test described above, the `Unit.Tests.ps1` will contain the following: + ``` Describe "Check results file is present" { It "Check results file is present" { diff --git a/content/actions/automating-builds-and-tests/building-and-testing-ruby.md b/content/actions/automating-builds-and-tests/building-and-testing-ruby.md index cf0e4d9db2..95a81e63af 100644 --- a/content/actions/automating-builds-and-tests/building-and-testing-ruby.md +++ b/content/actions/automating-builds-and-tests/building-and-testing-ruby.md @@ -89,11 +89,13 @@ Alternatively, you can check a `.ruby-version` file into the root of your repos You can add a matrix strategy to run your workflow with more than one version of Ruby. For example, you can test your code against the latest patch releases of versions 3.1, 3.0, and 2.7. {% raw %} + ```yaml strategy: matrix: ruby-version: ['3.1', '3.0', '2.7'] ``` + {% endraw %} Each version of Ruby specified in the `ruby-version` array creates a job that runs the same steps. The {% raw %}`${{ matrix.ruby-version }}`{% endraw %} context is used to access the current job's version. For more information about matrix strategies and contexts, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions)" and "[AUTOTITLE](/actions/learn-github-actions/contexts)." @@ -156,12 +158,14 @@ The `setup-ruby` actions provides a method to automatically handle the caching o To enable caching, set the following. {% raw %} + ```yaml steps: - uses: ruby/setup-ruby@ec02537da5712d66d4d50a0f33b7eb52773b5ed1 with: bundler-cache: true ``` + {% endraw %} This will configure bundler to install your gems to `vendor/cache`. For each successful run of your workflow, this folder will be cached by {% data variables.product.prodname_actions %} and re-downloaded for subsequent workflow runs. A hash of your gemfile.lock and the Ruby version are used as the cache key. If you install any new gems, or change a version, the cache will be invalidated and bundler will do a fresh install. diff --git a/content/actions/automating-builds-and-tests/building-and-testing-swift.md b/content/actions/automating-builds-and-tests/building-and-testing-swift.md index 89c24d5f81..329088898e 100644 --- a/content/actions/automating-builds-and-tests/building-and-testing-swift.md +++ b/content/actions/automating-builds-and-tests/building-and-testing-swift.md @@ -101,6 +101,7 @@ jobs: You can configure your job to use a single specific version of Swift, such as `5.3.3`. {% raw %} + ```yaml copy steps: - uses: swift-actions/setup-swift@65540b95f51493d65f5e59e97dcef9629ddf11bf @@ -109,6 +110,7 @@ steps: - name: Get swift version run: swift --version # Swift 5.3.3 ``` + {% endraw %} ## Building and testing your code diff --git a/content/actions/creating-actions/creating-a-composite-action.md b/content/actions/creating-actions/creating-a-composite-action.md index 807977f3c9..3bc61819e0 100644 --- a/content/actions/creating-actions/creating-a-composite-action.md +++ b/content/actions/creating-actions/creating-a-composite-action.md @@ -51,6 +51,7 @@ Before you begin, you'll create a repository on {% ifversion ghae %}{% data vari ``` 1. From your terminal, check in your `goodbye.sh` file. + ```shell copy git add goodbye.sh git commit -m "Add goodbye script" @@ -63,6 +64,7 @@ Before you begin, you'll create a repository on {% ifversion ghae %}{% data vari {% raw %} **action.yml** + ```yaml copy name: 'Hello World' description: 'Greet someone' @@ -121,6 +123,7 @@ The following workflow code uses the completed hello world action that you made Copy the workflow code into a `.github/workflows/main.yml` file in another repository, but replace `actions/hello-world-composite-action@v1` with the repository and tag you created. You can also replace the `who-to-greet` input with your name. **.github/workflows/main.yml** + ```yaml copy on: [push] diff --git a/content/actions/creating-actions/creating-a-docker-container-action.md b/content/actions/creating-actions/creating-a-docker-container-action.md index 80a3b465a2..796b81e96c 100644 --- a/content/actions/creating-actions/creating-a-docker-container-action.md +++ b/content/actions/creating-actions/creating-a-docker-container-action.md @@ -58,6 +58,7 @@ Before you begin, you'll need to create a {% data variables.product.prodname_dot In your new `hello-world-docker-action` directory, create a new `Dockerfile` file. Make sure that your filename is capitalized correctly (use a capital `D` but not a capital `f`) if you're having issues. For more information, see "[AUTOTITLE](/actions/creating-actions/dockerfile-support-for-github-actions)." **Dockerfile** + ```Dockerfile copy # Container image that runs your code FROM alpine:3.10 @@ -75,6 +76,7 @@ Create a new `action.yml` file in the `hello-world-docker-action` directory you {% raw %} **action.yml** + ```yaml copy # action.yml name: 'Hello World' @@ -93,6 +95,7 @@ runs: args: - ${{ inputs.who-to-greet }} ``` + {% endraw %} This metadata defines one `who-to-greet` input and one `time` output parameter. To pass inputs to the Docker container, you should declare the input using `inputs` and pass the input in the `args` keyword. Everything you include in `args` is passed to the container, but for better discoverability for users of your action, we recommended using inputs. @@ -110,6 +113,7 @@ Next, the script gets the current time and sets it as an output variable that ac 1. Add the following code to your `entrypoint.sh` file. **entrypoint.sh** + ```shell copy #!/bin/sh -l @@ -120,6 +124,7 @@ Next, the script gets the current time and sets it as an output variable that ac {%- else %} echo "::set-output name=time::$time" {%- endif %} + ``` If `entrypoint.sh` executes without any errors, the action's status is set to `success`. You can also explicitly set exit codes in your action's code to provide an action's status. For more information, see "[AUTOTITLE](/actions/creating-actions/setting-exit-codes-for-actions)." @@ -153,6 +158,7 @@ In your `hello-world-docker-action` directory, create a `README.md` file that sp - An example of how to use your action in a workflow. **README.md** + ```markdown copy # Hello world docker action @@ -205,6 +211,7 @@ Now you're ready to test your action out in a workflow. The following workflow code uses the completed _hello world_ action in the public [`actions/hello-world-docker-action`](https://github.com/actions/hello-world-docker-action) repository. Copy the following workflow example code into a `.github/workflows/main.yml` file, but replace the `actions/hello-world-docker-action` with your repository and action name. You can also replace the `who-to-greet` input with your name. {% ifversion fpt or ghec %}Public actions can be used even if they're not published to {% data variables.product.prodname_marketplace %}. For more information, see "[AUTOTITLE](/actions/creating-actions/publishing-actions-in-github-marketplace#publishing-an-action)." {% endif %} **.github/workflows/main.yml** + ```yaml copy on: [push] @@ -228,6 +235,7 @@ jobs: Copy the following example workflow code into a `.github/workflows/main.yml` file in your action's repository. You can also replace the `who-to-greet` input with your name. {% ifversion fpt or ghec %}This private action can't be published to {% data variables.product.prodname_marketplace %}, and can only be used in this repository.{% endif %} **.github/workflows/main.yml** + ```yaml copy on: [push] diff --git a/content/actions/creating-actions/creating-a-javascript-action.md b/content/actions/creating-actions/creating-a-javascript-action.md index 3eb5b29e3e..8e85411fd2 100644 --- a/content/actions/creating-actions/creating-a-javascript-action.md +++ b/content/actions/creating-actions/creating-a-javascript-action.md @@ -105,6 +105,7 @@ GitHub Actions provide context information about the webhook event, Git refs, wo Add a new file called `index.js`, with the following code. {% raw %} + ```javascript copy const core = require('@actions/core'); const github = require('@actions/github'); @@ -122,6 +123,7 @@ try { core.setFailed(error.message); } ``` + {% endraw %} If an error is thrown in the above `index.js` example, `core.setFailed(error.message);` uses the actions toolkit [`@actions/core`](https://github.com/actions/toolkit/tree/main/packages/core) package to log a message and set a failing exit code. For more information, see "[AUTOTITLE](/actions/creating-actions/setting-exit-codes-for-actions)." @@ -224,6 +226,7 @@ This example demonstrates how your new public action can be run from within an e Copy the following YAML into a new file at `.github/workflows/main.yml`, and update the `uses: octocat/hello-world-javascript-action@v1.1` line with your username and the name of the public repository you created above. You can also replace the `who-to-greet` input with your name. {% raw %} + ```yaml copy on: [push] @@ -241,6 +244,7 @@ jobs: - name: Get the output time run: echo "The time was ${{ steps.hello.outputs.time }}" ``` + {% endraw %} When this workflow is triggered, the runner will download the `hello-world-javascript-action` action from your public repository and then execute it. @@ -250,6 +254,7 @@ When this workflow is triggered, the runner will download the `hello-world-javas Copy the workflow code into a `.github/workflows/main.yml` file in your action's repository. You can also replace the `who-to-greet` input with your name. **.github/workflows/main.yml** + ```yaml copy on: [push] diff --git a/content/actions/creating-actions/developing-a-third-party-cli-action.md b/content/actions/creating-actions/developing-a-third-party-cli-action.md index afa557c4af..7dd4352ca8 100644 --- a/content/actions/creating-actions/developing-a-third-party-cli-action.md +++ b/content/actions/creating-actions/developing-a-third-party-cli-action.md @@ -36,6 +36,7 @@ The following script demonstrates how you can get a user-specified version as in {% data variables.product.prodname_dotcom %} provides [`actions/toolkit`](https://github.com/actions/toolkit), which is a set of packages that helps you create actions. This example uses the [`actions/core`](https://github.com/actions/toolkit/tree/main/packages/core) and [`actions/tool-cache`](https://github.com/actions/toolkit/tree/main/packages/tool-cache) packages. {% raw %} + ```javascript copy const core = require('@actions/core'); const tc = require('@actions/tool-cache'); @@ -56,6 +57,7 @@ async function setup() { module.exports = setup ``` + {% endraw %} To use this script, replace `getDownloadURL` with a function that downloads your CLI. You will also need to create an actions metadata file (`action.yml`) that accepts a `version` input and that runs this script. For full details about how to create an action, see "[AUTOTITLE](/actions/creating-actions/creating-a-javascript-action)." diff --git a/content/actions/creating-actions/metadata-syntax-for-github-actions.md b/content/actions/creating-actions/metadata-syntax-for-github-actions.md index fbb8b39108..7dff981d34 100644 --- a/content/actions/creating-actions/metadata-syntax-for-github-actions.md +++ b/content/actions/creating-actions/metadata-syntax-for-github-actions.md @@ -115,6 +115,7 @@ outputs: ### Example: Declaring outputs for composite actions {% raw %} + ```yaml outputs: random-number: @@ -131,6 +132,7 @@ runs: {%- endif %}{% raw %} shell: bash ``` + {% endraw %} ### `outputs..value` @@ -235,6 +237,7 @@ For example, this `cleanup.js` will only run on Linux-based runners: **Optional** The command you want to run. This can be inline or a script in your action repository: {% raw %} + ```yaml runs: using: "composite" @@ -242,6 +245,7 @@ runs: - run: ${{ github.action_path }}/test/script.sh shell: bash ``` + {% endraw %} Alternatively, you can use `$GITHUB_ACTION_PATH`: @@ -447,6 +451,7 @@ For more information about using the `CMD` instruction with {% data variables.pr #### Example: Defining arguments for the Docker container {% raw %} + ```yaml runs: using: 'docker' @@ -456,6 +461,7 @@ runs: - 'foo' - 'bar' ``` + {% endraw %} ## `branding` diff --git a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-amazon-elastic-container-service.md b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-amazon-elastic-container-service.md index fdcc1babb0..4c24ef2517 100644 --- a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-amazon-elastic-container-service.md +++ b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-amazon-elastic-container-service.md @@ -47,6 +47,7 @@ Before creating your {% data variables.product.prodname_actions %} workflow, you aws ecr create-repository \ --repository-name MY_ECR_REPOSITORY \ --region MY_AWS_REGION + ```{% endraw %} Ensure that you use the same Amazon ECR repository name (represented here by `MY_ECR_REPOSITORY`) for the `ECR_REPOSITORY` variable in the workflow below. @@ -65,6 +66,7 @@ Before creating your {% data variables.product.prodname_actions %} workflow, you {% raw %}```bash copy aws ecs register-task-definition --generate-cli-skeleton + ```{% endraw %} Ensure that you set the `ECS_TASK_DEFINITION` variable in the workflow below as the path to the JSON file. diff --git a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-azure/deploying-docker-to-azure-app-service.md b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-azure/deploying-docker-to-azure-app-service.md index b188ee8766..253ac247bf 100644 --- a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-azure/deploying-docker-to-azure-app-service.md +++ b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-azure/deploying-docker-to-azure-app-service.md @@ -63,6 +63,7 @@ Before creating your {% data variables.product.prodname_actions %} workflow, you --name MY_WEBAPP_NAME \ --resource-group MY_RESOURCE_GROUP \ --settings DOCKER_REGISTRY_SERVER_URL=https://ghcr.io DOCKER_REGISTRY_SERVER_USERNAME=MY_REPOSITORY_OWNER DOCKER_REGISTRY_SERVER_PASSWORD=MY_PERSONAL_ACCESS_TOKEN + ``` 5. Optionally, configure a deployment environment. {% data reusables.actions.about-environments %} diff --git a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-google-kubernetes-engine.md b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-google-kubernetes-engine.md index 38f8331f1d..e59f9ec2bd 100644 --- a/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-google-kubernetes-engine.md +++ b/content/actions/deployment/deploying-to-your-cloud-provider/deploying-to-google-kubernetes-engine.md @@ -49,11 +49,13 @@ To create the GKE cluster, you will first need to authenticate using the `gcloud For example: {% raw %} + ```bash copy $ gcloud container clusters create $GKE_CLUSTER \ --project=$GKE_PROJECT \ --zone=$GKE_ZONE ``` + {% endraw %} ### Enabling the APIs @@ -61,11 +63,13 @@ $ gcloud container clusters create $GKE_CLUSTER \ Enable the Kubernetes Engine and Container Registry APIs. For example: {% raw %} + ```bash copy $ gcloud services enable \ containerregistry.googleapis.com \ container.googleapis.com ``` + {% endraw %} ### Configuring a service account and storing its credentials @@ -74,18 +78,23 @@ This procedure demonstrates how to create the service account for your GKE integ 1. Create a new service account: {% raw %} + ``` gcloud iam service-accounts create $SA_NAME ``` + {% endraw %} 1. Retrieve the email address of the service account you just created: {% raw %} + ``` gcloud iam service-accounts list ``` + {% endraw %} 1. Add roles to the service account. Note: Apply more restrictive roles to suit your requirements. {% raw %} + ``` $ gcloud projects add-iam-policy-binding $GKE_PROJECT \ --member=serviceAccount:$SA_EMAIL \ @@ -97,18 +106,23 @@ This procedure demonstrates how to create the service account for your GKE integ --member=serviceAccount:$SA_EMAIL \ --role=roles/container.clusterViewer ``` + {% endraw %} 1. Download the JSON keyfile for the service account: {% raw %} + ``` gcloud iam service-accounts keys create key.json --iam-account=$SA_EMAIL ``` + {% endraw %} 1. Store the service account key as a secret named `GKE_SA_KEY`: {% raw %} + ``` export GKE_SA_KEY=$(cat key.json | base64) ``` + {% endraw %} For more information about how to store a secret, see "[AUTOTITLE](/actions/security-guides/encrypted-secrets)." diff --git a/content/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development.md b/content/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development.md index 8351b997eb..13c21cdee4 100644 --- a/content/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development.md +++ b/content/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development.md @@ -50,6 +50,7 @@ Create secrets in your repository or organization for the following items: ```shell base64 -i BUILD_CERTIFICATE.p12 | pbcopy ``` + - The password for your Apple signing certificate. - In this example, the secret is named `P12_PASSWORD`. @@ -131,6 +132,7 @@ On self-hosted runners, the `$RUNNER_TEMP` directory is cleaned up at the end of If you use self-hosted runners, you should add a final step to your workflow to help ensure that these sensitive files are deleted at the end of the job. The workflow step shown below is an example of how to do this. {% raw %} + ```yaml - name: Clean up keychain and provisioning profile if: ${{ always() }} @@ -138,4 +140,5 @@ If you use self-hosted runners, you should add a final step to your workflow to security delete-keychain $RUNNER_TEMP/app-signing.keychain-db rm ~/Library/MobileDevice/Provisioning\ Profiles/build_pp.mobileprovision ``` + {% endraw %} diff --git a/content/actions/deployment/protecting-deployments/creating-custom-deployment-protection-rules.md b/content/actions/deployment/protecting-deployments/creating-custom-deployment-protection-rules.md index 7975c81c8a..72ea6a5b84 100644 --- a/content/actions/deployment/protecting-deployments/creating-custom-deployment-protection-rules.md +++ b/content/actions/deployment/protecting-deployments/creating-custom-deployment-protection-rules.md @@ -68,6 +68,7 @@ Once a workflow reaches a job that references an environment that has the custom } \ }' ``` + 1. Optionally, to add a status report without taking any other action to {% data variables.product.prodname_dotcom_the_website %}, send a `POST` request to `/repos/OWNER/REPO/actions/runs/RUN_ID/deployment_protection_rule`. In the request body, omit the `state`. For more information, see "[AUTOTITLE](/rest/actions/workflow-runs#review-custom-deployment-protection-rules-for-a-workflow-run)." You can post a status report on the same deployment up to 10 times. Status reports support Markdown formatting and can be up to 1024 characters long. 1. To approve or reject a request, send a `POST` request to `/repos/OWNER/REPO/actions/runs/RUN_ID/deployment_protection_rule`. In the request body, set the `state` property to either `approved` or `rejected`. For more information, see "[AUTOTITLE](/rest/actions/workflow-runs#review-custom-deployment-protection-rules-for-a-workflow-run)." diff --git a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-azure.md b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-azure.md index 8b514a7fbb..5c2f501bf9 100644 --- a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-azure.md +++ b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-azure.md @@ -57,6 +57,7 @@ The [`azure/login`](https://github.com/Azure/login) action receives a JWT from t The following example exchanges an OIDC ID token with Azure to receive an access token, which can then be used to access cloud resources. {% raw %} + ```yaml copy name: Run Azure Login with OIDC on: [push] @@ -80,4 +81,5 @@ jobs: az account show az group list ``` + {% endraw %} diff --git a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-google-cloud-platform.md b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-google-cloud-platform.md index 1ad5a36426..829de6dfa2 100644 --- a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-google-cloud-platform.md +++ b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-google-cloud-platform.md @@ -62,6 +62,7 @@ This example has a job called `Get_OIDC_ID_token` that uses actions to request a This action exchanges a {% data variables.product.prodname_dotcom %} OIDC token for a Google Cloud access token, using [Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation). {% raw %} + ```yaml copy name: List services in GCP on: @@ -89,4 +90,5 @@ jobs: gcloud auth login --brief --cred-file="${{ steps.auth.outputs.credentials_file_path }}" gcloud services list ``` + {% endraw %} diff --git a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-hashicorp-vault.md b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-hashicorp-vault.md index 603ee4fc7e..e91601ff45 100644 --- a/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-hashicorp-vault.md +++ b/content/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-hashicorp-vault.md @@ -63,6 +63,7 @@ To configure your Vault server to accept JSON Web Tokens (JWT) for authenticatio } EOF ``` + 3. Configure roles to group different policies together. If the authentication is successful, these policies are attached to the resulting Vault access token. ```sh copy diff --git a/content/actions/examples/using-concurrency-expressions-and-a-test-matrix.md b/content/actions/examples/using-concurrency-expressions-and-a-test-matrix.md index f0847c56cc..c4dcb5f9ff 100644 --- a/content/actions/examples/using-concurrency-expressions-and-a-test-matrix.md +++ b/content/actions/examples/using-concurrency-expressions-and-a-test-matrix.md @@ -217,6 +217,7 @@ jobs: ```yaml copy name: Node.js Tests ``` + @@ -229,6 +230,7 @@ name: Node.js Tests ```yaml copy on: ``` + @@ -241,6 +243,7 @@ The `on` keyword lets you define the events that trigger when the workflow is ru ```yaml copy workflow_dispatch: ``` + @@ -253,6 +256,7 @@ Add the `workflow_dispatch` event if you want to be able to manually run this wo ```yaml copy pull_request: ``` + @@ -267,6 +271,7 @@ Add the `pull_request` event, so that the workflow runs automatically every time branches: - main ``` + @@ -281,6 +286,7 @@ permissions: contents: read pull-requests: read ``` + @@ -294,6 +300,7 @@ Modifies the default permissions granted to `GITHUB_TOKEN`. This will vary depen concurrency: group: {% raw %}'${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}'{% endraw %} ``` + @@ -306,6 +313,7 @@ Creates a concurrency group for specific events, and uses the `||` operator to d ```yaml copy cancel-in-progress: true ``` + @@ -318,6 +326,7 @@ Cancels any currently running job or workflow in the same concurrency group. ```yaml copy jobs: ``` + @@ -330,6 +339,7 @@ Groups together all the jobs that run in the workflow file. ```yaml copy test: ``` + @@ -342,6 +352,7 @@ Defines a job with the ID `test` that is stored within the `jobs` key. ```yaml copy runs-on: {% raw %}${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }}{% endraw %} ``` + @@ -354,6 +365,7 @@ Configures the job to run on a {% data variables.product.prodname_dotcom %}-host ```yaml copy timeout-minutes: 60 ``` + @@ -366,6 +378,7 @@ Sets the maximum number of minutes to let the job run before it is automatically ```yaml copy strategy: ``` + This section defines the build matrix for your jobs. @@ -377,6 +390,7 @@ Sets the maximum number of minutes to let the job run before it is automatically ```yaml copy fail-fast: false ``` + @@ -400,6 +414,7 @@ Setting `fail-fast` to `false` prevents {% data variables.product.prodname_dotco translations, ] ``` + @@ -412,6 +427,7 @@ Creates a matrix named `test-group`, with an array of test groups. These values ```yaml copy steps: ``` + @@ -428,6 +444,7 @@ Groups together all the steps that will run as part of the `test` job. Each job lfs: {% raw %}${{ matrix.test-group == 'content' }}{% endraw %} persist-credentials: 'false' ``` + @@ -468,6 +485,7 @@ The `uses` keyword tells the job to retrieve the action named `actions/checkout` throw err } ``` + @@ -487,6 +505,7 @@ If the current repository is the `github/docs-internal` repository, this step us path: docs-early-access ref: {% raw %}${{ steps.check-early-access.outputs.result }}{% endraw %} ``` + @@ -504,6 +523,7 @@ If the current repository is the `github/docs-internal` repository, this step ch mv docs-early-access/data data/early-access rm -r docs-early-access ``` + @@ -517,6 +537,7 @@ If the current repository is the `github/docs-internal` repository, this step us - name: Checkout LFS objects run: git lfs checkout ``` + @@ -535,6 +556,7 @@ This step runs a command to check out LFS objects from the repository. # a string like `foo.js path/bar.md` output: ' ' ``` + @@ -549,6 +571,7 @@ This step uses the `trilom/file-changes-action` action to gather the files chang run: | echo {% raw %}"${{ steps.get_diff_files.outputs.files }}" > get_diff_files.txt{% endraw %} ``` + @@ -565,6 +588,7 @@ This step runs a shell command that uses an output from the previous step to cre node-version: 16.14.x cache: npm ``` + @@ -578,6 +602,7 @@ This step uses the `actions/setup-node` action to install the specified version - name: Install dependencies run: npm ci ``` + @@ -594,6 +619,7 @@ This step runs the `npm ci` shell command to install the npm software packages f path: .next/cache key: {% raw %}${{ runner.os }}-nextjs-${{ hashFiles('package*.json') }}{% endraw %} ``` + @@ -608,6 +634,7 @@ This step uses the `actions/cache` action to cache the Next.js build, so that th - name: Run build script run: npm run build ``` + {% endif %} @@ -625,6 +652,7 @@ This step runs the build script. CHANGELOG_CACHE_FILE_PATH: tests/fixtures/changelog-feed.json run: npm test -- {% raw %}tests/${{ matrix.test-group }}/{% endraw %} ``` + diff --git a/content/actions/examples/using-scripts-to-test-your-code-on-a-runner.md b/content/actions/examples/using-scripts-to-test-your-code-on-a-runner.md index ae6194ac4a..5e3109b9af 100644 --- a/content/actions/examples/using-scripts-to-test-your-code-on-a-runner.md +++ b/content/actions/examples/using-scripts-to-test-your-code-on-a-runner.md @@ -133,6 +133,7 @@ jobs: ```yaml copy name: 'Link Checker: All English' ``` + @@ -145,6 +146,7 @@ name: 'Link Checker: All English' ```yaml copy on: ``` + @@ -157,6 +159,7 @@ The `on` keyword lets you define the events that trigger when the workflow is ru ```yaml copy workflow_dispatch: ``` + @@ -171,6 +174,7 @@ Add the `workflow_dispatch` event if you want to be able to manually run this wo branches: - main ``` + @@ -183,6 +187,7 @@ Add the `push` event, so that the workflow runs automatically every time a commi ```yaml copy pull_request: ``` + @@ -197,6 +202,7 @@ permissions: contents: read pull-requests: read ``` + @@ -207,10 +213,12 @@ Modifies the default permissions granted to `GITHUB_TOKEN`. This will vary depen {% raw %} + ```yaml copy concurrency: group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' ``` + {% endraw %} @@ -224,6 +232,7 @@ Creates a concurrency group for specific events, and uses the `||` operator to d ```yaml copy cancel-in-progress: true ``` + @@ -236,6 +245,7 @@ Cancels any currently running job or workflow in the same concurrency group. ```yaml copy jobs: ``` + @@ -248,6 +258,7 @@ Groups together all the jobs that run in the workflow file. ```yaml copy check-links: ``` + @@ -258,9 +269,11 @@ Defines a job with the ID `check-links` that is stored within the `jobs` key. {% raw %} + ```yaml copy runs-on: ${{ fromJSON('["ubuntu-latest", "self-hosted"]')[github.repository == 'github/docs-internal'] }} ``` + {% endraw %} @@ -274,6 +287,7 @@ Configures the job to run on a {% data variables.product.prodname_dotcom %}-host ```yaml copy steps: ``` + @@ -287,6 +301,7 @@ Groups together all the steps that will run as part of the `check-links` job. Ea - name: Checkout uses: {% data reusables.actions.action-checkout %} ``` + @@ -303,6 +318,7 @@ The `uses` keyword tells the job to retrieve the action named `actions/checkout` node-version: 16.13.x cache: npm ``` + @@ -317,6 +333,7 @@ This step uses the `actions/setup-node` action to install the specified version - name: Install run: npm ci ``` + @@ -333,6 +350,7 @@ The `run` keyword tells the job to execute a command on the runner. In this case with: fileOutput: 'json' ``` + @@ -347,6 +365,7 @@ Uses the `trilom/file-changes-action` action to gather all the changed files. Th - name: Show files changed run: cat $HOME/files.json ``` + @@ -367,6 +386,7 @@ Lists the contents of `files.json`. This will be visible in the workflow run's l --verbose \ --list $HOME/files.json ``` + @@ -386,6 +406,7 @@ This step uses `run` command to execute a script that is stored in the repositor --check-images \ --level critical ``` + diff --git a/content/actions/examples/using-the-github-cli-on-a-runner.md b/content/actions/examples/using-the-github-cli-on-a-runner.md index f183d35288..4ead377334 100644 --- a/content/actions/examples/using-the-github-cli-on-a-runner.md +++ b/content/actions/examples/using-the-github-cli-on-a-runner.md @@ -163,6 +163,7 @@ jobs: fi done ``` + ## Understanding the example {% data reusables.actions.example-explanation-table-intro %} @@ -181,6 +182,7 @@ jobs: ```yaml copy name: Check all English links ``` + @@ -196,6 +198,7 @@ on: schedule: - cron: '40 20 * * *' # once a day at 20:40 UTC / 12:40 PST ``` + @@ -213,6 +216,7 @@ permissions: contents: read issues: write ``` + @@ -225,6 +229,7 @@ Modifies the default permissions granted to `GITHUB_TOKEN`. This will vary depen ```yaml copy jobs: ``` + @@ -238,6 +243,7 @@ Groups together all the jobs that run in the workflow file. check_all_english_links: name: Check all links ``` + @@ -250,6 +256,7 @@ Defines a job with the ID `check_all_english_links`, and the name `Check all lin ```yaml copy if: github.repository == 'github/docs-internal' ``` + @@ -262,6 +269,7 @@ Only run the `check_all_english_links` job if the repository is named `docs-inte ```yaml copy runs-on: ubuntu-latest ``` + @@ -278,6 +286,7 @@ Configures the job to run on an Ubuntu Linux runner. This means that the job wil REPORT_LABEL: broken link report REPORT_REPOSITORY: github/docs-content ``` + @@ -290,6 +299,7 @@ Creates custom environment variables, and redefines the built-in `GITHUB_TOKEN` ```yaml copy steps: ``` + @@ -303,6 +313,7 @@ Groups together all the steps that will run as part of the `check_all_english_li - name: Check out repo's default branch uses: {% data reusables.actions.action-checkout %} ``` + @@ -319,6 +330,7 @@ The `uses` keyword tells the job to retrieve the action named `actions/checkout` node-version: 16.8.x cache: npm ``` + @@ -334,6 +346,7 @@ This step uses the `actions/setup-node` action to install the specified version - name: Run the "npm run build" command run: npm run build ``` + @@ -348,6 +361,7 @@ The `run` keyword tells the job to execute a command on the runner. In this case run: | script/check-english-links.js > broken_links.md ``` + @@ -367,6 +381,7 @@ This `run` command executes a script that is stored in the repository at `script run: echo "::set-output name=title::$(head -1 broken_links.md)" {%- endif %} ``` + @@ -389,6 +404,7 @@ If the `check-english-links.js` script detects broken links and returns a non-ze repository: {% raw %}${{ env.REPORT_REPOSITORY }}{% endraw %} labels: {% raw %}${{ env.REPORT_LABEL }}{% endraw %} ``` + @@ -417,6 +433,7 @@ Uses the `peter-evans/create-issue-from-file` action to create a new {% data var gh issue comment {% raw %}${{ env.NEW_REPORT_URL }}{% endraw %} --body "⬅️ [Previous report]($previous_report_url)" ``` + @@ -437,6 +454,7 @@ Uses [`gh issue list`](https://cli.github.com/manual/gh_issue_list) to locate th fi done ``` + @@ -458,6 +476,7 @@ If an issue from a previous run is open and assigned to someone, then use [`gh i fi done ``` + diff --git a/content/actions/hosting-your-own-runners/managing-self-hosted-runners/configuring-the-self-hosted-runner-application-as-a-service.md b/content/actions/hosting-your-own-runners/managing-self-hosted-runners/configuring-the-self-hosted-runner-application-as-a-service.md index 8ffbac1aac..2e5ec6419a 100644 --- a/content/actions/hosting-your-own-runners/managing-self-hosted-runners/configuring-the-self-hosted-runner-application-as-a-service.md +++ b/content/actions/hosting-your-own-runners/managing-self-hosted-runners/configuring-the-self-hosted-runner-application-as-a-service.md @@ -92,6 +92,7 @@ You can manage the runner service in the Windows **Services** application, or yo ```shell ./svc.sh install ``` + {% endmac %} ## Starting the service @@ -99,19 +100,25 @@ You can manage the runner service in the Windows **Services** application, or yo Start the service with the following command: {% linux %} + ```shell sudo ./svc.sh start ``` + {% endlinux %} {% windows %} + ```shell Start-Service "{{ service_win_name }}" ``` + {% endwindows %} {% mac %} + ```shell ./svc.sh start ``` + {% endmac %} ## Checking the status of the service @@ -119,19 +126,25 @@ Start-Service "{{ service_win_name }}" Check the status of the service with the following command: {% linux %} + ```shell sudo ./svc.sh status ``` + {% endlinux %} {% windows %} + ```shell Get-Service "{{ service_win_name }}" ``` + {% endwindows %} {% mac %} + ```shell ./svc.sh status ``` + {% endmac %} For more information on viewing the status of your self-hosted runner, see "[AUTOTITLE](/actions/hosting-your-own-runners/managing-self-hosted-runners/monitoring-and-troubleshooting-self-hosted-runners)." @@ -141,19 +154,25 @@ Get-Service "{{ service_win_name }}" Stop the service with the following command: {% linux %} + ```shell sudo ./svc.sh stop ``` + {% endlinux %} {% windows %} + ```shell Stop-Service "{{ service_win_name }}" ``` + {% endwindows %} {% mac %} + ```shell ./svc.sh stop ``` + {% endmac %} ## Uninstalling the service @@ -162,19 +181,25 @@ Stop-Service "{{ service_win_name }}" 1. Uninstall the service with the following command: {% linux %} + ```shell sudo ./svc.sh uninstall ``` + {% endlinux %} {% windows %} + ```shell Remove-Service "{{ service_win_name }}" ``` + {% endwindows %} {% mac %} + ```shell ./svc.sh uninstall ``` + {% endmac %} {% linux %} diff --git a/content/actions/learn-github-actions/contexts.md b/content/actions/learn-github-actions/contexts.md index bd6cbe8321..460cbd9d59 100644 --- a/content/actions/learn-github-actions/contexts.md +++ b/content/actions/learn-github-actions/contexts.md @@ -114,6 +114,7 @@ You can print the contents of contexts to the log for debugging. The [`toJSON` f {% data reusables.actions.github-context-warning %} {% raw %} + ```yaml copy name: Context testing on: push @@ -147,6 +148,7 @@ jobs: MATRIX_CONTEXT: ${{ toJson(matrix) }} run: echo '$MATRIX_CONTEXT' ``` + {% endraw %} ## `github` context @@ -312,6 +314,7 @@ This example workflow shows how the `env` context can be configured at the workf {% data reusables.repositories.actions-env-var-note %} {% raw %} + ```yaml copy name: Hi Mascot on: push @@ -334,6 +337,7 @@ jobs: steps: - run: echo 'Hi ${{ env.mascot }}' # Hi Tux ``` + {% endraw %} {% ifversion actions-configuration-variables %} @@ -458,6 +462,7 @@ This example `jobs` context contains the result and outputs of a job from a reus This example reusable workflow uses the `jobs` context to set outputs for the reusable workflow. Note how the outputs flow up from the steps, to the job, then to the `workflow_call` trigger. For more information, see "[AUTOTITLE](/actions/using-workflows/reusing-workflows#using-outputs-from-a-reusable-workflow)." {% raw %} + ```yaml copy name: Reusable workflow @@ -494,6 +499,7 @@ jobs: run: echo "::set-output name=secondword::world" {%- endif %}{% raw %} ``` + {% endraw %} ## `steps` context @@ -813,6 +819,7 @@ jobs: - uses: {% data reusables.actions.action-checkout %} - run: ./debug ``` + ## `inputs` context The `inputs` context contains input properties passed to an action{% ifversion actions-unified-inputs %},{% else %} or{% endif %} to a reusable workflow{% ifversion actions-unified-inputs %}, or to a manually triggered workflow{% endif %}. {% ifversion actions-unified-inputs %}For reusable workflows, the{% else %}The{% endif %} input names and types are defined in the [`workflow_call` event configuration](/actions/using-workflows/events-that-trigger-workflows#workflow-reuse-events) of a reusable workflow, and the input values are passed from [`jobs..with`](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idwith) in an external workflow that calls the reusable workflow. {% ifversion actions-unified-inputs %}For manually triggered workflows, the inputs are defined in the [`workflow_dispatch` event configuration](/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch) of a workflow.{% endif %} @@ -843,6 +850,7 @@ The following example contents of the `inputs` context is from a workflow that h This example reusable workflow uses the `inputs` context to get the values of the `build_id`, `deploy_target`, and `perform_deploy` inputs that were passed to the reusable workflow from the caller workflow. {% raw %} + ```yaml copy name: Reusable deploy workflow on: @@ -866,6 +874,7 @@ jobs: - name: Deploy build to target run: deploy --build ${{ inputs.build_id }} --target ${{ inputs.deploy_target }} ``` + {% endraw %} {% ifversion actions-unified-inputs %} @@ -874,6 +883,7 @@ jobs: This example workflow triggered by a `workflow_dispatch` event uses the `inputs` context to get the values of the `build_id`, `deploy_target`, and `perform_deploy` inputs that were passed to the workflow. {% raw %} + ```yaml copy on: workflow_dispatch: @@ -896,5 +906,6 @@ jobs: - name: Deploy build to target run: deploy --build ${{ inputs.build_id }} --target ${{ inputs.deploy_target }} ``` + {% endraw %} {% endif %} diff --git a/content/actions/learn-github-actions/expressions.md b/content/actions/learn-github-actions/expressions.md index b7dd43e5d4..26056b5946 100644 --- a/content/actions/learn-github-actions/expressions.md +++ b/content/actions/learn-github-actions/expressions.md @@ -38,10 +38,12 @@ steps: ### Example setting an environment variable {% raw %} + ```yaml env: MY_ENV_VAR: ${{ }} ``` + {% endraw %} ## Literals @@ -184,9 +186,11 @@ Replaces values in the `string`, with the variable `replaceValueN`. Variables in #### Example of `format` {% raw %} + ```js format('Hello {0} {1} {2}', 'Mona', 'the', 'Octocat') ``` + {% endraw %} Returns 'Hello Mona the Octocat'. @@ -194,9 +198,11 @@ Returns 'Hello Mona the Octocat'. #### Example escaping braces {% raw %} + ```js format('{{Hello {0} {1} {2}!}}', 'Mona', 'the', 'Octocat') ``` + {% endraw %} Returns '{Hello Mona the Octocat!}'. @@ -232,6 +238,7 @@ Returns a JSON object or JSON data type for `value`. You can use this function t This workflow sets a JSON matrix in one job, and passes it to the next job using an output and `fromJSON`. {% raw %} + ```yaml name: build on: push @@ -255,6 +262,7 @@ jobs: steps: - run: build ``` + {% endraw %} #### Example returning a JSON data type @@ -262,6 +270,7 @@ jobs: This workflow uses `fromJSON` to convert environment variables from a string to a Boolean or integer. {% raw %} + ```yaml name: print on: push @@ -276,6 +285,7 @@ jobs: timeout-minutes: ${{ fromJSON(env.time) }} run: echo ... ``` + {% endraw %} ### hashFiles diff --git a/content/actions/learn-github-actions/variables.md b/content/actions/learn-github-actions/variables.md index 3eeeac9fda..b9de05a8dc 100644 --- a/content/actions/learn-github-actions/variables.md +++ b/content/actions/learn-github-actions/variables.md @@ -52,6 +52,7 @@ To set a custom environment variable{% ifversion actions-configuration-variables - A specific step within a job, by using [`jobs..steps[*].env`](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsenv). {% raw %} + ```yaml copy name: Greeting on variable day @@ -72,6 +73,7 @@ jobs: env: First_Name: Mona ``` + {% endraw %} You can access `env` variable values using runner environment variables or using contexts. The example above shows three custom variables being used as environment variables in an `echo` command: `$DAY_OF_WEEK`, `$Greeting`, and `$First_Name`. The values for these variables are set, and scoped, at the workflow, job, and step level respectively. For more information on accessing variable values using contexts, see "[Using contexts to access variable values](#using-contexts-to-access-variable-values)." @@ -222,6 +224,7 @@ In addition to runner environment variables, {% data variables.product.prodname_ Runner environment variables are always interpolated on the runner machine. However, parts of a workflow are processed by {% data variables.product.prodname_actions %} and are not sent to the runner. You cannot use environment variables in these parts of a workflow file. Instead, you can use contexts. For example, an `if` conditional, which determines whether a job or step is sent to the runner, is always processed by {% data variables.product.prodname_actions %}. You can use a context in an `if` conditional statement to access the value of an variable. {% raw %} + ```yaml copy env: DAY_OF_WEEK: Monday @@ -238,6 +241,7 @@ jobs: env: First_Name: Mona ``` + {% endraw %} In this modification of the earlier example, we've introduced an `if` conditional. The workflow step is now only run if `DAY_OF_WEEK` is set to "Monday". We access this value from the `if` conditional statement by using the [`env` context](/actions/learn-github-actions/contexts#env-context). @@ -343,6 +347,7 @@ We strongly recommend that actions use variables to access the filesystem rather You can write a single workflow file that can be used for different operating systems by using the `RUNNER_OS` default environment variable and the corresponding context property {% raw %}`${{ runner.os }}`{% endraw %}. For example, the following workflow could be run successfully if you changed the operating system from `macos-latest` to `windows-latest` without having to alter the syntax of the environment variables, which differs depending on the shell being used by the runner. {% raw %} + ```yaml copy jobs: if-Windows-else: @@ -355,6 +360,7 @@ jobs: if: runner.os != 'Windows' run: echo "The operating system on the runner is not Windows, it's $RUNNER_OS." ``` + {% endraw %} In this example, the two `if` statements check the `os` property of the `runner` context to determine the operating system of the runner. `if` conditionals are processed by {% data variables.product.prodname_actions %}, and only steps where the check resolves as `true` are sent to the runner. Here one of the checks will always be `true` and the other `false`, so only one of these steps is sent to the runner. Once the job is sent to the runner, the step is executed and the environment variable in the `echo` command is interpolated using the appropriate syntax (`$env:NAME` for PowerShell on Windows, and `$NAME` for bash and sh on Linux and MacOS). In this example, the statement `runs-on: macos-latest` means that the second step will be run. diff --git a/content/actions/managing-issues-and-pull-requests/commenting-on-an-issue-when-a-label-is-added.md b/content/actions/managing-issues-and-pull-requests/commenting-on-an-issue-when-a-label-is-added.md index d3159b7dc2..983db10e7d 100644 --- a/content/actions/managing-issues-and-pull-requests/commenting-on-an-issue-when-a-label-is-added.md +++ b/content/actions/managing-issues-and-pull-requests/commenting-on-an-issue-when-a-label-is-added.md @@ -30,6 +30,7 @@ In the tutorial, you will first make a workflow file that uses the [`peter-evans 3. Copy the following YAML contents into your workflow file. ```yaml copy + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=4 %} {% indented_data_reference reusables.actions.actions-use-sha-pinning-comment spaces=4 %} diff --git a/content/actions/managing-issues-and-pull-requests/moving-assigned-issues-on-project-boards.md b/content/actions/managing-issues-and-pull-requests/moving-assigned-issues-on-project-boards.md index 44bd8becda..d83b7c55be 100644 --- a/content/actions/managing-issues-and-pull-requests/moving-assigned-issues-on-project-boards.md +++ b/content/actions/managing-issues-and-pull-requests/moving-assigned-issues-on-project-boards.md @@ -31,6 +31,7 @@ In the tutorial, you will first make a workflow file that uses the [`alex-page/g 4. Copy the following YAML contents into your workflow file. ```yaml copy + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=4 %} {% indented_data_reference reusables.actions.actions-use-sha-pinning-comment spaces=4 %} diff --git a/content/actions/managing-issues-and-pull-requests/scheduling-issue-creation.md b/content/actions/managing-issues-and-pull-requests/scheduling-issue-creation.md index 20ae03fabb..27cff80ef1 100644 --- a/content/actions/managing-issues-and-pull-requests/scheduling-issue-creation.md +++ b/content/actions/managing-issues-and-pull-requests/scheduling-issue-creation.md @@ -30,6 +30,7 @@ In the tutorial, you will first make a workflow file that uses the [`imjohnbo/is 3. Copy the following YAML contents into your workflow file. ```yaml copy + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=4 %} {% indented_data_reference reusables.actions.actions-use-sha-pinning-comment spaces=4 %} diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-azure-devops-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-azure-devops-with-github-actions-importer.md index cddc9b240d..b4eb592a42 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-azure-devops-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-azure-devops-with-github-actions-importer.md @@ -107,6 +107,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ Azure DevOps organization name: :organization ✔ Azure DevOps project name: :project Environment variables successfully updated. + ``` 3. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to the {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-bamboo-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-bamboo-with-github-actions-importer.md index 7933af4c42..63d3d2f4c3 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-bamboo-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-bamboo-with-github-actions-importer.md @@ -105,6 +105,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ Base url of the Bamboo instance: https://bamboo.example.com Environment variables successfully updated. ``` + 1. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: ```shell @@ -182,6 +183,7 @@ You can use the `dry-run` command to convert a Bamboo pipeline to an equivalent ### Running a dry-run migration for a build plan To perform a dry run of migrating your Bamboo build plan to {% data variables.product.prodname_actions %}, run the following command in your terminal, replacing `:my_plan_slug` with the plan's project and plan key in the format `-` (for example: `PAN-SCRIP`). + ```shell gh actions-importer dry-run bamboo build --plan-slug :my_plan_slug --output-dir tmp/dry-run ``` diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-circleci-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-circleci-with-github-actions-importer.md index 917ddc34c2..cda0c00394 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-circleci-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-circleci-with-github-actions-importer.md @@ -86,6 +86,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ CircleCI organization name: mycircleciorganization Environment variables successfully updated. ``` + 1. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: ```shell diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-gitlab-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-gitlab-with-github-actions-importer.md index a478e8cad1..ddf9a914d6 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-gitlab-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-gitlab-with-github-actions-importer.md @@ -85,6 +85,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ Private token for GitLab: *************** ✔ Base url of the GitLab instance: http://localhost Environment variables successfully updated. + ``` 1. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-jenkins-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-jenkins-with-github-actions-importer.md index e3e2e60e6f..29b7564bb8 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-jenkins-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-jenkins-with-github-actions-importer.md @@ -81,6 +81,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ Username of Jenkins user: admin ✔ Base url of the Jenkins instance: https://localhost Environment variables successfully updated. + ``` 1. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: diff --git a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-travis-ci-with-github-actions-importer.md b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-travis-ci-with-github-actions-importer.md index 3d35396c85..1df67db6b3 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-travis-ci-with-github-actions-importer.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/migrating-from-travis-ci-with-github-actions-importer.md @@ -88,6 +88,7 @@ The `configure` CLI command is used to set required credentials and options for ✔ Base url of the Travis CI instance: https://travis-ci.com ✔ Travis CI organization name: actions-importer-labs Environment variables successfully updated. + ``` 1. In your terminal, run the {% data variables.product.prodname_actions_importer %} `update` CLI command to connect to {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} and ensure that the container image is updated to the latest version: diff --git a/content/actions/migrating-to-github-actions/automated-migrations/supplemental-arguments-and-settings.md b/content/actions/migrating-to-github-actions/automated-migrations/supplemental-arguments-and-settings.md index 44e86bcd42..3711fc2e38 100644 --- a/content/actions/migrating-to-github-actions/automated-migrations/supplemental-arguments-and-settings.md +++ b/content/actions/migrating-to-github-actions/automated-migrations/supplemental-arguments-and-settings.md @@ -101,6 +101,7 @@ The supported values for `--features` are: - `ghes-`, where `` is the version of {% data variables.product.prodname_ghe_server %}, `3.0` or later. For example, `ghes-3.3`. You can view the list of available feature flags by {% data variables.product.prodname_actions_importer %} by running the `list-features` command. For example: + ```shell copy gh actions-importer list-features ``` diff --git a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-azure-pipelines-to-github-actions.md b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-azure-pipelines-to-github-actions.md index 42c4af1386..8a1b34ab03 100644 --- a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-azure-pipelines-to-github-actions.md +++ b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-azure-pipelines-to-github-actions.md @@ -59,6 +59,7 @@ Below is an example of the syntax for each system. ### Azure Pipelines syntax for script steps {% raw %} + ```yaml jobs: - job: scripts @@ -72,11 +73,13 @@ jobs: inputs: script: Write-Host "This step runs in PowerShell" ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for script steps {% raw %} + ```yaml jobs: scripts: @@ -90,6 +93,7 @@ jobs: - run: Write-Host "This step runs in PowerShell" shell: powershell ``` + {% endraw %} ## Differences in script error handling @@ -109,6 +113,7 @@ Below is an example of the syntax for each system. ### Azure Pipelines syntax using CMD by default {% raw %} + ```yaml jobs: - job: run_command @@ -117,11 +122,13 @@ jobs: steps: - script: echo "This step runs in CMD on Windows by default" ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for specifying CMD {% raw %} + ```yaml jobs: run_command: @@ -131,6 +138,7 @@ jobs: - run: echo "This step runs in CMD on Windows explicitly" shell: cmd ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#using-a-specific-shell)." @@ -146,6 +154,7 @@ Below is an example of the syntax for each system. ### Azure Pipelines syntax for conditional expressions {% raw %} + ```yaml jobs: - job: conditional @@ -155,11 +164,13 @@ jobs: - script: echo "This step runs with str equals 'ABC' and num equals 123" condition: and(eq(variables.str, 'ABC'), eq(variables.num, 123)) ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for conditional expressions {% raw %} + ```yaml jobs: conditional: @@ -168,6 +179,7 @@ jobs: - run: echo "This step runs with str equals 'ABC' and num equals 123" if: ${{ env.str == 'ABC' && env.num == 123 }} ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/learn-github-actions/expressions)." @@ -181,6 +193,7 @@ Below is an example of the syntax for each system. The workflows start a first j ### Azure Pipelines syntax for dependencies between jobs {% raw %} + ```yaml jobs: - job: initial @@ -207,11 +220,13 @@ jobs: steps: - script: echo "This job will run after fanout1 and fanout2 have finished." ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for dependencies between jobs {% raw %} + ```yaml jobs: initial: @@ -234,6 +249,7 @@ jobs: steps: - run: echo "This job will run after fanout1 and fanout2 have finished." ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds)." @@ -247,6 +263,7 @@ Below is an example of the syntax for each system. ### Azure Pipelines syntax for tasks {% raw %} + ```yaml jobs: - job: run_python @@ -259,6 +276,7 @@ jobs: architecture: 'x64' - script: python script.py ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for actions diff --git a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-circleci-to-github-actions.md b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-circleci-to-github-actions.md index d3c5eb7561..7157db174f 100644 --- a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-circleci-to-github-actions.md +++ b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-circleci-to-github-actions.md @@ -87,12 +87,14 @@ Below is an example of the syntax for each system. ### CircleCI syntax for caching {% raw %} + ```yaml - restore_cache: keys: - v1-npm-deps-{{ checksum "package-lock.json" }} - v1-npm-deps- ``` + {% endraw %} ### GitHub Actions syntax for caching @@ -123,6 +125,7 @@ Below is an example in CircleCI and {% data variables.product.prodname_actions % ### CircleCI syntax for persisting data between jobs {% raw %} + ```yaml - persist_to_workspace: root: workspace @@ -134,11 +137,13 @@ Below is an example in CircleCI and {% data variables.product.prodname_actions % - attach_workspace: at: /tmp/workspace ``` + {% endraw %} ### GitHub Actions syntax for persisting data between jobs {% raw %} + ```yaml - name: Upload math result for job 1 uses: {% data reusables.actions.action-upload-artifact %} @@ -153,6 +158,7 @@ Below is an example in CircleCI and {% data variables.product.prodname_actions % with: name: homework ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/storing-workflow-data-as-artifacts)." @@ -168,6 +174,7 @@ Below is an example in CircleCI and {% data variables.product.prodname_actions % ### CircleCI syntax for using databases and service containers {% raw %} + ```yaml --- version: 2.1 @@ -218,11 +225,13 @@ workflows: - attach_workspace: at: /tmp/workspace ``` + {% endraw %} ### GitHub Actions syntax for using databases and service containers {% raw %} + ```yaml name: Containers @@ -267,6 +276,7 @@ jobs: - name: Run tests run: bundle exec rake ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-containerized-services/about-service-containers)." @@ -278,6 +288,7 @@ Below is a real-world example. The left shows the actual CircleCI _config.yml_ f ### Complete example for CircleCI {% raw %} + ```yaml --- version: 2.1 @@ -359,6 +370,7 @@ workflows: - ruby-26 - ruby-25 ``` + {% endraw %} ### Complete example for GitHub Actions diff --git a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions.md b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions.md index 4437526774..46386a10cb 100644 --- a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions.md +++ b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-gitlab-cicd-to-github-actions.md @@ -46,6 +46,7 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for jobs {% raw %} + ```yaml job1: variables: @@ -53,11 +54,13 @@ job1: script: - echo "Run your script here" ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for jobs {% raw %} + ```yaml jobs: job1: @@ -65,6 +68,7 @@ jobs: - uses: {% data reusables.actions.action-checkout %} - run: echo "Run your script here" ``` + {% endraw %} ## Runners @@ -76,6 +80,7 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for runners {% raw %} + ```yaml windows_job: tags: @@ -89,11 +94,13 @@ linux_job: script: - echo "Hello, $USER!" ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for runners {% raw %} + ```yaml windows_job: runs-on: windows-latest @@ -105,6 +112,7 @@ linux_job: steps: - run: echo "Hello, $USER!" ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on)." @@ -118,20 +126,24 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for Docker images {% raw %} + ```yaml my_job: image: node:10.16-jessie ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for Docker images {% raw %} + ```yaml jobs: my_job: container: node:10.16-jessie ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontainer)." @@ -145,6 +157,7 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for conditions and expressions {% raw %} + ```yaml deploy_prod: stage: deploy @@ -153,11 +166,13 @@ deploy_prod: rules: - if: '$CI_COMMIT_BRANCH == "master"' ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for conditions and expressions {% raw %} + ```yaml jobs: deploy_prod: @@ -166,6 +181,7 @@ jobs: steps: - run: echo "Deploy to production server" ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/learn-github-actions/expressions)." @@ -179,6 +195,7 @@ Below is an example of the syntax for each system. The workflows start with two ### GitLab CI/CD syntax for dependencies between jobs {% raw %} + ```yaml stages: - build @@ -205,11 +222,13 @@ deploy_ab: script: - echo "This job will run after test_ab is complete" ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for dependencies between jobs {% raw %} + ```yaml jobs: build_a: @@ -234,6 +253,7 @@ jobs: steps: - run: echo "This job will run after test_ab is complete" ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds)." @@ -261,6 +281,7 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for caching {% raw %} + ```yaml image: node:latest @@ -276,6 +297,7 @@ test_async: script: - node ./specs/start.js ./specs/async.spec.js ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for caching @@ -308,17 +330,20 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for artifacts {% raw %} + ```yaml script: artifacts: paths: - math-homework.txt ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for artifacts {% raw %} + ```yaml - name: Upload math result for job 1 uses: {% data reusables.actions.action-upload-artifact %} @@ -326,6 +351,7 @@ artifacts: name: homework path: math-homework.txt ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/storing-workflow-data-as-artifacts)." @@ -341,6 +367,7 @@ Below is an example of the syntax for each system. ### GitLab CI/CD syntax for databases and service containers {% raw %} + ```yaml container-job: variables: @@ -363,11 +390,13 @@ container-job: tags: - docker ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for databases and service containers {% raw %} + ```yaml jobs: container-job: @@ -400,6 +429,7 @@ jobs: # The default PostgreSQL port POSTGRES_PORT: 5432 ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-containerized-services/about-service-containers)." diff --git a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions.md b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions.md index db96378bf4..5309c097eb 100644 --- a/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions.md +++ b/content/actions/migrating-to-github-actions/manual-migrations/migrating-from-travis-ci-to-github-actions.md @@ -69,17 +69,20 @@ Below is an example comparing the syntax for each system. #### Travis CI syntax for a matrix {% raw %} + ```yaml matrix: include: - rvm: 2.5 - rvm: 2.6.3 ``` + {% endraw %} #### {% data variables.product.prodname_actions %} syntax for a matrix {% raw %} + ```yaml jobs: build: @@ -87,6 +90,7 @@ jobs: matrix: ruby: [2.5, 2.6.3] ``` + {% endraw %} ### Targeting specific branches @@ -98,17 +102,20 @@ Below is an example of the syntax for each system. #### Travis CI syntax for targeting specific branches {% raw %} + ```yaml branches: only: - main - 'mona/octocat' ``` + {% endraw %} #### {% data variables.product.prodname_actions %} syntax for targeting specific branches {% raw %} + ```yaml on: push: @@ -116,6 +123,7 @@ on: - main - 'mona/octocat' ``` + {% endraw %} ### Checking out submodules @@ -127,20 +135,24 @@ Below is an example of the syntax for each system. #### Travis CI syntax for checking out submodules {% raw %} + ```yaml git: submodules: false ``` + {% endraw %} #### {% data variables.product.prodname_actions %} syntax for checking out submodules {% raw %} + ```yaml - uses: {% data reusables.actions.action-checkout %} with: submodules: false ``` + {% endraw %} ### Using environment variables in a matrix @@ -232,6 +244,7 @@ Below is an example of the syntax for each system. ### Travis CI syntax for phases and steps {% raw %} + ```yaml language: python python: @@ -240,11 +253,13 @@ python: script: - python script.py ``` + {% endraw %} ### {% data variables.product.prodname_actions %} syntax for steps and actions {% raw %} + ```yaml jobs: run_python: @@ -256,6 +271,7 @@ jobs: architecture: 'x64' - run: python script.py ``` + {% endraw %} ## Caching dependencies @@ -269,10 +285,12 @@ These examples demonstrate the cache syntax for each system. ### Travis CI syntax for caching {% raw %} + ```yaml language: node_js cache: npm ``` + {% endraw %} ### GitHub Actions syntax for caching @@ -321,6 +339,7 @@ jobs: ##### Travis CI for building with Node.js {% raw %} + ```yaml install: - npm install @@ -328,6 +347,7 @@ script: - npm run build - npm test ``` + {% endraw %} ##### {% data variables.product.prodname_actions %} workflow for building with Node.js diff --git a/content/actions/publishing-packages/publishing-java-packages-with-gradle.md b/content/actions/publishing-packages/publishing-java-packages-with-gradle.md index 073df742e4..b70c3dc2fa 100644 --- a/content/actions/publishing-packages/publishing-java-packages-with-gradle.md +++ b/content/actions/publishing-packages/publishing-java-packages-with-gradle.md @@ -50,6 +50,7 @@ Each time you create a new release, you can trigger a workflow to publish your p You can define a new Maven repository in the publishing block of your _build.gradle_ file that points to your package repository. For example, if you were deploying to the Maven Central Repository through the OSSRH hosting project, your _build.gradle_ could specify a repository with the name `"OSSRH"`. {% raw %} + ```groovy copy plugins { ... @@ -71,6 +72,7 @@ publishing { } } ``` + {% endraw %} With this configuration, you can create a workflow that publishes your package to the Maven Central Repository by running the `gradle publish` command. In the deploy step, you’ll need to set environment variables for the username and password or token that you use to authenticate to the Maven repository. For more information, see "[AUTOTITLE](/actions/security-guides/encrypted-secrets)." @@ -122,6 +124,7 @@ You can define a new Maven repository in the publishing block of your _build.gra For example, if your organization is named "octocat" and your repository is named "hello-world", then the {% data variables.product.prodname_registry %} configuration in _build.gradle_ would look similar to the below example. {% raw %} + ```groovy copy plugins { ... @@ -143,6 +146,7 @@ publishing { } } ``` + {% endraw %} With this configuration, you can create a workflow that publishes your package to {% data variables.product.prodname_registry %} by running the `gradle publish` command. @@ -195,6 +199,7 @@ For example, if you deploy to the Central Repository through the OSSRH hosting p If your organization is named "octocat" and your repository is named "hello-world", then the configuration in _build.gradle_ would look similar to the below example. {% raw %} + ```groovy copy plugins { ... @@ -224,6 +229,7 @@ publishing { } } ``` + {% endraw %} With this configuration, you can create a workflow that publishes your package to both the Maven Central Repository and {% data variables.product.prodname_registry %} by running the `gradle publish` command. diff --git a/content/actions/publishing-packages/publishing-java-packages-with-maven.md b/content/actions/publishing-packages/publishing-java-packages-with-maven.md index 24fa2ff30e..b9fcd7a216 100644 --- a/content/actions/publishing-packages/publishing-java-packages-with-maven.md +++ b/content/actions/publishing-packages/publishing-java-packages-with-maven.md @@ -54,6 +54,7 @@ In this workflow, you can use the `setup-java` action. This action installs the For example, if you were deploying to the Maven Central Repository through the OSSRH hosting project, your _pom.xml_ could specify a distribution management repository with the `id` of `ossrh`. {% raw %} + ```xml copy ... @@ -66,6 +67,7 @@ For example, if you were deploying to the Maven Central Repository through the O ``` + {% endraw %} With this configuration, you can create a workflow that publishes your package to the Maven Central Repository by specifying the repository management `id` to the `setup-java` action. You’ll also need to provide environment variables that contain the username and password to authenticate to the repository. @@ -118,6 +120,7 @@ For a Maven-based project, you can make use of these settings by creating a dist For example, if your organization is named "octocat" and your repository is named "hello-world", then the {% data variables.product.prodname_registry %} configuration in _pom.xml_ would look similar to the below example. {% raw %} + ```xml copy ... @@ -130,6 +133,7 @@ For example, if your organization is named "octocat" and your repository is name ``` + {% endraw %} With this configuration, you can create a workflow that publishes your package to {% data variables.product.prodname_registry %} by making use of the automatically generated _settings.xml_. diff --git a/content/actions/quickstart.md b/content/actions/quickstart.md index 2fe15f43fd..a0ab4b7a74 100644 --- a/content/actions/quickstart.md +++ b/content/actions/quickstart.md @@ -51,6 +51,7 @@ The following example shows you how {% data variables.product.prodname_actions % ls {% raw %}${{ github.workspace }}{% endraw %} - run: echo "🍏 This job's status is {% raw %}${{ job.status }}{% endraw %}." ``` + 1. Scroll to the bottom of the page and select **Create a new branch for this commit and start a pull request**. Then, to create a pull request, click **Propose new file**. ![Screenshot of the "Commit new file" area of the page.](/assets/images/help/repository/actions-quickstart-commit-new-file.png) diff --git a/content/actions/security-guides/encrypted-secrets.md b/content/actions/security-guides/encrypted-secrets.md index 433ab762cd..850255b414 100644 --- a/content/actions/security-guides/encrypted-secrets.md +++ b/content/actions/security-guides/encrypted-secrets.md @@ -232,6 +232,7 @@ You can check which access policies are being applied to a secret in your organi To provide an action with a secret as an input or environment variable, you can use the `secrets` context to access secrets you've created in your repository. For more information, see "[AUTOTITLE](/actions/learn-github-actions/contexts)" and "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions)." {% raw %} + ```yaml steps: - name: Hello world action @@ -240,6 +241,7 @@ steps: env: # Or as an environment variable super_secret: ${{ secrets.SuperSecret }} ``` + {% endraw %} Secrets cannot be directly referenced in `if:` conditionals. Instead, consider setting secrets as job-level environment variables, then referencing the environment variables to conditionally run steps in the job. For more information, see "[AUTOTITLE](/actions/learn-github-actions/contexts#context-availability)" and [`jobs..steps[*].if`](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsif). @@ -253,6 +255,7 @@ If you must pass secrets within a command line, then enclose them within the pro ### Example using Bash {% raw %} + ```yaml steps: - shell: bash @@ -261,11 +264,13 @@ steps: run: | example-command "$SUPER_SECRET" ``` + {% endraw %} ### Example using PowerShell {% raw %} + ```yaml steps: - shell: pwsh @@ -274,11 +279,13 @@ steps: run: | example-command "$env:SUPER_SECRET" ``` + {% endraw %} ### Example using Cmd.exe {% raw %} + ```yaml steps: - shell: cmd @@ -287,6 +294,7 @@ steps: run: | example-command "%SUPER_SECRET%" ``` + {% endraw %} ## Limits for secrets diff --git a/content/actions/security-guides/security-hardening-for-github-actions.md b/content/actions/security-guides/security-hardening-for-github-actions.md index f0f57bc961..135ac85206 100644 --- a/content/actions/security-guides/security-hardening-for-github-actions.md +++ b/content/actions/security-guides/security-hardening-for-github-actions.md @@ -75,6 +75,7 @@ The following sections explain how you can help mitigate the risk of script inje A script injection attack can occur directly within a workflow's inline script. In the following example, an action uses an expression to test the validity of a pull request title, but also adds the risk of script injection: {% raw %} + ``` - name: Check PR title run: | @@ -87,6 +88,7 @@ A script injection attack can occur directly within a workflow's inline script. exit 1 fi ``` + {% endraw %} This example is vulnerable to script injection because the `run` command executes within a temporary shell script on the runner. Before the shell script is run, the expressions inside {% raw %}`${{ }}`{% endraw %} are evaluated and then substituted with the resulting values, which can make it vulnerable to shell command injection. @@ -113,11 +115,13 @@ There are a number of different approaches available to help you mitigate the ri The recommended approach is to create an action that processes the context value as an argument. This approach is not vulnerable to the injection attack, as the context value is not used to generate a shell script, but is instead passed to the action as an argument: {% raw %} + ``` uses: fakeaction/checktitle@v3 with: title: ${{ github.event.pull_request.title }} ``` + {% endraw %} ### Using an intermediate environment variable @@ -127,6 +131,7 @@ For inline scripts, the preferred approach to handling untrusted input is to set The following example uses Bash to process the `github.event.pull_request.title` value as an environment variable: {% raw %} + ``` - name: Check PR title env: @@ -140,6 +145,7 @@ The following example uses Bash to process the `github.event.pull_request.title` exit 1 fi ``` + {% endraw %} In this example, the attempted script injection is unsuccessful, which is reflected by the following lines in the log: @@ -244,11 +250,13 @@ Workflows triggered using the `pull_request` event have read-only permissions an - For a custom action, the risk can vary depending on how a program is using the secret it obtained from the argument: {% raw %} + ``` uses: fakeaction/publish@v3 with: key: ${{ secrets.PUBLISH_KEY }} ``` + {% endraw %} Although {% data variables.product.prodname_actions %} scrubs secrets from memory that are not referenced in the workflow (or an included action), the `GITHUB_TOKEN` and any referenced secrets can be harvested by a determined attacker. diff --git a/content/actions/using-containerized-services/about-service-containers.md b/content/actions/using-containerized-services/about-service-containers.md index 3a4c2a5a4f..b39fdd1906 100644 --- a/content/actions/using-containerized-services/about-service-containers.md +++ b/content/actions/using-containerized-services/about-service-containers.md @@ -51,6 +51,7 @@ You can use the `services` keyword to create service containers that are part of This example creates a service called `redis` in a job called `container-job`. The Docker host in this example is the `node:16-bullseye` container. {% raw %} + ```yaml copy name: Redis container example on: push @@ -70,6 +71,7 @@ jobs: # Docker Hub image image: redis ``` + {% endraw %} ## Mapping Docker host and service container ports @@ -93,6 +95,7 @@ When you specify the Docker host port but not the container port, the container This example maps the service container `redis` port 6379 to the Docker host port 6379. {% raw %} + ```yaml copy name: Redis Service Example on: push @@ -114,6 +117,7 @@ jobs: # Opens tcp port 6379 on the host and service container - 6379:6379 ``` + {% endraw %} ## Further reading diff --git a/content/actions/using-github-hosted-runners/customizing-github-hosted-runners.md b/content/actions/using-github-hosted-runners/customizing-github-hosted-runners.md index 4042227c3b..0f56c3cccb 100644 --- a/content/actions/using-github-hosted-runners/customizing-github-hosted-runners.md +++ b/content/actions/using-github-hosted-runners/customizing-github-hosted-runners.md @@ -73,6 +73,7 @@ jobs: The following example demonstrates how to use [Chocolatey](https://community.chocolatey.org/packages) to install the {% data variables.product.prodname_dotcom %} CLI as part of a job. {% raw %} + ```yaml name: Build on Windows on: push @@ -83,4 +84,5 @@ jobs: - run: choco install gh - run: gh version ``` + {% endraw %} diff --git a/content/actions/using-workflows/about-workflows.md b/content/actions/using-workflows/about-workflows.md index 1f13edcbea..6eb8edef45 100644 --- a/content/actions/using-workflows/about-workflows.md +++ b/content/actions/using-workflows/about-workflows.md @@ -62,6 +62,7 @@ If your workflows use sensitive data, such as passwords or certificates, you can This example job demonstrates how to reference an existing secret as an environment variable, and send it as a parameter to an example command. {% raw %} + ```yaml jobs: example-job: @@ -73,6 +74,7 @@ jobs: run: | example-command "$super_secret" ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/security-guides/encrypted-secrets)." diff --git a/content/actions/using-workflows/caching-dependencies-to-speed-up-workflows.md b/content/actions/using-workflows/caching-dependencies-to-speed-up-workflows.md index fb926f5a1f..f1d1c2357e 100644 --- a/content/actions/using-workflows/caching-dependencies-to-speed-up-workflows.md +++ b/content/actions/using-workflows/caching-dependencies-to-speed-up-workflows.md @@ -80,17 +80,20 @@ You cannot change the contents of an existing cache. Instead, you can create a n ~/.gradle/caches ~/.gradle/wrapper ``` + - You can specify either directories or single files, and glob patterns are supported. - You can specify absolute paths, or paths relative to the workspace directory. - `restore-keys`: **Optional** A string containing alternative restore keys, with each restore key placed on a new line. If no cache hit occurs for `key`, these restore keys are used sequentially in the order provided to find and restore a cache. For example: {% raw %} + ```yaml restore-keys: | npm-feature-${{ hashFiles('package-lock.json') }} npm-feature- npm- ``` + {% endraw %} - `enableCrossOsArchive`: **Optional** A boolean value that when enabled, allows Windows runners to save or restore caches independent of the operating system the cache was created on. If this parameter is not set, it defaults to `false`. For more information, see [Cross OS cache](https://github.com/actions/cache/blob/main/tips-and-workarounds.md#cross-os-cache) in the Actions Cache documentation. @@ -165,9 +168,11 @@ Using expressions to create a `key` allows you to automatically create a new cac For example, you can create a `key` using an expression that calculates the hash of an npm `package-lock.json` file. So, when the dependencies that make up the `package-lock.json` file change, the cache key changes and a new cache is automatically created. {% raw %} + ```yaml npm-${{ hashFiles('package-lock.json') }} ``` + {% endraw %} {% data variables.product.prodname_dotcom %} evaluates the expression `hash "package-lock.json"` to derive the final `key`. @@ -200,23 +205,27 @@ Cache version is a way to stamp a cache with metadata of the `path` and the comp ### Example using multiple restore keys {% raw %} + ```yaml restore-keys: | npm-feature-${{ hashFiles('package-lock.json') }} npm-feature- npm- ``` + {% endraw %} The runner evaluates the expressions, which resolve to these `restore-keys`: {% raw %} + ```yaml restore-keys: | npm-feature-d5ea0750 npm-feature- npm- ``` + {% endraw %} The restore key `npm-feature-` matches any key that starts with the string `npm-feature-`. For example, both of the keys `npm-feature-fd3052de` and `npm-feature-a9b253ff` match the restore key. The cache with the most recent creation date would be used. The keys in this example are searched in the following order: diff --git a/content/actions/using-workflows/creating-starter-workflows-for-your-organization.md b/content/actions/using-workflows/creating-starter-workflows-for-your-organization.md index 226006e0c5..640088c126 100644 --- a/content/actions/using-workflows/creating-starter-workflows-for-your-organization.md +++ b/content/actions/using-workflows/creating-starter-workflows-for-your-organization.md @@ -68,7 +68,9 @@ This procedure demonstrates how to create a starter workflow and metadata file. - name: Run a one-line script run: echo Hello from Octo Organization ``` + 4. Create a metadata file inside the `workflow-templates` directory. The metadata file must have the same name as the workflow file, but instead of the `.yml` extension, it must be appended with `.properties.json`. For example, this file named `octo-organization-ci.properties.json` contains the metadata for a workflow file named `octo-organization-ci.yml`: + ```json copy { "name": "Octo Organization Workflow", @@ -84,6 +86,7 @@ This procedure demonstrates how to create a starter workflow and metadata file. ] } ``` + - `name` - **Required.** The name of the workflow. This is displayed in the list of available workflows. - `description` - **Required.** The description of the workflow. This is displayed in the list of available workflows. - `iconName` - **Optional.** Specifies an icon for the workflow that is displayed in the list of workflows. `iconName` can one of the following types: diff --git a/content/actions/using-workflows/events-that-trigger-workflows.md b/content/actions/using-workflows/events-that-trigger-workflows.md index 2258368210..844c85edf5 100644 --- a/content/actions/using-workflows/events-that-trigger-workflows.md +++ b/content/actions/using-workflows/events-that-trigger-workflows.md @@ -1046,11 +1046,13 @@ on: **Note**: When pushing multi-architecture container images, this event occurs once per manifest, so you might observe your workflow triggering multiple times. To mitigate this, and only run your workflow job for the event that contains the actual image tag information, use a conditional: {% raw %} + ```yaml jobs: job_name: if: ${{ github.event.registry_package.package_version.container_metadata.tag.name != '' }} ``` + {% endraw %} {% endnote %} diff --git a/content/actions/using-workflows/reusing-workflows.md b/content/actions/using-workflows/reusing-workflows.md index 2ce1cd7a9e..c676a97ca1 100644 --- a/content/actions/using-workflows/reusing-workflows.md +++ b/content/actions/using-workflows/reusing-workflows.md @@ -110,6 +110,7 @@ You can define inputs and secrets, which can be passed from the caller workflow 1. In the reusable workflow, use the `inputs` and `secrets` keywords to define inputs or secrets that will be passed from a caller workflow. {% raw %} + ```yaml on: workflow_call: @@ -121,6 +122,7 @@ You can define inputs and secrets, which can be passed from the caller workflow envPAT: required: true ``` + {% endraw %} For details of the syntax for defining inputs and secrets, see [`on.workflow_call.inputs`](/actions/using-workflows/workflow-syntax-for-github-actions#onworkflow_callinputs) and [`on.workflow_call.secrets`](/actions/using-workflows/workflow-syntax-for-github-actions#onworkflow_callsecrets). {% ifversion actions-inherit-secrets-reusable-workflows %} @@ -136,6 +138,7 @@ You can define inputs and secrets, which can be passed from the caller workflow {%- endif %} {% raw %} + ```yaml jobs: reusable_workflow_job: @@ -147,6 +150,7 @@ You can define inputs and secrets, which can be passed from the caller workflow repo-token: ${{ secrets.envPAT }} configuration-path: ${{ inputs.config-path }} ``` + {% endraw %} In the example above, `envPAT` is an environment secret that's been added to the `production` environment. This environment is therefore referenced within the job. @@ -165,6 +169,7 @@ You can define inputs and secrets, which can be passed from the caller workflow This reusable workflow file named `workflow-B.yml` (we'll refer to this later in the [example caller workflow](#example-caller-workflow)) takes an input string and a secret from the caller workflow and uses them in an action. {% raw %} + ```yaml copy name: Reusable workflow example @@ -187,6 +192,7 @@ jobs: repo-token: ${{ secrets.token }} configuration-path: ${{ inputs.config-path }} ``` + {% endraw %} ## Calling a reusable workflow @@ -217,6 +223,7 @@ A matrix strategy lets you use variables in a single job definition to automatic This example job below calls a reusable workflow and references the matrix context by defining the variable `target` with the values `[dev, stage, prod]`. It will run three jobs, one for each value in the variable. {% raw %} + ```yaml copy jobs: ReuseableMatrixJobForDeployment: @@ -227,6 +234,7 @@ jobs: with: target: ${{ matrix.target }} ``` + {% endraw %} {% endif %} @@ -265,6 +273,7 @@ When you call a reusable workflow, you can only use the following keywords in th This workflow file calls two workflow files. The second of these, `workflow-B.yml` (shown in the [example reusable workflow](#example-reusable-workflow)), is passed an input (`config-path`) and a secret (`token`). {% raw %} + ```yaml copy name: Call a reusable workflow @@ -287,6 +296,7 @@ jobs: secrets: token: ${{ secrets.GITHUB_TOKEN }} ``` + {% endraw %} {% ifversion nested-reusable-workflow %} @@ -297,6 +307,7 @@ You can connect a maximum of four levels of workflows - that is, the top-level c From within a reusable workflow you can call another reusable workflow. {% raw %} + ```yaml copy name: Reusable workflow @@ -307,6 +318,7 @@ jobs: call-another-reusable: uses: octo-org/example-repo/.github/workflows/another-reusable.yml@v1 ``` + {% endraw %} ### Passing secrets to nested workflows @@ -316,6 +328,7 @@ You can use `jobs..secrets` in a calling workflow to pass named secrets In the following example, workflow A passes all of its secrets to workflow B, by using the `inherit` keyword, but workflow B only passes one secret to workflow C. Any of the other secrets passed to workflow B are not available to workflow C. {% raw %} + ```yaml jobs: workflowA-calls-workflowB: @@ -330,6 +343,7 @@ jobs: secrets: envPAT: ${{ secrets.envPAT }} # pass just this secret ``` + {% endraw %} ### Access and permissions @@ -351,6 +365,7 @@ That means if the last successful completing reusable workflow sets an empty str The following reusable workflow has a single job containing two steps. In each of these steps we set a single word as the output: "hello" and "world." In the `outputs` section of the job, we map these step outputs to job outputs called: `output1` and `output2`. In the `on.workflow_call.outputs` section we then define two outputs for the workflow itself, one called `firstword` which we map to `output1`, and one called `secondword` which we map to `output2`. {% raw %} + ```yaml copy name: Reusable workflow @@ -387,11 +402,13 @@ jobs: run: echo "::set-output name=secondword::world" {%- endif %}{% raw %} ``` + {% endraw %} We can now use the outputs in the caller workflow, in the same way you would use the outputs from a job within the same workflow. We reference the outputs using the names defined at the workflow level in the reusable workflow: `firstword` and `secondword`. In this workflow, `job1` calls the reusable workflow and `job2` prints the outputs from the reusable workflow ("hello world") to standard output in the workflow log. {% raw %} + ```yaml copy name: Call a reusable workflow and use its outputs @@ -408,6 +425,7 @@ jobs: steps: - run: echo ${{ needs.job1.outputs.firstword }} ${{ needs.job1.outputs.secondword }} ``` + {% endraw %} For more information on using job outputs, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idoutputs)." diff --git a/content/actions/using-workflows/workflow-commands-for-github-actions.md b/content/actions/using-workflows/workflow-commands-for-github-actions.md index 0739066a54..55ed36594b 100644 --- a/content/actions/using-workflows/workflow-commands-for-github-actions.md +++ b/content/actions/using-workflows/workflow-commands-for-github-actions.md @@ -256,6 +256,7 @@ Creates a warning message and prints the message to the log. {% data reusables.a ```bash copy echo "::warning file=app.js,line=1,col=5,endColumn=7::Missing semicolon" ``` + {% endbash %} {% powershell %} @@ -524,6 +525,7 @@ jobs: echo "::add-mask::$RETRIEVED_SECRET" echo "We retrieved our masked secret: $RETRIEVED_SECRET" ``` + {% endbash %} {% powershell %} @@ -563,6 +565,7 @@ jobs: echo "::add-mask::$Retrieved_Secret" echo "We retrieved our masked secret: $Retrieved_Secret" ``` + {% endpowershell %} ## Stopping and starting workflow commands @@ -603,6 +606,7 @@ jobs: echo "::$stopMarker::" echo '::warning:: This is a warning again, because stop-commands has been turned off.' ``` + {% endbash %} {% powershell %} @@ -715,6 +719,7 @@ This example uses JavaScript to run the `save-state` command. The resulting envi ```javascript copy console.log('::save-state name=processID::12345') ``` + {% endif %} The `STATE_processID` variable is then exclusively available to the cleanup script running under the `main` action. This example runs in `main` and uses JavaScript to display the value assigned to the `STATE_processID` environment variable: @@ -888,6 +893,7 @@ Sets a step's output parameter. Note that the step will need an `id` to be defin ```bash copy echo "{name}={value}" >> "$GITHUB_OUTPUT" ``` + {% endbash %} {% powershell %} @@ -1087,6 +1093,7 @@ Prepends a directory to the system `PATH` variable and automatically makes it av ```bash copy echo "{path}" >> $GITHUB_PATH ``` + {% endbash %} {% powershell %} diff --git a/content/actions/using-workflows/workflow-syntax-for-github-actions.md b/content/actions/using-workflows/workflow-syntax-for-github-actions.md index 213982f8c4..7f9a3bccda 100644 --- a/content/actions/using-workflows/workflow-syntax-for-github-actions.md +++ b/content/actions/using-workflows/workflow-syntax-for-github-actions.md @@ -37,9 +37,11 @@ This value can include expressions and can reference the [`github`](/actions/lea ### Example of `run-name` {% raw %} + ```yaml run-name: Deploy to ${{ inputs.deploy_target }} by @${{ github.actor }} ``` + {% endraw %} {% endif %} @@ -88,6 +90,7 @@ If a caller workflow passes an input that is not specified in the called workflo ### Example of `on.workflow_call.inputs` {% raw %} + ```yaml on: workflow_call: @@ -106,6 +109,7 @@ jobs: - name: Print the input name to STDOUT run: echo The username is ${{ inputs.username }} ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/using-workflows/reusing-workflows)." @@ -123,6 +127,7 @@ In the example below, two outputs are defined for this reusable workflow: `workf ### Example of `on.workflow_call.outputs` {% raw %} + ```yaml on: workflow_call: @@ -135,6 +140,7 @@ on: description: "The second job output" value: ${{ jobs.my_job.outputs.job_output2 }} ``` + {% endraw %} For information on how to reference a job output, see [`jobs..outputs`](#jobsjob_idoutputs). For more information, see "[AUTOTITLE](/actions/using-workflows/reusing-workflows)." @@ -156,6 +162,7 @@ If a caller workflow passes a secret that is not specified in the called workflo ### Example of `on.workflow_call.secrets` {% raw %} + ```yaml on: workflow_call: @@ -181,6 +188,7 @@ jobs: secrets: token: ${{ secrets.access-token }} ``` + {% endraw %} ## `on.workflow_call.secrets.` @@ -326,6 +334,7 @@ You can run an unlimited number of steps as long as you are within the workflow ### Example of `jobs..steps` {% raw %} + ```yaml name: Greeting from Mona @@ -345,6 +354,7 @@ jobs: run: | echo $MY_VAR $FIRST_NAME $MIDDLE_NAME $LAST_NAME. ``` + {% endraw %} ## `jobs..steps[*].id` @@ -388,6 +398,7 @@ Secrets cannot be directly referenced in `if:` conditionals. Instead, consider s If a secret has not been set, the return value of an expression referencing the secret (such as {% raw %}`${{ secrets.SuperSecret }}`{% endraw %} in the example) will be an empty string. {% raw %} + ```yaml name: Run a step if a secret has been set on: push @@ -402,6 +413,7 @@ jobs: - if: ${{ env.super_secret == '' }} run: echo 'This step will only run if the secret does not have a value set.' ``` + {% endraw %} For more information, see "[AUTOTITLE](/actions/learn-github-actions/contexts#context-availability)" and "[AUTOTITLE](/actions/security-guides/encrypted-secrets)." @@ -513,6 +525,7 @@ jobs: - name: My first step uses: docker://ghcr.io/OWNER/IMAGE_NAME ``` + {% endif %} ### Example: Using a Docker public registry action @@ -714,6 +727,7 @@ A `string` that defines the inputs for a Docker container. {% data variables.pro ### Example of `jobs..steps[*].with.args` {% raw %} + ```yaml steps: - name: Explain why this job ran @@ -722,6 +736,7 @@ steps: entrypoint: /bin/echo args: The ${{ github.event_name }} event triggered this step. ``` + {% endraw %} The `args` are used in place of the `CMD` instruction in a `Dockerfile`. If you use `CMD` in your `Dockerfile`, use the guidelines ordered by preference: @@ -757,6 +772,7 @@ Public actions may specify expected variables in the README file. If you are set ### Example of `jobs..steps[*].env` {% raw %} + ```yaml steps: - name: My first action @@ -765,6 +781,7 @@ steps: FIRST_NAME: Mona LAST_NAME: Octocat ``` + {% endraw %} ## `jobs..steps[*].continue-on-error` @@ -840,6 +857,7 @@ Prevents a workflow run from failing when a job fails. Set to `true` to allow a You can allow specific jobs in a job matrix to fail without failing the workflow run. For example, if you wanted to only allow an experimental job with `node` set to `15` to fail without failing the workflow run. {% raw %} + ```yaml runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} @@ -854,6 +872,7 @@ strategy: os: ubuntu-latest experimental: true ``` + {% endraw %} ## `jobs..container` @@ -927,6 +946,7 @@ The Docker image to use as the service container to run the action. The value ca ### Example of `jobs..services..credentials` {% raw %} + ```yaml services: myservice1: @@ -940,6 +960,7 @@ services: username: ${{ secrets.DOCKER_USER }} password: ${{ secrets.DOCKER_PASSWORD }} ``` + {% endraw %} ## `jobs..services..env` @@ -1026,6 +1047,7 @@ Any secrets that you pass must match the names defined in the called workflow. ### Example of `jobs..secrets` {% raw %} + ```yaml jobs: call-workflow: @@ -1033,6 +1055,7 @@ jobs: secrets: access-token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} ``` + {% endraw %} {% ifversion actions-inherit-secrets-reusable-workflows %} diff --git a/content/admin/code-security/managing-github-advanced-security-for-your-enterprise/enabling-github-advanced-security-for-your-enterprise.md b/content/admin/code-security/managing-github-advanced-security-for-your-enterprise/enabling-github-advanced-security-for-your-enterprise.md index d4ca74c3c4..e32151dab6 100644 --- a/content/admin/code-security/managing-github-advanced-security-for-your-enterprise/enabling-github-advanced-security-for-your-enterprise.md +++ b/content/admin/code-security/managing-github-advanced-security-for-your-enterprise/enabling-github-advanced-security-for-your-enterprise.md @@ -78,42 +78,55 @@ For example, you can enable any {% data variables.product.prodname_GH_advanced_s 1. Enable features for {% data variables.product.prodname_GH_advanced_security %}. - To enable {% data variables.product.prodname_code_scanning_caps %}, enter the following commands. + ```shell ghe-config app.minio.enabled true ghe-config app.code-scanning.enabled true ``` + - To enable {% data variables.product.prodname_secret_scanning_caps %}, enter the following command. + ```shell ghe-config app.secret-scanning.enabled true ``` + - To enable the dependency graph, enter the following {% ifversion ghes %}command{% else %}commands{% endif %}. {% ifversion ghes %}```shell ghe-config app.dependency-graph.enabled true + ``` {% else %}```shell ghe-config app.github.dependency-graph-enabled true ghe-config app.github.vulnerability-alerting-and-settings-enabled true ```{% endif %} + 2. Optionally, disable features for {% data variables.product.prodname_GH_advanced_security %}. - To disable {% data variables.product.prodname_code_scanning %}, enter the following commands. + ```shell ghe-config app.minio.enabled false ghe-config app.code-scanning.enabled false ``` + - To disable {% data variables.product.prodname_secret_scanning %}, enter the following command. + ```shell ghe-config app.secret-scanning.enabled false ``` + - To disable the dependency graph, enter the following {% ifversion ghes %}command{% else %}commands{% endif %}. {% ifversion ghes %}```shell ghe-config app.dependency-graph.enabled false + ``` {% else %}```shell ghe-config app.github.dependency-graph-enabled false ghe-config app.github.vulnerability-alerting-and-settings-enabled false ```{% endif %} + 3. Apply the configuration. + ```shell ghe-config-apply ``` diff --git a/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/configuring-dependabot-to-work-with-limited-internet-access.md b/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/configuring-dependabot-to-work-with-limited-internet-access.md index 391a961d21..026ab757e3 100644 --- a/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/configuring-dependabot-to-work-with-limited-internet-access.md +++ b/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/configuring-dependabot-to-work-with-limited-internet-access.md @@ -40,6 +40,7 @@ Before configuring {% data variables.product.prodname_dependabot %}, install Doc docker pull ghcr.io/dependabot/dependabot-updater-github-actions:VERSION@SHA docker pull ghcr.io/dependabot/dependabot-updater-npm:VERSION@SHA ``` + {%- endif %} {% note %} diff --git a/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/enabling-the-dependency-graph-for-your-enterprise.md b/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/enabling-the-dependency-graph-for-your-enterprise.md index 84d65b401d..53ed2de520 100644 --- a/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/enabling-the-dependency-graph-for-your-enterprise.md +++ b/content/admin/code-security/managing-supply-chain-security-for-your-enterprise/enabling-the-dependency-graph-for-your-enterprise.md @@ -42,6 +42,7 @@ If {% data variables.location.product_location %} uses clustering, you cannot en 1. In the administrative shell, enable the dependency graph on {% data variables.location.product_location %}: {% ifversion ghes %}```shell ghe-config app.dependency-graph.enabled true + ``` {% else %}```shell ghe-config app.github.dependency-graph-enabled true diff --git a/content/admin/configuration/administering-your-instance-from-the-management-console/accessing-the-management-console.md b/content/admin/configuration/administering-your-instance-from-the-management-console/accessing-the-management-console.md index ee09202988..69fcd197d8 100644 --- a/content/admin/configuration/administering-your-instance-from-the-management-console/accessing-the-management-console.md +++ b/content/admin/configuration/administering-your-instance-from-the-management-console/accessing-the-management-console.md @@ -24,8 +24,10 @@ The first time that you access the {% data variables.enterprise.management_conso ## Accessing the {% data variables.enterprise.management_console %} as an unauthenticated user 1. Visit this URL in your browser, replacing `hostname` with your actual {% data variables.product.prodname_ghe_server %} hostname or IP address: + ```shell http(s)://HOSTNAME/setup ``` + {% data reusables.enterprise_management_console.type-management-console-password %} {% data reusables.enterprise_management_console.click-continue-authentication %} diff --git a/content/admin/configuration/configuring-network-settings/configuring-an-outbound-web-proxy-server.md b/content/admin/configuration/configuring-network-settings/configuring-an-outbound-web-proxy-server.md index 31a4d27052..511df3a291 100644 --- a/content/admin/configuration/configuring-network-settings/configuring-an-outbound-web-proxy-server.md +++ b/content/admin/configuration/configuring-network-settings/configuring-an-outbound-web-proxy-server.md @@ -65,4 +65,5 @@ Your instance validates the hostnames for proxy exclusion using the list of IANA ```shell ghe-config noproxy.exception-tld-list "COMMA-SEPARATED-TLD-LIST" ``` + {% data reusables.enterprise.apply-configuration %} diff --git a/content/admin/configuration/configuring-network-settings/configuring-built-in-firewall-rules.md b/content/admin/configuration/configuring-network-settings/configuring-built-in-firewall-rules.md index de550031ab..964dd31036 100644 --- a/content/admin/configuration/configuring-network-settings/configuring-built-in-firewall-rules.md +++ b/content/admin/configuration/configuring-network-settings/configuring-built-in-firewall-rules.md @@ -30,6 +30,7 @@ We do not recommend customizing UFW as it can complicate some troubleshooting is {% data reusables.enterprise_installation.ssh-into-instance %} 2. To view the default firewall rules, use the `sudo ufw status` command. You should see output similar to this: + ```shell $ sudo ufw status > Status: active @@ -67,10 +68,13 @@ We do not recommend customizing UFW as it can complicate some troubleshooting is 1. Configure a custom firewall rule. 2. Check the status of each new rule with the `status numbered` command. + ```shell sudo ufw status numbered ``` + 3. To back up your custom firewall rules, use the `cp`command to move the rules to a new file. + ```shell sudo cp -r /etc/ufw ~/ufw.backup ``` @@ -89,14 +93,19 @@ If something goes wrong after you change the firewall rules, you can reset the r {% data reusables.enterprise_installation.ssh-into-instance %} 2. To restore the previous backup rules, copy them back to the firewall with the `cp` command. + ```shell sudo cp -f ~/ufw.backup/*rules /etc/ufw ``` + 3. Restart the firewall with the `systemctl` command. + ```shell sudo systemctl restart ufw ``` + 4. Confirm that the rules are back to their defaults with the `ufw status` command. + ```shell $ sudo ufw status > Status: active diff --git a/content/admin/configuration/configuring-your-enterprise/command-line-utilities.md b/content/admin/configuration/configuring-your-enterprise/command-line-utilities.md index 58f337466e..7b63527169 100644 --- a/content/admin/configuration/configuring-your-enterprise/command-line-utilities.md +++ b/content/admin/configuration/configuring-your-enterprise/command-line-utilities.md @@ -33,6 +33,7 @@ $ ghe-announce -u {% ifversion ghe-announce-dismiss %} To allow each user to dismiss the announcement for themselves, use the `-d` flag. + ```shell # Sets a user-dismissible message that's visible to everyone $ ghe-announce -d -s MESSAGE @@ -41,6 +42,7 @@ $ ghe-announce -d -s MESSAGE $ ghe-announce -u > Removed the announcement message, which was user dismissible: MESSAGE ``` + {% endif %} You can also set an announcement banner using the enterprise settings on {% data variables.product.product_name %}. For more information, see "[AUTOTITLE](/admin/user-management/managing-users-in-your-enterprise/customizing-user-messages-for-your-enterprise#creating-a-global-announcement-banner)." @@ -88,6 +90,7 @@ This utility cleans up a variety of caches that might potentially take up extra ```shell ghe-cleanup-caches ``` + ### ghe-cleanup-settings This utility wipes all existing {% data variables.enterprise.management_console %} settings. @@ -114,6 +117,7 @@ $ ghe-config core.github-hostname URL $ ghe-config -l # Lists all the configuration values ``` + Allows you to find the universally unique identifier (UUID) of your node in `cluster.conf`. ```shell @@ -157,6 +161,7 @@ ghe-dbconsole This utility returns a summary of Elasticsearch indexes in CSV format. Print an index summary with a header row to `STDOUT`: + ```shell $ ghe-es-index-status -do > warning: parser/current is loading parser/ruby23, which recognizes @@ -424,12 +429,14 @@ ghe-ssh-check-host-keys ``` If a leaked host key is found the utility exits with status `1` and a message: + ```shell > One or more of your SSH host keys were found in the blacklist. > Please reset your host keys using ghe-ssh-roll-host-keys. ``` If a leaked host key was not found, the utility exits with status `0` and a message: + ```shell > The SSH host keys were not found in the SSH host key blacklist. > No additional steps are needed/recommended at this time. @@ -568,6 +575,7 @@ ghe-webhook-logs -f -a YYYY-MM-DD The date format should be `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS`, or `YYYY-MM-DD HH:MM:SS (+/-) HH:M`. To show the full hook payload, result, and any exceptions for the delivery: + ```shell ghe-webhook-logs -g DELIVERY_GUID ``` @@ -639,21 +647,25 @@ By default, the command creates the tarball in _/tmp_, but you can also have it {% data reusables.enterprise.bundle-utility-period-argument-availability-note %} To create a standard bundle: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-cluster-support-bundle -o' > cluster-support-bundle.tgz ``` To create a standard bundle including data from the last 3 hours: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-cluster-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}3hours {% elsif ghes < 3.9 %}'3 hours' {% endif %} -o" > support-bundle.tgz ``` To create a standard bundle including data from the last 2 days: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-cluster-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}2days {% elsif ghes < 3.9 %}'2 days' {% endif %} -o" > support-bundle.tgz ``` To create a standard bundle including data from the last 4 days and 8 hours: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-cluster-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}4days8hours {% elsif ghes < 3.9 %}'4 days 8 hours' {% endif %} -o" > support-bundle.tgz ``` @@ -665,11 +677,13 @@ ssh -p 122 admin@HOSTNAME -- ghe-cluster-support-bundle -x -o' > cluster-support ``` To send a bundle to {% data variables.contact.github_support %}: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-cluster-support-bundle -u' ``` To send a bundle to {% data variables.contact.github_support %} and associate the bundle with a ticket: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-cluster-support-bundle -t TICKET_ID' ``` @@ -683,11 +697,13 @@ ghe-dpages ``` To show a summary of repository location and health: + ```shell ghe-dpages status ``` To evacuate a {% data variables.product.prodname_pages %} storage service before evacuating a cluster node: + ```shell ghe-dpages evacuate pages-server-UUID ``` @@ -709,6 +725,7 @@ ghe-spokesctl routes ``` To evacuate storage services on a cluster node: + ```shell ghe-spokesctl server set evacuating git-server-UUID ``` @@ -983,6 +1000,7 @@ For more information, please see our guides on [migrating data to and from your ### git-import-detect Given a URL, detect which type of source control management system is at the other end. During a manual import this is likely already known, but this can be very useful in automated scripts. + ```shell git-import-detect ``` @@ -990,6 +1008,7 @@ git-import-detect ### git-import-hg-raw This utility imports a Mercurial repository to this Git repository. For more information, see "[AUTOTITLE](/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell)." + ```shell git-import-hg-raw ``` @@ -997,6 +1016,7 @@ git-import-hg-raw ### git-import-svn-raw This utility imports Subversion history and file data into a Git branch. This is a straight copy of the tree, ignoring any trunk or branch distinction. For more information, see "[AUTOTITLE](/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell)." + ```shell git-import-svn-raw ``` @@ -1004,6 +1024,7 @@ git-import-svn-raw ### git-import-tfs-raw This utility imports from Team Foundation Version Control (TFVC). For more information, see "[AUTOTITLE](/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell))." + ```shell git-import-tfs-raw ``` @@ -1011,6 +1032,7 @@ git-import-tfs-raw ### git-import-rewrite This utility rewrites the imported repository. This gives you a chance to rename authors and, for Subversion and TFVC, produces Git branches based on folders. For more information, see "[AUTOTITLE](/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell)." + ```shell git-import-rewrite ``` @@ -1047,31 +1069,37 @@ By default, the command creates the tarball in _/tmp_, but you can also have it {% data reusables.enterprise.bundle-utility-period-argument-availability-note %} To create a standard bundle: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -o' > support-bundle.tgz ``` To create a standard bundle including data from the last 3 hours: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}3hours {% elsif ghes < 3.9 %}'3 hours' {% endif %} -o" > support-bundle.tgz ``` To create a standard bundle including data from the last 2 days: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}2days {% elsif ghes < 3.9 %}'2 days' {% endif %} -o" > support-bundle.tgz ``` To create a standard bundle including data from the last 4 days and 8 hours: + ```shell ssh -p 122 admin@HOSTNAME -- "ghe-support-bundle -p {% ifversion bundle-cli-syntax-no-quotes %}4days8hours {% elsif ghes < 3.9 %}'4 days 8 hours' {% endif %} -o" > support-bundle.tgz ``` To create an extended bundle including data from the last 8 days: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -x -o' > support-bundle.tgz ``` To send a bundle to {% data variables.contact.github_support %}: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -u' ``` @@ -1087,11 +1115,13 @@ ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -t TICKET_ID' This utility sends information from your appliance to {% data variables.product.prodname_enterprise %} support. You can either specify a local file, or provide a stream of up to 100MB of data via `STDIN`. The uploaded data can optionally be associated with a support ticket. To send a file to {% data variables.contact.github_support %} and associate the file with a ticket: + ```shell ghe-support-upload -f FILE_PATH -t TICKET_ID ``` To upload data via `STDIN` and associating the data with a ticket: + ```shell ghe-repl-status -vv | ghe-support-upload -t TICKET_ID -d "Verbose Replication Status" ``` @@ -1143,11 +1173,13 @@ ssh -p 122 admin@HOSTNAME -- 'ghe-update-check' This utility installs or verifies an upgrade package. You can also use this utility to roll back a patch release if an upgrade fails or is interrupted. For more information, see "[AUTOTITLE](/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server)." To verify an upgrade package: + ```shell ghe-upgrade --verify UPGRADE-PACKAGE-FILENAME ``` To install an upgrade package: + ```shell ghe-upgrade UPGRADE-PACKAGE-FILENAME ``` @@ -1161,17 +1193,20 @@ This utility manages scheduled installation of upgrade packages. You can show, c The `ghe-upgrade-scheduler` utility is best suited for scheduling hotpatch upgrades, which do not require maintenance mode or a reboot in most cases. This utility is not practical for full package upgrades, which require an administrator to manually set maintenance mode, reboot the instance, and unset maintenance mode. For more information about the different types of upgrades, see "[AUTOTITLE](/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server#upgrading-with-an-upgrade-package)" To schedule a new installation for a package: + ```shell ghe-upgrade-scheduler -c "0 2 15 12 *" UPGRADE-PACKAGE-FILENAME ``` To show scheduled installations for a package: + ```shell $ ghe-upgrade-scheduler -s UPGRADE PACKAGE FILENAME > 0 2 15 12 * /usr/local/bin/ghe-upgrade -y -s UPGRADE-PACKAGE-FILENAME > /data/user/common/UPGRADE-PACKAGE-FILENAME.log 2>&1 ``` To remove scheduled installations for a package: + ```shell ghe-upgrade-scheduler -r UPGRADE PACKAGE FILENAME ``` diff --git a/content/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance.md b/content/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance.md index c59c8fc329..d352a9b151 100644 --- a/content/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance.md +++ b/content/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance.md @@ -73,17 +73,20 @@ Backup snapshots are written to the disk path set by the `GHE_DATA_DIR` data dir ``` git clone https://github.com/github/backup-utils.git /path/to/target/directory/backup-utils ``` + 1. To change into the local repository directory, run the following command. ``` cd backup-utils ``` + {% data reusables.enterprise_backup_utilities.enterprise-backup-utils-update-repo %} 1. To copy the included `backup.config-example` file to `backup.config`, run the following command. ```shell cp backup.config-example backup.config ``` + 1. To customize your configuration, edit `backup.config` in a text editor. 1. Set the `GHE_HOSTNAME` value to your primary {% data variables.product.prodname_ghe_server %} instance's hostname or IP address. @@ -101,6 +104,7 @@ Backup snapshots are written to the disk path set by the `GHE_DATA_DIR` data dir ```shell ./bin/ghe-host-check ``` + 1. To create an initial full backup, run the following command. ```shell @@ -168,11 +172,13 @@ If your backup host has internet connectivity and you previously used a compress ``` git clone https://github.com/github/backup-utils.git ``` + 1. To change into the cloned repository, run the following command. ``` cd backup-utils ``` + {% data reusables.enterprise_backup_utilities.enterprise-backup-utils-update-repo %} 1. To restore your backup configuration from earlier, copy your existing backup configuration file to the local repository directory. Replace the path in the command with the location of the file saved in step 2. diff --git a/content/admin/configuration/configuring-your-enterprise/configuring-host-keys-for-your-instance.md b/content/admin/configuration/configuring-your-enterprise/configuring-host-keys-for-your-instance.md index 6c4cc51477..750f5bfb4d 100644 --- a/content/admin/configuration/configuring-your-enterprise/configuring-host-keys-for-your-instance.md +++ b/content/admin/configuration/configuring-your-enterprise/configuring-host-keys-for-your-instance.md @@ -39,9 +39,11 @@ To improve security for clients that connect to {% data variables.location.produ ```shell ghe-config app.babeld.host-key-ed25519 true ``` + 1. Optionally, enter the following command to disable generation and advertisement of the Ed25519 host key. ```shell ghe-config app.babeld.host-key-ed25519 false ``` + {% data reusables.enterprise.apply-configuration %} diff --git a/content/admin/configuration/configuring-your-enterprise/configuring-rate-limits.md b/content/admin/configuration/configuring-your-enterprise/configuring-rate-limits.md index f1aafd2fde..b8b10670e7 100644 --- a/content/admin/configuration/configuring-your-enterprise/configuring-rate-limits.md +++ b/content/admin/configuration/configuring-your-enterprise/configuring-rate-limits.md @@ -101,16 +101,19 @@ By default, the rate limit for {% data variables.product.prodname_actions %} is ghe-config actions-rate-limiting.enabled true ghe-config actions-rate-limiting.queue-runs-per-minute RUNS-PER-MINUTE ``` + 1. To disable the rate limit after it's been enabled, run the following command. ``` ghe-config actions-rate-limiting.enabled false ``` + 1. To apply the configuration, run the following command. ``` ghe-config-apply ``` + 1. Wait for the configuration run to complete. {% endif %} diff --git a/content/admin/configuration/configuring-your-enterprise/configuring-ssh-connections-to-your-instance.md b/content/admin/configuration/configuring-your-enterprise/configuring-ssh-connections-to-your-instance.md index bd635a2144..2b768d7a39 100644 --- a/content/admin/configuration/configuring-your-enterprise/configuring-ssh-connections-to-your-instance.md +++ b/content/admin/configuration/configuring-your-enterprise/configuring-ssh-connections-to-your-instance.md @@ -49,4 +49,5 @@ For more information, see [{% data variables.product.prodname_blog %}](https://g ```shell ghe-config app.gitauth.rsa-sha1 false ``` + {% data reusables.enterprise.apply-configuration %} diff --git a/content/admin/configuration/configuring-your-enterprise/configuring-web-commit-signing.md b/content/admin/configuration/configuring-your-enterprise/configuring-web-commit-signing.md index 83df7becee..2381905a6b 100644 --- a/content/admin/configuration/configuring-your-enterprise/configuring-web-commit-signing.md +++ b/content/admin/configuration/configuring-your-enterprise/configuring-web-commit-signing.md @@ -33,11 +33,13 @@ You can enable web commit signing, rotate the private key used for web commit si ```bash copy ghe-config app.github.web-commit-signing-enabled true ``` + 1. Apply the configuration, then wait for the configuration run to complete. ```bash copy ghe-config-apply ``` + 1. Create a new user on {% data variables.location.product_location %} via built-in authentication or external authentication. For more information, see "[AUTOTITLE](/admin/identity-and-access-management/managing-iam-for-your-enterprise/about-authentication-for-your-enterprise)." - The user's username must be the same username you used when creating the PGP key in step 1 above, for example, `web-flow`. - The user's email address must be the same address you used when creating the PGP key. @@ -71,6 +73,7 @@ You can disable web commit signing for {% data variables.location.product_locati ```bash copy ghe-config app.github.web-commit-signing-enabled false ``` + 1. Apply the configuration. ```bash copy diff --git a/content/admin/configuration/configuring-your-enterprise/troubleshooting-tls-errors.md b/content/admin/configuration/configuring-your-enterprise/troubleshooting-tls-errors.md index 7edaa2807b..3f0f406173 100644 --- a/content/admin/configuration/configuring-your-enterprise/troubleshooting-tls-errors.md +++ b/content/admin/configuration/configuring-your-enterprise/troubleshooting-tls-errors.md @@ -25,10 +25,13 @@ shortTitle: Troubleshoot TLS errors If you have a Linux machine with OpenSSL installed, you can remove your passphrase. 1. Rename your original key file. + ```shell mv yourdomain.key yourdomain.key.orig ``` + 2. Generate a new key without a passphrase. + ```shell openssl rsa -in yourdomain.key.orig -out yourdomain.key ``` @@ -69,14 +72,19 @@ If your {% data variables.product.prodname_ghe_server %} appliance interacts wit 1. Obtain the CA's root certificate from your local certificate authority and ensure it is in PEM format. 2. Copy the file to your {% data variables.product.prodname_ghe_server %} appliance over SSH as the "admin" user on port 122. + ```shell scp -P 122 rootCA.crt admin@HOSTNAME:/home/admin ``` + 3. Connect to the {% data variables.product.prodname_ghe_server %} administrative shell over SSH as the "admin" user on port 122. + ```shell ssh -p 122 admin@HOSTNAME ``` + 4. Import the certificate into the system-wide certificate store. + ```shell ghe-ssl-ca-certificate-install -c rootCA.crt ``` diff --git a/content/admin/configuration/configuring-your-enterprise/verifying-or-approving-a-domain-for-your-enterprise.md b/content/admin/configuration/configuring-your-enterprise/verifying-or-approving-a-domain-for-your-enterprise.md index aa6c41bbd4..5f2cf700d0 100644 --- a/content/admin/configuration/configuring-your-enterprise/verifying-or-approving-a-domain-for-your-enterprise.md +++ b/content/admin/configuration/configuring-your-enterprise/verifying-or-approving-a-domain-for-your-enterprise.md @@ -63,9 +63,11 @@ To verify your enterprise account's domain, you must have access to modify domai {% data reusables.organizations.add-domain %} {% data reusables.organizations.add-dns-txt-record %} 1. Wait for your DNS configuration to change, which may take up to 72 hours. You can confirm your DNS configuration has changed by running the `dig` command on the command line, replacing `ENTERPRISE-ACCOUNT` with the name of your enterprise account, and `example.com` with the domain you'd like to verify. You should see your new TXT record listed in the command output. + ```shell dig _github-challenge-ENTERPRISE-ACCOUNT.DOMAIN-NAME +nostats +nocomments +nocmd TXT ``` + 1. After confirming your TXT record is added to your DNS, follow steps one through four above to navigate to your enterprise account's approved and verified domains. {% data reusables.enterprise-accounts.continue-verifying-domain %} 1. Optionally, after the "Verified" badge is visible on your organizations' profiles, delete the TXT entry from the DNS record at your domain hosting service. diff --git a/content/admin/enterprise-management/caching-repositories/configuring-a-repository-cache.md b/content/admin/enterprise-management/caching-repositories/configuring-a-repository-cache.md index 0c56556219..5b6e7a7838 100644 --- a/content/admin/enterprise-management/caching-repositories/configuring-a-repository-cache.md +++ b/content/admin/enterprise-management/caching-repositories/configuring-a-repository-cache.md @@ -45,24 +45,30 @@ Then, when told to fetch `https://github.example.com/myorg/myrepo`, Git will ins {% data reusables.enterprise_installation.add-ssh-key-to-primary %} 1. To verify the connection to the primary and enable replica mode for the repository cache, run `ghe-repl-setup` again. - If the repository cache is your only additional node, no arguments are required. + ```shell ghe-repl-setup PRIMARY-IP ``` + - If you're configuring a repository cache in addition to one or more existing replicas, use the `-a` or `--add` argument. + ``` ghe-repl-setup -a PRIMARY-IP ``` + {% ifversion ghes < 3.6 %} 1. If you haven't already, set the datacenter name on the primary and any replica appliances, replacing DC-NAME with a datacenter name. ``` ghe-repl-node --datacenter DC-NAME ``` -1. Set a `cache-location` for the repository cache, replacing _CACHE-LOCATION_ with an alphanumeric identifier, such as the region where the cache is deployed. Also set a datacenter name for this cache; new caches will attempt to seed from another cache in the same datacenter. + +1. Set a `cache-location` for the repository cache, replacing CACHE-LOCATION with an alphanumeric identifier, such as the region where the cache is deployed. Also set a datacenter name for this cache; new caches will attempt to seed from another cache in the same datacenter. ```shell ghe-repl-node --cache CACHE-LOCATION --datacenter REPLICA-DC-NAME ``` + {% else %} 1. To configure the repository cache, use the `ghe-repl-node` command and include the necessary parameters. - Set a `cache-location` for the repository cache, replacing _CACHE-LOCATION_ with an alphanumeric identifier, such as the region where the cache is deployed. The _CACHE-LOCATION_ value must not be any of the subdomains reserved for use with subdomain isolation, such as `assets` or `media`. For a list of reserved names, see "[AUTOTITLE](/admin/configuration/configuring-network-settings/enabling-subdomain-isolation#about-subdomain-isolation)." @@ -72,11 +78,13 @@ Then, when told to fetch `https://github.example.com/myorg/myrepo`, Git will ins ``` ghe-repl-node --datacenter DC-NAME ``` - - New caches will attempt to seed from another cache in the same datacenter. Set a `datacenter` for the repository cache, replacing _REPLICA-DC-NAME_ with the name of the datacenter where you're deploying the node. + + - New caches will attempt to seed from another cache in the same datacenter. Set a `datacenter` for the repository cache, replacing REPLICA-DC-NAME with the name of the datacenter where you're deploying the node. ```shell ghe-repl-node --cache CACHE-LOCATION --cache-domain EXTERNAL-CACHE-DOMAIN --datacenter REPLICA-DC-NAME ``` + {% endif %} {% data reusables.enterprise_installation.replication-command %} diff --git a/content/admin/enterprise-management/configuring-clustering/cluster-network-configuration.md b/content/admin/enterprise-management/configuring-clustering/cluster-network-configuration.md index cec704b1ac..4286b385cc 100644 --- a/content/admin/enterprise-management/configuring-clustering/cluster-network-configuration.md +++ b/content/admin/enterprise-management/configuring-clustering/cluster-network-configuration.md @@ -112,9 +112,11 @@ We strongly recommend enabling PROXY support for both your instance and the load {% data reusables.enterprise_installation.proxy-incompatible-with-aws-nlbs %} - For your instance, use this command: + ```shell ghe-config 'loadbalancer.proxy-protocol' 'true' && ghe-cluster-config-apply ``` + - For the load balancer, use the instructions provided by your vendor. {% data reusables.enterprise_clustering.proxy_protocol_ports %} diff --git a/content/admin/enterprise-management/configuring-clustering/evacuating-a-cluster-node-running-data-services.md b/content/admin/enterprise-management/configuring-clustering/evacuating-a-cluster-node-running-data-services.md index 74f7c185f3..f9761f6ca2 100644 --- a/content/admin/enterprise-management/configuring-clustering/evacuating-a-cluster-node-running-data-services.md +++ b/content/admin/enterprise-management/configuring-clustering/evacuating-a-cluster-node-running-data-services.md @@ -95,13 +95,17 @@ If you plan to take a node offline and the node runs any of the following roles, - Command (replace REASON FOR EVACUATION with the reason for evacuation): {% ifversion ghe-spokes-deprecation-phase-1 %} + ```shell ghe-spokesctl server set evacuating git-server-UUID 'REASON FOR EVACUATION' ``` + {% else %} + ```shell ghe-spokes server evacuate git-server-UUID 'REASON FOR EVACUATION' ``` + {% endif %} - `pages-server`: @@ -136,13 +140,17 @@ If you plan to take a node offline and the node runs any of the following roles, - `git-server`: {% ifversion ghe-spokes-deprecation-phase-1 %} + ```shell ghe-spokesctl server evac-status git-server-UUID ``` + {% else %} + ```shell ghe-spokes evac-status git-server-UUID ``` + {% endif %} - `pages-server`: diff --git a/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster-nodes-with-node-eligibility-service.md b/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster-nodes-with-node-eligibility-service.md index 458fe1b653..be64e9c43e 100644 --- a/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster-nodes-with-node-eligibility-service.md +++ b/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster-nodes-with-node-eligibility-service.md @@ -56,11 +56,13 @@ By default, {% data variables.product.prodname_nes %} is disabled. You can enabl ```shell copy ghe-config app.nes.enabled ``` + 1. To enable {% data variables.product.prodname_nes %}, run the following command. ```shell copy ghe-config app.nes.enabled true ``` + {% data reusables.enterprise.apply-configuration %} 1. To verify that {% data variables.product.prodname_nes %} is running, from any node, run the following command. @@ -78,11 +80,13 @@ To determine how {% data variables.product.prodname_nes %} notifies you, you can ```shell copy nes get-node-ttl all ``` + 1. To set the TTL for the `fail` state, run the following command. Replace MINUTES with the number of minutes to use for failures. ```shell copy nes set-node-ttl fail MINUTES ``` + 1. To set the TTL for the `warn` state, run the following command. Replace MINUTES with the number of minutes to use for warnings. ```shell copy @@ -104,6 +108,7 @@ To manage whether {% data variables.product.prodname_nes %} can take a node and ```shell copy nes set-node-adminaction approved HOSTNAME ``` + - To revoke {% data variables.product.prodname_nes %}'s ability to take a node offline, run the following command. Replace HOSTNAME with the node's hostname. ```shell copy @@ -127,11 +132,13 @@ After {% data variables.product.prodname_nes %} detects that a node has exceeded ```shell copy nes get-node-adminaction HOSTNAME ``` + 1. If the `adminaction` state is currently set to `approved`, change the state to `none` by running the following command. Replace HOSTNAME with the hostname of the ineligible node. ```shell copy nes set-node-adminaction none HOSTNAME ``` + 1. To ensure the node is in a healthy state, run the following command and confirm that the node's status is `ready`. ```shell copy @@ -143,11 +150,13 @@ After {% data variables.product.prodname_nes %} detects that a node has exceeded ```shell copy nomad node eligibility -enable -self ``` + 1. To update the node's eligibility in {% data variables.product.prodname_nes %}, run the following command. Replace HOSTNAME with the node's hostname. ```shell copy nes set-node-eligibility eligible HOSTNAME ``` + 1. Wait 30 seconds, then check the cluster's health to confirm the target node is eligible by running the following command. ```shell copy @@ -164,6 +173,7 @@ You can view logs for {% data variables.product.prodname_nes %} from any node in ```shell copy nomad alloc logs -job nes ``` + 1. Alternatively, you can view logs for {% data variables.product.prodname_nes %} on the node that runs the service. The service writes logs to the systemd journal. - To determine which node runs {% data variables.product.prodname_nes %}, run the following command. @@ -171,6 +181,7 @@ You can view logs for {% data variables.product.prodname_nes %} from any node in ```shell copy nomad job status "nes" | grep running | grep "${nomad_node_id}" | awk 'NR==2{ print $1 }' | xargs nomad alloc status | grep "Node Name" ``` + - To view logs on the node, connect to the node via SSH, then run the following command. ```shell copy diff --git a/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster.md b/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster.md index ccca58675c..975906c09d 100644 --- a/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster.md +++ b/content/admin/enterprise-management/configuring-clustering/monitoring-the-health-of-your-cluster.md @@ -39,6 +39,7 @@ admin@ghe-data-node-0:~$ ghe-cluster-status | grep error > mysql-replication ghe-data-node-0: error Stopped > mysql cluster: error ``` + {% note %} **Note:** If there are no failing tests, this command produces no output. This indicates the cluster is healthy. @@ -55,6 +56,7 @@ You can configure [Nagios](https://www.nagios.org/) to monitor {% data variables ### Configuring the Nagios host 1. Generate an SSH key with a blank passphrase. Nagios uses this to authenticate to the {% data variables.product.prodname_ghe_server %} cluster. + ```shell nagiosuser@nagios:~$ ssh-keygen -t ed25519 > Generating public/private ed25519 key pair. @@ -64,6 +66,7 @@ You can configure [Nagios](https://www.nagios.org/) to monitor {% data variables > Your identification has been saved in /home/nagiosuser/.ssh/id_ed25519. > Your public key has been saved in /home/nagiosuser/.ssh/id_ed25519.pub. ``` + {% danger %} **Security Warning:** An SSH key without a passphrase can pose a security risk if authorized for full access to a host. Limit this key's authorization to a single read-only command. @@ -72,12 +75,14 @@ You can configure [Nagios](https://www.nagios.org/) to monitor {% data variables {% note %} **Note:** If you're using a distribution of Linux that doesn't support the Ed25519 algorithm, use the command: + ```shell nagiosuser@nagios:~$ ssh-keygen -t rsa -b 4096 ``` {% endnote %} 2. Copy the private key (`id_ed25519`) to the `nagios` home folder and set the appropriate ownership. + ```shell nagiosuser@nagios:~$ sudo cp .ssh/id_ed25519 /var/lib/nagios/.ssh/ nagiosuser@nagios:~$ sudo chown nagios:nagios /var/lib/nagios/.ssh/id_ed25519 @@ -95,6 +100,7 @@ You can configure [Nagios](https://www.nagios.org/) to monitor {% data variables ``` 5. To test that the Nagios plugin can successfully execute the command, run it interactively from Nagios host. + ```shell nagiosuser@nagios:~$ /usr/lib/nagios/plugins/check_by_ssh -l admin -p 122 -H HOSTNAME -C "ghe-cluster-status -n" -t 30 > OK - No errors detected @@ -110,6 +116,7 @@ You can configure [Nagios](https://www.nagios.org/) to monitor {% data variables command_line $USER1$/check_by_ssh -H $HOSTADDRESS$ -C "ghe-cluster-status -n" -l admin -p 122 -t 30 } ``` + 7. Add this command to a service definition for a node in the {% data variables.product.prodname_ghe_server %} cluster. **Example definition** diff --git a/content/admin/enterprise-management/configuring-clustering/rebalancing-cluster-workloads.md b/content/admin/enterprise-management/configuring-clustering/rebalancing-cluster-workloads.md index d5190d9865..6da5f97e94 100644 --- a/content/admin/enterprise-management/configuring-clustering/rebalancing-cluster-workloads.md +++ b/content/admin/enterprise-management/configuring-clustering/rebalancing-cluster-workloads.md @@ -31,6 +31,7 @@ In some cases, such as hardware failure, the underlying software that that manag ```shell copy ghe-cluster-balance status ``` + 1. If a job is not properly distributed, inspect the allocations by running the following command. Replace JOB with a single job or comma-delimited list of jobs. ```shell copy @@ -71,11 +72,13 @@ You can schedule rebalancing of jobs on your cluster by setting and applying con ```shell copy ghe-config app.cluster-rebalance.enabled true ``` + 1. Optionally, you can override the default schedule by defining a cron expression. For example, run the following command to balance jobs every three hours. ```shell copy ghe-config app.cluster-rebalance.schedule '0 */3 * * *' ``` + {% data reusables.enterprise.apply-configuration %} ## Further reading diff --git a/content/admin/enterprise-management/configuring-clustering/upgrading-a-cluster.md b/content/admin/enterprise-management/configuring-clustering/upgrading-a-cluster.md index 9174dbc146..d3c9c4ca44 100644 --- a/content/admin/enterprise-management/configuring-clustering/upgrading-a-cluster.md +++ b/content/admin/enterprise-management/configuring-clustering/upgrading-a-cluster.md @@ -25,6 +25,7 @@ topics: 1. Back up your data with [{% data variables.product.prodname_enterprise_backup_utilities %}](https://github.com/github/backup-utils#readme). 2. From the administrative shell of any node, use the `ghe-cluster-hotpatch` command to install the latest hotpatch. You can provide a URL for a hotpatch, or manually download the hotpatch and specify a local filename. + ```shell ghe-cluster-hotpatch https://HOTPATCH-URL/FILENAME.hpkg ``` @@ -39,6 +40,7 @@ Use an upgrade package to upgrade a {% data variables.product.prodname_ghe_serve 3. Schedule a maintenance window for end users of your {% data variables.product.prodname_ghe_server %} cluster, as it will be unavailable for normal use during the upgrade. Maintenance mode blocks user access and prevents data changes while the cluster upgrade is in progress. 4. On the [{% data variables.product.prodname_ghe_server %} Download Page](https://enterprise.github.com/download), copy the URL for the upgrade _.pkg_ file to the clipboard. 5. From the administrative shell of any node, use the `ghe-cluster-each` command combined with `curl` to download the release package to each node in a single step. Use the URL you copied in the previous step as an argument. + ```shell $ ghe-cluster-each -- "cd /home/admin && curl -L -O https://PACKAGE-URL.pkg" > ghe-app-node-1: % Total % Received % Xferd Average Speed Time Time Time Current @@ -57,6 +59,7 @@ Use an upgrade package to upgrade a {% data variables.product.prodname_ghe_serve > ghe-data-node-3: Dload Upload Total Spent Left Speed > 100 496M 100 496M 0 0 19.7M 0 0:00:25 0:00:25 --:--:-- 25.5M ``` + 6. Identify the primary MySQL node, which is defined as `mysql-master = ` in `cluster.conf`. This node will be upgraded last. ### Upgrading the cluster nodes @@ -64,6 +67,7 @@ Use an upgrade package to upgrade a {% data variables.product.prodname_ghe_serve 1. Enable maintenance mode according to your scheduled window by connecting to the administrative shell of any cluster node and running `ghe-cluster-maintenance -s`. 2. **With the exception of the primary MySQL node**, connect to the administrative shell of each of the {% data variables.product.prodname_ghe_server %} nodes. Run the `ghe-upgrade` command, providing the package file name you downloaded in Step 4 of [Preparing to upgrade](#preparing-to-upgrade): + ```shell $ ghe-upgrade PACKAGE-FILENAME.pkg > *** verifying upgrade package signature... @@ -74,8 +78,10 @@ Run the `ghe-upgrade` command, providing the package file name you downloaded in > gpg: depth: 0 valid: 1 signed: 0 trust: 0-, 0q, 0n, 0m, 0f, 1u > gpg: Good signature from "GitHub Enterprise (Upgrade Package Key) > " ``` + 3. The upgrade process will reboot the node once it completes. Verify that you can `ping` each node after it reboots. 4. Connect to the administrative shell of the primary MySQL node. Run the `ghe-upgrade` command, providing the package file name you downloaded in Step 4 of [Preparing to upgrade](#preparing-to-upgrade): + ```shell $ ghe-upgrade PACKAGE-FILENAME.pkg > *** verifying upgrade package signature... @@ -86,6 +92,7 @@ Run the `ghe-upgrade` command, providing the package file name you downloaded in > gpg: depth: 0 valid: 1 signed: 0 trust: 0-, 0q, 0n, 0m, 0f, 1u > gpg: Good signature from "GitHub Enterprise (Upgrade Package Key) > " ``` + 5. The upgrade process will reboot the primary MySQL node once it completes. Verify that you can `ping` each node after it reboots.{% ifversion ghes %} 6. Connect to the administrative shell of the primary MySQL node and run the `ghe-cluster-config-apply` command. 7. When `ghe-cluster-config-apply` is complete, check that the services are in a healthy state by running `ghe-cluster-status`.{% endif %} diff --git a/content/admin/enterprise-management/configuring-high-availability/creating-a-high-availability-replica.md b/content/admin/enterprise-management/configuring-high-availability/creating-a-high-availability-replica.md index 4492f83eae..653e6e20f5 100644 --- a/content/admin/enterprise-management/configuring-high-availability/creating-a-high-availability-replica.md +++ b/content/admin/enterprise-management/configuring-high-availability/creating-a-high-availability-replica.md @@ -23,15 +23,19 @@ shortTitle: Create HA replica 1. In a browser, navigate to the new replica appliance's IP address and upload your {% data variables.product.prodname_enterprise %} license. {% data reusables.enterprise_installation.replica-steps %} 1. Connect to the replica appliance's IP address using SSH. + ```shell ssh -p 122 admin@REPLICA_IP ``` + {% data reusables.enterprise_installation.generate-replication-key-pair %} {% data reusables.enterprise_installation.add-ssh-key-to-primary %} 1. To verify the connection to the primary and enable replica mode for the new replica, run `ghe-repl-setup` again. + ```shell ghe-repl-setup PRIMARY_IP ``` + {% data reusables.enterprise_installation.replication-command %} {% data reusables.enterprise_installation.verify-replication-channel %} @@ -42,29 +46,39 @@ This example configuration uses a primary and two replicas, which are located in {% data reusables.enterprise_clustering.network-latency %} If latency is more than 70 milliseconds, we recommend cache replica nodes instead. For more information, see "[AUTOTITLE](/admin/enterprise-management/caching-repositories/configuring-a-repository-cache)." 1. Create the first replica the same way you would for a standard two node configuration by running `ghe-repl-setup` on the first replica. + ```shell (replica1)$ ghe-repl-setup PRIMARY_IP (replica1)$ ghe-repl-start ``` + 2. Create a second replica and use the `ghe-repl-setup --add` command. The `--add` flag prevents it from overwriting the existing replication configuration and adds the new replica to the configuration. + ```shell (replica2)$ ghe-repl-setup --add PRIMARY_IP (replica2)$ ghe-repl-start ``` + 3. By default, replicas are configured to the same datacenter, and will now attempt to seed from an existing node in the same datacenter. Configure the replicas for different datacenters by setting a different value for the datacenter option. The specific values can be anything you would like as long as they are different from each other. Run the `ghe-repl-node` command on each node and specify the datacenter. On the primary: + ```shell (primary)$ ghe-repl-node --datacenter [PRIMARY DC NAME] ``` + On the first replica: + ```shell (replica1)$ ghe-repl-node --datacenter [FIRST REPLICA DC NAME] ``` + On the second replica: + ```shell (replica2)$ ghe-repl-node --datacenter [SECOND REPLICA DC NAME] ``` + {% tip %} **Tip:** You can set the `--datacenter` and `--active` options at the same time. @@ -73,14 +87,19 @@ This example configuration uses a primary and two replicas, which are located in 4. An active replica node will store copies of the appliance data and service end user requests. An inactive node will store copies of the appliance data but will be unable to service end user requests. Enable active mode using the `--active` flag or inactive mode using the `--inactive` flag. On the first replica: + ```shell (replica1)$ ghe-repl-node --active ``` + On the second replica: + ```shell (replica2)$ ghe-repl-node --active ``` + 5. To apply the configuration, use the `ghe-config-apply` command on the primary. + ```shell (primary)$ ghe-config-apply ``` diff --git a/content/admin/enterprise-management/configuring-high-availability/initiating-a-failover-to-your-replica-appliance.md b/content/admin/enterprise-management/configuring-high-availability/initiating-a-failover-to-your-replica-appliance.md index 9b875bdd5d..59b9ee293a 100644 --- a/content/admin/enterprise-management/configuring-high-availability/initiating-a-failover-to-your-replica-appliance.md +++ b/content/admin/enterprise-management/configuring-high-availability/initiating-a-failover-to-your-replica-appliance.md @@ -25,6 +25,7 @@ The time required to failover depends on how long it takes to manually promote t - To use the management console, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)" - You can also use the `ghe-maintenance -s` command. + ```shell ghe-maintenance -s ``` @@ -44,6 +45,7 @@ The time required to failover depends on how long it takes to manually promote t ``` 4. On the replica appliance, to stop replication and promote the replica appliance to primary status, use the `ghe-repl-promote` command. This will also automatically put the primary node in maintenance mode if it’s reachable. + ```shell ghe-repl-promote ``` @@ -59,10 +61,13 @@ The time required to failover depends on how long it takes to manually promote t 7. If desired, set up replication from the new primary to existing appliances and the previous primary. For more information, see "[AUTOTITLE](/admin/enterprise-management/configuring-high-availability/about-high-availability-configuration#utilities-for-replication-management)." 8. Appliances you do not intend to setup replication to that were part of the high availability configuration prior the failover, need to be removed from the high availability configuration by UUID. - On the former appliances, get their UUID via `cat /data/user/common/uuid`. + ```shell cat /data/user/common/uuid ``` + - On the new primary, remove the UUIDs using `ghe-repl-teardown`. Please replace *`UUID`* with a UUID you retrieved in the previous step. + ```shell ghe-repl-teardown -u UUID ``` diff --git a/content/admin/enterprise-management/configuring-high-availability/recovering-a-high-availability-configuration.md b/content/admin/enterprise-management/configuring-high-availability/recovering-a-high-availability-configuration.md index 87250c02a1..e5fef36a85 100644 --- a/content/admin/enterprise-management/configuring-high-availability/recovering-a-high-availability-configuration.md +++ b/content/admin/enterprise-management/configuring-high-availability/recovering-a-high-availability-configuration.md @@ -28,17 +28,23 @@ You can use the former primary appliance as the new replica appliance if the fai ## Configuring a former primary appliance as a new replica 1. Connect to the former primary appliance's IP address using SSH. + ```shell ssh -p 122 admin@ FORMER_PRIMARY_IP ``` + 1. Enable maintenance mode on the former primary appliance. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)." 1. On the former primary appliance, run `ghe-repl-setup` with the IP address of the former replica. + ```shell ghe-repl-setup FORMER_REPLICA_IP ``` + {% data reusables.enterprise_installation.add-ssh-key-to-primary %} 1. To verify the connection to the new primary and enable replica mode for the new replica, run `ghe-repl-setup` again. + ```shell ghe-repl-setup FORMER_REPLICA_IP ``` + {% data reusables.enterprise_installation.replication-command %} diff --git a/content/admin/enterprise-management/configuring-high-availability/removing-a-high-availability-replica.md b/content/admin/enterprise-management/configuring-high-availability/removing-a-high-availability-replica.md index fce2d60624..58f246bd4d 100644 --- a/content/admin/enterprise-management/configuring-high-availability/removing-a-high-availability-replica.md +++ b/content/admin/enterprise-management/configuring-high-availability/removing-a-high-availability-replica.md @@ -19,10 +19,13 @@ shortTitle: Remove a HA replica 1. If necessary, stop a geo-replication replica from serving user traffic by removing the Geo DNS entries for the replica. 2. On the replica where you wish to temporarily stop replication, run ghe-repl-stop. + ```shell ghe-repl-stop ``` + 3. To start replication again, run `ghe-repl-start`. + ```shell ghe-repl-start ``` @@ -31,10 +34,13 @@ shortTitle: Remove a HA replica 1. If necessary, stop a geo-replication replica from serving user traffic by removing the Geo DNS entries for the replica. 2. On the replica you wish to remove replication from, run `ghe-repl-stop`. + ```shell ghe-repl-stop ``` + 3. On the replica, to tear down the replication state, run `ghe-repl-teardown`. + ```shell ghe-repl-teardown ``` diff --git a/content/admin/enterprise-management/monitoring-your-appliance/monitoring-using-snmp.md b/content/admin/enterprise-management/monitoring-your-appliance/monitoring-using-snmp.md index 7b3d2d3fc0..a2fb0544a9 100644 --- a/content/admin/enterprise-management/monitoring-your-appliance/monitoring-using-snmp.md +++ b/content/admin/enterprise-management/monitoring-your-appliance/monitoring-using-snmp.md @@ -28,6 +28,7 @@ SNMP is a common standard for monitoring devices over a network. We strongly rec 4. In the **Community string** field, enter a new community string. If left blank, this defaults to `public`. {% data reusables.enterprise_management_console.save-settings %} 5. Test your SNMP configuration by running the following command on a separate workstation with SNMP support in your network: + ```shell # community-string is your community string # hostname is the IP or domain of your Enterprise instance @@ -87,6 +88,7 @@ Of the available MIBs for SNMP, the most useful is `HOST-RESOURCES-MIB` (1.3.6.1 | hrStorageAllocationUnits.1 | 1.3.6.1.2.1.25.2.3.1.4.1 | The size, in bytes, of an hrStorageAllocationUnit | For example, to query for `hrMemorySize` with SNMP v3, run the following command on a separate workstation with SNMP support in your network: + ```shell # username is the unique username of your SNMP v3 user # auth password is the authentication password @@ -99,6 +101,7 @@ $ snmpget -v 3 -u USERNAME -l authPriv \ ``` With SNMP v2c, to query for `hrMemorySize`, run the following command on a separate workstation with SNMP support in your network: + ```shell # community-string is your community string # hostname is the IP or domain of your Enterprise instance diff --git a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/increasing-storage-capacity.md b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/increasing-storage-capacity.md index 6aed93c77c..7288154f60 100644 --- a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/increasing-storage-capacity.md +++ b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/increasing-storage-capacity.md @@ -37,22 +37,28 @@ As more users join {% data variables.location.product_location %}, you may need {% data reusables.enterprise_installation.ssh-into-instance %} 3. Put the appliance in maintenance mode. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)." 4. Reboot the appliance to detect the new storage allocation: + ```shell sudo reboot ``` + 5. Run the `ghe-storage-extend` command to expand the `/data/user` filesystem: + ```shell ghe-storage-extend ``` + 6. Ensure system services are functioning correctly, then release maintenance mode. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)." ## Increasing the root partition size using a new appliance 1. Set up a new {% data variables.product.prodname_ghe_server %} instance with a larger root disk using the same version as your current appliance. For more information, see "[AUTOTITLE](/admin/installation/setting-up-a-github-enterprise-server-instance)." 2. Shut down the current appliance: + ```shell sudo poweroff ``` + 3. Detach the data disk from the current appliance using your virtualization platform's tools. 4. Attach the data disk to the new appliance with the larger root disk. @@ -67,11 +73,13 @@ As more users join {% data variables.location.product_location %}, you may need 1. Attach a new disk to your {% data variables.product.prodname_ghe_server %} appliance. 1. Run the `lsblk` command to identify the new disk's device name. 1. Run the `parted` command to format the disk, substituting your device name for `/dev/xvdg`: + ```shell sudo parted /dev/xvdg mklabel msdos sudo parted /dev/xvdg mkpart primary ext4 0% 50% sudo parted /dev/xvdg mkpart primary ext4 50% 100% ``` + 1. If your appliance is configured for high-availability or geo-replication, to stop replication run the `ghe-repl-stop` command on each replica node: ```shell @@ -83,10 +91,13 @@ As more users join {% data variables.location.product_location %}, you may need ```shell ghe-upgrade PACKAGE-NAME.pkg -s -t /dev/xvdg1 ``` + 1. Shut down the appliance: + ```shell sudo poweroff ``` + 1. In the hypervisor, remove the old root disk and attach the new root disk at the same location as the old root disk. 1. Start the appliance. 1. Ensure system services are functioning correctly, then release maintenance mode. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode)." diff --git a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/known-issues-with-upgrades-to-your-instance.md b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/known-issues-with-upgrades-to-your-instance.md index 0eb52380ce..cb29e73eb2 100644 --- a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/known-issues-with-upgrades-to-your-instance.md +++ b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/known-issues-with-upgrades-to-your-instance.md @@ -95,6 +95,7 @@ The following instructions are only intended for {% data variables.product.prod ```shell copy ghe-config mysql.innodb-flush-no-fsync true ``` + {% data reusables.enterprise.apply-configuration %} #### Upgrade your instance's storage diff --git a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/migrating-from-github-enterprise-1110x-to-2123.md b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/migrating-from-github-enterprise-1110x-to-2123.md index a9c5b73bf8..9d66211588 100644 --- a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/migrating-from-github-enterprise-1110x-to-2123.md +++ b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/migrating-from-github-enterprise-1110x-to-2123.md @@ -63,6 +63,7 @@ To upgrade to the latest version of {% data variables.product.prodname_enterpris 10. On the backup host, run the `ghe-backup` command to take a final backup snapshot. This ensures that all data from the old instance is captured. 11. On the backup host, run the `ghe-restore` command you copied on the new instance's restore status screen to restore the latest snapshot. + ```shell $ ghe-restore 169.254.1.1 The authenticity of host '169.254.1.1:122' can't be established. diff --git a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrade-requirements.md b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrade-requirements.md index d44ae04226..e2ee56f962 100644 --- a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrade-requirements.md +++ b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrade-requirements.md @@ -42,9 +42,11 @@ topics: - Additional root storage must be available when upgrading through hotpatching, as it installs multiple versions of certain services until the upgrade is complete. Pre-flight checks will notify you if you don't have enough root disk storage. - When upgrading through hotpatching, your instance cannot be too heavily loaded, as it may impact the hotpatching process. - Upgrading to {% data variables.product.prodname_ghe_server %} 2.17 migrates your audit logs from Elasticsearch to MySQL. This migration also increases the amount of time and disk space it takes to restore a snapshot. Before migrating, check the number of bytes in your Elasticsearch audit log indices with this command: + ``` shell curl -s http://localhost:9201/audit_log/_stats/store | jq ._all.primaries.store.size_in_bytes ``` + Use the number to estimate the amount of disk space the MySQL audit logs will need. The script also monitors your free disk space while the import is in progress. Monitoring this number is especially useful if your free disk space is close to the amount of disk space necessary for migration. {% ifversion mysql-8-upgrade %} diff --git a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server.md b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server.md index 784dd4e839..f72d1a51eb 100644 --- a/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server.md +++ b/content/admin/enterprise-management/updating-the-virtual-machine-and-physical-resources/upgrading-github-enterprise-server.md @@ -145,10 +145,12 @@ If the upgrade target you're presented with is a feature release instead of a pa 1. {% data reusables.enterprise_installation.enterprise-download-upgrade-pkg %} Copy the URL for the upgrade hotpackage (_.hpkg_ file). {% data reusables.enterprise_installation.download-package %} 1. Run the `ghe-upgrade` command using the package file name: + ```shell admin@HOSTNAME:~$ ghe-upgrade GITHUB-UPGRADE.hpkg *** verifying upgrade package signature... ``` + 1. If at least one service or system component requires a reboot, the hotpatch upgrade script notifies you. For example, updates to the kernel, MySQL, or Elasticsearch may require a reboot. ### Upgrading an instance with multiple nodes using a hotpatch @@ -194,11 +196,14 @@ While you can use a hotpatch to upgrade to the latest patch release within a fea {% endnote %} 1. Run the `ghe-upgrade` command using the package file name: + ```shell admin@HOSTNAME:~$ ghe-upgrade GITHUB-UPGRADE.pkg *** verifying upgrade package signature... ``` + 1. Confirm that you'd like to continue with the upgrade and restart after the package signature verifies. The new root filesystem writes to the secondary partition and the instance automatically restarts in maintenance mode: + ```shell *** applying update... This package will upgrade your installation to version VERSION-NUMBER @@ -206,6 +211,7 @@ While you can use a hotpatch to upgrade to the latest patch release within a fea Target root partition: /dev/xvda2 Proceed with installation? [y/N] ``` + {%- ifversion ghe-migrations-cli-utility %} 1. Optionally, during an upgrade to a feature release, you can monitor the status of database migrations using the `ghe-migrations` utility. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-migrations)." {%- endif %} @@ -214,6 +220,7 @@ While you can use a hotpatch to upgrade to the latest patch release within a fea ```shell tail -f /data/user/common/ghe-config.log ``` + {% ifversion ip-exception-list %} 1. Optionally, after the upgrade, validate the upgrade by configuring an IP exception list to allow access to a specified list of IP addresses. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/enabling-and-scheduling-maintenance-mode#validating-changes-in-maintenance-mode-using-the-ip-exception-list)." {% endif %} @@ -262,6 +269,7 @@ To upgrade an instance that comprises multiple nodes using an upgrade package, y ``` CRITICAL: git replication is behind the primary by more than 1007 repositories and/or gists ``` + {% endnote %} {%- ifversion ghes = 3.4 or ghes = 3.5 or ghes = 3.6 %} diff --git a/content/admin/github-actions/advanced-configuration-and-troubleshooting/backing-up-and-restoring-github-enterprise-server-with-github-actions-enabled.md b/content/admin/github-actions/advanced-configuration-and-troubleshooting/backing-up-and-restoring-github-enterprise-server-with-github-actions-enabled.md index 4174e522d8..c66ae69f3f 100644 --- a/content/admin/github-actions/advanced-configuration-and-troubleshooting/backing-up-and-restoring-github-enterprise-server-with-github-actions-enabled.md +++ b/content/admin/github-actions/advanced-configuration-and-troubleshooting/backing-up-and-restoring-github-enterprise-server-with-github-actions-enabled.md @@ -31,6 +31,7 @@ To restore a backup of {% data variables.location.product_location %} with {% da ```shell copy ssh -p 122 admin@HOSTNAME ``` + 1. Configure the destination instance to use the same external storage service for {% data variables.product.prodname_actions %} as the source instance by entering one of the following commands. {% indented_data_reference reusables.actions.configure-storage-provider-platform-commands spaces=3 %} {% data reusables.actions.configure-storage-provider %} @@ -39,6 +40,7 @@ To restore a backup of {% data variables.location.product_location %} with {% da ```shell copy ghe-config app.actions.enabled true ``` + {% data reusables.actions.apply-configuration-and-enable %} 1. After {% data variables.product.prodname_actions %} is configured and enabled, to restore the rest of the data from the backup, use the `ghe-restore` command. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/configuring-backups-on-your-appliance#restoring-a-backup)." 1. Re-register your self-hosted runners on the destination instance. For more information, see "[AUTOTITLE](/actions/hosting-your-own-runners/managing-self-hosted-runners/adding-self-hosted-runners)." diff --git a/content/admin/github-actions/advanced-configuration-and-troubleshooting/troubleshooting-github-actions-for-your-enterprise.md b/content/admin/github-actions/advanced-configuration-and-troubleshooting/troubleshooting-github-actions-for-your-enterprise.md index 119c6cfbc8..3df86e9e5c 100644 --- a/content/admin/github-actions/advanced-configuration-and-troubleshooting/troubleshooting-github-actions-for-your-enterprise.md +++ b/content/admin/github-actions/advanced-configuration-and-troubleshooting/troubleshooting-github-actions-for-your-enterprise.md @@ -150,6 +150,7 @@ If any of these services are at or near 100% CPU utilization, or the memory is n } } ``` + 1. Save and exit the file. 1. Run `ghe-config-apply` to apply the changes. @@ -175,13 +176,17 @@ There are three ways to resolve this problem: 1. Log in to the administrative shell using SSH. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/accessing-the-administrative-shell-ssh)." 1. To remove the limitations on workflows triggered by {% data variables.product.prodname_dependabot %} on {% data variables.location.product_location %}, use the following command. + ``` shell ghe-config app.actions.disable-dependabot-enforcement true ``` + 1. Apply the configuration. + ```shell ghe-config-apply ``` + 1. Return to {% data variables.product.prodname_ghe_server %}. {% endif %} @@ -204,18 +209,25 @@ To install the official bundled actions and starter workflows within a designate 1. Log in to the administrative shell using SSH. For more information, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/accessing-the-administrative-shell-ssh)." 1. To designate your organization as the location to store the bundled actions, use the `ghe-config` command, replacing `ORGANIZATION` with the name of your organization. + ```shell ghe-config app.actions.actions-org ORGANIZATION ``` + and: + ```shell ghe-config app.actions.github-org ORGANIZATION ``` + 1. To add the bundled actions to your organization, unset the SHA. + ```shell ghe-config --unset 'app.actions.actions-repos-sha1sum' ``` + 1. Apply the configuration. + ```shell ghe-config-apply ``` diff --git a/content/admin/github-actions/advanced-configuration-and-troubleshooting/using-a-staging-environment.md b/content/admin/github-actions/advanced-configuration-and-troubleshooting/using-a-staging-environment.md index c5584c4a12..047d5c25e6 100644 --- a/content/admin/github-actions/advanced-configuration-and-troubleshooting/using-a-staging-environment.md +++ b/content/admin/github-actions/advanced-configuration-and-troubleshooting/using-a-staging-environment.md @@ -45,6 +45,7 @@ To more accurately mirror your production environment, you can optionally copy f ```shell azcopy copy 'https://SOURCE-STORAGE-ACCOUNT-NAME.blob.core.windows.net/SAS-TOKEN' 'https://DESTINATION-STORAGE-ACCOUNT-NAME.blob.core.windows.net/' --recursive ``` + - For Amazon S3 buckets, you can use [`aws s3 sync`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/s3/sync.html). For example: ```shell diff --git a/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-amazon-s3-storage.md b/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-amazon-s3-storage.md index 02563b175f..8fea435d17 100644 --- a/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-amazon-s3-storage.md +++ b/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-amazon-s3-storage.md @@ -66,6 +66,7 @@ To configure {% data variables.product.prodname_ghe_server %} to use OIDC with a ``` SHA1 Fingerprint=AB:12:34:56:78:90:AB:CD:EF:12:34:56:78:90:AB:CD:EF:12:34:56 ``` + 1. Remove the colons (`:`) from the thumbprint value, and save the value to use later. For example, the thumbprint for the value returned in the previous step is: @@ -73,6 +74,7 @@ To configure {% data variables.product.prodname_ghe_server %} to use OIDC with a ``` AB1234567890ABCDEF1234567890ABCDEF123456 ``` + 1. Using the AWS CLI, use the following command to create an OIDC provider for {% data variables.location.product_location_enterprise %}. Replace `HOSTNAME` with the public hostname for {% data variables.location.product_location_enterprise %}, and `THUMBPRINT` with the thumbprint value from the previous step. ```shell copy @@ -139,6 +141,7 @@ To configure {% data variables.product.prodname_ghe_server %} to use OIDC with a } ... ``` + 1. Click **Update policy**. ### 3. Configure {% data variables.product.prodname_ghe_server %} to connect to Amazon S3 using OIDC diff --git a/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-google-cloud-storage.md b/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-google-cloud-storage.md index 9d64c3a801..3c897ea646 100644 --- a/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-google-cloud-storage.md +++ b/content/admin/github-actions/enabling-github-actions-for-github-enterprise-server/enabling-github-actions-with-google-cloud-storage.md @@ -73,6 +73,7 @@ To configure {% data variables.product.prodname_ghe_server %} to use OIDC with G ``` https://my-ghes-host.example.com/_services/token ``` + - Under "Audiences", leave **Default audience** selected, but note the identity provider URL, as it is needed later. The identity provider URL is in the format `https://iam.googleapis.com/projects/PROJECT-NUMBER/locations/global/workloadIdentityPools/POOL-NAME/providers/PROVIDER-NAME`. - Click **Continue**. 1. Under "Configure provider attributes": diff --git a/content/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access.md b/content/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access.md index 5e8c0886ab..13a1a6a26c 100644 --- a/content/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access.md +++ b/content/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access.md @@ -74,6 +74,7 @@ You can populate the runner tool cache by running a {% data variables.product.pr with: path: {% raw %}${{runner.tool_cache}}/tool_cache.tar.gz{% endraw %} ``` + 1. Download the tool cache artifact from the workflow run. For instructions on downloading artifacts, see "[AUTOTITLE](/actions/managing-workflow-runs/downloading-workflow-artifacts)." 1. Transfer the tool cache artifact to your self hosted runner and extract it to the local tool cache directory. The default tool cache directory is `RUNNER_DIR/_work/_tool`. If the runner hasn't processed any jobs yet, you might need to create the `_work/_tool` directories. diff --git a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-aws.md b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-aws.md index cf8efab302..04bb726a43 100644 --- a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-aws.md +++ b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-aws.md @@ -68,12 +68,14 @@ AMIs for {% data variables.product.prodname_ghe_server %} are available in the A ### Using the AWS CLI to select an AMI 1. Using the AWS CLI, get a list of {% data variables.product.prodname_ghe_server %} images published by {% data variables.product.prodname_dotcom %}'s AWS owner IDs (`025577942450` for GovCloud, and `895557238572` for other regions). For more information, see "[describe-images](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html)" in the AWS documentation. + ```shell aws ec2 describe-images \ --owners OWNER_ID \ --query 'sort_by(Images,&Name)[*].{Name:Name,ImageID:ImageId}' \ --output=text ``` + 2. Take note of the AMI ID for the latest {% data variables.product.prodname_ghe_server %} image. ## Creating a security group @@ -81,6 +83,7 @@ AMIs for {% data variables.product.prodname_ghe_server %} are available in the A If you're setting up your AMI for the first time, you will need to create a security group and add a new security group rule for each port in the table below. For more information, see the AWS guide "[Using Security Groups](https://docs.aws.amazon.com/cli/latest/userguide/cli-ec2-sg.html)." 1. Using the AWS CLI, create a new security group. For more information, see "[create-security-group](https://docs.aws.amazon.com/cli/latest/reference/ec2/create-security-group.html)" in the AWS documentation. + ```shell aws ec2 create-security-group --group-name SECURITY_GROUP_NAME --description "SECURITY GROUP DESCRIPTION" ``` @@ -88,9 +91,11 @@ If you're setting up your AMI for the first time, you will need to create a secu 2. Take note of the security group ID (`sg-xxxxxxxx`) of your newly created security group. 3. Create a security group rule for each of the ports in the table below. For more information, see "[authorize-security-group-ingress](https://docs.aws.amazon.com/cli/latest/reference/ec2/authorize-security-group-ingress.html)" in the AWS documentation. + ```shell aws ec2 authorize-security-group-ingress --group-id SECURITY_GROUP_ID --protocol PROTOCOL --port PORT_NUMBER --cidr SOURCE IP RANGE ``` + This table identifies what each port is used for. {% data reusables.enterprise_installation.necessary_ports %} diff --git a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-azure.md b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-azure.md index f3d88ec2d8..4de686cc0d 100644 --- a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-azure.md +++ b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-azure.md @@ -40,6 +40,7 @@ Before launching {% data variables.location.product_location %} on Azure, you'll {% data reusables.enterprise_installation.create-ghe-instance %} 1. Find the most recent {% data variables.product.prodname_ghe_server %} appliance image. For more information about the `vm image list` command, see "[`az vm image list`](https://docs.microsoft.com/cli/azure/vm/image?view=azure-cli-latest#az_vm_image_list)" in the Microsoft documentation. + ```shell az vm image list --all -f GitHub-Enterprise | grep '"urn":' | sort -V ``` @@ -83,6 +84,7 @@ To configure the instance, you must confirm the instance's status, upload a lice {% data reusables.enterprise_installation.new-instance-attack-vector-warning %} 1. Before configuring the VM, you must wait for it to enter ReadyRole status. Check the status of the VM with the `vm list` command. For more information, see "[`az vm list`](https://docs.microsoft.com/cli/azure/vm?view=azure-cli-latest#az_vm_list)" in the Microsoft documentation. + ```shell $ az vm list -d -g RESOURCE_GROUP -o table > Name ResourceGroup PowerState PublicIps Fqdns Location Zones @@ -90,6 +92,7 @@ To configure the instance, you must confirm the instance's status, upload a lice > VM_NAME RESOURCE_GROUP VM running 40.76.79.202 eastus ``` + {% note %} **Note:** Azure does not automatically create a FQDNS entry for the VM. For more information, see Azure's guide on how to "[Create a fully qualified domain name in the Azure portal for a Linux VM](https://docs.microsoft.com/azure/virtual-machines/linux/portal-create-fqdn)." diff --git a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-google-cloud-platform.md b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-google-cloud-platform.md index 0b58ab27a6..c08b885d6a 100644 --- a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-google-cloud-platform.md +++ b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-google-cloud-platform.md @@ -36,6 +36,7 @@ Before launching {% data variables.location.product_location %} on Google Cloud ## Selecting the {% data variables.product.prodname_ghe_server %} image 1. Using the [gcloud compute](https://cloud.google.com/compute/docs/gcloud-compute/) command-line tool, list the public {% data variables.product.prodname_ghe_server %} images: + ```shell gcloud compute images list --project github-enterprise-public --no-standard-images ``` @@ -47,15 +48,19 @@ Before launching {% data variables.location.product_location %} on Google Cloud GCE virtual machines are created as a member of a network, which has a firewall. For the network associated with the {% data variables.product.prodname_ghe_server %} VM, you'll need to configure the firewall to allow the required ports listed in the table below. For more information about firewall rules on Google Cloud Platform, see the Google guide "[Firewall Rules Overview](https://cloud.google.com/vpc/docs/firewalls)." 1. Using the gcloud compute command-line tool, create the network. For more information, see "[gcloud compute networks create](https://cloud.google.com/sdk/gcloud/reference/compute/networks/create)" in the Google documentation. + ```shell gcloud compute networks create NETWORK-NAME --subnet-mode auto ``` + 2. Create a firewall rule for each of the ports in the table below. For more information, see "[gcloud compute firewall-rules](https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/)" in the Google documentation. + ```shell $ gcloud compute firewall-rules create RULE-NAME \ --network NETWORK-NAME \ --allow tcp:22,tcp:25,tcp:80,tcp:122,udp:161,tcp:443,udp:1194,tcp:8080,tcp:8443,tcp:9418,icmp ``` + This table identifies the required ports and what each port is used for. {% data reusables.enterprise_installation.necessary_ports %} @@ -71,11 +76,13 @@ In production High Availability configurations, both primary and replica applian To create the {% data variables.product.prodname_ghe_server %} instance, you'll need to create a GCE instance with your {% data variables.product.prodname_ghe_server %} image and attach an additional storage volume for your instance data. For more information, see "[Hardware considerations](#hardware-considerations)." 1. Using the gcloud compute command-line tool, create a data disk to use as an attached storage volume for your instance data, and configure the size based on your user license count. For more information, see "[gcloud compute disks create](https://cloud.google.com/sdk/gcloud/reference/compute/disks/create)" in the Google documentation. + ```shell gcloud compute disks create DATA-DISK-NAME --size DATA-DISK-SIZE --type DATA-DISK-TYPE --zone ZONE ``` 2. Then create an instance using the name of the {% data variables.product.prodname_ghe_server %} image you selected, and attach the data disk. For more information, see "[gcloud compute instances create](https://cloud.google.com/sdk/gcloud/reference/compute/instances/create)" in the Google documentation. + ```shell $ gcloud compute instances create INSTANCE-NAME \ --machine-type n1-standard-8 \ diff --git a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-hyper-v.md b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-hyper-v.md index 0a72a2ab80..29bf2e0835 100644 --- a/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-hyper-v.md +++ b/content/admin/installation/setting-up-a-github-enterprise-server-instance/installing-github-enterprise-server-on-hyper-v.md @@ -37,25 +37,35 @@ shortTitle: Install on Hyper-V {% data reusables.enterprise_installation.create-ghe-instance %} 1. In PowerShell, create a new Generation 1 virtual machine, configure the size based on your user license count, and attach the {% data variables.product.prodname_ghe_server %} image you downloaded. For more information, see "[New-VM](https://docs.microsoft.com/powershell/module/hyper-v/new-vm?view=win10-ps)" in the Microsoft documentation. + ```shell PS C:\> New-VM -Generation 1 -Name VM_NAME -MemoryStartupBytes MEMORY_SIZE -BootDevice VHD -VHDPath PATH_TO_VHD ``` + {% data reusables.enterprise_installation.create-attached-storage-volume %} Replace `PATH_TO_DATA_DISK` with the path to the location where you create the disk. For more information, see "[New-VHD](https://docs.microsoft.com/powershell/module/hyper-v/new-vhd?view=win10-ps)" in the Microsoft documentation. + ```shell PS C:\> New-VHD -Path PATH_TO_DATA_DISK -SizeBytes DISK_SIZE ``` + 3. Attach the data disk to your instance. For more information, see "[Add-VMHardDiskDrive](https://docs.microsoft.com/powershell/module/hyper-v/add-vmharddiskdrive?view=win10-ps)" in the Microsoft documentation. + ```shell PS C:\> Add-VMHardDiskDrive -VMName VM_NAME -Path PATH_TO_DATA_DISK ``` + 4. Start the VM. For more information, see "[Start-VM](https://docs.microsoft.com/powershell/module/hyper-v/start-vm?view=win10-ps)" in the Microsoft documentation. + ```shell PS C:\> Start-VM -Name VM_NAME ``` + 5. Get the IP address of your VM. For more information, see "[Get-VMNetworkAdapter](https://docs.microsoft.com/powershell/module/hyper-v/get-vmnetworkadapter?view=win10-ps)" in the Microsoft documentation. + ```shell PS C:\> (Get-VMNetworkAdapter -VMName VM_NAME).IpAddresses ``` + 6. Copy the VM's IP address and paste it into a web browser. ## Configuring the {% data variables.product.prodname_ghe_server %} instance diff --git a/content/admin/installation/setting-up-a-github-enterprise-server-instance/setting-up-a-staging-instance.md b/content/admin/installation/setting-up-a-github-enterprise-server-instance/setting-up-a-staging-instance.md index d4cb8f6722..8a535875b8 100644 --- a/content/admin/installation/setting-up-a-github-enterprise-server-instance/setting-up-a-staging-instance.md +++ b/content/admin/installation/setting-up-a-github-enterprise-server-instance/setting-up-a-staging-instance.md @@ -108,6 +108,7 @@ Optionally, if you use {% data variables.product.prodname_registry %} on your pr ghe-config secrets.packages.azure-container-name "AZURE CONTAINER NAME" ghe-config secrets.packages.azure-connection-string "CONNECTION STRING" ``` + - Amazon S3: ```shell copy @@ -117,6 +118,7 @@ Optionally, if you use {% data variables.product.prodname_registry %} on your pr ghe-config secrets.packages.aws-access-key "S3 ACCESS KEY ID" ghe-config secrets.packages.aws-secret-key "S3 ACCESS SECRET" ``` + 1. To prepare to enable {% data variables.product.prodname_registry %} on the staging instance, enter the following command. ```shell copy diff --git a/content/admin/monitoring-activity-in-your-enterprise/exploring-user-activity/viewing-push-logs.md b/content/admin/monitoring-activity-in-your-enterprise/exploring-user-activity/viewing-push-logs.md index dae49c04bc..cc6492c257 100644 --- a/content/admin/monitoring-activity-in-your-enterprise/exploring-user-activity/viewing-push-logs.md +++ b/content/admin/monitoring-activity-in-your-enterprise/exploring-user-activity/viewing-push-logs.md @@ -45,7 +45,9 @@ For more information, see "[Using the activity view to see changes to your repos {% data reusables.enterprise_installation.ssh-into-instance %} 1. In the appropriate Git repository, open the audit log file: + ```shell ghe-repo OWNER/REPOSITORY -c "cat audit_log" ``` + {% endif %} diff --git a/content/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise.md b/content/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise.md index b9f5b92d37..8d30445215 100644 --- a/content/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise.md +++ b/content/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise.md @@ -101,6 +101,7 @@ For information on creating or accessing your access key ID and secret key, see - Add the permissions policy you created above to allow writes to the bucket. - Edit the trust relationship to add the `sub` field to the validation conditions, replacing `ENTERPRISE` with the name of your enterprise. + ``` "Condition": { "StringEquals": { @@ -109,6 +110,7 @@ For information on creating or accessing your access key ID and secret key, see } } ``` + - Make note of the Amazon Resource Name (ARN) of the created role. {% data reusables.enterprise.navigate-to-log-streaming-tab %} {% data reusables.audit_log.streaming-choose-s3 %} diff --git a/content/admin/packages/quickstart-for-configuring-your-minio-storage-bucket-for-github-packages.md b/content/admin/packages/quickstart-for-configuring-your-minio-storage-bucket-for-github-packages.md index c7342dc594..b7116349a2 100644 --- a/content/admin/packages/quickstart-for-configuring-your-minio-storage-bucket-for-github-packages.md +++ b/content/admin/packages/quickstart-for-configuring-your-minio-storage-bucket-for-github-packages.md @@ -33,6 +33,7 @@ For more information about your options, see the official [MinIO docs](https://d 1. Set up your preferred environment variables for MinIO. These examples use `MINIO_DIR`: + ```shell export MINIO_DIR=$(pwd)/minio mkdir -p $MINIO_DIR @@ -43,24 +44,29 @@ For more information about your options, see the official [MinIO docs](https://d ```shell docker pull minio/minio ``` + For more information, see the official "[MinIO Quickstart Guide](https://docs.min.io/docs/minio-quickstart-guide)." 3. Sign in to MinIO using your MinIO access key and secret. {% linux %} + ```shell $ export MINIO_ACCESS_KEY=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) # this one is actually a secret, so careful $ export MINIO_SECRET_KEY=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) ``` + {% endlinux %} {% mac %} + ```shell $ export MINIO_ACCESS_KEY=$(cat /dev/urandom | LC_CTYPE=C tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) # this one is actually a secret, so careful $ export MINIO_SECRET_KEY=$(cat /dev/urandom | LC_CTYPE=C tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) ``` + {% endmac %} You can access your MinIO keys using the environment variables: diff --git a/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-environment.md b/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-environment.md index 97fc342ba0..bb18f9357c 100644 --- a/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-environment.md +++ b/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-environment.md @@ -31,6 +31,7 @@ You can use a Linux container management tool to build a pre-receive hook enviro FROM gliderlabs/alpine:3.3 RUN apk add --no-cache git bash ``` + 3. From the working directory that contains `Dockerfile.alpine-3.3`, build an image: ```shell @@ -43,11 +44,13 @@ You can use a Linux container management tool to build a pre-receive hook enviro > ---> 0250ab3be9c5 > Successfully built 0250ab3be9c5 ``` + 4. Create a container: ```shell docker create --name pre-receive.alpine-3.3 pre-receive.alpine-3.3 /bin/true ``` + 5. Export the Docker container to a `gzip` compressed `tar` file: ```shell @@ -60,6 +63,7 @@ You can use a Linux container management tool to build a pre-receive hook enviro 1. Create a Linux `chroot` environment. 2. Create a `gzip` compressed `tar` file of the `chroot` directory. + ```shell cd /path/to/chroot tar -czf /path/to/pre-receive-environment.tar.gz . diff --git a/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-script.md b/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-script.md index 48b772311c..6d5869f741 100644 --- a/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-script.md +++ b/content/admin/policies/enforcing-policy-with-pre-receive-hooks/creating-a-pre-receive-hook-script.md @@ -132,6 +132,7 @@ We recommend consolidating hooks to a single repository. If the consolidated hoo ```shell sudo chmod +x SCRIPT_FILE.sh ``` + For Windows users, ensure the scripts have execute permissions: ```shell diff --git a/content/admin/user-management/managing-repositories-in-your-enterprise/configuring-git-large-file-storage-for-your-enterprise.md b/content/admin/user-management/managing-repositories-in-your-enterprise/configuring-git-large-file-storage-for-your-enterprise.md index 9f75d02ccc..34956d9f8c 100644 --- a/content/admin/user-management/managing-repositories-in-your-enterprise/configuring-git-large-file-storage-for-your-enterprise.md +++ b/content/admin/user-management/managing-repositories-in-your-enterprise/configuring-git-large-file-storage-for-your-enterprise.md @@ -75,6 +75,7 @@ For more information, see "[AUTOTITLE](/repositories/working-with-files/managing 1. Disable {% data variables.large_files.product_name_short %} on {% data variables.location.product_location %}. For more information, see "[Configuring {% data variables.large_files.product_name_long %} for your enterprise](#configuring-git-large-file-storage-for-your-enterprise)." 2. Create a {% data variables.large_files.product_name_short %} configuration file that points to the third party server. + ```shell # Show default configuration $ git lfs env @@ -98,10 +99,12 @@ For more information, see "[AUTOTITLE](/repositories/working-with-files/managing ``` 3. To keep the same {% data variables.large_files.product_name_short %} configuration for each user, commit a custom `.lfsconfig` file to the repository. + ```shell git add .lfsconfig git commit -m "Adding LFS config file" ``` + 3. Migrate any existing {% data variables.large_files.product_name_short %} assets. For more information, see "[Migrating to a different {% data variables.large_files.product_name_long %} server](#migrating-to-a-different-git-large-file-storage-server)." ## Migrating to a different Git Large File Storage server @@ -109,6 +112,7 @@ For more information, see "[AUTOTITLE](/repositories/working-with-files/managing Before migrating to a different {% data variables.large_files.product_name_long %} server, you must configure {% data variables.large_files.product_name_short %} to use a third party server. For more information, see "[Configuring {% data variables.large_files.product_name_long %} to use a third party server](#configuring-git-large-file-storage-to-use-a-third-party-server)." 1. Configure the repository with a second remote. + ```shell $ git remote add NEW-REMOTE https://NEW-REMOTE-HOSTNAME/path/to/repo   @@ -121,6 +125,7 @@ Before migrating to a different {% data variables.large_files.product_name_long ``` 2. Fetch all objects from the old remote. + ```shell $ git lfs fetch origin --all > Scanning for all objects ever referenced... @@ -130,6 +135,7 @@ Before migrating to a different {% data variables.large_files.product_name_long ``` 3. Push all objects to the new remote. + ```shell $ git lfs push NEW-REMOTE --all > Scanning for all objects ever referenced... @@ -137,6 +143,7 @@ Before migrating to a different {% data variables.large_files.product_name_long > Pushing objects... > Git LFS: (16 of 16 files) 48.00 MB / 48.85 MB, 879.10 KB skipped ``` + {% endif %} ## Further reading diff --git a/content/admin/user-management/managing-repositories-in-your-enterprise/migrating-to-internal-repositories.md b/content/admin/user-management/managing-repositories-in-your-enterprise/migrating-to-internal-repositories.md index d79f51cca6..dec423f19a 100644 --- a/content/admin/user-management/managing-repositories-in-your-enterprise/migrating-to-internal-repositories.md +++ b/content/admin/user-management/managing-repositories-in-your-enterprise/migrating-to-internal-repositories.md @@ -48,13 +48,17 @@ If you don't have private mode enabled, the migration script will have no effect {% else %} 2. Navigate to the `/data/github/current` directory. + ```shell cd /data/github/current ``` + 3. Run the migration command. + ```shell sudo bin/safe-ruby lib/github/transitions/20191210220630_convert_public_ghes_repos_to_internal.rb --verbose -w | tee -a /tmp/convert_public_ghes_repos_to_internal.log ``` + {% endif %} Log output will appear in the terminal and `/tmp/convert_public_ghes_repos_to_internal.log`. diff --git a/content/admin/user-management/managing-users-in-your-enterprise/promoting-or-demoting-a-site-administrator.md b/content/admin/user-management/managing-users-in-your-enterprise/promoting-or-demoting-a-site-administrator.md index 77e067e34a..a871eeca4a 100644 --- a/content/admin/user-management/managing-users-in-your-enterprise/promoting-or-demoting-a-site-administrator.md +++ b/content/admin/user-management/managing-users-in-your-enterprise/promoting-or-demoting-a-site-administrator.md @@ -50,6 +50,7 @@ For information about promoting a user to an organization owner, see the `ghe-or 1. [SSH](/admin/configuration/configuring-your-enterprise/accessing-the-administrative-shell-ssh) into your appliance. 2. Run [ghe-user-promote](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-user-promote) with the username to promote. + ```shell ghe-user-promote USERNAME ``` @@ -58,6 +59,7 @@ For information about promoting a user to an organization owner, see the `ghe-or 1. [SSH](/admin/configuration/configuring-your-enterprise/accessing-the-administrative-shell-ssh) into your appliance. 2. Run [ghe-user-demote](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-user-demote) with the username to demote. + ```shell ghe-user-demote USERNAME ``` diff --git a/content/admin/user-management/managing-users-in-your-enterprise/suspending-and-unsuspending-users.md b/content/admin/user-management/managing-users-in-your-enterprise/suspending-and-unsuspending-users.md index b50c369582..871bad7420 100644 --- a/content/admin/user-management/managing-users-in-your-enterprise/suspending-and-unsuspending-users.md +++ b/content/admin/user-management/managing-users-in-your-enterprise/suspending-and-unsuspending-users.md @@ -64,6 +64,7 @@ As when suspending a user, unsuspending a user takes effect immediately. The use {% data reusables.enterprise_installation.ssh-into-instance %} 2. Run [ghe-user-suspend](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-user-suspend) with the username to suspend. + ```shell ghe-user-suspend USERNAME ``` @@ -86,6 +87,7 @@ You can create a custom message that suspended users will see when attempting to {% data reusables.enterprise_installation.ssh-into-instance %} 2. Run [ghe-user-unsuspend](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-user-unsuspend) with the username to unsuspend. + ```shell ghe-user-unsuspend USERNAME ``` diff --git a/content/apps/creating-github-apps/authenticating-with-a-github-app/authenticating-as-a-github-app.md b/content/apps/creating-github-apps/authenticating-with-a-github-app/authenticating-as-a-github-app.md index 542ff5f67d..643a03cd86 100644 --- a/content/apps/creating-github-apps/authenticating-with-a-github-app/authenticating-as-a-github-app.md +++ b/content/apps/creating-github-apps/authenticating-with-a-github-app/authenticating-as-a-github-app.md @@ -47,6 +47,7 @@ You can use {% data variables.product.company_short %}'s Octokit.js SDK to authe ```javascript copy import { App } from "octokit"; ``` + 1. Create a new instance of `App`. In the following example, replace `APP_ID` with a reference to your app's ID. Replace `PRIVATE_KEY` with a reference to the value of your app's private key. ```javascript copy diff --git a/content/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps.md b/content/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps.md index 9f9572486f..7c33e62c12 100644 --- a/content/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps.md +++ b/content/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps.md @@ -46,9 +46,11 @@ To verify a private key: ![Screenshot of a private key in a {% data variables.product.prodname_github_app %} settings page. The fingerprint, the part of the private key after the colon, is outlined in dark orange.](/assets/images/github-apps/github-apps-private-key-fingerprint.png) 1. Generate the fingerprint of your private key (PEM) locally by using the following command: + ```shell openssl rsa -in PATH_TO_PEM_FILE -pubout -outform DER | openssl sha256 -binary | openssl base64 ``` + 1. Compare the results of the locally generated fingerprint to the fingerprint you see in {% data variables.product.product_name %}. ## Deleting private keys diff --git a/content/apps/creating-github-apps/writing-code-for-a-github-app/creating-ci-tests-with-the-checks-api.md b/content/apps/creating-github-apps/writing-code-for-a-github-app/creating-ci-tests-with-the-checks-api.md index 26cf2880af..e688df3649 100644 --- a/content/apps/creating-github-apps/writing-code-for-a-github-app/creating-ci-tests-with-the-checks-api.md +++ b/content/apps/creating-github-apps/writing-code-for-a-github-app/creating-ci-tests-with-the-checks-api.md @@ -57,6 +57,7 @@ You'll use the [Ruby programming language](https://www.ruby-lang.org/en/), the [ You don't need to be an expert in any of these tools or concepts to complete this project. This guide will walk you through all the required steps. Before you begin creating CI tests with the Checks API, you'll need to do the following: 1. Clone the [Creating CI tests with the Checks API](https://github.com/github-developer/creating-ci-tests-with-the-checks-api) repository. + ```shell git clone https://github.com/github-developer/creating-ci-tests-with-the-checks-api.git ``` diff --git a/content/apps/creating-github-apps/writing-code-for-a-github-app/setting-up-your-development-environment-to-create-a-github-app.md b/content/apps/creating-github-apps/writing-code-for-a-github-app/setting-up-your-development-environment-to-create-a-github-app.md index a429c9e1eb..078e4399f2 100644 --- a/content/apps/creating-github-apps/writing-code-for-a-github-app/setting-up-your-development-environment-to-create-a-github-app.md +++ b/content/apps/creating-github-apps/writing-code-for-a-github-app/setting-up-your-development-environment-to-create-a-github-app.md @@ -407,6 +407,7 @@ Here are a few common problems and some suggested solutions. If you run into any **A:** You may not be running the Smee client, running the Smee command with the wrong parameters or you may not have the correct Smee domain in your GitHub App settings. First check to make sure the Smee client is running in a Terminal tab. If that's not the problem, visit your [app settings page](https://github.com/settings/apps) and check the fields shown in "[Step 2. Register a new GitHub App](#step-2-register-a-new-github-app)." Make sure the domain in those fields matches the domain you used in your `smee -u ` command in "[Step 1. Start a new Smee channel](#step-1-start-a-new-smee-channel)." If none of the above work, check that you are running the full Smee command including the `--path` and `--port` options, for example: `smee --url https://smee.io/qrfeVRbFbffd6vD --path /event_handler --port 3000` (replacing `https://smee.io/qrfeVRbFbffd6vD` with your own Smee domain). - **Q:** I'm getting an `Octokit::NotFound` 404 error in my debug output: + ``` 2018-12-06 15:00:56 - Octokit::NotFound - POST {% data variables.product.api_url_code %}/app/installations/500991/access_tokens: 404 - Not Found // See: /v3/apps/#create-a-new-installation-token: ``` diff --git a/content/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent.md b/content/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent.md index 292553ec7a..5de66b3558 100644 --- a/content/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent.md +++ b/content/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent.md @@ -48,13 +48,17 @@ If you are a site administrator for {% data variables.location.product_location ssh-keygen -t rsa -b 4096 -C "your_email@example.com" ``` + {%- else %} + ```shell ssh-keygen -t ed25519 -C "your_email@example.com" ``` + {% note %} **Note:** If you are using a legacy system that doesn't support the Ed25519 algorithm, use: + ```shell ssh-keygen -t rsa -b 4096 -C "your_email@example.com" ``` @@ -63,9 +67,11 @@ If you are a site administrator for {% data variables.location.product_location {%- endif %} This creates a new SSH key, using the provided email as a label. + ```shell > Generating public/private ALGORITHM key pair. ``` + When you're prompted to "Enter a file in which to save the key", you can press **Enter** to accept the default file location. Please note that if you created SSH keys previously, ssh-keygen may ask you to rewrite another key, in which case we recommend creating a custom-named SSH key. To do so, type the default file location and replace id_ssh_keyname with your custom key name. {% mac %} @@ -93,6 +99,7 @@ When you're prompted to "Enter a file in which to save the key", you can press * {% endlinux %} 1. At the prompt, type a secure passphrase. For more information, see "[AUTOTITLE](/authentication/connecting-to-github-with-ssh/working-with-ssh-key-passphrases)." + ```shell > Enter passphrase (empty for no passphrase): [Type a passphrase] > Enter same passphrase again: [Type passphrase again] @@ -142,12 +149,15 @@ Before adding a new SSH key to the ssh-agent to manage your keys, you should hav Host {% ifversion ghes or ghae %}HOSTNAME{% else %}github.com{% endif %} IgnoreUnknown UseKeychain ``` + {% endnote %} 1. Add your SSH private key to the ssh-agent and store your passphrase in the keychain. {% data reusables.ssh.add-ssh-key-to-ssh-agent %} + ```shell ssh-add --apple-use-keychain ~/.ssh/id_{% ifversion ghae %}rsa{% else %}ed25519{% endif %} ``` + {% note %} **Note:** The `--apple-use-keychain` option stores the passphrase in your keychain for you when you add an SSH key to the ssh-agent. If you chose not to add a passphrase to your key, run the command without the `--apple-use-keychain` option. @@ -169,6 +179,7 @@ Before adding a new SSH key to the ssh-agent to manage your keys, you should hav {% data reusables.desktop.windows_git_bash %} 1. Ensure the ssh-agent is running. You can use the "Auto-launching the ssh-agent" instructions in "[Working with SSH key passphrases](/articles/working-with-ssh-key-passphrases)", or start it manually: + ```shell # start the ssh-agent in the background $ eval "$(ssh-agent -s)" @@ -200,6 +211,7 @@ If you are using macOS or Linux, you may need to update your SSH client or insta 1. Insert your hardware security key into your computer. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Paste the text below, substituting in the email address for your account on {% data variables.product.product_name %}. + ```shell ssh-keygen -t {% ifversion ghae %}ecdsa{% else %}ed25519{% endif %}-sk -C "YOUR_EMAIL" ``` @@ -208,6 +220,7 @@ If you are using macOS or Linux, you may need to update your SSH client or insta {% note %} **Note:** If the command fails and you receive the error `invalid format` or `feature not supported,` you may be using a hardware security key that does not support the Ed25519 algorithm. Enter the following command instead. + ```shell ssh-keygen -t ecdsa-sk -C "your_email@example.com" ``` @@ -242,8 +255,10 @@ If you are using macOS or Linux, you may need to update your SSH client or insta {% endlinux %} 1. When you are prompted to type a passphrase, press **Enter**. + ```shell > Enter passphrase (empty for no passphrase): [Type a passphrase] > Enter same passphrase again: [Type passphrase again] ``` + {% data reusables.ssh.add-public-key-to-github %} diff --git a/content/authentication/connecting-to-github-with-ssh/testing-your-ssh-connection.md b/content/authentication/connecting-to-github-with-ssh/testing-your-ssh-connection.md index 3f996480d2..986c2299b5 100644 --- a/content/authentication/connecting-to-github-with-ssh/testing-your-ssh-connection.md +++ b/content/authentication/connecting-to-github-with-ssh/testing-your-ssh-connection.md @@ -23,6 +23,7 @@ When you test your connection, you'll need to authenticate this action using you {% data reusables.command_line.open_the_multi_os_terminal %} 2. Enter the following: + ```shell $ ssh -T git@{% data variables.command_line.codeblock %} # Attempts to ssh to {% data variables.product.product_name %} @@ -37,6 +38,7 @@ When you test your connection, you'll need to authenticate this action using you ``` 3. Verify that the fingerprint in the message you see matches {% ifversion fpt or ghec %}[{% data variables.product.prodname_dotcom %}'s public key fingerprint](/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints){% else %} your enterprise's public key fingerprint{% endif %}. If it does, then type `yes`: + ```shell > Hi USERNAME! You've successfully authenticated, but GitHub does not > provide shell access. @@ -45,6 +47,7 @@ When you test your connection, you'll need to authenticate this action using you {% linux %} You may see this error message: + ```shell ... Agent admitted failure to sign using the key. diff --git a/content/authentication/keeping-your-account-and-data-secure/about-anonymized-urls.md b/content/authentication/keeping-your-account-and-data-secure/about-anonymized-urls.md index 110dcfde6f..602ad3ed5e 100644 --- a/content/authentication/keeping-your-account-and-data-secure/about-anonymized-urls.md +++ b/content/authentication/keeping-your-account-and-data-secure/about-anonymized-urls.md @@ -40,6 +40,7 @@ If an image is showing up in your browser but not on {% data variables.product.p {% data reusables.command_line.open_the_multi_os_terminal %} 1. Request the image headers using `curl`. + ```shell $ curl -I https://www.my-server.com/images/some-image.png > HTTP/2 200 @@ -49,6 +50,7 @@ If an image is showing up in your browser but not on {% data variables.product.p > Server: Google Frontend > Content-Length: 6507 ``` + 3. Check the value of `Content-Type`. In this case, it's `image/x-png`. 4. Check that content type against [the list of types supported by Camo](https://github.com/atmos/camo/blob/master/mime-types.json). @@ -63,6 +65,7 @@ If you changed an image recently and it's showing up in your browser but not {% {% data reusables.command_line.open_the_multi_os_terminal %} 1. Request the image headers using `curl`. + ```shell $ curl -I https://www.my-server.com/images/some-image.png > HTTP/2 200 @@ -84,6 +87,7 @@ Purging the cache forces every {% data variables.product.prodname_dotcom %} user {% data reusables.command_line.open_the_multi_os_terminal %} 1. Purge the image using `curl -X PURGE` on the Camo URL. + ```shell $ curl -X PURGE https://camo.githubusercontent.com/4d04abe0044d94fefcf9af2133223.... > {"status": "ok", "id": "216-8675309-1008701"} diff --git a/content/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository.md b/content/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository.md index 46d0aac3ec..a869c73029 100644 --- a/content/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository.md +++ b/content/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository.md @@ -79,12 +79,15 @@ See the [BFG Repo-Cleaner](https://rtyley.github.io/bfg-repo-cleaner/)'s documen To illustrate how `git filter-repo` works, we'll show you how to remove your file with sensitive data from the history of your repository and add it to `.gitignore` to ensure that it is not accidentally re-committed. 1. Install the latest release of the [git filter-repo](https://github.com/newren/git-filter-repo) tool. You can install `git-filter-repo` manually or by using a package manager. For example, to install the tool with HomeBrew, use the `brew install` command. + ``` brew install git-filter-repo ``` + For more information, see [_INSTALL.md_](https://github.com/newren/git-filter-repo/blob/main/INSTALL.md) in the `newren/git-filter-repo` repository. 2. If you don't already have a local copy of your repository with sensitive data in its history, [clone the repository](/repositories/creating-and-managing-repositories/cloning-a-repository) to your local computer. + ```shell $ git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/YOUR-REPOSITORY > Initialized empty Git repository in /Users/YOUR-FILE-PATH/YOUR-REPOSITORY/.git/ @@ -94,15 +97,19 @@ To illustrate how `git filter-repo` works, we'll show you how to remove your fil > Receiving objects: 100% (1301/1301), 164.39 KiB, done. > Resolving deltas: 100% (724/724), done. ``` + 3. Navigate into the repository's working directory. + ```shell cd YOUR-REPOSITORY ``` + 4. Run the following command, replacing `PATH-TO-YOUR-FILE-WITH-SENSITIVE-DATA` with the **path to the file you want to remove, not just its filename**. These arguments will: - Force Git to process, but not check out, the entire history of every branch and tag - Remove the specified file, as well as any empty commits generated as a result - Remove some configurations, such as the remote URL, stored in the _.git/config_ file. You may want to back up this file in advance for restoration later. - **Overwrite your existing tags** + ```shell $ git filter-repo --invert-paths --path PATH-TO-YOUR-FILE-WITH-SENSITIVE-DATA Parsed 197 commits @@ -133,8 +140,10 @@ To illustrate how `git filter-repo` works, we'll show you how to remove your fil > [main 051452f] Add YOUR-FILE-WITH-SENSITIVE-DATA to .gitignore > 1 files changed, 1 insertions(+), 0 deletions(-) ``` + 6. Double-check that you've removed everything you wanted to from your repository's history, and that all of your branches are checked out. 7. Once you're happy with the state of your repository, force-push your local changes to overwrite your repository on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.location.product_location %}{% endif %}, as well as all the branches you've pushed up. A force push is required to remove sensitive data from your commit history. + ```shell $ git push origin --force --all > Counting objects: 1074, done. @@ -145,7 +154,9 @@ To illustrate how `git filter-repo` works, we'll show you how to remove your fil > To https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/YOUR-REPOSITORY.git > + 48dc599...051452f main -> main (forced update) ``` + 8. In order to remove the sensitive file from [your tagged releases](/repositories/releasing-projects-on-github/about-releases), you'll also need to force-push against your Git tags: + ```shell $ git push origin --force --tags > Counting objects: 321, done. @@ -166,6 +177,7 @@ After using either the BFG tool or `git filter-repo` to remove the sensitive dat 2. Tell your collaborators to [rebase](https://git-scm.com/book/en/Git-Branching-Rebasing), _not_ merge, any branches they created off of your old (tainted) repository history. One merge commit could reintroduce some or all of the tainted history that you just went to the trouble of purging. 3. After some time has passed and you're confident that the BFG tool / `git filter-repo` had no unintended side effects, you can force all objects in your local repository to be dereferenced and garbage collected with the following commands (using Git 1.8.5 or newer): + ```shell $ git for-each-ref --format="delete %(refname)" refs/original | git update-ref --stdin $ git reflog expire --expire=now --all @@ -176,6 +188,7 @@ After using either the BFG tool or `git filter-repo` to remove the sensitive dat > Writing objects: 100% (2437/2437), done. > Total 2437 (delta 1461), reused 1802 (delta 1048) ``` + {% note %} **Note:** You can also achieve this by pushing your filtered history to a new or empty repository and then making a fresh clone from {% data variables.product.product_name %}. diff --git a/content/authentication/keeping-your-account-and-data-secure/reviewing-your-ssh-keys.md b/content/authentication/keeping-your-account-and-data-secure/reviewing-your-ssh-keys.md index 457e5bcf1e..d008ba13ff 100644 --- a/content/authentication/keeping-your-account-and-data-secure/reviewing-your-ssh-keys.md +++ b/content/authentication/keeping-your-account-and-data-secure/reviewing-your-ssh-keys.md @@ -35,6 +35,7 @@ You can delete unauthorized (or possibly compromised) SSH keys to ensure that an {% data reusables.command_line.start_ssh_agent %} 6. Find and take a note of your public key fingerprint. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -63,6 +64,7 @@ You can delete unauthorized (or possibly compromised) SSH keys to ensure that an {% data reusables.desktop.windows_git_for_windows_turn_on_ssh_agent %} 6. Find and take a note of your public key fingerprint. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -89,6 +91,7 @@ You can delete unauthorized (or possibly compromised) SSH keys to ensure that an {% data reusables.command_line.start_ssh_agent %} 6. Find and take a note of your public key fingerprint. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) diff --git a/content/authentication/managing-commit-signature-verification/associating-an-email-with-your-gpg-key.md b/content/authentication/managing-commit-signature-verification/associating-an-email-with-your-gpg-key.md index 15a209f6a2..abb516beaf 100644 --- a/content/authentication/managing-commit-signature-verification/associating-an-email-with-your-gpg-key.md +++ b/content/authentication/managing-commit-signature-verification/associating-an-email-with-your-gpg-key.md @@ -25,31 +25,41 @@ If you're using a GPG key that matches your committer identity and your verified {% data reusables.gpg.list-keys-with-note %} {% data reusables.gpg.copy-gpg-key-id %} 4. Enter `gpg --edit-key GPG key ID`, substituting in the GPG key ID you'd like to use. In the following example, the GPG key ID is `3AA5C34371567BD2`: + ```shell gpg --edit-key 3AA5C34371567BD2 ``` + 5. Enter `gpg> adduid` to add the user ID details. + ```shell gpg> adduid ``` + 6. Follow the prompts to supply your real name, email address, and any comments. You can modify your entries by choosing `N`, `C`, or `E`. {% data reusables.gpg.private-email %} {% ifversion fpt or ghec %} For more information, see "[AUTOTITLE](/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/setting-your-commit-email-address)."{% endif %} + ```shell Real Name: OCTOCAT Email address: "octocat@github.com" Comment: GITHUB-KEY Change (N)ame, (C)omment, (E)mail or (O)kay/(Q)uit? ``` + 7. Enter `O` to confirm your selections. 8. Enter your key's passphrase. 9. Enter `gpg> save` to save the changes + ```shell gpg> save ``` + 10. Enter `gpg --armor --export GPG key ID`, substituting in the GPG key ID you'd like to use. In the following example, the GPG key ID is `3AA5C34371567BD2`: + ```shell $ gpg --armor --export 3AA5C34371567BD2 # Prints the GPG key, in ASCII armor format ``` + 11. Upload the GPG key by [adding it to your GitHub account](/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account). ## Further reading diff --git a/content/authentication/managing-commit-signature-verification/generating-a-new-gpg-key.md b/content/authentication/managing-commit-signature-verification/generating-a-new-gpg-key.md index 278a0ead66..b2a02a8799 100644 --- a/content/authentication/managing-commit-signature-verification/generating-a-new-gpg-key.md +++ b/content/authentication/managing-commit-signature-verification/generating-a-new-gpg-key.md @@ -28,13 +28,17 @@ topics: {% data reusables.command_line.open_the_multi_os_terminal %} 3. Generate a GPG key pair. Since there are multiple versions of GPG, you may need to consult the relevant [_man page_](https://en.wikipedia.org/wiki/Man_page) to find the appropriate key generation command. - If you are on version 2.1.17 or greater, paste the text below to generate a GPG key pair. + ```shell copy gpg --full-generate-key ``` + - If you are not on version 2.1.17 or greater, the `gpg --full-generate-key` command doesn't work. Paste the text below and skip to step 6. + ```shell copy gpg --default-new-key-algo rsa4096 --gen-key ``` + 4. At the prompt, specify the kind of key you want, or press `Enter` to accept the default. 5. At the prompt, specify the key size you want, or press `Enter` to accept the default. 6. Enter the length of time the key should be valid. Press `Enter` to specify the default selection, indicating that the key doesn't expire. Unless you require an expiration date, we recommend accepting this default. @@ -51,10 +55,12 @@ topics: {% data reusables.gpg.list-keys-with-note %} {% data reusables.gpg.copy-gpg-key-id %} 10. Paste the text below, substituting in the GPG key ID you'd like to use. In this example, the GPG key ID is `3AA5C34371567BD2`: + ```shell gpg --armor --export 3AA5C34371567BD2 # Prints the GPG key ID, in ASCII armor format ``` + 11. Copy your GPG key, beginning with `-----BEGIN PGP PUBLIC KEY BLOCK-----` and ending with `-----END PGP PUBLIC KEY BLOCK-----`. 12. [Add the GPG key to your GitHub account](/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account). diff --git a/content/authentication/managing-commit-signature-verification/signing-commits.md b/content/authentication/managing-commit-signature-verification/signing-commits.md index 762ff68337..f95937df71 100644 --- a/content/authentication/managing-commit-signature-verification/signing-commits.md +++ b/content/authentication/managing-commit-signature-verification/signing-commits.md @@ -35,16 +35,20 @@ You can also manually configure [gpg-agent](http://linux.die.net/man/1/gpg-agent If you have multiple keys or are attempting to sign commits or tags with a key that doesn't match your committer identity, you should [tell Git about your signing key](/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key). 1. When committing changes in your local branch, add the -S flag to the git commit command: + ```shell $ git commit -S -m "YOUR_COMMIT_MESSAGE" # Creates a signed commit ``` + 2. If you're using GPG, after you create your commit, provide the passphrase you set up when you [generated your GPG key](/authentication/managing-commit-signature-verification/generating-a-new-gpg-key). 3. When you've finished creating commits locally, push them to your remote repository on {% data variables.product.product_name %}: + ```shell $ git push # Pushes your local commits to the remote repository ``` + 4. On {% data variables.product.product_name %}, navigate to your pull request. {% data reusables.repositories.review-pr-commits %} 5. To view more detailed information about the verified signature, click **Verified.** diff --git a/content/authentication/managing-commit-signature-verification/signing-tags.md b/content/authentication/managing-commit-signature-verification/signing-tags.md index c537db7256..b55d90bdba 100644 --- a/content/authentication/managing-commit-signature-verification/signing-tags.md +++ b/content/authentication/managing-commit-signature-verification/signing-tags.md @@ -18,11 +18,14 @@ topics: {% data reusables.gpg.desktop-support-for-commit-signing %} 1. To sign a tag, add `-s` to your `git tag` command. + ```shell $ git tag -s MYTAG # Creates a signed tag ``` + 2. Verify your signed tag by running `git tag -v [tag-name]`. + ```shell $ git tag -v MYTAG # Verifies the signed tag diff --git a/content/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key.md b/content/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key.md index 7f0270f6d7..2d610e3a5d 100644 --- a/content/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key.md +++ b/content/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key.md @@ -38,16 +38,21 @@ If you have multiple GPG keys, you need to tell Git which one to use. {% data reusables.gpg.paste-gpg-key-id %} {% data reusables.gpg.set-auto-sign %} 1. If you aren't using the GPG suite, run the following command in the `zsh` shell to add the GPG key to your `.zshrc` file, if it exists, or your `.zprofile` file: + ```shell $ if [ -r ~/.zshrc ]; then echo -e '\nexport GPG_TTY=\$(tty)' >> ~/.zshrc; \ else echo -e '\nexport GPG_TTY=\$(tty)' >> ~/.zprofile; fi ``` + Alternatively, if you use the `bash` shell, run this command: + ```shell $ if [ -r ~/.bash_profile ]; then echo -e '\nexport GPG_TTY=\$(tty)' >> ~/.bash_profile; \ else echo -e '\nexport GPG_TTY=\$(tty)' >> ~/.profile; fi ``` + 1. Optionally, to prompt you to enter a PIN or passphrase when required, install `pinentry-mac`. For example, using [Homebrew](https://brew.sh/): + ```shell brew install pinentry-mac echo "pinentry-program $(which pinentry-mac)" >> ~/.gnupg/gpg-agent.conf @@ -96,9 +101,11 @@ If you have multiple GPG keys, you need to tell Git which one to use. {% data reusables.gpg.paste-gpg-key-id %} {% data reusables.gpg.set-auto-sign %} 1. To add your GPG key to your `.bashrc` startup file, run the following command: + ```bash [ -f ~/.bashrc ] && echo -e '\nexport GPG_TTY=\$(tty)' >> ~/.bashrc ``` + {% endlinux %} {% ifversion ssh-commit-verification %} diff --git a/content/authentication/troubleshooting-ssh/error-permission-denied-publickey.md b/content/authentication/troubleshooting-ssh/error-permission-denied-publickey.md index aa387390e5..3776a54b76 100644 --- a/content/authentication/troubleshooting-ssh/error-permission-denied-publickey.md +++ b/content/authentication/troubleshooting-ssh/error-permission-denied-publickey.md @@ -43,6 +43,7 @@ All connections, including those for remote URLs, must be made as the "git" user $ ssh -T GITHUB-USERNAME@{% data variables.command_line.codeblock %} > Permission denied (publickey). ``` + If your connection failed and you're using a remote URL with your {% data variables.product.product_name %} username, you can [change the remote URL to use the "git" user](/get-started/getting-started-with-git/managing-remote-repositories). You should verify your connection by typing: @@ -57,7 +58,9 @@ $ ssh -T git@{% data variables.command_line.codeblock %} {% mac %} {% data reusables.command_line.open_the_multi_os_terminal %} + 2. Verify that you have a private key generated and loaded into SSH. + ```shell # start the ssh-agent in the background $ eval "$(ssh-agent -s)" @@ -75,7 +78,9 @@ $ ssh -T git@{% data variables.command_line.codeblock %} 1. {% data reusables.desktop.windows_git_bash_turn_on_ssh_agent %} {% data reusables.desktop.windows_git_for_windows_turn_on_ssh_agent %} + 2. Verify that you have a private key generated and loaded into SSH. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -86,7 +91,9 @@ $ ssh -T git@{% data variables.command_line.codeblock %} {% linux %} {% data reusables.command_line.open_the_multi_os_terminal %} + 2. Verify that you have a private key generated and loaded into SSH. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -142,11 +149,14 @@ You must provide your public key to {% data variables.product.product_name %} to 1. Open Terminal. 2. Start SSH agent in the background. + ```shell $ eval "$(ssh-agent -s)" > Agent pid 59566 ``` + 3. Find and take a note of your public key fingerprint. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -162,11 +172,14 @@ You must provide your public key to {% data variables.product.product_name %} to 1. Open the command line. 2. Start SSH agent in the background. + ```shell $ ssh-agent -s > Agent pid 59566 ``` + 3. Find and take a note of your public key fingerprint. + ```shell $ ssh-add -l -E sha256 > 2048 SHA256:274ffWxgaxq/tSINAykStUL7XWyRNcRTlcST1Ei7gBQ /Users/USERNAME/.ssh/id_rsa (RSA) @@ -182,17 +195,21 @@ You must provide your public key to {% data variables.product.product_name %} to 1. Open Terminal. 2. Start SSH agent in the background. + ```shell $ eval "$(ssh-agent -s)" > Agent pid 59566 ``` + 3. Find and take a note of your public key fingerprint. If you're using OpenSSH 6.7 or older: + ```shell $ ssh-add -l > 2048 a0:dd:42:3c:5a:9d:e4:2a:21:52:4e:78:07:6e:c8:4d /Users/USERNAME/.ssh/id_rsa (RSA) ``` If you're using OpenSSH 6.8 or newer: + ```shell $ ssh-add -l -E md5 > 2048 MD5:a0:dd:42:3c:5a:9d:e4:2a:21:52:4e:78:07:6e:c8:4d /Users/USERNAME/.ssh/id_rsa (RSA) diff --git a/content/authentication/troubleshooting-ssh/error-were-doing-an-ssh-key-audit.md b/content/authentication/troubleshooting-ssh/error-were-doing-an-ssh-key-audit.md index 9710adc62c..24f701feca 100644 --- a/content/authentication/troubleshooting-ssh/error-were-doing-an-ssh-key-audit.md +++ b/content/authentication/troubleshooting-ssh/error-were-doing-an-ssh-key-audit.md @@ -25,6 +25,7 @@ to approve this key so we know it's safe. Fingerprint: ab:08:46:83:ff:f6:c4:f8:a9:4e:68:6b:94:17:f2:46 fatal: could not read from remote repository ``` + ## Solving the issue To fix this, you need to [review your SSH keys](/authentication/keeping-your-account-and-data-secure/reviewing-your-ssh-keys) and either reject or approve the unverified key. Clicking the URL link in the error message brings you to the SSH Settings page, where the unverified SSH key is highlighted in the SSH key list. diff --git a/content/billing/managing-billing-for-github-advanced-security/viewing-committer-information-for-github-advanced-security.md b/content/billing/managing-billing-for-github-advanced-security/viewing-committer-information-for-github-advanced-security.md index 87231c2468..493a108927 100644 --- a/content/billing/managing-billing-for-github-advanced-security/viewing-committer-information-for-github-advanced-security.md +++ b/content/billing/managing-billing-for-github-advanced-security/viewing-committer-information-for-github-advanced-security.md @@ -40,4 +40,5 @@ Under "Calculate Additional Advanced Committers", you can calculate how many mor example-org octo-org/octo-repo ``` + 1. Click **Recalculate**. diff --git a/content/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning.md b/content/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning.md index 92f6a61dc0..7409991488 100644 --- a/content/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning.md +++ b/content/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning.md @@ -231,6 +231,7 @@ If your workflow does not contain a matrix called `language`, then {% data varia with: languages: cpp, csharp, python ``` + {% ifversion fpt or ghec %} ## Analyzing Python dependencies @@ -274,6 +275,7 @@ jobs: # to auto-install Python dependencies setup-python-dependencies: false ``` + {% endif %} ## Defining the alert severities that give a check failure for a pull request @@ -461,6 +463,7 @@ The settings in the configuration file are written in YAML format. You specify {% data variables.product.prodname_codeql %} query packs in an array. Note that the format is different from the format used by the workflow file. {% raw %} + ``` yaml copy packs: # Use the latest version of 'pack1' published by 'scope' @@ -476,6 +479,7 @@ packs: # Use pack6 and restrict it to the query suite 'path/to/suite.qls' - scope/pack6:path/to/suite.qls ``` + {% endraw %} The full format for specifying a query pack is `scope/name[@version][:path]`. Both `version` and `path` are optional. `version` is semver version range. If it is missing, the latest version is used. For more information about semver ranges, see the [semver docs on npm](https://docs.npmjs.com/cli/v6/using-npm/semver#ranges). @@ -483,6 +487,7 @@ The full format for specifying a query pack is `scope/name[@version][:path]`. Bo If you have a workflow that generates more than one {% data variables.product.prodname_codeql %} database, you can specify any {% data variables.product.prodname_codeql %} query packs to run in a custom configuration file using a nested map of packs. {% raw %} + ``` yaml copy packs: # Use these packs for JavaScript and TypeScript analysis @@ -494,6 +499,7 @@ packs: - scope/java-pack1 - scope/java-pack2@v1.0.0 ``` + {% endraw %} {% endif %} @@ -533,6 +539,7 @@ query-filters: - exclude: id: js/useless-assignment-to-local ``` + To find the id of a query, you can click the alert in the list of alerts in the **Security** tab. This opens the alert details page. The `Rule ID` field contains the query id. For more information about the alert details page, see "[AUTOTITLE](/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/about-code-scanning-alerts#about-alert-details)." {% tip %} diff --git a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/configuring-codeql-cli-in-your-ci-system.md b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/configuring-codeql-cli-in-your-ci-system.md index 01430c23b1..bd3a9d42d0 100644 --- a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/configuring-codeql-cli-in-your-ci-system.md +++ b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/configuring-codeql-cli-in-your-ci-system.md @@ -132,6 +132,7 @@ $ 1. Create a {% data variables.product.prodname_codeql %} database (see above). 2. Run `codeql database analyze` on the database and specify which {% ifversion codeql-packs %}packs and/or {% endif %}queries to use. + ```shell codeql database analyze <database> --format=<format> \ --output=<output> {% ifversion codeql-packs %}--download <packs,queries>{% else %}<queries>{% endif %} @@ -146,6 +147,7 @@ codeql database analyze <database> --format=<format> \ --sarif-category=<language-specifier> --output=<output> \ {% ifversion codeql-packs %}<packs,queries>{% else %}<queries>{% endif %} ``` + {% endnote %} | Option | Required | Usage | diff --git a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/installing-codeql-cli-in-your-ci-system.md b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/installing-codeql-cli-in-your-ci-system.md index 60852a89b1..13fd4e7b10 100644 --- a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/installing-codeql-cli-in-your-ci-system.md +++ b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/installing-codeql-cli-in-your-ci-system.md @@ -85,6 +85,7 @@ After you extract the {% data variables.product.prodname_codeql_cli %} bundle, y - `//codeql/codeql resolve qlpacks` otherwise. **Extract from successful output:** + ``` codeql/cpp-all (//qlpacks/codeql/cpp-all/) codeql/cpp-examples (//qlpacks/codeql/cpp-examples/) diff --git a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/migrating-from-the-codeql-runner-to-codeql-cli.md b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/migrating-from-the-codeql-runner-to-codeql-cli.md index 4822400209..48d95a156c 100644 --- a/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/migrating-from-the-codeql-runner-to-codeql-cli.md +++ b/content/code-security/code-scanning/using-codeql-code-scanning-with-your-existing-ci-system/migrating-from-the-codeql-runner-to-codeql-cli.md @@ -62,6 +62,7 @@ The ref name and commit SHA being checked out and analyzed in these examples are ### Single non-compiled language (JavaScript) Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages javascript \ @@ -71,7 +72,9 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo --github-url https://github.com --github-auth-stdin --commit deb275d2d5fe9a522a0b7bd8b6b6a1c939552718 --ref refs/heads/main ``` + CLI: + ```bash codeql database create /codeql-dbs/example-repo --language=javascript \ --source-root=. @@ -91,6 +94,7 @@ echo "$TOKEN" | codeql github upload-results --repository=my-org/example-repo \ A similar approach can be taken for compiled languages, or multiple languages. Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages javascript \ @@ -101,7 +105,9 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo \ --github-url https://github.com --github-auth-stdin \ --commit deb275d2d5fe9a522a0b7bd8b6b6a1c939552718 --ref refs/heads/main ``` + CLI: + ```bash codeql database create /codeql-dbs/example-repo --language=javascript \ --source-root=. @@ -121,6 +127,7 @@ echo "$TOKEN" | codeql github upload-results --repository=my-org/example-repo \ A similar approach can be taken for compiled languages, or multiple languages. Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages javascript \ @@ -131,7 +138,9 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo \ --github-url https://github.com --github-auth-stdin \ --commit deb275d2d5fe9a522a0b7bd8b6b6a1c939552718 --ref refs/heads/main ``` + CLI: + ```bash # Use `--codescanning-config` with the path to the YAML configuration file. codeql database create /codeql-dbs/example-repo --language=javascript \ @@ -150,6 +159,7 @@ echo "$TOKEN" | codeql github upload-results --repository=my-org/example-repo \ ### Single compiled language using autobuild (Java) Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages java \ @@ -165,7 +175,9 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo --github-url https://github.com --github-auth-stdin --commit deb275d2d5fe9a522a0b7bd8b6b6a1c939552718 --ref refs/heads/main ``` + CLI: + ```bash # Run `codeql database create` without `--command`. # This will run the autobuilder for the given language. @@ -184,6 +196,7 @@ echo "$TOKEN" | codeql github upload-results --repository=my-org/example-repo \ ### Single compiled language using a custom build command (Java) Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages java \ @@ -199,7 +212,9 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo --github-url https://github.com --github-auth-stdin --commit deb275d2d5fe9a522a0b7bd8b6b6a1c939552718 --ref refs/heads/main ``` + CLI: + ```bash # Provide an explicit build command using `--command`. codeql database create /codeql-dbs/example-repo --language=java \ @@ -219,6 +234,7 @@ echo "$TOKEN" | codeql github upload-results --repository=my-org/example-repo \ Indirect build tracing for a compiled language enables {% data variables.product.prodname_codeql %} to detect all build steps between the `init` and `analyze` steps, when the code cannot be built using the autobuilder or an explicit build command line. This is useful when using preconfigured build steps from your CI system, such as the `VSBuild` and `MSBuild` tasks in Azure DevOps. Runner: + ```yaml - task: CmdLine@1 displayName: CodeQL Initialization @@ -257,6 +273,7 @@ Runner: ``` CLI: + ```yaml # Run any pre-build tasks, for example, restore NuGet dependencies... @@ -342,6 +359,7 @@ This example is not strictly possible with the {% data variables.code-scanning.c Only one language (the compiled language with the most files) will be analyzed. Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages cpp,python \ @@ -359,6 +377,7 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo ``` CLI: + ```bash # Create multiple databases using `--db-cluster`. # Run autobuild by omitting `--command`. @@ -382,6 +401,7 @@ done ### Multiple languages using a custom build command (C++, Python) Runner: + ```bash echo "$TOKEN" | codeql-runner-linux init --repository my-org/example-repo \ --languages cpp,python \ @@ -399,6 +419,7 @@ echo "$TOKEN" | codeql-runner-linux analyze --repository my-org/example-repo ``` CLI: + ```bash # Create multiple databases using `--db-cluster`. codeql database create /codeql-dbs/example-repo-multi \ diff --git a/content/code-security/codeql-cli/codeql-cli-reference/about-codeql-packs.md b/content/code-security/codeql-cli/codeql-cli-reference/about-codeql-packs.md index 5587080c2e..808453391e 100644 --- a/content/code-security/codeql-cli/codeql-cli-reference/about-codeql-packs.md +++ b/content/code-security/codeql-cli/codeql-cli-reference/about-codeql-packs.md @@ -77,6 +77,7 @@ The following properties are supported in `qlpack.yml` files. - Required by all packs. - Defines the scope of the pack, where the {% data variables.product.prodname_codeql %} pack is published, and the name of the pack defined using alphanumeric characters and hyphens. It must be unique as {% data variables.product.prodname_codeql %} cannot differentiate between {% data variables.product.prodname_codeql %} packs with identical names. Use the pack name to specify queries to run using `database analyze` and to define dependencies between {% data variables.product.prodname_codeql %} packs (see examples below). For example: + ```yaml name: octo-org/security-queries ``` @@ -85,6 +86,7 @@ The following properties are supported in `qlpack.yml` files. - Required by all packs that are published. - Defines a semantic version for this {% data variables.product.prodname_codeql %} pack that must adhere to the [SemVer v2.0.0 specification](https://semver.org/spec/v2.0.0.html). For example: + ```yaml version: 0.0.0 ``` @@ -93,6 +95,7 @@ The following properties are supported in `qlpack.yml` files. - Required by packs that define {% data variables.product.prodname_codeql %} package dependencies on other packs. - Defines a map from pack references to the semantic version range that is compatible with this pack. Supported for {% data variables.product.prodname_codeql_cli %} versions v2.6.0 and later. For example: + ```yaml dependencies: codeql/cpp-all: ^0.0.2 @@ -102,6 +105,7 @@ The following properties are supported in `qlpack.yml` files. - Required by packs that export a set of default queries to run. - Defines the path to a query suite file relative to the package root, containing all of the queries that are run by default when this pack is passed to the `codeql database analyze` command. Supported from CLI version v2.6.0 and onwards. Only one of `defaultSuiteFile` or `defaultSuite` can be defined. For example: + ```yaml defaultSuiteFile: cpp-code-scanning.qls ``` @@ -110,6 +114,7 @@ The following properties are supported in `qlpack.yml` files. - Required by packs that export a set of default queries to run. - Defines an inlined query suite containing all of the queries that are run by default when this pack is passed to the `codeql database analyze` command. Supported from CLI version v2.6.0 and onwards. Only one of `defaultSuiteFile` or `defaultSuite` can be defined. For example: + ```yaml defaultSuite: queries: . @@ -121,6 +126,7 @@ The following properties are supported in `qlpack.yml` files. - Required by library packs. - Defines a boolean value that indicates whether or not this pack is a library pack. Library packs do not contain queries and are not compiled. Query packs can ignore this field or explicitly set it to `false`. For example: + ```yaml library: true ``` @@ -129,6 +135,7 @@ The following properties are supported in `qlpack.yml` files. - Optional for packs that define query suites. - Defines the path to a directory in the pack that contains the query suites you want to make known to the {% data variables.product.prodname_codeql_cli %}, defined relative to the pack directory. {% data variables.product.prodname_codeql %} pack users can run "well-known" suites stored in this directory by specifying the pack name, without providing their full path. This is not supported for {% data variables.product.prodname_codeql %} packs downloaded from the Container registry. For more information about query suites, see "[Creating {% data variables.product.prodname_codeql %} query suites](/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-query-suites)." For example: + ```yaml suites: octo-org-query-suites ``` @@ -136,6 +143,7 @@ The following properties are supported in `qlpack.yml` files. #### `tests` - Optional for packs containing {% data variables.product.prodname_codeql %} tests. Ignored for packs without tests. - Defines the path to a directory within the pack that contains tests, defined relative to the pack directory. Use `.` to specify the whole pack. Any queries in this directory are run as tests when `test run` is run with the `--strict-test-discovery` option. These queries are ignored by query suite definitions that use `queries` or `qlpack` instructions to ask for all queries in a particular pack. If this property is missing, then `.` is assumed. For example: + ```yaml tests: . ``` @@ -143,6 +151,7 @@ The following properties are supported in `qlpack.yml` files. #### `extractor` - Required by all packs containing {% data variables.product.prodname_codeql %} tests. - Defines the {% data variables.product.prodname_codeql %} language extractor to use when running the {% data variables.product.prodname_codeql %} tests in the pack. For more information about testing queries, see "[Testing custom queries](/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries)." For example: + ```yaml extractor: javascript ``` @@ -150,6 +159,7 @@ The following properties are supported in `qlpack.yml` files. #### `authors` - Optional. - Defines metadata that will be displayed on the packaging search page in the packages section of the account that the {% data variables.product.prodname_codeql %} pack is published to. For example: + ```yaml authors: author1@github.com,author2@github.com ``` @@ -157,6 +167,7 @@ The following properties are supported in `qlpack.yml` files. #### `license` - Optional. - Defines metadata that will be displayed on the packaging search page in the packages section of the account that the {% data variables.product.prodname_codeql %} pack is published to. For a list of allowed licenses, see [SPDX License List](https://spdx.org/licenses/) in the SPDX Specification. For example: + ```yaml license: MIT ``` @@ -164,6 +175,7 @@ The following properties are supported in `qlpack.yml` files. #### `description` - Optional. - Defines metadata that will be displayed on the packaging search page in the packages section of the account that the {% data variables.product.prodname_codeql %} pack is published to. For example: + ```yaml description: Human-readable description of the contents of the {% data variables.product.prodname_codeql %} pack. ``` @@ -171,6 +183,7 @@ The following properties are supported in `qlpack.yml` files. #### `libraryPathDependencies` - Optional, deprecated. Use the `dependencies` property instead. - Previously used to define the names of any {% data variables.product.prodname_codeql %} packs that this {% data variables.product.prodname_codeql %} pack depends on, as an array. This gives the pack access to any libraries, database schema, and query suites defined in the dependency. For example: + ```yaml libraryPathDependencies: codeql/javascript-all ``` @@ -178,12 +191,15 @@ The following properties are supported in `qlpack.yml` files. #### `dbscheme` - Required by core language packs only. - Defines the path to the [database schema](https://codeql.github.com/docs/codeql-overview/codeql-glossary/#codeql-database-schema) for all libraries and queries written for this {% data variables.product.prodname_codeql %} language (see example below). For example: + ```yaml dbscheme: semmlecode.python.dbscheme ``` + #### `upgrades` - Required by core language packs only. - Defines the path to a directory within the pack that contains database upgrade scripts, defined relative to the pack directory. Database upgrades are used internally to ensure that a database created with a different version of the {% data variables.product.prodname_codeql_cli %} is compatible with the current version of the CLI. For example: + ```yaml upgrades: . ``` @@ -191,6 +207,7 @@ The following properties are supported in `qlpack.yml` files. #### `warnOnImplicitThis` - Optional. Defaults to `false` if the `warnOnImplicitThis` property is not defined. - Defines a boolean that specifies whether or not the compiler should emit warnings about member predicate calls with implicit `this` call receivers, that is, without an explicit receiver. Supported from {% data variables.product.prodname_codeql_cli %} version 2.13.2 and onwards. For example: + ```yaml warnOnImplicitThis: true ``` diff --git a/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-databases.md b/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-databases.md index eb378a743d..1d140e1913 100644 --- a/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-databases.md +++ b/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-databases.md @@ -240,6 +240,7 @@ The following examples are designed to give you an idea of some of the build com ``` codeql database create -l swift -c "./scripts/build.sh" swift-database ``` + {% endif %} - Project built using Bazel: diff --git a/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-query-suites.md b/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-query-suites.md index 70c3527641..a5af2ccf87 100644 --- a/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-query-suites.md +++ b/content/code-security/codeql-cli/using-the-codeql-cli/creating-codeql-query-suites.md @@ -51,6 +51,7 @@ files: ``` - query: ``` + The argument must be one or more file paths, relative to the {% data variables.product.prodname_codeql %} pack containing the suite definition. diff --git a/content/code-security/codeql-cli/using-the-codeql-cli/publishing-and-using-codeql-packs.md b/content/code-security/codeql-cli/using-the-codeql-cli/publishing-and-using-codeql-packs.md index cb6d4f6d03..cebcb867ab 100644 --- a/content/code-security/codeql-cli/using-the-codeql-cli/publishing-and-using-codeql-packs.md +++ b/content/code-security/codeql-cli/using-the-codeql-cli/publishing-and-using-codeql-packs.md @@ -79,6 +79,7 @@ To analyze a {% data variables.product.prodname_codeql %} database with a {% dat ``` codeql database analyze /@x.x.x: ``` + - ``: the {% data variables.product.prodname_codeql %} database to be analyzed. - ``: the name of the {% data variables.product.prodname_dotcom %} organization that the pack is published to. - ``: the name for the pack that you are using. @@ -110,14 +111,18 @@ It can also be assumed that a pack published by the _latest_ public release of { As an exception to the above, packs published with versions of {% data variables.product.prodname_codeql %} _earlier than 2.12.0_ are not compatible with any earlier or later versions. These old versions did not write pre-compiled queries in a format that supported compatibility between releases. Packs published by these versions can still be _used_ by newer versions, but the analysis will be slower because the queries have to be recompiled first. As a user of a published query pack, you can check that the {% data variables.product.prodname_codeql %} makes use of the precompiled queries in it by inspecting the terminal output from an analysis runs that uses the query pack. If it contains lines looking like the following, then the precompiled queries were used successfully: + ``` [42/108] Loaded /long/path/to/query/Filename.qlx. ``` + However, if they instead look like the following, then usage of the precompiled queries failed: + ``` Compiling query plan for /long/path/to/query/Filename.ql. [42/108 comp 25s] Compiled /long/path/to/query/Filename.ql. ``` + The results of the analysis will still be good in this case, but to get optimal performance you may need to upgrade to a newer version of the {% data variables.product.prodname_codeql %} CLI and/or of the query pack. If you publish query packs on the {% data variables.product.prodname_container_registry %} on {% data variables.product.prodname_dotcom_the_website %} for others to use, we recommend that you use a recent release of {% data variables.product.prodname_codeql %} to run `codeql pack publish`, and that you publish a fresh version of your pack with an updated {% data variables.product.prodname_codeql %} version before the version you used turns 6 months old. That way you can ensure that users of your pack who keep _their_ {% data variables.product.prodname_codeql %} up to date will benefit from the pre-compiled queries in your pack. diff --git a/content/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries.md b/content/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries.md index 9caf2b8110..29ec7ad81a 100644 --- a/content/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries.md +++ b/content/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries.md @@ -158,6 +158,7 @@ which is declared as a dependency for `my-query-tests`. Therefore, `EmptyThen.ql 8. Create a code snippet to test. The following Java code contains an empty `if` statement on the third line. Save it in `custom-queries/java/tests/EmptyThen/Test.java`. ```java + class Test { public void problem(String arg) { if (arg.isEmpty()) @@ -173,6 +174,7 @@ class Test { } } } + ``` ### Execute the test @@ -211,7 +213,9 @@ query. In this case, the failure was expected and is easy to fix. If you open the `EmptyThen.actual` file, you can see the results of the test: ``` + | Test.java:3:5:3:22 | stmt | This if statement has an empty then. | + ``` This file contains a table, with a column for the location of the result, diff --git a/content/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file.md b/content/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file.md index 496cf59110..fd7adc1a26 100644 --- a/content/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file.md +++ b/content/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file.md @@ -81,6 +81,7 @@ In general, security updates use any configuration options that affect pull requ {% data reusables.dependabot.supported-package-managers %} #### Example of a basic setup for three package managers + ```yaml # Basic set up for three package managers @@ -320,6 +321,7 @@ updates: prefix-development: "pip dev" include: "scope" ``` + If you use the same configuration as in the example above, bumping the `requests` library in the `pip` development dependency group will generate a commit message of: `pip dev: bump requests from 1.0.0 to 1.0.1` @@ -390,6 +392,7 @@ updates: Package managers with the `package-ecosystem` values `bundler`, `mix`, and `pip` may execute external code in the manifest as part of the version update process. This might allow a compromised package to steal credentials or gain access to configured registries. When you add a [`registries`](#registries) setting within an `updates` configuration, {% data variables.product.prodname_dependabot %} automatically prevents external code execution, in which case the version update may fail. You can choose to override this behavior and allow external code execution for `bundler`, `mix`, and `pip` package managers by setting `insecure-external-code-execution` to `allow`. {% raw %} + ```yaml # Allow external code execution when updating dependencies from private registries @@ -407,6 +410,7 @@ updates: schedule: interval: "monthly" ``` + {% endraw %} If you define a `registries` setting to allow {% data variables.product.prodname_dependabot %} to access a private package registry, and you set `insecure-external-code-execution` to `allow` in the same `updates` configuration, external code execution that occurs will only have access to the package managers in the registries associated with that `updates`setting. There is no access allowed to any of the registries defined in the top level `registries` configuration. @@ -414,6 +418,7 @@ If you define a `registries` setting to allow {% data variables.product.prodname In this example, the configuration file allows {% data variables.product.prodname_dependabot %} to access the `ruby-github` private package registry. In the same `updates`setting, `insecure-external-code-execution`is set to `allow`, which means that the code executed by dependencies will only access the `ruby-github` registry, and not the `dockerhub` registry. {% raw %} + ```yaml # Using `registries` in conjunction with `insecure-external-code-execution:allow` # in the same `updates` setting @@ -439,6 +444,7 @@ updates: interval: "monthly" ``` + {% endraw %} You can explicitly deny external code execution, regardless of whether there is a `registries` setting for this update configuration, by setting `insecure-external-code-execution` to `deny`. @@ -845,6 +851,7 @@ The top-level `registries` key is optional. It allows you to specify authenticat The value of the `registries` key is an associative array, each element of which consists of a key that identifies a particular registry and a value which is an associative array that specifies the settings required to access that registry. The following _dependabot.yml_ file configures a registry identified as `dockerhub` in the `registries` section of the file and then references this in the `updates` section of the file. {% raw %} + ```yaml # Minimal settings to update dependencies in one private registry @@ -863,6 +870,7 @@ updates: schedule: interval: "monthly" ``` + {% endraw %} {% data reusables.dependabot.dependabot-updates-registries-options %} @@ -874,6 +882,7 @@ You must provide the required settings for each configuration `type` that you sp The `composer-repository` type supports username and password. {% raw %} + ```yaml registries: composer: @@ -882,6 +891,7 @@ registries: username: octocat password: ${{secrets.MY_PACKAGIST_PASSWORD}} ``` + {% endraw %} ### `docker-registry` @@ -891,6 +901,7 @@ registries: The `docker-registry` type supports username and password. {% ifversion dependabot-private-registries %} {% raw %} + ```yaml registries: dockerhub: @@ -900,9 +911,11 @@ registries: password: ${{secrets.MY_DOCKERHUB_PASSWORD}} replaces-base: true ``` + {% endraw %} {% else %} {% raw %} + ```yaml registries: dockerhub: @@ -911,12 +924,14 @@ registries: username: octocat password: ${{secrets.MY_DOCKERHUB_PASSWORD}} ``` + {% endraw %} {% endif %} The `docker-registry` type can also be used to pull from private Amazon ECR using static AWS credentials. {% ifversion dependabot-private-registries %} {% raw %} + ```yaml registries: ecr-docker: @@ -926,9 +941,11 @@ registries: password: ${{secrets.ECR_AWS_SECRET_ACCESS_KEY}} replaces-base: true ``` + {% endraw %} {% else %} {% raw %} + ```yaml registries: ecr-docker: @@ -937,6 +954,7 @@ registries: username: ${{secrets.ECR_AWS_ACCESS_KEY_ID}} password: ${{secrets.ECR_AWS_SECRET_ACCESS_KEY}} ``` + {% endraw %} {% endif %} @@ -945,6 +963,7 @@ registries: The `git` type supports username and password. {% raw %} + ```yaml registries: github-octocat: @@ -953,6 +972,7 @@ registries: username: x-access-token password: ${{secrets.MY_GITHUB_PERSONAL_TOKEN}} ``` + {% endraw %} ### `hex-organization` @@ -960,6 +980,7 @@ registries: The `hex-organization` type supports organization and key. {% raw %} + ```yaml registries: github-hex-org: @@ -967,6 +988,7 @@ registries: organization: github key: ${{secrets.MY_HEX_ORGANIZATION_KEY}} ``` + {% endraw %} {% ifversion dependabot-hex-self-hosted-support %} @@ -979,6 +1001,7 @@ The `hex-repository` type supports an authentication key. The `public-key-fingerprint` is an optional configuration field, representing the fingerprint of the public key for the Hex repository. `public-key-fingerprint` is used by Hex to establish trust with the private repository. The `public-key-fingerprint` field can be either listed in plaintext or stored as a {% data variables.product.prodname_dependabot %} secret. {% raw %} + ```yaml registries: github-hex-repository: @@ -988,6 +1011,7 @@ registries: auth-key: ${{secrets.MY_AUTH_KEY}} public-key-fingerprint: ${{secrets.MY_PUBLIC_KEY_FINGERPRINT}} ``` + {% endraw %}{% endif %} ### `maven-repository` @@ -995,6 +1019,7 @@ registries: The `maven-repository` type supports username and password. {% ifversion dependabot-private-registries %} {% raw %} + ```yaml registries: maven-artifactory: @@ -1004,9 +1029,11 @@ registries: password: ${{secrets.MY_ARTIFACTORY_PASSWORD}} replaces-base: true ``` + {% endraw %} {% else %} {% raw %} + ```yaml registries: maven-artifactory: @@ -1015,6 +1042,7 @@ registries: username: octocat password: ${{secrets.MY_ARTIFACTORY_PASSWORD}} ``` + {% endraw %}{% endif %} ### `npm-registry` @@ -1031,6 +1059,7 @@ When using username and password, your `.npmrc`'s auth token may contain a `base {% ifversion dependabot-private-registries %} {% raw %} + ```yaml registries: npm-npmjs: @@ -1040,9 +1069,11 @@ registries: password: ${{secrets.MY_NPM_PASSWORD}} # Must be an unencoded password replaces-base: true ``` + {% endraw %} {% raw %} + ```yaml registries: npm-github: @@ -1051,9 +1082,11 @@ registries: token: ${{secrets.MY_GITHUB_PERSONAL_TOKEN}} replaces-base: true ``` + {% endraw %} {% else %} {% raw %} + ```yaml registries: npm-npmjs: @@ -1062,9 +1095,11 @@ registries: username: octocat password: ${{secrets.MY_NPM_PASSWORD}} # Must be an unencoded password ``` + {% endraw %} {% raw %} + ```yaml registries: npm-github: @@ -1072,6 +1107,7 @@ registries: url: https://npm.pkg.github.com token: ${{secrets.MY_GITHUB_PERSONAL_TOKEN}} ``` + {% endraw %} {% endif %} {% ifversion dependabot-yarn-v3-update %} For security reasons, {% data variables.product.prodname_dependabot %} does not set environment variables. Yarn (v2 and later) requires that any accessed environment variables are set. When accessing environment variables in your `.yarnrc.yml` file, you should provide a fallback value such as {% raw %}`${ENV_VAR-fallback}`{% endraw %} or {% raw %}`${ENV_VAR:-fallback}`{% endraw %}. For more information, see [Yarnrc files](https://yarnpkg.com/configuration/yarnrc) in the Yarn documentation.{% endif %} @@ -1081,6 +1117,7 @@ For security reasons, {% data variables.product.prodname_dependabot %} does not The `nuget-feed` type supports username and password, or token. {% raw %} + ```yaml registries: nuget-example: @@ -1089,9 +1126,11 @@ registries: username: octocat@example.com password: ${{secrets.MY_NUGET_PASSWORD}} ``` + {% endraw %} {% raw %} + ```yaml registries: nuget-azure-devops: @@ -1100,6 +1139,7 @@ registries: username: octocat@example.com password: ${{secrets.MY_AZURE_DEVOPS_TOKEN}} ``` + {% endraw %} ### `python-index` @@ -1107,6 +1147,7 @@ registries: The `python-index` type supports username and password, or token. {% raw %} + ```yaml registries: python-example: @@ -1116,9 +1157,11 @@ registries: password: ${{secrets.MY_BASIC_AUTH_PASSWORD}} replaces-base: true ``` + {% endraw %} {% raw %} + ```yaml registries: python-azure: @@ -1128,6 +1171,7 @@ registries: password: ${{secrets.MY_AZURE_DEVOPS_TOKEN}} replaces-base: true ``` + {% endraw %} ### `rubygems-server` @@ -1136,6 +1180,7 @@ The `rubygems-server` type supports username and password, or token. {% ifversion dependabot-private-registries %} {% raw %} + ```yaml registries: ruby-example: @@ -1145,9 +1190,11 @@ registries: password: ${{secrets.MY_RUBYGEMS_PASSWORD}} replaces-base: true ``` + {% endraw %} {% raw %} + ```yaml registries: ruby-github: @@ -1156,9 +1203,11 @@ registries: token: ${{secrets.MY_GITHUB_PERSONAL_TOKEN}} replaces-base: true ``` + {% endraw %} {% else %} {% raw %} + ```yaml registries: ruby-example: @@ -1167,9 +1216,11 @@ registries: username: octocat@example.com password: ${{secrets.MY_RUBYGEMS_PASSWORD}} ``` + {% endraw %} {% raw %} + ```yaml registries: ruby-github: @@ -1177,6 +1228,7 @@ registries: url: https://rubygems.pkg.github.com/octocat/github_api token: ${{secrets.MY_GITHUB_PERSONAL_TOKEN}} ``` + {% endraw %}{% endif %} ### `terraform-registry` @@ -1184,6 +1236,7 @@ registries: The `terraform-registry` type supports a token. {% raw %} + ```yaml registries: terraform-example: @@ -1191,6 +1244,7 @@ registries: url: https://terraform.example.com token: ${{secrets.MY_TERRAFORM_API_TOKEN}} ``` + {% endraw %} {% ifversion fpt or ghec or ghes > 3.4 %} @@ -1215,4 +1269,5 @@ updates:{% ifversion fpt or ghec or ghes > 3.5 %} schedule: interval: "weekly" ``` + {% endif %} diff --git a/content/code-security/dependabot/working-with-dependabot/configuring-access-to-private-registries-for-dependabot.md b/content/code-security/dependabot/working-with-dependabot/configuring-access-to-private-registries-for-dependabot.md index 563e0cc6bb..73d3486421 100644 --- a/content/code-security/dependabot/working-with-dependabot/configuring-access-to-private-registries-for-dependabot.md +++ b/content/code-security/dependabot/working-with-dependabot/configuring-access-to-private-registries-for-dependabot.md @@ -55,9 +55,11 @@ When you add a secret at the organization level, you can specify which repositor After you add a {% data variables.product.prodname_dependabot %} secret, you can reference it in the _dependabot.yml_ configuration file like this: {% raw %}`${{secrets.NAME}}`{% endraw %}, where "NAME" is the name you chose for the secret. For example: {% raw %} + ```yaml password: ${{secrets.MY_ARTIFACTORY_PASSWORD}} ``` + {% endraw %} For more information, see "[AUTOTITLE](/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#configuration-options-for-private-registries)." diff --git a/content/code-security/dependabot/working-with-dependabot/removing-dependabot-access-to-public-registries.md b/content/code-security/dependabot/working-with-dependabot/removing-dependabot-access-to-public-registries.md index fb57939f06..c036726cd8 100644 --- a/content/code-security/dependabot/working-with-dependabot/removing-dependabot-access-to-public-registries.md +++ b/content/code-security/dependabot/working-with-dependabot/removing-dependabot-access-to-public-registries.md @@ -110,6 +110,7 @@ Set `replaces-base` as `true` in the `dependabot.yml` file. For more information **Option 2** Use only the private registry URL in the `pom.xml` file. + ```xml ... @@ -189,6 +190,7 @@ encoding@^0.1.11: If the `yarn.lock` file doesn't list the private registry as the dependency source, you can set up Yarn Classic according to the normal package manager instructions: 1. Define the private registry configuration in a `dependabot.yml` file 2. Add the registry to a `.yarnrc` file in the project root with the key registry. Alternatively run `yarn config set registry `. + ```yaml registry https://private_registry_url ``` @@ -220,6 +222,7 @@ Define the private registry configuration in a `dependabot.yml` file. For more i To ensure the private registry is listed as the dependency source in the project's `yarn.lock` file, run `yarn install` on a machine with private registry access. Yarn should update the `resolved` field to include the private registry URL. {% raw %} + ```yaml encoding@^0.1.11: version "0.1.13" @@ -228,6 +231,7 @@ encoding@^0.1.11: dependencies: iconv-lite "^0.6.2" ``` + {% endraw %} **Option 2** diff --git a/content/code-security/dependabot/working-with-dependabot/troubleshooting-dependabot-errors.md b/content/code-security/dependabot/working-with-dependabot/troubleshooting-dependabot-errors.md index f41ce013a5..1a5a5f35a0 100644 --- a/content/code-security/dependabot/working-with-dependabot/troubleshooting-dependabot-errors.md +++ b/content/code-security/dependabot/working-with-dependabot/troubleshooting-dependabot-errors.md @@ -91,6 +91,7 @@ The best way to avoid this problem is to stay up to date with the most recently **Security updates only.** {% data variables.product.prodname_dependabot %} updates explicitly defined transitive dependencies that are vulnerable for all ecosystems. For npm, {% data variables.product.prodname_dependabot %} will raise a pull request that also updates the parent dependency if it's the only way to fix the transitive dependency. For example, a project with a dependency on `A` version `~2.0.0` which has a transitive dependency on `B` version `~1.0.0` which has resolved to `1.0.1`. + ``` my project | @@ -98,6 +99,7 @@ my project | --> B (1.0.1) [~1.0.0] ``` + If a security vulnerability is released for `B` versions `<2.0.0` and a patch is available at `2.0.0` then {% data variables.product.prodname_dependabot %} will attempt to update `B` but will find that it's not possible due to the restriction in place by `A` which only allows lower vulnerable versions. To fix the vulnerability, {% data variables.product.prodname_dependabot %} will look for updates to dependency `A` which allow the fixed version of `B` to be used. {% data variables.product.prodname_dependabot %} automatically generates a pull request that upgrades both the locked parent and child transitive dependencies.{% endif %} diff --git a/content/code-security/secret-scanning/secret-scanning-partner-program.md b/content/code-security/secret-scanning/secret-scanning-partner-program.md index 9f2412657f..f25b98e9ea 100644 --- a/content/code-security/secret-scanning/secret-scanning-partner-program.md +++ b/content/code-security/secret-scanning/secret-scanning-partner-program.md @@ -151,6 +151,7 @@ The following code snippets demonstrate how you could perform signature validati The code examples assume you've set an environment variable called `GITHUB_PRODUCTION_TOKEN` with a generated [{% data variables.product.pat_generic %}](https://github.com/settings/tokens) to avoid hitting rate limits. The {% data variables.product.pat_generic %} does not need any scopes/permissions. **Validation sample in Go** + ```golang package main @@ -281,6 +282,7 @@ type asn1Signature struct { ``` **Validation sample in Ruby** + ```ruby require 'openssl' require 'net/http' @@ -321,6 +323,7 @@ puts openssl_key.verify(OpenSSL::Digest::SHA256.new, Base64.decode64(signature), ``` **Validation sample in JavaScript** + ```js const crypto = require("crypto"); const axios = require("axios"); @@ -383,6 +386,7 @@ You can send us the raw token: } ] ``` + You may also provide the token in hashed form after performing a one way cryptographic hash of the raw token using SHA-256: ``` @@ -394,6 +398,7 @@ You may also provide the token in hashed form after performing a one way cryptog } ] ``` + A few important points: - You should only send us either the raw form of the token ("token_raw"), or the hashed form ("token_hash"), but not both. - For the hashed form of the raw token, you can only use SHA-256 to hash the token, not any other hashing algorithm. diff --git a/content/code-security/security-advisories/global-security-advisories/about-the-github-advisory-database.md b/content/code-security/security-advisories/global-security-advisories/about-the-github-advisory-database.md index af4cbe64ca..4dc9dc8c91 100644 --- a/content/code-security/security-advisories/global-security-advisories/about-the-github-advisory-database.md +++ b/content/code-security/security-advisories/global-security-advisories/about-the-github-advisory-database.md @@ -82,6 +82,7 @@ The syntax of GHSA IDs follows this format: `GHSA-xxxx-xxxx-xxxx` where: - All letters are lowercase. You can validate a GHSA ID using a regular expression. + ```bash copy /GHSA(-[23456789cfghjmpqrvwx]{4}){3}/ ``` diff --git a/content/code-security/supply-chain-security/understanding-your-software-supply-chain/configuring-dependency-review.md b/content/code-security/supply-chain-security/understanding-your-software-supply-chain/configuring-dependency-review.md index ee9f7ae68e..8ab4233306 100644 --- a/content/code-security/supply-chain-security/understanding-your-software-supply-chain/configuring-dependency-review.md +++ b/content/code-security/supply-chain-security/understanding-your-software-supply-chain/configuring-dependency-review.md @@ -89,6 +89,7 @@ Notice that all of the examples use a short version number for the action (`v3`) 1. Add a new YAML workflow to your `.github/workflows` folder. {% ifversion ghes %}For `runs-on`, the default label is `self-hosted`. You can replace the default label with the label of any of your runners.{% endif %} + ```yaml copy name: 'Dependency Review' on: [pull_request] @@ -106,9 +107,11 @@ Notice that all of the examples use a short version number for the action (`v3`) - name: Dependency Review uses: actions/dependency-review-action@v3 ``` + 1. Specify your settings. This {% data variables.dependency-review.action_name %} example file illustrates how you can use the available configuration options. + ```yaml copy name: 'Dependency Review' on: [pull_request] @@ -146,6 +149,7 @@ Notice that all of the examples use a short version number for the action (`v3`) # Possible values: "development", "runtime", "unknown" fail-on-scopes: development, runtime {% endif %} + ``` ### Using a configuration file to set up {% data variables.dependency-review.action_name %} @@ -183,6 +187,7 @@ Notice that all of the examples use a short version number for the action (`v3`) 1. Create the configuration file in the path you have specified. This YAML example file illustrates how you can use the available configuration options. + ```yaml copy # Possible values: "critical", "high", "moderate", "low" fail-on-severity: critical @@ -213,5 +218,6 @@ Notice that all of the examples use a short version number for the action (`v3`) - runtime {% endif %} ``` + For further details about the configuration options, see [`dependency-review-action`](https://github.com/actions/dependency-review-action#readme). {% endif %} diff --git a/content/codespaces/customizing-your-codespace/changing-the-machine-type-for-your-codespace.md b/content/codespaces/customizing-your-codespace/changing-the-machine-type-for-your-codespace.md index e1a7e33fbd..909f2746e7 100644 --- a/content/codespaces/customizing-your-codespace/changing-the-machine-type-for-your-codespace.md +++ b/content/codespaces/customizing-your-codespace/changing-the-machine-type-for-your-codespace.md @@ -64,6 +64,7 @@ You can use the `gh codespace edit --machine MACHINE-TYPE-NAME` {% data variable ``` gh codespace list ``` + 1. Optionally, to find the current machine type for a codespace, enter the following command. ``` diff --git a/content/codespaces/customizing-your-codespace/changing-the-shell-in-a-codespace.md b/content/codespaces/customizing-your-codespace/changing-the-shell-in-a-codespace.md index 8834f2522a..f7ff6b27b7 100644 --- a/content/codespaces/customizing-your-codespace/changing-the-shell-in-a-codespace.md +++ b/content/codespaces/customizing-your-codespace/changing-the-shell-in-a-codespace.md @@ -90,6 +90,7 @@ You can set a default terminal profile to choose the default shell used for all "terminal.integrated.defaultProfile.windows": "PowerShell" } ``` + 1. Save the `settings.json` file. {% data reusables.codespaces.settings-sync-link %} diff --git a/content/codespaces/setting-up-your-project-for-codespaces/configuring-dev-containers/adding-features-to-a-devcontainer-file.md b/content/codespaces/setting-up-your-project-for-codespaces/configuring-dev-containers/adding-features-to-a-devcontainer-file.md index 806244123f..99c935accf 100644 --- a/content/codespaces/setting-up-your-project-for-codespaces/configuring-dev-containers/adding-features-to-a-devcontainer-file.md +++ b/content/codespaces/setting-up-your-project-for-codespaces/configuring-dev-containers/adding-features-to-a-devcontainer-file.md @@ -38,6 +38,7 @@ redirect_from: ... } ``` + 1. By default, the latest version of the feature will be used. To choose a different version, or configure other options for the feature, expand the properties listed under "Options" to view the available values, then add the options by manually editing the object in your `devcontainer.json` file. ![Screenshot of the "Options" section of the "Marketplace" tab, with the "version" and "tflint" properties expanded.](/assets/images/help/codespaces/feature-options.png) @@ -52,6 +53,7 @@ redirect_from: ... } ``` + 1. Commit the changes to your `devcontainer.json` file. The configuration changes will take effect in new codespaces created from the repository. To make the changes take effect in existing codespaces, you will need to pull the updates to the `devcontainer.json` file into your codespace, then rebuild the container for the codespace. For more information, see "[AUTOTITLE](/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers#applying-configuration-changes-to-a-codespace)." diff --git a/content/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/setting-up-a-template-repository-for-github-codespaces.md b/content/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/setting-up-a-template-repository-for-github-codespaces.md index ded3384bd8..77ef5f0641 100644 --- a/content/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/setting-up-a-template-repository-for-github-codespaces.md +++ b/content/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/setting-up-a-template-repository-for-github-codespaces.md @@ -80,4 +80,5 @@ The following configuration settings for a React template will open the `app.js` } } ``` + For more information, see "[AUTOTITLE](/codespaces/setting-up-your-project-for-codespaces/configuring-dev-containers/automatically-opening-files-in-the-codespaces-for-a-repository)" and the [dev containers specification](https://containers.dev/implementors/json_reference/#general-properties) on the Development Containers website. diff --git a/content/codespaces/troubleshooting/troubleshooting-creation-and-deletion-of-codespaces.md b/content/codespaces/troubleshooting/troubleshooting-creation-and-deletion-of-codespaces.md index 29964558a6..331422d7c8 100644 --- a/content/codespaces/troubleshooting/troubleshooting-creation-and-deletion-of-codespaces.md +++ b/content/codespaces/troubleshooting/troubleshooting-creation-and-deletion-of-codespaces.md @@ -102,6 +102,7 @@ Some more destructive options: ``` This codespace is currently running in recovery mode due to a container error. ``` + Review the creation logs and update the dev container configuration as needed. For more information, see "[AUTOTITLE](/codespaces/troubleshooting/github-codespaces-logs)." You can then try restarting the codespace, or rebuilding the container. For more information on rebuilding the container, see "[AUTOTITLE](/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers#applying-configuration-changes-to-a-codespace)." diff --git a/content/codespaces/troubleshooting/troubleshooting-gpg-verification-for-github-codespaces.md b/content/codespaces/troubleshooting/troubleshooting-gpg-verification-for-github-codespaces.md index 19ee6482f0..57272eb5b6 100644 --- a/content/codespaces/troubleshooting/troubleshooting-gpg-verification-for-github-codespaces.md +++ b/content/codespaces/troubleshooting/troubleshooting-gpg-verification-for-github-codespaces.md @@ -90,6 +90,7 @@ For example, if the global `.gitconfig` file on your local machine contains a `g ```Shell git config --global --unset gpg.program ``` + 3. Push the change to your dotfiles repository on {% data variables.product.prodname_dotcom %}. 4. Optionally, to keep your local configuration, set the value again in a Git configuration file that you do not push to your dotfiles repository. diff --git a/content/codespaces/troubleshooting/troubleshooting-included-usage.md b/content/codespaces/troubleshooting/troubleshooting-included-usage.md index 93d2af053d..802ddcde92 100644 --- a/content/codespaces/troubleshooting/troubleshooting-included-usage.md +++ b/content/codespaces/troubleshooting/troubleshooting-included-usage.md @@ -88,9 +88,11 @@ If the dev container for the current codespace was built from the default image, {% endnote %} - Configure your retention period to ensure codespaces you forget to delete are deleted automatically. The default retention period is 30 days. For more information, see "[AUTOTITLE](/codespaces/customizing-your-codespace/configuring-automatic-deletion-of-your-codespaces)." - {% data variables.product.prodname_vscode %} extensions consume storage. Make sure you are only installing extensions that you need. You can find out how much space is being used by extensions by running this command in your codespace. + ```shell copy du -h -s ~/.vscode-remote/extensions ``` + - Monitor your compute and storage usage by going to your billing page on {% data variables.product.prodname_dotcom_the_website %}, https://github.com/settings/billing, and reviewing the figures in the "{% data variables.product.prodname_codespaces %}" section. {% note %} diff --git a/content/communities/using-templates-to-encourage-useful-issues-and-pull-requests/manually-creating-a-single-issue-template-for-your-repository.md b/content/communities/using-templates-to-encourage-useful-issues-and-pull-requests/manually-creating-a-single-issue-template-for-your-repository.md index b9b0175882..389e3919ec 100644 --- a/content/communities/using-templates-to-encourage-useful-issues-and-pull-requests/manually-creating-a-single-issue-template-for-your-repository.md +++ b/content/communities/using-templates-to-encourage-useful-issues-and-pull-requests/manually-creating-a-single-issue-template-for-your-repository.md @@ -32,6 +32,7 @@ labels: tracking issue, needs triage assignees: octocat --- ``` + {% note %} **Note:** If a front matter value includes a YAML-reserved character such as `:` , you must put the whole value in quotes. For example, `":bug: Bug"` or `":new: triage needed, :bug: bug"`. diff --git a/content/copilot/getting-started-with-github-copilot.md b/content/copilot/getting-started-with-github-copilot.md index 791ec2ffc2..b06107f7c6 100644 --- a/content/copilot/getting-started-with-github-copilot.md +++ b/content/copilot/getting-started-with-github-copilot.md @@ -122,6 +122,7 @@ To use {% data variables.product.prodname_copilot %} in a JetBrains IDE, you mus {% data reusables.copilot.create-java-file %} 1. To prompt {% data variables.product.prodname_copilot %} to suggest an implementation of a function in the Java file, type the following lines. + ```java copy // find all images without alternate text // and give them a red border @@ -195,6 +196,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-c-file %} 1. In the C# file, type the following function signature. {% data variables.product.prodname_copilot %} will automatically suggest an entire function body in grayed text, as shown below. The exact suggestion may vary. + ```csharp copy int CalculateDaysBetweenDates( ``` @@ -209,6 +211,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the ```csharp copy int CalculateDaysBetweenDates( ``` + 1. If alternative suggestions are available, you can see these alternatives by pressing Alt+. (or Alt+,). 1. Optionally, you can hover over the suggestion to see the {% data variables.product.prodname_copilot %} command palette for choosing suggestions. {% data reusables.copilot.accept-or-reject-suggestion %} @@ -219,6 +222,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-c-file %} 1. In the C# file, type the following comment. {% data variables.product.prodname_copilot %} will suggest an implementation of the function. + ```csharp copy using System.Xml.Linq; @@ -226,6 +230,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the // find all images ``` + {% data reusables.copilot.accept-suggestion %} {% data reusables.copilot.enabling-or-disabling-vs %} @@ -272,6 +277,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-js-file %} 1. In the JavaScript file, type the following function header. {% data variables.product.prodname_copilot %} will automatically suggest an entire function body in grayed text, as shown below. The exact suggestion may vary. + ```javascript copy function calculateDaysBetweenDates(begin, end) { ``` @@ -284,9 +290,11 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-js-file %} 1. In the JavaScript file, type the following function header. {% data variables.product.prodname_copilot %} will show you a suggestion. + ```javascript copy function calculateDaysBetweenDates(begin, end) { ``` + {% data reusables.copilot.see-alternative-suggestions %} | OS | See next suggestion | See previous suggestion | @@ -303,9 +311,11 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-js-file %} 1. In the JavaScript file, type the following function header. {% data variables.product.prodname_copilot %} will show you a suggestion. + ```javascript copy function calculateDaysBetweenDates(begin, end) { ``` + 1. To open a new tab with multiple additional options, press Ctrl+Enter. 1. To accept a suggestion, above the suggestion, click **Accept Solution**. To reject all suggestions, close the tab. @@ -315,6 +325,7 @@ To use {% data variables.product.prodname_copilot %}, you must first install the {% data reusables.copilot.create-js-file %} 1. In the JavaScript file, type the following comment. {% data variables.product.prodname_copilot %} will suggest an implementation of the function. + ```javascript copy // find all images without alternate text // and give them a red border @@ -327,13 +338,16 @@ You can also use {% data variables.product.prodname_copilot %} to generate sugge {% data reusables.copilot.create-js-file %} 1. In the JavaScript file, type the following comment and then press Enter. {% data variables.product.prodname_copilot %} will suggest an implementation of the Express app. + ```javascript copy // Express server on port 3000 1. To accept each line, press Tab, then Enter. 1. Type the following comment and then press Enter. {% data variables.product.prodname_copilot %} will suggest an implementation for the default handler. + ```javascript copy // Return the current time ``` + 1. To accept each line, press Tab. {% data reusables.copilot.enabling-or-disabling-in-vsc %} diff --git a/content/get-started/getting-started-with-git/associating-text-editors-with-git.md b/content/get-started/getting-started-with-git/associating-text-editors-with-git.md index 32cd7e3e7c..faf7ccf7cd 100644 --- a/content/get-started/getting-started-with-git/associating-text-editors-with-git.md +++ b/content/get-started/getting-started-with-git/associating-text-editors-with-git.md @@ -24,6 +24,7 @@ shortTitle: Associate text editors 1. Install [{% data variables.product.prodname_vscode %}](https://code.visualstudio.com/) ({% data variables.product.prodname_vscode_shortname %}). For more information, see "[Setting up {% data variables.product.prodname_vscode_shortname %}](https://code.visualstudio.com/Docs/setup/setup-overview)" in the {% data variables.product.prodname_vscode_shortname %} documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "code --wait" ``` @@ -35,6 +36,7 @@ shortTitle: Associate text editors 1. Install [{% data variables.product.prodname_vscode %}](https://code.visualstudio.com/) ({% data variables.product.prodname_vscode_shortname %}). For more information, see "[Setting up {% data variables.product.prodname_vscode_shortname %}](https://code.visualstudio.com/Docs/setup/setup-overview)" in the {% data variables.product.prodname_vscode_shortname %} documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "code --wait" ``` @@ -46,6 +48,7 @@ shortTitle: Associate text editors 1. Install [{% data variables.product.prodname_vscode %}](https://code.visualstudio.com/) ({% data variables.product.prodname_vscode_shortname %}). For more information, see "[Setting up {% data variables.product.prodname_vscode_shortname %}](https://code.visualstudio.com/Docs/setup/setup-overview)" in the {% data variables.product.prodname_vscode_shortname %} documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "code --wait" ``` @@ -59,6 +62,7 @@ shortTitle: Associate text editors 1. Install [Sublime Text](https://www.sublimetext.com/). For more information, see "[Installation](https://docs.sublimetext.io/guide/getting-started/installation.html)" in the Sublime Text documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "subl -n -w" ``` @@ -70,6 +74,7 @@ shortTitle: Associate text editors 1. Install [Sublime Text](https://www.sublimetext.com/). For more information, see "[Installation](https://docs.sublimetext.io/guide/getting-started/installation.html)" in the Sublime Text documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "'C:/Program Files (x86)/sublime text 3/subl.exe' -w" ``` @@ -81,6 +86,7 @@ shortTitle: Associate text editors 1. Install [Sublime Text](https://www.sublimetext.com/). For more information, see "[Installation](https://docs.sublimetext.io/guide/getting-started/installation.html)" in the Sublime Text documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "subl -n -w" ``` @@ -95,6 +101,7 @@ shortTitle: Associate text editors 1. Install TextMate's `mate` shell utility. For more information, see "[`mate` and `rmate`](https://macromates.com/blog/2011/mate-and-rmate/)" in the TextMate documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "mate -w" ``` @@ -108,6 +115,7 @@ shortTitle: Associate text editors 1. Install Notepad++ from https://notepad-plus-plus.org/. For more information, see "[Getting started](https://npp-user-manual.org/docs/getting-started/)" in the Notepad++ documentation. {% data reusables.command_line.open_the_multi_os_terminal %} 1. Type this command: + ```shell git config --global core.editor "'C:/Program Files (x86)/Notepad++/notepad++.exe' -multiInst -notabbar -nosession -noPlugin" ``` diff --git a/content/get-started/getting-started-with-git/caching-your-github-credentials-in-git.md b/content/get-started/getting-started-with-git/caching-your-github-credentials-in-git.md index e0d100c7a4..cb3546479c 100644 --- a/content/get-started/getting-started-with-git/caching-your-github-credentials-in-git.md +++ b/content/get-started/getting-started-with-git/caching-your-github-credentials-in-git.md @@ -40,15 +40,18 @@ For more information about authenticating with {% data variables.product.prodnam {% mac %} 1. Install Git using [Homebrew](https://brew.sh/): + ```shell brew install git ``` 2. Install GCM using Homebrew: + ```shell brew tap microsoft/git brew install --cask git-credential-manager-core ``` + For MacOS, you don't need to run `git config` because GCM automatically configures Git for you. {% data reusables.gcm-core.next-time-you-clone %} diff --git a/content/get-started/getting-started-with-git/configuring-git-to-handle-line-endings.md b/content/get-started/getting-started-with-git/configuring-git-to-handle-line-endings.md index ed20f37594..e422be68a3 100644 --- a/content/get-started/getting-started-with-git/configuring-git-to-handle-line-endings.md +++ b/content/get-started/getting-started-with-git/configuring-git-to-handle-line-endings.md @@ -116,17 +116,20 @@ To ensure that all the line endings in your repository match your new configurat git add . -u git commit -m "Saving files before refreshing line endings" ``` + 1. To update all files on the current branch to reflect the new configuration, run the following commands. ```shell copy git rm -rf --cached . git reset --hard HEAD ``` + 1. To display the rewritten, normalized files, run the following command. ```shell copy git status ``` + 1. Optionally, to commit any outstanding changes in your repository, run the following command. ```shell copy diff --git a/content/get-started/getting-started-with-git/ignoring-files.md b/content/get-started/getting-started-with-git/ignoring-files.md index a108ca54dd..2081272fe0 100644 --- a/content/get-started/getting-started-with-git/ignoring-files.md +++ b/content/get-started/getting-started-with-git/ignoring-files.md @@ -23,7 +23,9 @@ GitHub maintains an official list of recommended _.gitignore_ files for many pop {% data reusables.command_line.open_the_multi_os_terminal %} 2. Navigate to the location of your Git repository. + 3. Create a _.gitignore_ file for your repository. + ```shell touch .gitignore ``` @@ -43,7 +45,9 @@ git rm --cached FILENAME You can also create a global _.gitignore_ file to define a list of rules for ignoring files in every Git repository on your computer. For example, you might create the file at _~/.gitignore_global_ and add some rules to it. {% data reusables.command_line.open_the_multi_os_terminal %} + 2. Configure Git to use the exclude file _~/.gitignore_global_ for all Git repositories. + ```shell git config --global core.excludesfile ~/.gitignore_global ``` diff --git a/content/get-started/getting-started-with-git/managing-remote-repositories.md b/content/get-started/getting-started-with-git/managing-remote-repositories.md index 24d875a946..97b45aba24 100644 --- a/content/get-started/getting-started-with-git/managing-remote-repositories.md +++ b/content/get-started/getting-started-with-git/managing-remote-repositories.md @@ -76,10 +76,13 @@ The `git remote set-url` command takes two arguments: - An existing remote name. For example, `origin` or `upstream` are two common choices. - A new URL for the remote. For example: - If you're updating to use HTTPS, your URL might look like: + ```shell https://{% data variables.command_line.backticks %}/OWNER/REPOSITORY.git ``` + - If you're updating to use SSH, your URL might look like: + ```shell git@{% data variables.command_line.codeblock %}:OWNER/REPOSITORY.git ``` @@ -89,16 +92,21 @@ The `git remote set-url` command takes two arguments: {% data reusables.command_line.open_the_multi_os_terminal %} 2. Change the current working directory to your local project. 3. List your existing remotes in order to get the name of the remote you want to change. + ```shell $ git remote -v > origin git@{% data variables.command_line.codeblock %}:OWNER/REPOSITORY.git (fetch) > origin git@{% data variables.command_line.codeblock %}:OWNER/REPOSITORY.git (push) ``` + 4. Change your remote's URL from SSH to HTTPS with the `git remote set-url` command. + ```shell git remote set-url origin https://{% data variables.command_line.codeblock %}/OWNER/REPOSITORY.git ``` + 5. Verify that the remote URL has changed. + ```shell $ git remote -v # Verify new remote URL @@ -115,16 +123,21 @@ You can [use a credential helper](/get-started/getting-started-with-git/caching- {% data reusables.command_line.open_the_multi_os_terminal %} 2. Change the current working directory to your local project. 3. List your existing remotes in order to get the name of the remote you want to change. + ```shell $ git remote -v > origin https://{% data variables.command_line.codeblock %}/OWNER/REPOSITORY.git (fetch) > origin https://{% data variables.command_line.codeblock %}/OWNER/REPOSITORY.git (push) ``` + 4. Change your remote's URL from HTTPS to SSH with the `git remote set-url` command. + ```shell git remote set-url origin git@{% data variables.command_line.codeblock %}:OWNER/REPOSITORY.git ``` + 5. Verify that the remote URL has changed. + ```shell $ git remote -v # Verify new remote URL diff --git a/content/get-started/getting-started-with-git/setting-your-username-in-git.md b/content/get-started/getting-started-with-git/setting-your-username-in-git.md index 88c67b0d20..245a71c5ec 100644 --- a/content/get-started/getting-started-with-git/setting-your-username-in-git.md +++ b/content/get-started/getting-started-with-git/setting-your-username-in-git.md @@ -23,11 +23,13 @@ Changing the name associated with your Git commits using `git config` will only {% data reusables.command_line.open_the_multi_os_terminal %} 2. {% data reusables.user-settings.set_your_git_username %} + ```shell git config --global user.name "Mona Lisa" ``` 3. {% data reusables.user-settings.confirm_git_username_correct %} + ```shell $ git config --global user.name > Mona Lisa @@ -40,11 +42,13 @@ Changing the name associated with your Git commits using `git config` will only 2. Change the current working directory to the local repository where you want to configure the name that is associated with your Git commits. 3. {% data reusables.user-settings.set_your_git_username %} + ```shell git config user.name "Mona Lisa" ``` 3. {% data reusables.user-settings.confirm_git_username_correct %} + ```shell $ git config user.name > Mona Lisa diff --git a/content/get-started/quickstart/contributing-to-projects.md b/content/get-started/quickstart/contributing-to-projects.md index ddb0f82970..03650b53f6 100644 --- a/content/get-started/quickstart/contributing-to-projects.md +++ b/content/get-started/quickstart/contributing-to-projects.md @@ -59,11 +59,13 @@ You can clone your fork with the command line, {% data variables.product.prodnam {% data reusables.command_line.open_the_multi_os_terminal %} {% data reusables.command_line.change-current-directory-clone %} 4. Type `git clone`, and then paste the URL you copied earlier. It will look like this, with your {% data variables.product.product_name %} username instead of `YOUR-USERNAME`: + ```shell git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/Spoon-Knife ``` 5. Press **Enter**. Your local clone will be created. + ```shell $ git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/Spoon-Knife > Cloning into `Spoon-Knife`... diff --git a/content/get-started/quickstart/fork-a-repo.md b/content/get-started/quickstart/fork-a-repo.md index 5b7ede521e..059369e26a 100644 --- a/content/get-started/quickstart/fork-a-repo.md +++ b/content/get-started/quickstart/fork-a-repo.md @@ -113,11 +113,13 @@ Right now, you have a fork of the Spoon-Knife repository, but you do not have th {% data reusables.command_line.open_the_multi_os_terminal %} {% data reusables.command_line.change-current-directory-clone %} 4. Type `git clone`, and then paste the URL you copied earlier. It will look like this, with your {% data variables.product.product_name %} username instead of `YOUR-USERNAME`: + ```shell git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/Spoon-Knife ``` 5. Press **Enter**. Your local clone will be created. + ```shell $ git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/Spoon-Knife > Cloning into `Spoon-Knife`... @@ -166,6 +168,7 @@ When you fork a project in order to propose changes to the upstream repository, - To go into one of your listed directories, type `cd your_listed_directory`. - To go up one directory, type `cd ..`. 5. Type `git remote -v` and press **Enter**. You will see the current configured remote repository for your fork. + ```shell $ git remote -v > origin https://{% data variables.command_line.codeblock %}/YOUR_USERNAME/YOUR_FORK.git (fetch) @@ -173,11 +176,13 @@ When you fork a project in order to propose changes to the upstream repository, ``` 6. Type `git remote add upstream`, and then paste the URL you copied in Step 3 and press **Enter**. It will look like this: + ```shell git remote add upstream https://{% data variables.command_line.codeblock %}/ORIGINAL_OWNER/Spoon-Knife.git ``` 7. To verify the new upstream repository you have specified for your fork, type `git remote -v` again. You should see the URL for your fork as `origin`, and the URL for the upstream repository as `upstream`. + ```shell $ git remote -v > origin https://{% data variables.command_line.codeblock %}/YOUR_USERNAME/YOUR_FORK.git (fetch) diff --git a/content/get-started/using-git/about-git-subtree-merges.md b/content/get-started/using-git/about-git-subtree-merges.md index 14ca92fd01..22a6781b14 100644 --- a/content/get-started/using-git/about-git-subtree-merges.md +++ b/content/get-started/using-git/about-git-subtree-merges.md @@ -29,16 +29,21 @@ The best way to explain subtree merges is to show by example. We will: {% data reusables.command_line.open_the_multi_os_terminal %} 2. Create a new directory and navigate to it. + ```shell mkdir test cd test ``` + 3. Initialize a new Git repository. + ```shell $ git init > Initialized empty Git repository in /Users/octocat/tmp/test/.git/ ``` + 4. Create and commit a new file. + ```shell $ touch .gitignore $ git add .gitignore @@ -51,6 +56,7 @@ The best way to explain subtree merges is to show by example. We will: ## Adding a new repository as a subtree 1. Add a new remote URL pointing to the separate project that we're interested in. + ```shell $ git remote add -f spoon-knife https://github.com/octocat/Spoon-Knife.git > Updating spoon-knife @@ -63,25 +69,32 @@ The best way to explain subtree merges is to show by example. We will: > From https://github.com/octocat/Spoon-Knife > * [new branch] main -> Spoon-Knife/main ``` + 2. Merge the `Spoon-Knife` project into the local Git project. This doesn't change any of your files locally, but it does prepare Git for the next step. If you're using Git 2.9 or above: + ```shell $ git merge -s ours --no-commit --allow-unrelated-histories spoon-knife/main > Automatic merge went well; stopped before committing as requested ``` If you're using Git 2.8 or below: + ```shell $ git merge -s ours --no-commit spoon-knife/main > Automatic merge went well; stopped before committing as requested ``` + 3. Create a new directory called **spoon-knife**, and copy the Git history of the `Spoon-Knife` project into it. + ```shell $ git read-tree --prefix=spoon-knife/ -u spoon-knife/main > fatal: refusing to merge unrelated histories ``` + 4. Commit the changes to keep them safe. + ```shell $ git commit -m "Subtree merged in spoon-knife" > [main fe0ca25] Subtree merged in spoon-knife diff --git a/content/get-started/using-git/splitting-a-subfolder-out-into-a-new-repository.md b/content/get-started/using-git/splitting-a-subfolder-out-into-a-new-repository.md index 9114b1cf28..31054a22a9 100644 --- a/content/get-started/using-git/splitting-a-subfolder-out-into-a-new-repository.md +++ b/content/get-started/using-git/splitting-a-subfolder-out-into-a-new-repository.md @@ -21,11 +21,13 @@ If you create a new clone of the repository, you won't lose any of your Git hist 2. Change the current working directory to the location where you want to create your new repository. 4. Clone the repository that contains the subfolder. + ```shell git clone https://{% data variables.command_line.codeblock %}/USERNAME/REPOSITORY-NAME ``` 4. Change the current working directory to your cloned repository. + ```shell cd REPOSITORY-NAME ``` @@ -65,11 +67,13 @@ If you create a new clone of the repository, you won't lose any of your Git hist {% endtip %} 8. Add a new remote name with the URL you copied for your repository. For example, `origin` or `upstream` are two common choices. + ```shell git remote add origin https://{% data variables.command_line.codeblock %}/USERNAME/REPOSITORY-NAME.git ``` 9. Verify that the remote URL was added with your new repository name. + ```shell $ git remote -v # Verify new remote URL @@ -78,6 +82,7 @@ If you create a new clone of the repository, you won't lose any of your Git hist ``` 10. Push your changes to the new repository on {% data variables.product.product_name %}. + ```shell git push -u origin BRANCH-NAME ``` diff --git a/content/get-started/working-with-subversion-on-github/support-for-subversion-clients.md b/content/get-started/working-with-subversion-on-github/support-for-subversion-clients.md index 4da0621cb1..a23a88570f 100644 --- a/content/get-started/working-with-subversion-on-github/support-for-subversion-clients.md +++ b/content/get-started/working-with-subversion-on-github/support-for-subversion-clients.md @@ -28,6 +28,7 @@ Subversion checkouts are different: they mix the repository data in the working {% data reusables.repositories.copy-clone-url %} 3. Make an empty checkout of the repository: + ```shell $ svn co --depth empty https://github.com/USER/REPO > Checked out revision 1. @@ -35,6 +36,7 @@ Subversion checkouts are different: they mix the repository data in the working ``` 4. Get the `trunk` branch. The Subversion bridge maps trunk to the Git HEAD branch. + ```shell $ svn up trunk > A trunk @@ -44,6 +46,7 @@ Subversion checkouts are different: they mix the repository data in the working ``` 5. Get an empty checkout of the `branches` directory. This is where all of the non-`HEAD` branches live, and where you'll be making feature branches. + ```shell $ svn up --depth empty branches Updated to revision 1. @@ -54,12 +57,14 @@ Subversion checkouts are different: they mix the repository data in the working You can also create branches using the Subversion bridge to GitHub. From your svn client, make sure the default branch is current by updating `trunk`: + ```shell $ svn up trunk > At revision 1. ``` Next, you can use `svn copy` to create a new branch: + ```shell $ svn copy trunk branches/more_awesome > A branches/more_awesome diff --git a/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax.md b/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax.md index 6ad1069b1f..27c1aea4c4 100644 --- a/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax.md +++ b/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax.md @@ -51,6 +51,7 @@ Text that is not a quote > Text that is a quote ``` + Quoted text is indented, with a different type color. ![Screenshot of rendered GitHub Markdown showing sample quoted text. The quote is indented with a vertical line on the left, and its text is dark gray rather than black.](/assets/images/help/writing/quoted-text-rendered.png) diff --git a/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/quickstart-for-writing-on-github.md b/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/quickstart-for-writing-on-github.md index 07a761f704..29dc9cd995 100644 --- a/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/quickstart-for-writing-on-github.md +++ b/content/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/quickstart-for-writing-on-github.md @@ -68,6 +68,7 @@ By using the HTML `` element with the `prefers-color-scheme` media feat YOUR-ALT-TEXT ``` + 1. Replace the placeholders in the markup with the URLs of your chosen images. Alternatively, to try the feature first, you can copy the URLs from our example below. - Replace `YOUR-DARKMODE-IMAGE` with the URL of an image to display for visitors using dark mode. @@ -98,6 +99,7 @@ You can use Markdown tables to organize information. Here, you'll use a table to Hi, I'm Mona. You might recognize me as {% data variables.product.prodname_dotcom %}'s mascot. ``` + 1. Two lines below this paragraph, insert a table by copying and pasting the following markup. ```Markdown copy @@ -107,6 +109,7 @@ You can use Markdown tables to organize information. Here, you'll use a table to | 2| | | 3| | ``` + 1. In the column on the right, replace `THING-TO-RANK` with "Languages," "Hobbies," or anything else, and fill in the column with your list of things. 1. To check the table has rendered correctly, click the **Preview** tab. @@ -144,12 +147,14 @@ To keep your content tidy, you can use the `
` tag to create an expandib
``` + 1. Between the `` tags, replace `THINGS-TO-RANK` with whatever you ranked in your table. 1. Optionally, to make the section display as open by default, add the `open` attribute to the `
` tag. ```HTML
``` + 1. To check the collapsed section has rendered correctly, click the **Preview** tab. ### Example of a collapsed section @@ -180,6 +185,7 @@ Markdown has many other options for formatting your content. Here, you'll add a ```Markdown --- ``` + 1. Below the `---` line, add a quote by typing markup like the following. ```Markdown diff --git a/content/get-started/writing-on-github/working-with-advanced-formatting/autolinked-references-and-urls.md b/content/get-started/writing-on-github/working-with-advanced-formatting/autolinked-references-and-urls.md index 68fc696352..1bebf6ffb4 100644 --- a/content/get-started/writing-on-github/working-with-advanced-formatting/autolinked-references-and-urls.md +++ b/content/get-started/writing-on-github/working-with-advanced-formatting/autolinked-references-and-urls.md @@ -52,6 +52,7 @@ The URL of a label can be found by navigating to the labels page and clicking on ```md https://github.com/github/docs/labels/enhancement ``` + {% note %} **Note:** If the label name contains a period (`.`), the label will not automatically render from the label URL. diff --git a/content/get-started/writing-on-github/working-with-advanced-formatting/writing-mathematical-expressions.md b/content/get-started/writing-on-github/working-with-advanced-formatting/writing-mathematical-expressions.md index 9773b92cfa..07a7349f19 100644 --- a/content/get-started/writing-on-github/working-with-advanced-formatting/writing-mathematical-expressions.md +++ b/content/get-started/writing-on-github/working-with-advanced-formatting/writing-mathematical-expressions.md @@ -29,6 +29,7 @@ This sentence uses `$` delimiters to show math inline: $\sqrt{3x-1}+(1+x)^2$ ![Screenshot of rendered Markdown showing how a mathematical expression displays on {% data variables.product.prodname_dotcom %}. The equation is the square root of 3 x minus 1 plus open paren 1 plus x close paren squared.](/assets/images/help/writing/inline-math-markdown-rendering.png) {% ifversion math-backtick-syntax %} + ```` This sentence uses $\` and \`$ delimiters to show math inline: $`\sqrt{3x-1}+(1+x)^2`$ ```` @@ -58,6 +59,7 @@ Alternatively, you can use the \`\`\`math code block syntax to disp \left( \sum_{k=1}^n a_k b_k \right)^2 \leq \left( \sum_{k=1}^n a_k^2 \right) \left( \sum_{k=1}^n b_k^2 \right) ``` ```` + {% endif %} ## Writing dollar signs in line with and within mathematical expressions diff --git a/content/github-cli/github-cli/quickstart.md b/content/github-cli/github-cli/quickstart.md index 968ca2ec29..e07d1927fa 100644 --- a/content/github-cli/github-cli/quickstart.md +++ b/content/github-cli/github-cli/quickstart.md @@ -23,13 +23,17 @@ shortTitle: Quickstart 1. In the command line, authenticate to {% data variables.product.company_short %}.{% ifversion not fpt or ghec %} Replace `HOSTNAME` with the name of {% data variables.location.product_location %}. For example, `octo-inc.ghe.com`.{% endif %} {% ifversion fpt or ghec %} + ```shell gh auth login ``` + {% else %} + ```shell gh auth login --hostname HOSTNAME ``` + {% endif %} 1. Start working with {% data variables.product.company_short %} in the command line. For example, find an issue to work on with `gh issue status` or `gh issue list --assignee @me`. Create a pull request with `gh pr create`. Review a pull request with `gh pr checkout`, `gh pr diff` and `gh pr review`. diff --git a/content/graphql/guides/managing-enterprise-accounts.md b/content/graphql/guides/managing-enterprise-accounts.md index 00d1a8522a..5fa0b104b5 100644 --- a/content/graphql/guides/managing-enterprise-accounts.md +++ b/content/graphql/guides/managing-enterprise-accounts.md @@ -161,11 +161,13 @@ variables { "slug": "" } ``` + {% endif %} The next GraphQL query example shows how challenging it is to retrieve the number of {% ifversion not ghae %}`public`{% else %}`private`{% endif %} repositories in each organization without using the Enterprise Account API. Notice that the GraphQL Enterprise Accounts API has made this task simpler for enterprises since you only need to customize a single variable. To customize this query, replace `` and ``, etc. with the organization names on your instance. {% ifversion not ghae %} + ```graphql # Each organization is queried separately { @@ -187,7 +189,9 @@ fragment repositories on Organization { } } ``` + {% else %} + ```graphql # Each organization is queried separately { @@ -209,6 +213,7 @@ fragment repositories on Organization { } } ``` + {% endif %} ## Query each organization separately diff --git a/content/graphql/guides/migrating-from-rest-to-graphql.md b/content/graphql/guides/migrating-from-rest-to-graphql.md index 753b5eca89..742ff213b8 100644 --- a/content/graphql/guides/migrating-from-rest-to-graphql.md +++ b/content/graphql/guides/migrating-from-rest-to-graphql.md @@ -38,6 +38,7 @@ Here are examples of each. ## Example: Getting the data you need and nothing more A single REST API call retrieves a list of your organization's members: + ```shell curl -v {% data variables.product.api_url_pre %}/orgs/:org/members ``` @@ -60,11 +61,13 @@ query { ``` Consider another example: retrieving a list of pull requests and checking if each one is mergeable. A call to the REST API retrieves a list of pull requests and their [summary representations](/rest#summary-representations): + ```shell curl -v {% data variables.product.api_url_pre %}/repos/:owner/:repo/pulls ``` Determining if a pull request is mergeable requires retrieving each pull request individually for its [detailed representation](/rest#detailed-representations) (a large payload) and checking whether its `mergeable` attribute is true or false: + ```shell curl -v {% data variables.product.api_url_pre %}/repos/:owner/:repo/pulls/:number ``` @@ -89,6 +92,7 @@ query { ## Example: Nesting Querying with nested fields lets you replace multiple REST calls with fewer GraphQL queries. For example, retrieving a pull request along with its commits, non-review comments, and reviews using the **REST API** requires four separate calls: + ```shell curl -v {% data variables.product.api_url_pre %}/repos/:owner/:repo/pulls/:number curl -v {% data variables.product.api_url_pre %}/repos/:owner/:repo/pulls/:number/commits diff --git a/content/graphql/guides/migrating-graphql-global-node-ids.md b/content/graphql/guides/migrating-graphql-global-node-ids.md index 7ad0df822a..831bdd913f 100644 --- a/content/graphql/guides/migrating-graphql-global-node-ids.md +++ b/content/graphql/guides/migrating-graphql-global-node-ids.md @@ -38,13 +38,16 @@ $ curl \ ``` Even though the legacy ID `MDQ6VXNlcjM0MDczMDM=` was used in the query, the response will contain the new ID format: + ``` {"data":{"node":{"id":"U_kgDOADP9xw"}}} ``` + With the `X-Github-Next-Global-ID` header, you can find the new ID format for legacy IDs that you reference in your application. You can then update those references with the ID received in the response. You should update all references to legacy IDs and use the new ID format for any subsequent requests to the API. To perform bulk operations, you can use aliases to submit multiple node queries in one API call. For more information, see "[the GraphQL docs](https://graphql.org/learn/queries/#aliases)." You can also get the new ID for a collection of items. For example, if you wanted to get the new ID for the last 10 repositories in your organization, you could use a query like this: + ``` { organization(login: "github") { diff --git a/content/issues/planning-and-tracking-with-projects/automating-your-project/using-the-api-to-manage-projects.md b/content/issues/planning-and-tracking-with-projects/automating-your-project/using-the-api-to-manage-projects.md index e9607078ed..bc2cea5cd9 100644 --- a/content/issues/planning-and-tracking-with-projects/automating-your-project/using-the-api-to-manage-projects.md +++ b/content/issues/planning-and-tracking-with-projects/automating-your-project/using-the-api-to-manage-projects.md @@ -64,15 +64,18 @@ To update your project through the API, you will need to know the node ID of the You can find the node ID of an organization project if you know the organization name and project number. Replace `ORGANIZATION` with the name of your organization. For example, `octo-org`. Replace `NUMBER` with the project number. To find the project number, look at the project URL. For example, `https://github.com/orgs/octo-org/projects/5` has a project number of 5. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"query{organization(login: \"ORGANIZATION\") {projectV2(number: NUMBER){id}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -83,20 +86,24 @@ gh api graphql -f query=' } }' ``` + {% endcli %} You can also find the node ID of all projects in your organization. The following example will return the node ID and title of the first 20 projects in an organization. Replace `ORGANIZATION` with the name of your organization. For example, `octo-org`. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"{organization(login: \"ORGANIZATION\") {projectsV2(first: 20) {nodes {id title}}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -110,6 +117,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ### Finding the node ID of a user project @@ -119,15 +127,18 @@ To update your project through the API, you will need to know the node ID of the You can find the node ID of a user project if you know the project number. Replace `USER` with your user name. For example, `octocat`. Replace `NUMBER` with your project number. To find the project number, look at the project URL. For example, `https://github.com/users/octocat/projects/5` has a project number of 5. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"query{user(login: \"USER\") {projectV2(number: NUMBER){id}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -138,20 +149,24 @@ gh api graphql -f query=' } }' ``` + {% endcli %} You can also find the node ID for all of your projects. The following example will return the node ID and title of your first 20 projects. Replace `USER` with your username. For example, `octocat`. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"{user(login: \"USER\") {projectsV2(first: 20) {nodes {id title}}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -165,6 +180,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ### Finding the node ID of a field @@ -174,15 +190,18 @@ To update the value of a field, you will need to know the node ID of the field. The following example will return the ID, name, settings, and configuration for the first 20 fields in a project. Replace `PROJECT_ID` with the node ID of your project. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"query{ node(id: \"PROJECT_ID\") { ... on ProjectV2 { fields(first: 20) { nodes { ... on ProjectV2Field { id name } ... on ProjectV2IterationField { id name configuration { iterations { startDate id }}} ... on ProjectV2SingleSelectField { id name options { id name }}}}}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -218,6 +237,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} The response will look similar to the following example: @@ -278,15 +298,18 @@ Each field has an ID and name. Single select fields are returned as a `ProjectV2 If you just need the name and ID of a field, and do not need information about iterations or a single select field's options, you can make use of the `ProjectV2FieldCommon` object. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"query{ node(id: \"PROJECT_ID\") { ... on ProjectV2 { fields(first: 20) { nodes { ... on ProjectV2FieldCommon { id name }}}}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -304,6 +327,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} The response when using the `ProjectV2FieldCommon` object will look similar to the following example: @@ -348,15 +372,18 @@ You can query the API to find information about items in your project. The following example will return the first 20 issues, pull requests, and draft issues in a project. For issues and pull requests, it will also return title and the first 10 assignees. For draft issue, it will return the title and body. The example will also return the field name and value for any text, date, or single select fields in the first 8 fields of the project. Replace `PROJECT_ID` with the node ID of your project. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"query{ node(id: \"PROJECT_ID\") { ... on ProjectV2 { items(first: 20) { nodes{ id fieldValues(first: 8) { nodes{ ... on ProjectV2ItemFieldTextValue { text field { ... on ProjectV2FieldCommon { name }}} ... on ProjectV2ItemFieldDateValue { date field { ... on ProjectV2FieldCommon { name } } } ... on ProjectV2ItemFieldSingleSelectValue { name field { ... on ProjectV2FieldCommon { name }}}}} content{ ... on DraftIssue { title body } ...on Issue { title assignees(first: 10) { nodes{ login }}} ...on PullRequest { title assignees(first: 10) { nodes{ login }}}}}}}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' query{ @@ -421,6 +448,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} A project may contain items that a user does not have permission to view. In this case, the item type will be returned as `REDACTED`. @@ -440,15 +468,18 @@ Use mutations to update projects. For more information, see "[AUTOTITLE](/graphq The following example will add an issue or pull request to your project. Replace `PROJECT_ID` with the node ID of your project. Replace `CONTENT_ID` with the node ID of the issue or pull request that you want to add. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {addProjectV2ItemById(input: {projectId: \"PROJECT_ID\" contentId: \"CONTENT_ID\"}) {item {id}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -459,6 +490,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} The response will contain the node ID of the newly created item. @@ -482,15 +514,18 @@ If you try to add an item that already exists, the existing item ID is returned The following example will add a draft issue to your project. Replace `PROJECT_ID` with the node ID of your project. Replace `TITLE` and `BODY` with the content you want for the new draft issue. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {addProjectV2DraftIssue(input: {projectId: \"PROJECT_ID\" title: \"TITLE\" body: \"BODY\"}) {projectItem {id}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -501,6 +536,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} The response will contain the node ID of the newly created draft issue. @@ -522,15 +558,18 @@ The response will contain the node ID of the newly created draft issue. The following example will update your project's settings. Replace `PROJECT_ID` with the node ID of your project. Set `public` to `true` to make your project public on {% data variables.product.product_name %}. Modify `readme` to make changes to your project's README. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation { updateProjectV2(input: { projectId: \"PROJECT_ID\", title: \"Project title\", public: false, readme: \"# Project README\n\nA long description\", shortDescription: \"A short description\"}) { projectV2 { id, title, readme, shortDescription }}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -552,6 +591,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ### Updating a custom text, number, or date field @@ -559,15 +599,18 @@ gh api graphql -f query=' The following example will update the value of a text field for an item. Replace `PROJECT_ID` with the node ID of your project. Replace `ITEM_ID` with the node ID of the item you want to update. Replace `FIELD_ID` with the ID of the field that you want to update. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {updateProjectV2ItemFieldValue( input: { projectId: \"PROJECT_ID\" itemId: \"ITEM_ID\" fieldId: \"FIELD_ID\" value: { text: \"Updated text\" }}) { projectV2Item { id }}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -587,6 +630,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} {% note %} @@ -613,15 +657,18 @@ The following example will update the value of a single select field for an item - `OPTION_ID` - Replace this with the ID of the desired single select option. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {updateProjectV2ItemFieldValue( input: { projectId: \"PROJECT_ID\" itemId: \"ITEM_ID\" fieldId: \"FIELD_ID\" value: { singleSelectOptionId: \"OPTION_ID\" }}) { projectV2Item { id }}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -641,6 +688,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ### Updating an iteration field @@ -653,15 +701,18 @@ The following example will update the value of an iteration field for an item. - `ITERATION_ID` - Replace this with the ID of the desired iteration. This can be either an active or completed iteration. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {updateProjectV2ItemFieldValue( input: { projectId: \"PROJECT_ID\" itemId: \"ITEM_ID\" fieldId: \"FIELD_ID\" value: { iterationId: \"ITERATION_ID\" }}) { projectV2Item { id }}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -681,6 +732,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ### Deleting an item from a project @@ -688,15 +740,18 @@ gh api graphql -f query=' The following example will delete an item from a project. Replace `PROJECT_ID` with the node ID of your project. Replace `ITEM_ID` with the node ID of the item you want to delete. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: Bearer TOKEN' \ --data '{"query":"mutation {deleteProjectV2Item(input: {projectId: \"PROJECT_ID\" itemId: \"ITEM_ID\"}) {deletedItemId}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation { @@ -710,6 +765,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} ## Managing projects @@ -723,32 +779,39 @@ To create a new project using the API, you'll need to provide a name for the pro You can find the node ID of a {% data variables.product.product_name %} user or organization if you know the username. Replace GITHUB_OWNER with the {% data variables.product.product_name %} username of the new project owner. {% curl %} + ```shell curl --request GET \ --url https://api.github.com/users/GITHUB_OWNER \ --header 'Authorization: token TOKEN' \ --header 'Accept: application/vnd.github+json' ``` + {% endcurl %} {% cli %} + ```shell gh api -H "Accept: application/vnd.github+json" /users/GITHUB_OWNER ``` + {% endcli %} To create the project, replace `OWNER_ID` with the node ID of the new project owner and replace `PROJECT_NAME` with a name for the project. {% curl %} + ```shell curl --request POST \ --url https://api.github.com/graphql \ --header 'Authorization: token TOKEN' \ --data '{"query":"mutation {createProjectV2(input: {ownerId: \"OWNER_ID\" title: \"PROJECT_NAME\"}) {projectV2 {id}}}"}' ``` + {% endcurl %} {% cli %} + ```shell gh api graphql -f query=' mutation{ @@ -764,6 +827,7 @@ gh api graphql -f query=' } }' ``` + {% endcli %} {% ifversion projects-v2-webhooks %} diff --git a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/adding-locally-hosted-code-to-github.md b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/adding-locally-hosted-code-to-github.md index 872e60fb0a..e7aa1f15fb 100644 --- a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/adding-locally-hosted-code-to-github.md +++ b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/adding-locally-hosted-code-to-github.md @@ -55,13 +55,16 @@ If your locally-hosted code isn't tracked by any VCS, the first step is to initi ``` shell git init && git symbolic-ref HEAD refs/heads/main ``` + 1. Add the files in your new local repository. This stages them for the first commit. ```shell $ git add . # Adds the files in the local repository and stages them for commit. {% data reusables.git.unstage-codeblock %} ``` + 1. Commit the files that you've staged in your local repository. + ```shell $ git commit -m "First commit" # Commits the tracked changes and prepares them to be pushed to a remote repository. {% data reusables.git.reset-head-to-previous-commit-codeblock %} @@ -93,13 +96,16 @@ After you've initialized a Git repository, you can push the repository to {% dat {% data reusables.command_line.open_the_multi_os_terminal %} 1. Change the current working directory to your local project. 1. [Add the URL for the remote repository](/get-started/getting-started-with-git/managing-remote-repositories) where your local repository will be pushed. + ```shell $ git remote add origin <REMOTE_URL> # Sets the new remote $ git remote -v # Verifies the new remote URL ``` + 1. [Push the changes](/get-started/using-git/pushing-commits-to-a-remote-repository) in your local repository to {% data variables.location.product_location %}. + ```shell $ git push -u origin main # Pushes the changes in your local repository up to the remote repository you specified as the origin @@ -116,13 +122,16 @@ After you've initialized a Git repository, you can push the repository to {% dat {% data reusables.command_line.open_the_multi_os_terminal %} 1. Change the current working directory to your local project. 1. In the Command prompt, [add the URL for the remote repository](/get-started/getting-started-with-git/managing-remote-repositories) where your local repository will be pushed. + ```shell $ git remote add origin <REMOTE_URL> # Sets the new remote $ git remote -v # Verifies the new remote URL ``` + 1. [Push the changes](/get-started/using-git/pushing-commits-to-a-remote-repository) in your local repository to {% data variables.location.product_location %}. + ```shell $ git push origin main # Pushes the changes in your local repository up to the remote repository you specified as the origin @@ -139,13 +148,16 @@ After you've initialized a Git repository, you can push the repository to {% dat {% data reusables.command_line.open_the_multi_os_terminal %} 1. Change the current working directory to your local project. 1. [Add the URL for the remote repository](/get-started/getting-started-with-git/managing-remote-repositories) where your local repository will be pushed. + ```shell $ git remote add origin <REMOTE_URL> # Sets the new remote $ git remote -v # Verifies the new remote URL ``` + 1. [Push the changes](/get-started/using-git/pushing-commits-to-a-remote-repository) in your local repository to {% data variables.location.product_location %}. + ```shell $ git push origin main # Pushes the changes in your local repository up to the remote repository you specified as the origin diff --git a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-mercurial-repository.md b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-mercurial-repository.md index cfb38c8778..95042e3e1e 100644 --- a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-mercurial-repository.md +++ b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-mercurial-repository.md @@ -47,11 +47,13 @@ To follow these steps, you must use a macOS or Linux system and have the followi ```shell copy hg log --template "{author}\n" | sort | uniq > committers.txt ``` + 1. Update your `committers.txt` file, mapping the committer name used in the Mercurial repository to the name you want to use in your Git repository, with the following format: ``` “The Octocat ”=”Octocat ” ``` + 1. In your initialized Git repository, run `hg-fast-export.sh`, passing in the path to your Mercurial repository and the path to your `committers.txt` file as arguments. For example, `../fast-export-221024/hg-fast-export.sh -r ../mercurial-repo -A ../mercurial-repo/committers.txt -M main`. diff --git a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-subversion-repository.md b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-subversion-repository.md index 45163a6fa9..bbcf320aa3 100644 --- a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-subversion-repository.md +++ b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-a-subversion-repository.md @@ -36,11 +36,13 @@ To follow these steps, you must use a macOS or Linux system and have the followi ```shell copy svn log -q | grep -e '^r' | awk 'BEGIN { FS = "|" } ; { print $2" = "$2 }' | sed 's/^[ \t]*//' | sort | uniq > authors.txt ``` + 1. Update your `authors.txt` file, mapping the author name used in the Subversion repository to the name you want to use in your Git repository, with the following format: ``` octocat = The Octocat ``` + 1. To convert your Subversion repository to a Git repository, use `git svn`. - If your Subversion repository has a standard format, with “trunk”, “branches”, and “tags” folders, run `git svn clone -s URL PATH/TO/DESTINATION --authors-file PATH/TO/AUTHORS.TXT`, replacing `URL` with the URL of the Subversion repository, `PATH/TO/DESTINATION` with the path to the directory you want to clone the repository into, and `PATH/TO/AUTHORS.TXT` with the path to your `authors.txt` file. diff --git a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-an-external-git-repository-using-the-command-line.md b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-an-external-git-repository-using-the-command-line.md index 725209f7d7..5119f5a42f 100644 --- a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-an-external-git-repository-using-the-command-line.md +++ b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-an-external-git-repository-using-the-command-line.md @@ -32,17 +32,22 @@ For purposes of demonstration, we'll use: 1. [Create a new repository on {% data variables.product.product_name %}](/repositories/creating-and-managing-repositories/creating-a-new-repository). You'll import your external Git repository to this new repository. 2. On the command line, make a "bare" clone of the external repository using the external clone URL. This creates a full copy of the data, but without a working directory for editing files, and ensures a clean, fresh export of all the old data. + ```shell $ git clone --bare https://external-host.com/EXTUSER/REPO.git # Makes a bare clone of the external repository in a local directory ``` + 3. Push the locally cloned repository to {% data variables.product.product_name %} using the "mirror" option, which ensures that all references, such as branches and tags, are copied to the imported repository. + ```shell $ cd REPO.git $ git push --mirror https://{% data variables.command_line.codeblock %}/USER/REPO.git # Pushes the mirror to the new repository on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.location.product_location %}{% endif %} ``` + 4. Remove the temporary local repository. + ```shell cd .. rm -rf REPO.git diff --git a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell.md b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell.md index 93b4b847cd..d15eaab85c 100644 --- a/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell.md +++ b/content/migrations/importing-source-code/using-the-command-line-to-import-source-code/importing-from-other-version-control-systems-with-the-administrative-shell.md @@ -20,18 +20,23 @@ permissions: Site administrators can use the administrative shell to import data {% data reusables.enterprise_installation.ssh-into-instance %} 2. Make a raw clone of the project using the command below, specifying the URL of the source project, and a path to a temporary repository: + ```shell $ git-import-hg-raw HG-CLONE-URL/PATH/REPO-NAME.git # Creates a new repository with one or more Git refs in "refs/import/" in the specified path. ``` + {% data reusables.enterprise_migrations.review-the-import-csv %} 4. Rewrite the authors and branches using the CSV file: + ```shell git-import-rewrite --flavor hg --authors /PATH/AUTHORS-MAP-FILE.csv /PATH/REPO-NAME.git ``` + 5. If you haven't yet, [create a new empty repository on {% data variables.product.prodname_ghe_server %}](/repositories/creating-and-managing-repositories/creating-a-new-repository). {% data reusables.command_line.switching_directories_procedural %} 7. Push the imported repository to {% data variables.product.prodname_ghe_server %}: + ```shell git push --mirror PUSH-URL-ON-GITHUB-ENTERPRISE ``` @@ -40,18 +45,23 @@ permissions: Site administrators can use the administrative shell to import data {% data reusables.enterprise_installation.ssh-into-instance %} 2. Make a raw clone of the project using the command below, specifying the URL of the source project, and a path to a temporary repository: + ```shell $ git-import-svn-raw SVN-CLONE-URL /PATH/REPO-NAME.git # Creates a new repository with one or more Git refs in "refs/import/" in the specified path. ``` + {% data reusables.enterprise_migrations.review-the-import-csv %} 4. Rewrite the authors and branches using the CSV file: + ```shell git-import-rewrite --flavor svn --authors /PATH/AUTHORS-MAP-FILE.csv /PATH/REPO-NAME.git ``` + 5. If you haven't yet, [create a new empty repository on {% data variables.product.prodname_ghe_server %}](/repositories/creating-and-managing-repositories/creating-a-new-repository). {% data reusables.command_line.switching_directories_procedural %} 7. Push the imported repository to {% data variables.product.prodname_ghe_server %}: + ```shell git push --mirror PUSH-URL-ON-GITHUB-ENTERPRISE ``` @@ -60,18 +70,23 @@ permissions: Site administrators can use the administrative shell to import data {% data reusables.enterprise_installation.ssh-into-instance %} 2. Make a raw clone of the project using the command below, specifying the URL of the source project, and a path to a temporary repository: + ```shell $ git-import-tfs-raw TEAM-FOUNDATION-CLONE-URL /PATH/REPO-NAME.git # Creates a new repository with one or more Git refs in "refs/import/" in the specified path. ``` + {% data reusables.enterprise_migrations.review-the-import-csv %} 4. Rewrite the authors and branches using the CSV file: + ```shell git-import-rewrite --flavor tfs --authors /PATH/AUTHORS-MAP-FILE.csv /PATH/REPO_NAME.git ``` + 5. If you haven't yet, [create a new empty repository on {% data variables.product.prodname_ghe_server %}](/repositories/creating-and-managing-repositories/creating-a-new-repository). {% data reusables.command_line.switching_directories_procedural %} 7. Push the imported repository to {% data variables.product.prodname_ghe_server %}: + ```shell git push --mirror PUSH-URL-ON-GITHUB-ENTERPRISE ``` diff --git a/content/migrations/using-ghe-migrator/exporting-migration-data-from-github-enterprise-server.md b/content/migrations/using-ghe-migrator/exporting-migration-data-from-github-enterprise-server.md index 69d11ca1cd..5c90dc4688 100644 --- a/content/migrations/using-ghe-migrator/exporting-migration-data-from-github-enterprise-server.md +++ b/content/migrations/using-ghe-migrator/exporting-migration-data-from-github-enterprise-server.md @@ -48,14 +48,19 @@ shortTitle: Export from GHES ``` 3. When prompted, enter your {% data variables.product.prodname_ghe_server %} username: + ```shell Enter username authorized for migration: admin ``` + 4. When prompted for a {% data variables.product.pat_generic %}, enter the access token you created in "[Preparing the {% data variables.product.prodname_ghe_server %} source instance](#preparing-the-github-enterprise-server-source-instance)": + ```shell Enter {% data variables.product.pat_generic %}: ************** ``` + 5. When `ghe-migrator add` has finished it will print the unique "Migration GUID" that it generated to identify this export as well as a list of the resources that were added to the export. You will use the Migration GUID that it generated in subsequent `ghe-migrator add` and `ghe-migrator export` steps to tell `ghe-migrator` to continue operating on the same export. + ```shell > 101 models added to export > Migration GUID: EXAMPLE-MIGRATION-GUID @@ -77,27 +82,36 @@ shortTitle: Export from GHES > attachments | 4 > projects | 2 ``` + Each time you add a new repository with an existing Migration GUID it will update the existing export. If you run `ghe-migrator add` again without a Migration GUID it will start a new export and generate a new Migration GUID. **Do not re-use the Migration GUID generated during an export when you start preparing your migration for import**. 6. To add more repositories to the same export, use the `ghe-migrator add` command with the `-g` flag. You'll pass in the new repository URL and the Migration GUID from Step 5: + ```shell ghe-migrator add https://HOSTNAME/USERNAME/OTHER-REPO-NAME -g MIGRATION-GUID --lock ``` + 7. When you've finished adding repositories, generate the migration archive using the `ghe-migrator export` command with the `-g` flag and the Migration GUID from Step 5: + ```shell $ ghe-migrator export -g MIGRATION-GUID > Archive saved to: /data/github/current/tmp/MIGRATION-GUID.tar.gz ``` + - {% data reusables.enterprise_migrations.specify-staging-path %} 8. Close the connection to {% data variables.location.product_location %}: + ```shell $ exit > logout > Connection to HOSTNAME closed. ``` + 9. Copy the migration archive to your computer using the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command. The archive file will be named with the Migration GUID: + ```shell scp -P 122 admin@HOSTNAME:/data/github/current/tmp/MIGRATION-GUID.tar.gz ~/Desktop ``` + {% data reusables.enterprise_migrations.ready-to-import-migrations %} diff --git a/content/migrations/using-ghe-migrator/exporting-migration-data-from-githubcom.md b/content/migrations/using-ghe-migrator/exporting-migration-data-from-githubcom.md index 258c9f14cc..682d0eec54 100644 --- a/content/migrations/using-ghe-migrator/exporting-migration-data-from-githubcom.md +++ b/content/migrations/using-ghe-migrator/exporting-migration-data-from-githubcom.md @@ -46,6 +46,7 @@ The Migrations API is currently in a preview period, which means that the endpoi 2. Start a migration by sending a `POST` request to [the migration endpoint](/free-pro-team@latest/rest/migrations#start-an-organization-migration). You'll need: - Your access token for authentication. - A [list of the repositories](/free-pro-team@latest/rest/repos#list-organization-repositories) you want to migrate: + ```shell curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" \ -X POST \ @@ -53,6 +54,7 @@ The Migrations API is currently in a preview period, which means that the endpoi -d'{"lock_repositories":true,"repositories":["ORG_NAME/REPO_NAME", "ORG_NAME/REPO_NAME"]}' \ https://api.github.com/orgs/ORG_NAME/migrations ``` + - If you want to lock the repositories before migrating them, make sure `lock_repositories` is set to `true`. This is highly recommended. - You can exclude file attachments by passing `exclude_attachments: true` to the endpoint. {% data reusables.enterprise_migrations.exclude-file-attachments %} The final archive size must be less than 20 GB. @@ -61,6 +63,7 @@ The Migrations API is currently in a preview period, which means that the endpoi 3. Send a `GET` request to [the migration status endpoint](/free-pro-team@latest/rest/migrations#get-an-organization-migration-status) to fetch the status of a migration. You'll need: - Your access token for authentication. - The unique `id` of the migration: + ```shell curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" \ -H "Accept: application/vnd.github+json" \ @@ -76,6 +79,7 @@ The Migrations API is currently in a preview period, which means that the endpoi 4. After your migration has exported, download the migration archive by sending a `GET` request to [the migration download endpoint](/free-pro-team@latest/rest/migrations#download-an-organization-migration-archive). You'll need: - Your access token for authentication. - The unique `id` of the migration: + ```shell curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" \ -H "Accept: application/vnd.github+json" \ @@ -86,10 +90,12 @@ The Migrations API is currently in a preview period, which means that the endpoi 5. The migration archive is automatically deleted after seven days. If you would prefer to delete it sooner, you can send a `DELETE` request to [the migration archive delete endpoint](/free-pro-team@latest/rest/migrations#delete-an-organization-migration-archive). You'll need: - Your access token for authentication. - The unique `id` of the migration: + ```shell curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" \ -X DELETE \ -H "Accept: application/vnd.github+json" \ https://api.github.com/orgs/ORG_NAME/migrations/ID/archive ``` + {% data reusables.enterprise_migrations.ready-to-import-migrations %} diff --git a/content/migrations/using-ghe-migrator/migrating-data-to-github-enterprise-server.md b/content/migrations/using-ghe-migrator/migrating-data-to-github-enterprise-server.md index 5041372aef..28b3c8d93d 100644 --- a/content/migrations/using-ghe-migrator/migrating-data-to-github-enterprise-server.md +++ b/content/migrations/using-ghe-migrator/migrating-data-to-github-enterprise-server.md @@ -103,6 +103,7 @@ ghe-migrator audit -m RECORD_TYPE -s STATE -g MIGRATION-GUID ``` For example, to view every successfully imported organization and team, you would enter: + ```shell $ ghe-migrator audit -m organization,team -s mapped,renamed -g MIGRATION-GUID > model_name,source_url,target_url,state @@ -110,6 +111,7 @@ $ ghe-migrator audit -m organization,team -s mapped,renamed -g MIGRATION-GUID ``` **We strongly recommend auditing every import that failed.** To do that, you will enter: + ```shell $ ghe-migrator audit -s failed_import,failed_map,failed_rename,failed_merge -g MIGRATION-GUID > model_name,source_url,target_url,state @@ -146,6 +148,7 @@ curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" -X DELETE \ ### Deleting repositories from an organization on {% data variables.product.prodname_dotcom_the_website %} After unlocking the {% data variables.product.prodname_dotcom_the_website %} organization's repositories, you should delete every repository you previously migrated using [the repository delete endpoint](/rest/repos#delete-a-repository). You'll need your access token for authentication: + ```shell curl -H "Authorization: Bearer GITHUB_ACCESS_TOKEN" -X DELETE \ https://api.github.com/repos/ORG-NAME/REPO_NAME diff --git a/content/migrations/using-ghe-migrator/preparing-to-migrate-data-to-github-enterprise-server.md b/content/migrations/using-ghe-migrator/preparing-to-migrate-data-to-github-enterprise-server.md index 291f4d5f8a..da61391c06 100644 --- a/content/migrations/using-ghe-migrator/preparing-to-migrate-data-to-github-enterprise-server.md +++ b/content/migrations/using-ghe-migrator/preparing-to-migrate-data-to-github-enterprise-server.md @@ -43,14 +43,19 @@ shortTitle: Prepare to migrate data ## Generating a list of migration conflicts 1. Using the `ghe-migrator conflicts` command with the Migration GUID, generate a _conflicts.csv_ file: + ```shell ghe-migrator conflicts -g MIGRATION-GUID > conflicts.csv ``` + - If no conflicts are reported, you can safely import the data by following the steps in "[AUTOTITLE](/migrations/using-ghe-migrator/migrating-data-to-github-enterprise-server)". + 2. If there are conflicts, using the [`scp`](https://acloudguru.com/blog/engineering/ssh-and-scp-howto-tips-tricks#scp) command, copy _conflicts.csv_ to your local computer: + ```shell scp -P 122 admin@HOSTNAME:conflicts.csv ~/Desktop ``` + 3. Continue to "[Resolving migration conflicts or setting up custom mappings](#resolving-migration-conflicts-or-setting-up-custom-mappings)". ## Reviewing migration conflicts diff --git a/content/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/reclaiming-mannequins-for-github-enterprise-importer.md b/content/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/reclaiming-mannequins-for-github-enterprise-importer.md index d1394d2504..866bf6ee88 100644 --- a/content/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/reclaiming-mannequins-for-github-enterprise-importer.md +++ b/content/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/reclaiming-mannequins-for-github-enterprise-importer.md @@ -83,6 +83,7 @@ If your migration source is a {% data variables.product.prodname_dotcom %} produ ```shell copy gh gei reclaim-mannequin --github-target-org DESTINATION --mannequin-user MANNEQUIN --target-user USERNAME ``` + {% data reusables.enterprise-migration-tool.mannequin-reclaim-must-accept %} #### Reclaiming mannequins with the {% data variables.product.prodname_ado2gh_cli_short %} @@ -114,6 +115,7 @@ If your migration source is Azure DevOps, you can reclaim mannequins with the {% ```shell copy gh ado2gh reclaim-mannequin --github-org DESTINATION --mannequin-user MANNEQUIN --target-user USERNAME ``` + {% data reusables.enterprise-migration-tool.mannequin-reclaim-must-accept %} ### Reclaiming mannequins in your browser diff --git a/content/migrations/using-github-enterprise-importer/migrating-repositories-with-github-enterprise-importer/migrating-repositories-from-githubcom-to-github-enterprise-cloud.md b/content/migrations/using-github-enterprise-importer/migrating-repositories-with-github-enterprise-importer/migrating-repositories-from-githubcom-to-github-enterprise-cloud.md index 58813f94fd..38c788dbe4 100644 --- a/content/migrations/using-github-enterprise-importer/migrating-repositories-with-github-enterprise-importer/migrating-repositories-from-githubcom-to-github-enterprise-cloud.md +++ b/content/migrations/using-github-enterprise-importer/migrating-repositories-with-github-enterprise-importer/migrating-repositories-from-githubcom-to-github-enterprise-cloud.md @@ -198,6 +198,7 @@ gh gei generate-script --github-source-org SOURCE --github-target-org DESTINATIO ```shell copy gh gei migrate-repo --github-source-org SOURCE --source-repo CURRENT-NAME --github-target-org DESTINATION --target-repo NEW-NAME ``` + {% data reusables.enterprise-migration-tool.skip-releases %} {% data reusables.enterprise-migration-tool.migrate-repo-table-ec %} diff --git a/content/organizations/managing-organization-settings/verifying-or-approving-a-domain-for-your-organization.md b/content/organizations/managing-organization-settings/verifying-or-approving-a-domain-for-your-organization.md index 4907774b14..032ab7d840 100644 --- a/content/organizations/managing-organization-settings/verifying-or-approving-a-domain-for-your-organization.md +++ b/content/organizations/managing-organization-settings/verifying-or-approving-a-domain-for-your-organization.md @@ -69,9 +69,11 @@ To verify a domain, you must have access to modify domain records with your doma {% data reusables.organizations.add-domain %} {% data reusables.organizations.add-dns-txt-record %} 1. Wait for your DNS configuration to change, which may take up to 72 hours. You can confirm your DNS configuration has changed by running the `dig` command on the command line, replacing `ORGANIZATION` with the name of your organization and `example.com` with the domain you'd like to verify. You should see your new TXT record listed in the command output. + ```shell dig _github-challenge-ORGANIZATION-org.example.com +nostats +nocomments +nocmd TXT ``` + 1. After confirming your TXT record is added to your DNS, follow steps one through three above to navigate to your organization's approved and verified domains. {% data reusables.organizations.continue-verifying-domain %} 1. Optionally, once the "Verified" badge is visible on your organization's profile page, you can delete the TXT entry from the DNS record at your domain hosting service. diff --git a/content/packages/learn-github-packages/connecting-a-repository-to-a-package.md b/content/packages/learn-github-packages/connecting-a-repository-to-a-package.md index abc3ffd37f..f2dcff11df 100644 --- a/content/packages/learn-github-packages/connecting-a-repository-to-a-package.md +++ b/content/packages/learn-github-packages/connecting-a-repository-to-a-package.md @@ -40,10 +40,13 @@ When you publish a package that is scoped to a personal account or an organizati ```shell LABEL org.opencontainers.image.source=https://{% ifversion fpt or ghec %}github.com{% else %}HOSTNAME{% endif %}/OWNER/REPO ``` + For example, if you're the user `octocat` and own `my-repo`{% ifversion ghes > 3.4 %}, and your {% data variables.location.product_location %} hostname is `github.companyname.com`,{% endif %} you would add this line to your Dockerfile: + ```shell LABEL org.opencontainers.image.source=https://{% ifversion fpt or ghec %}github.com{% else %}{% data reusables.package_registry.container-registry-example-hostname %}{% endif %}/octocat/my-repo ``` + For more information, see "[LABEL](https://docs.docker.com/engine/reference/builder/#label)" in the official Docker documentation and "[Pre-defined Annotation Keys](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys)" in the `opencontainers/image-spec` repository. 2. Build your container image. This example builds an image from the Dockerfile in the current directory and assigns the image name `hello_docker`. @@ -53,6 +56,7 @@ When you publish a package that is scoped to a personal account or an organizati ``` 3. Optionally, review the details of the Docker image you just created. + ```shell $ docker images > REPOSITORY TAG IMAGE ID CREATED SIZE @@ -62,6 +66,7 @@ When you publish a package that is scoped to a personal account or an organizati ``` 4. Assign a name and hosting destination to your Docker image. + ```shell docker tag IMAGE_NAME {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/NEW_IMAGE_NAME:TAG ``` @@ -69,23 +74,30 @@ When you publish a package that is scoped to a personal account or an organizati Replace `NAMESPACE` with the name of the personal account or organization to which you want the package to be scoped. For example: + ```shell docker tag 38f737a91f39 {% ifversion fpt or ghec %}ghcr.io{% elsif ghes > 3.4 %}{% data reusables.package_registry.container-registry-example-hostname %}{% endif %}/octocat/hello_docker:latest ``` 5. If you haven't already, authenticate to the {% data variables.product.prodname_container_registry %}. For more information, see "[AUTOTITLE](/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry)." {% raw %} + ```shell $ echo $CR_PAT | docker login {% endraw %}{% data reusables.package_registry.container-registry-hostname %}{% raw %} -u USERNAME --password-stdin > Login Succeeded ``` + {% endraw %} 6. Push your container image to the {% data variables.product.prodname_container_registry %}. + ```shell docker push {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/IMAGE-NAME:TAG ``` + For example: + ```shell docker push {% ifversion fpt or ghec %}ghcr.io{% elsif ghes > 3.4 %}{% data reusables.package_registry.container-registry-example-hostname %}{% endif %}/octocat/hello_docker:latest ``` + {% endif %} diff --git a/content/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions.md b/content/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions.md index 55c2322328..4b2f57982d 100644 --- a/content/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions.md +++ b/content/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions.md @@ -166,6 +166,7 @@ jobs: tags: | {% ifversion ghae %}docker.YOUR-HOSTNAME.com{% else %}docker.pkg.github.com{% endif %}/{% raw %}${{ github.repository }}/octo-image:${{ github.sha }}{% endraw %} ``` + {% endif %} The relevant settings are explained in the following table. For full details about each element in a workflow, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions)." diff --git a/content/packages/quickstart.md b/content/packages/quickstart.md index 8ed723af4b..c78e5f33f8 100644 --- a/content/packages/quickstart.md +++ b/content/packages/quickstart.md @@ -20,18 +20,23 @@ In this guide, you'll create a {% data variables.product.prodname_actions %} wor 1. Create a new repository on {% data variables.product.prodname_dotcom %}, adding the `.gitignore` for Node. For more information, see "[AUTOTITLE](/repositories/creating-and-managing-repositories/creating-a-new-repository)." 2. Clone the repository to your local machine. + ```shell git clone https://{% ifversion ghes or ghae %}YOUR-HOSTNAME{% else %}github.com{% endif %}/YOUR-USERNAME/YOUR-REPOSITORY.git cd YOUR-REPOSITORY ``` + 3. Create an `index.js` file and add a basic alert to say "Hello world!" {% raw %} + ```javascript copy console.log("Hello, World!"); ``` + {% endraw %} 4. Initialize an npm package with `npm init`. In the package initialization wizard, enter your package with the name: _`@YOUR-USERNAME/YOUR-REPOSITORY`_, and set the test script to `exit 0`. This will generate a `package.json` file with information about your package. {% raw %} + ```shell $ npm init ... @@ -40,16 +45,20 @@ In this guide, you'll create a {% data variables.product.prodname_actions %} wor test command: exit 0 ... ``` + {% endraw %} 5. Run `npm install` to generate the `package-lock.json` file, then commit and push your changes to {% data variables.product.prodname_dotcom %}. + ```shell npm install git add index.js package.json package-lock.json git commit -m "initialize npm package" git push ``` + 6. Create a `.github/workflows` directory. In that directory, create a file named `release-package.yml`. 7. Copy the following YAML content into the `release-package.yml` file{% ifversion ghes or ghae %}, replacing `YOUR-HOSTNAME` with the name of your enterprise{% endif %}. + ```yaml copy name: Node.js Package @@ -85,22 +94,28 @@ In this guide, you'll create a {% data variables.product.prodname_actions %} wor env: NODE_AUTH_TOKEN: ${% raw %}{{secrets.GITHUB_TOKEN}}{% endraw %} ``` + 8. Tell npm which scope and registry to publish packages to using one of the following methods: - Add an npm configuration file for the repository by creating a `.npmrc` file in the root directory with the contents: {% raw %} + ```shell @YOUR-USERNAME:registry=https://npm.pkg.github.com ``` + {% endraw %} - Edit the `package.json` file and specify the `publishConfig` key: {% raw %} + ```shell "publishConfig": { "@YOUR-USERNAME:registry": "https://npm.pkg.github.com" } ``` + {% endraw %} 9. Commit and push your changes to {% data variables.product.prodname_dotcom %}. + ```shell $ git add .github/workflows/release-package.yml # Also add the file you created or edited in the previous step. @@ -108,6 +123,7 @@ In this guide, you'll create a {% data variables.product.prodname_actions %} wor $ git commit -m "workflow to publish package" $ git push ``` + 10. The workflow that you created will run whenever a new release is created in your repository. If the tests pass, then the package will be published to {% data variables.product.prodname_registry %}. To test this out, navigate to the **Code** tab in your repository and create a new release. For more information, see "[Managing releases in a repository](/github/administering-a-repository/managing-releases-in-a-repository#creating-a-release)." diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry.md index eaede9d292..261daabfb4 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry.md @@ -125,6 +125,7 @@ If your instance has subdomain isolation disabled: ``` + {% endif %} ## Publishing a package @@ -138,6 +139,7 @@ For more information on creating a package, see the [maven.apache.org documentat 1. Edit the `distributionManagement` element of the _pom.xml_ file located in your package directory, replacing {% ifversion ghes or ghae %}HOSTNAME with the host name of {% data variables.location.product_location %}, {% endif %}`OWNER` with the name of the personal account or organization that owns the repository and `REPOSITORY` with the name of the repository containing your project.{% ifversion ghes %} If your instance has subdomain isolation enabled:{% endif %} + ```xml @@ -157,8 +159,10 @@ For more information on creating a package, see the [maven.apache.org documentat ```{% endif %} + {% data reusables.package_registry.checksum-maven-plugin %} 1. Publish the package. + ```shell mvn deploy ``` @@ -181,6 +185,7 @@ To install an Apache Maven package from {% data variables.product.prodname_regis ``` + {% data reusables.package_registry.checksum-maven-plugin %} 3. Install the package. diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-container-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-container-registry.md index 909c901b57..d39791d9c4 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-container-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-container-registry.md @@ -90,17 +90,20 @@ To connect a repository when publishing an image from the command line, and to e To ensure you're always using the same image, you can specify the exact container image version you want to pull by the `digest` SHA value. 1. To find the digest SHA value, use `docker inspect` or `docker pull` and copy the SHA value after `Digest:` + ```shell docker inspect {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/IMAGE_NAME ``` Replace `NAMESPACE` with the name of the personal account or organization to which the image is scoped. 2. Remove image locally as needed. + ```shell docker rmi {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/IMAGE_NAME:latest ``` 3. Pull the container image with `@YOUR_SHA_VALUE` after the image name. + ```shell docker pull {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/IMAGE_NAME@sha256:82jf9a84u29hiasldj289498uhois8498hjs29hkuhs ``` @@ -153,6 +156,7 @@ docker build -t hello_docker . ## Tagging container images 1. Find the ID for the Docker image you want to tag. + ```shell $ docker images > REPOSITORY TAG IMAGE ID CREATED SIZE @@ -161,6 +165,7 @@ docker build -t hello_docker . ``` 2. Tag your Docker image using the image ID and your desired image name and hosting destination. + ```shell docker tag 38f737a91f39 {% data reusables.package_registry.container-registry-hostname %}/NAMESPACE/NEW_IMAGE_NAME:latest ``` diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-docker-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-docker-registry.md index 67af930db3..d873a560a1 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-docker-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-docker-registry.md @@ -52,9 +52,11 @@ To keep your credentials secure, we recommend you save your {% data variables.pr {% ifversion fpt or ghec %} {% raw %} + ```shell cat ~/TOKEN.txt | docker login https://docker.pkg.github.com -u USERNAME --password-stdin ``` + {% endraw %} {% endif %} @@ -63,17 +65,21 @@ cat ~/TOKEN.txt | docker login https://docker.pkg.github.com -u USERNAME < > > REPOSITORY TAG IMAGE ID CREATED SIZE > IMAGE_NAME VERSION IMAGE_ID 4 weeks ago 1.11MB ``` + 2. Using the Docker image ID, tag the docker image, replacing OWNER with the name of the personal account or organization that owns the repository, REPOSITORY with the name of the repository containing your project, IMAGE_NAME with name of the package or image,{% ifversion ghes or ghae %} HOSTNAME with the hostname of {% data variables.location.product_location %},{% endif %} and VERSION with package version at build time. {% ifversion fpt or ghec %} + ```shell docker tag IMAGE_ID docker.pkg.github.com/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% else %} {% ifversion ghes %} If your instance has subdomain isolation enabled: {% endif %} + ```shell docker tag IMAGE_ID docker.HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% ifversion ghes %} If your instance has subdomain isolation disabled: + ```shell docker tag IMAGE_ID HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% endif %} {% endif %} 3. If you haven't already built a docker image for the package, build the image, replacing OWNER with the name of the personal account or organization that owns the repository, REPOSITORY with the name of the repository containing your project, IMAGE_NAME with name of the package or image, VERSION with package version at build time,{% ifversion ghes or ghae %} HOSTNAME with the hostname of {% data variables.location.product_location %},{% endif %} and PATH to the image if it isn't in the current working directory. {% ifversion fpt or ghec %} + ```shell docker build -t docker.pkg.github.com/OWNER/REPOSITORY/IMAGE_NAME:VERSION PATH ``` + {% else %} {% ifversion ghes %} If your instance has subdomain isolation enabled: {% endif %} + ```shell docker build -t docker.HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION PATH ``` + {% ifversion ghes %} If your instance has subdomain isolation disabled: + ```shell docker build -t HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION PATH ``` + {% endif %} {% endif %} 4. Publish the image to {% data variables.product.prodname_registry %}. {% ifversion fpt or ghec %} + ```shell docker push docker.pkg.github.com/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% else %} {% ifversion ghes %} If your instance has subdomain isolation enabled: {% endif %} + ```shell docker push docker.HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% ifversion ghes %} If your instance has subdomain isolation disabled: + ```shell docker push HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION ``` + {% endif %} {% endif %} {% note %} @@ -176,6 +202,7 @@ These examples assume your instance has subdomain isolation enabled. You can publish version 1.0 of the `monalisa` image to the `octocat/octo-app` repository using an image ID. {% ifversion fpt or ghec %} + ```shell $ docker images @@ -209,6 +236,7 @@ $ docker push docker.HOSTNAME/octocat/octo-app/monalisa:1.0 You can publish a new Docker image for the first time and name it `monalisa`. {% ifversion fpt or ghec %} + ```shell # Build the image with docker.pkg.github.com/OWNER/REPOSITORY/IMAGE_NAME:VERSION # Assumes Dockerfile resides in the current working directory (.) @@ -219,6 +247,7 @@ $ docker push docker.pkg.github.com/octocat/octo-app/monalisa:1.0 ``` {% else %} + ```shell # Build the image with docker.HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:VERSION # Assumes Dockerfile resides in the current working directory (.) @@ -227,6 +256,7 @@ $ docker build -t docker.HOSTNAME/octocat/octo-app/monalisa:1.0 . # Push the image to {% data variables.product.prodname_registry %} $ docker push docker.HOSTNAME/octocat/octo-app/monalisa:1.0 ``` + {% endif %} ## Downloading an image @@ -236,22 +266,28 @@ $ docker push docker.HOSTNAME/octocat/octo-app/monalisa:1.0 You can use the `docker pull` command to install a docker image from {% data variables.product.prodname_registry %}, replacing OWNER with the name of the personal account or organization that owns the repository, REPOSITORY with the name of the repository containing your project, IMAGE_NAME with name of the package or image,{% ifversion ghes or ghae %} HOSTNAME with the host name of {% data variables.location.product_location %}, {% endif %} and TAG_NAME with tag for the image you want to install. {% ifversion fpt or ghec %} + ```shell docker pull docker.pkg.github.com/OWNER/REPOSITORY/IMAGE_NAME:TAG_NAME ``` + {% else %} {% ifversion ghes %} If your instance has subdomain isolation enabled: {% endif %} + ```shell docker pull docker.HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:TAG_NAME ``` + {% ifversion ghes %} If your instance has subdomain isolation disabled: + ```shell docker pull HOSTNAME/OWNER/REPOSITORY/IMAGE_NAME:TAG_NAME ``` + {% endif %} {% endif %} diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-gradle-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-gradle-registry.md index a2f9fbafea..ec014902a2 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-gradle-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-gradle-registry.md @@ -174,12 +174,15 @@ To use a published package from {% data variables.product.prodname_registry %}, 2. Add the package dependencies to your _build.gradle_ file (Gradle Groovy) or _build.gradle.kts_ file (Kotlin DSL) file. Example using Gradle Groovy: + ```shell dependencies { implementation 'com.example:package' } ``` + Example using Kotlin DSL: + ```shell dependencies { implementation("com.example:package") @@ -189,6 +192,7 @@ To use a published package from {% data variables.product.prodname_registry %}, 3. Add the repository to your _build.gradle_ file (Gradle Groovy) or _build.gradle.kts_ file (Kotlin DSL) file. Example using Gradle Groovy: + ```shell repositories { maven { @@ -200,7 +204,9 @@ To use a published package from {% data variables.product.prodname_registry %}, } } ``` + Example using Kotlin DSL: + ```shell repositories { maven { diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-npm-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-npm-registry.md index 3c196f3167..e69c005d85 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-npm-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-npm-registry.md @@ -66,6 +66,7 @@ If your instance has subdomain isolation disabled: ```shell //HOSTNAME/_registry/npm/:_authToken=TOKEN ``` + {% endif %} To authenticate by logging in to npm, use the `npm login` command, replacing USERNAME with your {% data variables.product.prodname_dotcom %} username, TOKEN with your {% data variables.product.pat_v1 %}, and PUBLIC-EMAIL-ADDRESS with your email address. @@ -93,6 +94,7 @@ $ npm login --scope=@NAMESPACE --auth-type=legacy --registry=https://HOSTNAME/_r > Username: USERNAME > Password: TOKEN ``` + {% endif %} ## Publishing a package @@ -150,18 +152,22 @@ You can use `publishConfig` element in the _package.json_ file to specify the re {% ifversion ghes %} If your instance has subdomain isolation enabled: {% endif %} + ```shell "publishConfig": { "registry": "https://{% ifversion fpt or ghec %}npm.pkg.github.com{% else %}npm. HOSTNAME/{% endif %}" }, ``` + {% ifversion ghes %} If your instance has subdomain isolation disabled: + ```shell "publishConfig": { "registry": "https://HOSTNAME/_registry/npm/" }, ``` + {% endif %} {% data reusables.package_registry.verify_repository_field %} {% data reusables.package_registry.publish_package %} @@ -208,6 +214,7 @@ By default, you can only use npm packages hosted on your enterprise, and you wil } } ``` + 5. Install the package. ```shell @@ -234,6 +241,7 @@ If your instance has subdomain isolation disabled: @NAMESPACE:registry=https://HOSTNAME/_registry/npm @NAMESPACE:registry=https://HOSTNAME/_registry/npm ``` + {% endif %} {% ifversion ghes %} diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-nuget-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-nuget-registry.md index 2700608fdb..bd6f05a170 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-nuget-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-nuget-registry.md @@ -98,6 +98,7 @@ If your instance has subdomain isolation disabled: ``` + {% endif %} ## Publishing a package @@ -119,15 +120,19 @@ If you specify a `RepositoryURL` in your `nuget.config` file, the published pack If you don't already have a {% data variables.product.pat_generic %} to use for your account on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.location.product_location %}{% endif %}, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." 1. Create a new project. Replace `PROJECT_NAME` with the name you'd like to give the project. + ```shell dotnet new console --name PROJECT_NAME ``` + 2. Package the project. + ```shell dotnet pack --configuration Release ``` 3. Publish the package using your {% data variables.product.pat_generic %} as the API key. Replace `PROJECT_NAME` with the name of the project, `1.0.0` with the version number of the package, and `YOUR_GITHUB_PAT` with your {% data variables.product.pat_generic %}. + ```shell dotnet nuget push "bin/Release/PROJECT_NAME.1.0.0.nupkg" --api-key YOUR_GITHUB_PAT --source "github" ``` @@ -142,14 +147,18 @@ When publishing, {% ifversion packages-nuget-v2 %}if you are linking your packag {% data reusables.package_registry.authenticate-step %} 2. Create a new project. Replace `PROJECT_NAME` with the name you'd like to give the project. + ```shell dotnet new console --name PROJECT_NAME ``` + 3. Add your project's specific information to your project's file, which ends in _.csproj_. Make sure to replace: + - `1.0.0` with the version number of the package. - `OWNER` with the name of the personal account or organization that owns the repository to which you want to {% ifversion packages-nuget-v2 %}link your package{% else %}publish your package{% endif %}. - `REPOSITORY` with the name of the repository to which you want to connect your package.{% ifversion ghes or ghae %} - `HOSTNAME` with the host name for {% data variables.location.product_location %}.{% endif %} + ``` xml @@ -166,12 +175,15 @@ When publishing, {% ifversion packages-nuget-v2 %}if you are linking your packag ``` + 4. Package the project. + ```shell dotnet pack --configuration Release ``` 5. Publish the package using the `key` you specified in the _nuget.config_ file. Replace `PROJECT_NAME` with the name of the project, and replace `1.0.0` with the version number of the package. + ```shell dotnet nuget push "bin/Release/PROJECT_NAME.1.0.0.nupkg" --source "github" ``` @@ -225,6 +237,7 @@ Using packages from {% data variables.product.prodname_dotcom %} in your project {% data reusables.package_registry.authenticate-step %} 2. To use a package, add `ItemGroup` and configure the `PackageReference` field in the _.csproj_ project file. Replace the `PACKAGE_NAME` value in `Include="PACKAGE_NAME"` with your package dependency, and replace the `X.X.X` value in `Version="X.X.X"` with the version of the package you want to use: + ``` xml @@ -247,6 +260,7 @@ Using packages from {% data variables.product.prodname_dotcom %} in your project ``` 3. Install the packages with the `restore` command. + ```shell dotnet restore ``` diff --git a/content/packages/working-with-a-github-packages-registry/working-with-the-rubygems-registry.md b/content/packages/working-with-a-github-packages-registry/working-with-the-rubygems-registry.md index 85157f292a..a66a3822e8 100644 --- a/content/packages/working-with-a-github-packages-registry/working-with-the-rubygems-registry.md +++ b/content/packages/working-with-a-github-packages-registry/working-with-the-rubygems-registry.md @@ -75,6 +75,7 @@ To install gems, you need to authenticate to {% data variables.product.prodname_ {% endif %} If you would like your package to be available globally, you can run the following command to add your registry as a source. + ```shell gem sources --add https://USERNAME:TOKEN@{% ifversion fpt or ghec %}rubygems.pkg.github.com{% else %}REGISTRY_URL{% endif %}/NAMESPACE/ ``` @@ -92,10 +93,13 @@ bundle config https://{% ifversion fpt or ghec %}rubygems.pkg.github.com{% else {% data reusables.package_registry.auto-inherit-permissions-note %} {% data reusables.package_registry.authenticate-step %} + 1. Build the package from the _gemspec_ to create the _.gem_ package. Replace `GEM_NAME` with the name of your gem. + ``` gem build GEM_NAME.gemspec ``` + 1. Publish a package to {% data variables.product.prodname_registry %}, replacing `NAMESPACE` with the name of the personal account or organization {% ifversion packages-rubygems-v2 %}to which the package will be scoped{% else %}that owns the repository containing your project{% endif %} and `GEM_NAME` with the name of your gem package.{% ifversion ghes %} Replace `REGISTRY_URL` with the URL for your instance's Rubygems registry. If your instance has subdomain isolation enabled, use `rubygems.HOSTNAME`. If your instance has subdomain isolation disabled, use `HOSTNAME/_registry/rubygems`. In either case, replace `HOSTNAME` with the host name of your {% data variables.product.prodname_ghe_server %} instance.{% elsif ghae %} Replace `REGISTRY_URL` with the URL for your instance's Rubygems registry, `rubygems.HOSTNAME`. Replace `HOSTNAME` with the hostname of {% data variables.location.product_location %}.{% endif %} {% note %} @@ -164,6 +168,7 @@ You can use gems from {% data variables.product.prodname_registry %} much like y ``` 4. Install the package: + ``` gem install GEM_NAME --version "0.1.1" ``` diff --git a/content/pages/configuring-a-custom-domain-for-your-github-pages-site/managing-a-custom-domain-for-your-github-pages-site.md b/content/pages/configuring-a-custom-domain-for-your-github-pages-site/managing-a-custom-domain-for-your-github-pages-site.md index f4913496e7..1517f1a40e 100644 --- a/content/pages/configuring-a-custom-domain-for-your-github-pages-site/managing-a-custom-domain-for-your-github-pages-site.md +++ b/content/pages/configuring-a-custom-domain-for-your-github-pages-site/managing-a-custom-domain-for-your-github-pages-site.md @@ -60,6 +60,7 @@ To set up a `www` or custom subdomain, such as `www.example.com` or `blog.exampl {% indented_data_reference reusables.pages.wildcard-dns-warning spaces=3 %} {% data reusables.command_line.open_the_multi_os_terminal %} 6. To confirm that your DNS record configured correctly, use the `dig` command, replacing _WWW.EXAMPLE.COM_ with your subdomain. + ```shell $ dig WWW.EXAMPLE.COM +nostats +nocomments +nocmd > ;WWW.EXAMPLE.COM. IN A @@ -67,6 +68,7 @@ To set up a `www` or custom subdomain, such as `www.example.com` or `blog.exampl > YOUR-USERNAME.github.io. 43192 IN CNAME GITHUB-PAGES-SERVER . > GITHUB-PAGES-SERVER . 22 IN A 192.0.2.1 ``` + {% data reusables.pages.build-locally-download-cname %} {% data reusables.pages.enforce-https-custom-domain %} @@ -131,6 +133,7 @@ After you configure the apex domain, you must configure a CNAME record with your 1. Navigate to your DNS provider and create a `CNAME` record that points `www.example.com` to the default domain for your site: `.github.io` or `.github.io`. Do not include the repository name. {% data reusables.pages.contact-dns-provider %} {% data reusables.pages.default-domain-information %} 2. To confirm that your DNS record configured correctly, use the `dig` command, replacing _WWW.EXAMPLE.COM_ with your `www` subdomain variant. + ```shell $ dig WWW.EXAMPLE.COM +nostats +nocomments +nocmd > ;WWW.EXAMPLE.COM IN A diff --git a/content/pages/configuring-a-custom-domain-for-your-github-pages-site/verifying-your-custom-domain-for-github-pages.md b/content/pages/configuring-a-custom-domain-for-your-github-pages-site/verifying-your-custom-domain-for-github-pages.md index 43af01840d..e544bd21e5 100644 --- a/content/pages/configuring-a-custom-domain-for-your-github-pages-site/verifying-your-custom-domain-for-github-pages.md +++ b/content/pages/configuring-a-custom-domain-for-your-github-pages-site/verifying-your-custom-domain-for-github-pages.md @@ -28,9 +28,11 @@ It's also possible to verify a domain for your organization{% ifversion ghec %} 1. In the "Code, planning, and automation" section of the sidebar, click **{% octicon "browser" aria-hidden="true" %} Pages**. {% data reusables.pages.settings-verify-domain-setup %} 1. Wait for your DNS configuration to change, this may be immediate or take up to 24 hours. You can confirm the change to your DNS configuration by running the `dig` command on the command line. In the command below, replace `USERNAME` with your username and `example.com` with the domain you're verifying. If your DNS configuration has updated, you should see your new TXT record in the output. + ``` dig _github-pages-challenge-USERNAME.example.com +nostats +nocomments +nocmd TXT ``` + {% data reusables.pages.settings-verify-domain-confirm %} ## Verifying a domain for your organization site @@ -42,7 +44,9 @@ Organization owners can verify custom domains for their organization. 1. In the "Code, planning, and automation" section of the sidebar, click **{% octicon "browser" aria-hidden="true" %} Pages**. {% data reusables.pages.settings-verify-domain-setup %} 1. Wait for your DNS configuration to change. This may be immediate or take up to 24 hours. You can confirm the change to your DNS configuration by running the `dig` command on the command line. In the command below, replace `ORGANIZATION` with the name of your organization and `example.com` with the domain you're verifying. If your DNS configuration has updated, you should see your new TXT record in the output. + ``` dig _github-pages-challenge-ORGANIZATION.example.com +nostats +nocomments +nocmd TXT ``` + {% data reusables.pages.settings-verify-domain-confirm %} diff --git a/content/pages/getting-started-with-github-pages/creating-a-custom-404-page-for-your-github-pages-site.md b/content/pages/getting-started-with-github-pages/creating-a-custom-404-page-for-your-github-pages-site.md index 0575fdc8c7..b7bcca528b 100644 --- a/content/pages/getting-started-with-github-pages/creating-a-custom-404-page-for-your-github-pages-site.md +++ b/content/pages/getting-started-with-github-pages/creating-a-custom-404-page-for-your-github-pages-site.md @@ -21,11 +21,13 @@ shortTitle: Create custom 404 page {% data reusables.files.add-file %} 3. In the file name field, type `404.html` or `404.md`. 4. If you named your file `404.md`, add the following YAML front matter to the beginning of the file: + ```yaml --- permalink: /404.html --- ``` + 5. Below the YAML front matter, if present, add the content you want to display on your 404 page. {% data reusables.files.write_commit_message %} {% data reusables.files.choose-commit-email %} diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/about-jekyll-build-errors-for-github-pages-sites.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/about-jekyll-build-errors-for-github-pages-sites.md index 1c806342ce..e69391766c 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/about-jekyll-build-errors-for-github-pages-sites.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/about-jekyll-build-errors-for-github-pages-sites.md @@ -73,16 +73,19 @@ We recommend testing your site locally, which allows you to see build error mess You can configure a third-party service, such as [Travis CI](https://travis-ci.com/), to display error messages after each commit. 1. If you haven't already, add a file called _Gemfile_ in the root of your publishing source, with the following content: + ```ruby source `https://rubygems.org` gem `github-pages` ``` 2. Configure your site's repository for the testing service of your choice. For example, to use [Travis CI](https://travis-ci.com/), add a file named _.travis.yml_ in the root of your publishing source, with the following content: + ```yaml language: ruby rvm: - 2.3 script: "bundle exec jekyll build" ``` + 3. You may need to activate your repository with the third-party testing service. For more information, see your testing service's documentation. diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-a-theme-to-your-github-pages-site-using-jekyll.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-a-theme-to-your-github-pages-site-using-jekyll.md index d0868c263a..fcf3302e97 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-a-theme-to-your-github-pages-site-using-jekyll.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-a-theme-to-your-github-pages-site-using-jekyll.md @@ -46,12 +46,14 @@ People with write permissions for a repository can add a theme to a {% data vari {% data reusables.pages.navigate-publishing-source %} 1. Create a new file called _/assets/css/style.scss_. 2. Add the following content to the top of the file: + ```scss --- --- @import "{% raw %}{{ site.theme }}{% endraw %}"; ``` + 3. Add any custom CSS or Sass (including imports) you'd like immediately after the `@import` line. ## Customizing your theme's HTML layout diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-content-to-your-github-pages-site-using-jekyll.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-content-to-your-github-pages-site-using-jekyll.md index 837744dc3f..a0b1356001 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-content-to-your-github-pages-site-using-jekyll.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/adding-content-to-your-github-pages-site-using-jekyll.md @@ -37,11 +37,13 @@ Your theme includes default layouts, includes, and stylesheets that will automat {% data reusables.pages.navigate-publishing-source %} 3. In the root of your publishing source, create a new file for your page called `PAGE-NAME.md`, replacing PAGE-NAME with a meaningful filename for the page. 4. Add the following YAML frontmatter to the top of the file, replacing PAGE-TITLE with the page's title and URL-PATH with a path you want for the page's URL. For example, if the base URL of your site is `https://octocat.github.io` and your URL-PATH is `/about/contact/`, your page will be located at `https://octocat.github.io/about/contact`. + ```shell layout: page title: "PAGE-TITLE" permalink: /URL-PATH ``` + 5. Below the frontmatter, add content for your page. {% data reusables.files.write_commit_message %} {% data reusables.files.choose-commit-email %} @@ -60,12 +62,14 @@ Your theme includes default layouts, includes, and stylesheets that will automat 3. Navigate to the `_posts` directory. 4. Create a new file called `YYYY-MM-DD-NAME-OF-POST.md`, replacing YYYY-MM-DD with the date of your post and NAME-OF-POST with the name of your post. 5. Add the following YAML frontmatter to the top of the file, including the post's title enclosed in quotation marks, the date and time for the post in YYYY-MM-DD hh:mm:ss -0000 format, and as many categories as you want for your post. + ```shell layout: post title: "POST-TITLE" date: YYYY-MM-DD hh:mm:ss -0000 categories: CATEGORY-1 CATEGORY-2 ``` + 5. Below the frontmatter, add content for your post. {% data reusables.files.write_commit_message %} {% data reusables.files.choose-commit-email %} diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/creating-a-github-pages-site-with-jekyll.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/creating-a-github-pages-site-with-jekyll.md index c8dfaf3422..9a39c0720d 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/creating-a-github-pages-site-with-jekyll.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/creating-a-github-pages-site-with-jekyll.md @@ -44,40 +44,52 @@ Before you can use Jekyll to create a {% data variables.product.prodname_pages % {% data reusables.command_line.open_the_multi_os_terminal %} 1. If you don't already have a local copy of your repository, navigate to the location where you want to store your site's source files, replacing PARENT-FOLDER with the folder you want to contain the folder for your repository. + ```shell cd PARENT-FOLDER ``` + 1. If you haven't already, initialize a local Git repository, replacing REPOSITORY-NAME with the name of your repository. + ```shell $ git init REPOSITORY-NAME > Initialized empty Git repository in /Users/octocat/my-site/.git/ # Creates a new folder on your computer, initialized as a Git repository ``` + 1. Change directories to the repository. + ```shell $ cd REPOSITORY-NAME # Changes the working directory ``` + {% data reusables.pages.decide-publishing-source %} {% data reusables.pages.navigate-publishing-source %} For example, if you chose to publish your site from the `docs` folder on the default branch, create and change directories to the `docs` folder. + ```shell $ mkdir docs # Creates a new folder called docs $ cd docs ``` + If you chose to publish your site from the `gh-pages` branch, create and checkout the `gh-pages` branch. + ```shell $ git checkout --orphan gh-pages # Creates a new branch, with no history or contents, called gh-pages, and switches to the gh-pages branch $ git rm -rf . # Removes the contents from your default branch from the working directory ``` + 1. To create a new Jekyll site, use the `jekyll new` command: + ```shell $ jekyll new --skip-bundle . # Creates a Jekyll site in the current directory ``` + 1. Open the Gemfile that Jekyll created. 1. Add "#" to the beginning of the line that starts with `gem "jekyll"` to comment out this line. 1. Add the `github-pages` gem by editing the line starting with `# gem "github-pages"`. Change this line to: @@ -92,18 +104,23 @@ Before you can use Jekyll to create a {% data variables.product.prodname_pages % 1. Save and close the Gemfile. 1. From the command line, run `bundle install`. 1. Optionally, make any necessary edits to the `_config.yml` file. This is required for relative paths when the repository is hosted in a subdirectory. For more information, see "[AUTOTITLE](/get-started/using-git/splitting-a-subfolder-out-into-a-new-repository)." + ```yml domain: my-site.github.io # if you want to force HTTPS, specify the domain without the http at the start, e.g. example.com url: https://my-site.github.io # the base hostname and protocol for your site, e.g. http://example.com baseurl: /REPOSITORY-NAME/ # place folder name if the site is served in a subfolder ``` + 1. Optionally, test your site locally. For more information, see "[Testing your {% data variables.product.prodname_pages %} site locally with Jekyll](/articles/testing-your-github-pages-site-locally-with-jekyll)." 1. Add and commit your work. + ```shell git add . git commit -m 'Initial GitHub pages site with Jekyll' ``` + 1. Add your repository on {% ifversion ghae %}{% data variables.product.product_name %}{% else %}{% data variables.location.product_location %}{% endif %} as a remote, replacing {% ifversion ghes or ghae %}HOSTNAME with your enterprise's hostname,{% endif %} USER with the account that owns the repository{% ifversion ghes or ghae %},{% endif %} and REPOSITORY with the name of the repository. + ```shell {% ifversion fpt or ghec %} $ git remote add origin https://github.com/USER/REPOSITORY.git @@ -111,10 +128,13 @@ Before you can use Jekyll to create a {% data variables.product.prodname_pages % $ git remote add origin https://HOSTNAME/USER/REPOSITORY.git {% endif %} ``` + 1. Push the repository to {% data variables.product.product_name %}, replacing BRANCH with the name of the branch you're working on. + ```shell git push -u origin BRANCH ``` + {% data reusables.pages.configure-publishing-source %} {% data reusables.pages.navigate-site-repo %} {% data reusables.repositories.sidebar-settings %} diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/testing-your-github-pages-site-locally-with-jekyll.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/testing-your-github-pages-site-locally-with-jekyll.md index 56d24e642b..db5e6a6f24 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/testing-your-github-pages-site-locally-with-jekyll.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/testing-your-github-pages-site-locally-with-jekyll.md @@ -35,6 +35,7 @@ Before you can use Jekyll to test a site, you must: {% data reusables.pages.navigate-publishing-source %} 3. Run `bundle install`. 3. Run your Jekyll site locally. + ```shell $ bundle exec jekyll serve > Configuration file: /Users/octocat/my-site/_config.yml @@ -48,6 +49,7 @@ Before you can use Jekyll to test a site, you must: > Server address: http://127.0.0.1:4000/ > Server running... press ctrl-c to stop. ``` + {% note %} **Note:** If you've installed Ruby 3.0 or later (which you may have if you installed the default version via Homebrew), you might get an error at this step. That's because these versions of Ruby no longer come with `webrick` installed. diff --git a/content/pages/setting-up-a-github-pages-site-with-jekyll/troubleshooting-jekyll-build-errors-for-github-pages-sites.md b/content/pages/setting-up-a-github-pages-site-with-jekyll/troubleshooting-jekyll-build-errors-for-github-pages-sites.md index 57b37a2e52..a4d1c90a02 100644 --- a/content/pages/setting-up-a-github-pages-site-with-jekyll/troubleshooting-jekyll-build-errors-for-github-pages-sites.md +++ b/content/pages/setting-up-a-github-pages-site-with-jekyll/troubleshooting-jekyll-build-errors-for-github-pages-sites.md @@ -91,6 +91,7 @@ This error means that your code references a symlinked file that does not exist This error means that you used non-Latin characters, like `日本語`, without telling the computer to expect these symbols. To troubleshoot, force UTF-8 encoding by adding the following line to your __config.yml_ file: + ```yaml encoding: UTF-8 ``` diff --git a/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts.md b/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts.md index f22bf243e8..9ef9681510 100644 --- a/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts.md +++ b/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts.md @@ -27,6 +27,7 @@ To resolve a merge conflict, you must manually edit the conflicted file to selec - For all other types of merge conflicts, you must resolve the merge conflict in a local clone of the repository and push the change to your branch on {% data variables.product.product_name %}. You can use the command line or a tool like [{% data variables.product.prodname_desktop %}](https://desktop.github.com/) to push the change. For more information, see "[AUTOTITLE](/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line)." If you have a merge conflict on the command line, you cannot push your local changes to {% data variables.product.product_name %} until you resolve the merge conflict locally on your computer. If you try merging branches on the command line that have a merge conflict, you'll get an error message. For more information, see "[AUTOTITLE](/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line)." + ```shell $ git merge BRANCH-NAME > Auto-merging styleguide.md diff --git a/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line.md b/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line.md index 4e4915081a..b072d77f5c 100644 --- a/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line.md +++ b/content/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-using-the-command-line.md @@ -32,10 +32,13 @@ For example, if you and another person both edited the file _styleguide.md_ on t {% data reusables.command_line.open_the_multi_os_terminal %} 2. Navigate into the local Git repository that has the merge conflict. + ```shell cd REPOSITORY-NAME ``` + 3. Generate a list of the files affected by the merge conflict. In this example, the file _styleguide.md_ has a merge conflict. + ```shell $ git status > # On branch branch-b @@ -49,6 +52,7 @@ For example, if you and another person both edited the file _styleguide.md_ on t > # > no changes added to commit (use "git add" and/or "git commit -a") ``` + 4. Open your favorite text editor, such as [{% data variables.product.prodname_vscode %}](https://code.visualstudio.com/), and navigate to the file that has merge conflicts. 5. To see the beginning of the merge conflict in your file, search the file for the conflict marker `<<<<<<<`. When you open the file in your text editor, you'll see the changes from the HEAD or base branch after the line `<<<<<<< HEAD`. Next, you'll see `=======`, which divides your changes from the changes in the other branch, followed by `>>>>>>> BRANCH-NAME`. In this example, one person wrote "open an issue" in the base or HEAD branch and another person wrote "ask your question in IRC" in the compare branch or `branch-a`. @@ -60,16 +64,21 @@ For example, if you and another person both edited the file _styleguide.md_ on t ask your question in IRC. >>>>>>> branch-a ``` + {% data reusables.pull_requests.decide-how-to-resolve-competing-line-change-merge-conflict %} In this example, both changes are incorporated into the final merge: ```shell If you have questions, please open an issue or ask in our IRC channel if it's more urgent. ``` + 7. Add or stage your changes. + ```shell git add . ``` + 8. Commit your changes with a comment. + ```shell git commit -m "Resolved merge conflict by incorporating both suggestions." ``` @@ -84,10 +93,13 @@ For example, if you edited a file, such as _README.md_, and another person remov {% data reusables.command_line.open_the_multi_os_terminal %} 2. Navigate into the local Git repository that has the merge conflict. + ```shell cd REPOSITORY-NAME ``` + 2. Generate a list of the files affected by the merge conflict. In this example, the file _README.md_ has a merge conflict. + ```shell $ git status > # On branch main @@ -104,20 +116,26 @@ For example, if you edited a file, such as _README.md_, and another person remov > # > # no changes added to commit (use "git add" and/or "git commit -a") ``` + 3. Open your favorite text editor, such as [{% data variables.product.prodname_vscode %}](https://code.visualstudio.com/), and navigate to the file that has merge conflicts. 6. Decide if you want to keep the removed file. You may want to view the latest changes made to the removed file in your text editor. To add the removed file back to your repository: + ```shell git add README.md ``` + To remove this file from your repository: + ```shell $ git rm README.md > README.md: needs merge > rm 'README.md' ``` + 7. Commit your changes with a comment. + ```shell $ git commit -m "Resolved merge conflict by keeping README.md file." > [branch-d 6f89e49] Merge branch 'branch-c' into branch-d diff --git a/content/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks.md b/content/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks.md index c0713c6faf..978f93a24d 100644 --- a/content/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks.md +++ b/content/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks.md @@ -61,13 +61,16 @@ You can also skip workflow runs triggered by the `push` and `pull_request` event Alternatively, to skip or request _all_ checks for your commit, add one of the following trailer lines to the end of your commit message: - To _skip checks_ for a commit, type your commit message and a short, meaningful description of your changes. After your commit description, before the closing quotation, add two empty lines followed by `skip-checks: true`: + ```shell $ git commit -m "Update README > > skip-checks: true" ``` + - To _request_ checks for a commit, type your commit message and a short, meaningful description of your changes. After your commit description, before the closing quotation, add two empty lines followed by `request-checks: true`: + ```shell $ git commit -m "Refactor usability tests > diff --git a/content/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/committing-changes-to-a-pull-request-branch-created-from-a-fork.md b/content/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/committing-changes-to-a-pull-request-branch-created-from-a-fork.md index 4ccfeedb1e..5b67df8780 100644 --- a/content/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/committing-changes-to-a-pull-request-branch-created-from-a-fork.md +++ b/content/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/committing-changes-to-a-pull-request-branch-created-from-a-fork.md @@ -38,14 +38,19 @@ Only the user who created the pull request can give you permission to push commi {% endtip %} 4. Change the current working directory to the location where you want to download the cloned directory. + ```shell cd open-source-projects ``` + 5. Type `git clone`, and then paste the URL you copied in Step 3. + ```shell git clone https://{% data variables.command_line.codeblock %}/USERNAME/FORK-OF-THE-REPOSITORY ``` + 6. Press **Enter**. Your local clone will be created. + ```shell $ git clone https://{% data variables.command_line.codeblock %}/USERNAME/FORK-OF-THE-REPOSITORY > Cloning into `FORK-OF-THE-REPOSITORY`... @@ -54,18 +59,22 @@ Only the user who created the pull request can give you permission to push commi > remove: Total 10 (delta 1), reused 10 (delta 1) > Unpacking objects: 100% (10/10), done. ``` + {% tip %} **Tip:** The error message "fatal: destination path 'REPOSITORY-NAME' already exists and is not an empty directory" means that your current working directory already contains a repository with the same name. To resolve the error, you must clone the fork in a different directory. {% endtip %} 7. Navigate into your new cloned repository. + ```shell cd FORK-OF-THE-REPOSITORY ``` + 7. Switch branches to the compare branch of the pull request where the original changes were made. If you navigate to the original pull request, you'll see the compare branch at the top of the pull request. In this example, the compare branch is `test-branch`: + ```shell git checkout TEST-BRANCH ``` @@ -77,6 +86,7 @@ Only the user who created the pull request can give you permission to push commi {% endtip %} 8. At this point, you can do anything you want with this branch. You can push new commits to it, run some local tests, or merge other branches into the branch. Make modifications as you like. 9. After you commit your changes to the head branch of the pull request you can push your changes up to the original pull request directly. In this example, the head branch is `test-branch`: + ```shell $ git push origin test-branch > Counting objects: 32, done. diff --git a/content/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/checking-out-pull-requests-locally.md b/content/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/checking-out-pull-requests-locally.md index 38c0ee261e..ca80546337 100644 --- a/content/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/checking-out-pull-requests-locally.md +++ b/content/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/checking-out-pull-requests-locally.md @@ -70,16 +70,21 @@ Anyone can work with a previously opened pull request to continue working on it, {% data reusables.command_line.open_the_multi_os_terminal %} 5. Fetch the reference to the pull request based on its ID number, creating a new branch in the process. + ```shell git fetch origin pull/ID/head:BRANCH_NAME ``` + 6. Switch to the new branch that's based on this pull request: + ```shell [main] $ git checkout BRANCH_NAME > Switched to a new branch 'BRANCH_NAME' ``` + 7. At this point, you can do anything you want with this branch. You can run some local tests, or merge other branches into the branch. 8. When you're ready, you can push the new branch up: + ```shell [pull-inactive-pull-request] $ git push origin BRANCH_NAME > Counting objects: 32, done. @@ -90,11 +95,13 @@ Anyone can work with a previously opened pull request to continue working on it, > To https://{% data variables.command_line.codeblock %}/USERNAME/REPOSITORY.git > * [new branch] BRANCH_NAME -> BRANCH_NAME ``` + 9. [Create a new pull request](/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) with your new branch. ## Error: Failed to push some refs The remote `refs/pull/` namespace is _read-only_. If you try to push any commits there, you'll see this error: + ```shell ! [remote rejected] HEAD -> refs/pull/1/head (deny updating a hidden ref) error: failed to push some refs to 'git@github.local:USERNAME/REPOSITORY.git' diff --git a/content/pull-requests/collaborating-with-pull-requests/working-with-forks/configuring-a-remote-repository-for-a-fork.md b/content/pull-requests/collaborating-with-pull-requests/working-with-forks/configuring-a-remote-repository-for-a-fork.md index 1921bc4d78..c9639198e7 100644 --- a/content/pull-requests/collaborating-with-pull-requests/working-with-forks/configuring-a-remote-repository-for-a-fork.md +++ b/content/pull-requests/collaborating-with-pull-requests/working-with-forks/configuring-a-remote-repository-for-a-fork.md @@ -18,16 +18,21 @@ shortTitle: Configure a remote repository --- {% data reusables.command_line.open_the_multi_os_terminal %} 2. List the current configured remote repository for your fork. + ```shell $ git remote -v > origin https://{% data variables.command_line.codeblock %}/YOUR_USERNAME/YOUR_FORK.git (fetch) > origin https://{% data variables.command_line.codeblock %}/YOUR_USERNAME/YOUR_FORK.git (push) ``` + 3. Specify a new remote _upstream_ repository that will be synced with the fork. + ```shell git remote add upstream https://{% data variables.command_line.codeblock %}/ORIGINAL_OWNER/ORIGINAL_REPOSITORY.git ``` + 4. Verify the new upstream repository you've specified for your fork. + ```shell $ git remote -v > origin https://{% data variables.command_line.codeblock %}/YOUR_USERNAME/YOUR_FORK.git (fetch) diff --git a/content/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork.md b/content/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork.md index 08074ff3a9..d3a9c93765 100644 --- a/content/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork.md +++ b/content/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork.md @@ -90,6 +90,7 @@ Before you can sync your fork with an upstream repository, you must configure a ``` If your local branch didn't have any unique commits, Git will perform a fast-forward. For more information, see [Basic Branching and Merging](https://git-scm.com/book/en/v2/Git-Branching-Basic-Branching-and-Merging) in the Git documentation. + ```shell $ git merge upstream/main > Updating 34e91da..16c56ad @@ -97,6 +98,7 @@ Before you can sync your fork with an upstream repository, you must configure a > README.md | 5 +++-- > 1 file changed, 3 insertions(+), 2 deletions(-) ``` + If your local branch had unique commits, you may need to resolve conflicts. For more information, see "[AUTOTITLE](/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts)." {% tip %} diff --git a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message.md b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message.md index b2d3887ac7..515abb67fe 100644 --- a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message.md +++ b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message.md @@ -52,6 +52,7 @@ We strongly discourage force pushing, since this changes the history of your rep 1. Follow the [steps above](/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message#commit-has-not-been-pushed-online) to amend the commit message. 2. Use the `push --force-with-lease` command to force push over the old commit. + ```shell git push --force-with-lease origin EXAMPLE-BRANCH ``` @@ -67,6 +68,7 @@ If you need to amend the message for multiple commits or an older commit, you ca # Displays a list of the last 3 commits on the current branch $ git rebase -i HEAD~3 ``` + The list will look similar to the following: ```shell @@ -92,15 +94,19 @@ If you need to amend the message for multiple commits or an older commit, you ca # # Note that empty commits are commented out ``` + 3. Replace `pick` with `reword` before each commit message you want to change. + ```shell pick e499d89 Delete CNAME reword 0c39034 Better README reword f7fde4a Change the commit message but push the same commit. ``` + 4. Save and close the commit list file. 5. In each resulting commit file, type the new commit message, save the file, and close it. 6. When you're ready to push your changes to GitHub, use the push --force command to force push over the old commit. + ```shell git push --force origin EXAMPLE-BRANCH ``` diff --git a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-on-behalf-of-an-organization.md b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-on-behalf-of-an-organization.md index 0222493731..b344fc4e39 100644 --- a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-on-behalf-of-an-organization.md +++ b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-on-behalf-of-an-organization.md @@ -30,11 +30,13 @@ Organizations can use the `name@organization.com` email as a public point of con ## Creating commits with an `on-behalf-of` badge on the command line 1. Type your commit message and a short, meaningful description of your changes. After your commit description, instead of a closing quotation, add two empty lines. + ```shell $ git commit -m "Refactor usability tests. > > ``` + {% tip %} **Tip:** If you're using a text editor on the command line to type your commit message, ensure there are two newlines between the end of your commit description and the `on-behalf-of:` commit trailer. diff --git a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors.md b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors.md index 4091ee1dbb..b4c225e2f6 100644 --- a/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors.md +++ b/content/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors.md @@ -41,11 +41,13 @@ You can use {% data variables.product.prodname_desktop %} to create a commit wit {% data reusables.pull_requests.collect-co-author-commit-git-config-info %} 1. Type your commit message and a short, meaningful description of your changes. After your commit description, instead of a closing quotation, add two empty lines. + ``` $ git commit -m "Refactor usability tests. > > ``` + {% tip %} **Tip:** If you're using a text editor on the command line to type your commit message, ensure there are two newlines between the end of your commit description and the `Co-authored-by:` commit trailer. @@ -55,6 +57,7 @@ You can use {% data variables.product.prodname_desktop %} to create a commit wit 3. On the next line of the commit message, type `Co-authored-by: name ` with specific information for each co-author. After the co-author information, add a closing quotation mark. If you're adding multiple co-authors, give each co-author their own line and `Co-authored-by:` commit trailer. + ``` $ git commit -m "Refactor usability tests. > diff --git a/content/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch.md b/content/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch.md index 3d1634ab1c..70bfa22ab3 100644 --- a/content/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch.md +++ b/content/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch.md @@ -48,6 +48,7 @@ git remote set-head origin -a ``` Optionally, run the following command to remove tracking references to the old branch name. + ``` git remote prune origin ``` diff --git a/content/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/troubleshooting-required-status-checks.md b/content/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/troubleshooting-required-status-checks.md index b9106b9e9a..c0cc9ef014 100644 --- a/content/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/troubleshooting-required-status-checks.md +++ b/content/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/troubleshooting-required-status-checks.md @@ -31,6 +31,7 @@ You won't be able to push local changes to a protected branch until all required remote: error: GH006: Protected branch update failed for refs/heads/main. remote: error: Required status check "ci-build" is failing ``` + {% note %} **Note:** Pull requests that are up-to-date and pass required status checks can be merged locally and pushed to the protected branch. This can be done without status checks running on the merge commit itself. @@ -97,6 +98,7 @@ jobs: steps: - run: 'echo "No build required"' ``` + Now the checks will always pass whenever someone sends a pull request that doesn't change the files listed under `paths` in the first workflow. {% note %} diff --git a/content/repositories/creating-and-managing-repositories/duplicating-a-repository.md b/content/repositories/creating-and-managing-repositories/duplicating-a-repository.md index 7c749e392d..18fcf0cd62 100644 --- a/content/repositories/creating-and-managing-repositories/duplicating-a-repository.md +++ b/content/repositories/creating-and-managing-repositories/duplicating-a-repository.md @@ -30,15 +30,20 @@ Before you can push the original repository to your new copy, or _mirror_, of th {% data reusables.command_line.open_the_multi_os_terminal %} 2. Create a bare clone of the repository. + ```shell git clone --bare https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/OLD-REPOSITORY.git ``` + 3. Mirror-push to the new repository. + ```shell cd OLD-REPOSITORY.git git push --mirror https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/NEW-REPOSITORY.git ``` + 4. Remove the temporary local repository you created earlier. + ```shell cd .. rm -rf OLD-REPOSITORY.git @@ -48,26 +53,37 @@ Before you can push the original repository to your new copy, or _mirror_, of th {% data reusables.command_line.open_the_multi_os_terminal %} 2. Create a bare clone of the repository. Replace the example username with the name of the person or organization who owns the repository, and replace the example repository name with the name of the repository you'd like to duplicate. + ```shell git clone --bare https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/OLD-REPOSITORY.git ``` + 3. Navigate to the repository you just cloned. + ```shell cd OLD-REPOSITORY.git ``` + 4. Pull in the repository's {% data variables.large_files.product_name_long %} objects. + ```shell git lfs fetch --all ``` + 5. Mirror-push to the new repository. + ```shell git push --mirror https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/NEW-REPOSITORY.git ``` + 6. Push the repository's {% data variables.large_files.product_name_long %} objects to your mirror. + ```shell git lfs push --all https://github.com/EXAMPLE-USER/NEW-REPOSITORY.git ``` + 7. Remove the temporary local repository you created earlier. + ```shell cd .. rm -rf OLD-REPOSITORY.git @@ -79,21 +95,27 @@ If you want to mirror a repository in another location, including getting update {% data reusables.command_line.open_the_multi_os_terminal %} 2. Create a bare mirrored clone of the repository. + ```shell git clone --mirror https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/REPOSITORY-TO-MIRROR.git ``` + 3. Set the push location to your mirror. + ```shell cd REPOSITORY-TO-MIRROR git remote set-url --push origin https://{% data variables.command_line.codeblock %}/EXAMPLE-USER/MIRRORED ``` + As with a bare clone, a mirrored clone includes all remote branches and tags, but all local references will be overwritten each time you fetch, so it will always be the same as the original repository. Setting the URL for pushes simplifies pushing to your mirror. 4. To update your mirror, fetch updates and push. + ```shell git fetch -p origin git push --mirror ``` + {% ifversion fpt or ghec %} ## Further reading diff --git a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files.md b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files.md index 3711316ae9..85553c6dea 100644 --- a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files.md +++ b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files.md @@ -45,6 +45,7 @@ Lisa, M., & Bot, H. (2017). My Research Software (Version 2.0.4) [Computer softw **BibTeX** {% raw %} + ``` @software{Lisa_My_Research_Software_2017, author = {Lisa, Mona and Bot, Hew}, @@ -56,6 +57,7 @@ Lisa, M., & Bot, H. (2017). My Research Software (Version 2.0.4) [Computer softw year = {2017} } ``` + {% endraw %} Note the example above produces a _software_ citation (that is, `@software` type in BibTeX rather than `@article`). @@ -140,6 +142,7 @@ Lisa, M., & Bot, H. (2021). My awesome research software. Journal Title, 1(1), 1 **BibTeX** {% raw %} + ``` @article{Lisa_My_awesome_research_2021, author = {Lisa, Mona and Bot, Hew}, @@ -153,6 +156,7 @@ Lisa, M., & Bot, H. (2021). My awesome research software. Journal Title, 1(1), 1 year = {2021} } ``` + {% endraw %} ## Citing a dataset diff --git a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners.md b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners.md index 5a64bdd329..66e729cb52 100644 --- a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners.md +++ b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners.md @@ -72,6 +72,7 @@ If any line in your CODEOWNERS file contains invalid syntax, the file will not b {% endif %} ### Example of a CODEOWNERS file + ``` # This is a comment. # Each line is a file pattern followed by one or more owners. diff --git a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository.md b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository.md index cc48b828d0..09e38ba135 100644 --- a/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository.md +++ b/content/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository.md @@ -44,6 +44,7 @@ PHP | `packagist` C# | `nuget` Here's an example `FUNDING.yml` file: + ``` github: [octocat, surftocat] patreon: octocat diff --git a/content/repositories/releasing-projects-on-github/automatically-generated-release-notes.md b/content/repositories/releasing-projects-on-github/automatically-generated-release-notes.md index a2a4eb547e..daf44f14f5 100644 --- a/content/repositories/releasing-projects-on-github/automatically-generated-release-notes.md +++ b/content/repositories/releasing-projects-on-github/automatically-generated-release-notes.md @@ -55,6 +55,7 @@ You can also customize your automated release notes, using labels to create cust A configuration for a repository that labels semver releases {% raw %} + ```yaml copy # .github/release.yml @@ -77,11 +78,13 @@ changelog: labels: - "*" ``` + {% endraw %} A configuration for a repository that doesn't tag pull requests but where we want to separate out {% data variables.product.prodname_dependabot %} automated pull requests in release notes (`labels: '*'` is required to display a catchall category) {% raw %} + ```yaml copy # .github/release.yml @@ -97,6 +100,7 @@ changelog: labels: - dependencies ``` + {% endraw %} ## Further reading diff --git a/content/repositories/working-with-files/managing-files/adding-a-file-to-a-repository.md b/content/repositories/working-with-files/managing-files/adding-a-file-to-a-repository.md index fc78876c15..681cb74ad2 100644 --- a/content/repositories/working-with-files/managing-files/adding-a-file-to-a-repository.md +++ b/content/repositories/working-with-files/managing-files/adding-a-file-to-a-repository.md @@ -59,15 +59,19 @@ You can upload an existing file to a repository on {% ifversion ghae %}{% data v {% data reusables.command_line.open_the_multi_os_terminal %} {% data reusables.command_line.switching_directories_procedural %} {% data reusables.git.stage_for_commit %} + ```shell $ git add . # Adds the file to your local repository and stages it for commit. {% data reusables.git.unstage-codeblock %} ``` + {% data reusables.git.commit-file %} + ```shell $ git commit -m "Add existing file" # Commits the tracked changes and prepares them to be pushed to a remote repository. {% data reusables.git.reset-head-to-previous-commit-codeblock %} ``` + {% data reusables.git.git-push %} {% ifversion fpt or ghec %} diff --git a/content/repositories/working-with-files/managing-files/moving-a-file-to-a-new-location.md b/content/repositories/working-with-files/managing-files/moving-a-file-to-a-new-location.md index 71b642e176..1e1f7fc1e5 100644 --- a/content/repositories/working-with-files/managing-files/moving-a-file-to-a-new-location.md +++ b/content/repositories/working-with-files/managing-files/moving-a-file-to-a-new-location.md @@ -51,6 +51,7 @@ Many files can be [moved directly on {% data variables.product.product_name %}]( 1. On your computer, move the file to a new location within the directory that was created locally on your computer when you cloned the repository. {% data reusables.command_line.open_the_multi_os_terminal %} 3. Use `git status` to check the old and new file locations. + ```shell $ git status > # On branch YOUR-BRANCH @@ -67,13 +68,17 @@ Many files can be [moved directly on {% data variables.product.product_name %}]( > # > # no changes added to commit (use "git add" and/or "git commit -a") ``` + {% data reusables.git.stage_for_commit %} This will delete, or `git rm`, the file from the old location and add, or `git add`, the file to the new location. + ```shell $ git add . # Adds the file to your local repository and stages it for commit. # {% data reusables.git.unstage-codeblock %} ``` + 5. Use `git status` to check the changes staged for commit. + ```shell $ git status > # On branch YOUR-BRANCH @@ -83,10 +88,13 @@ Many files can be [moved directly on {% data variables.product.product_name %}]( > # renamed: /old-folder/image.png -> /new-folder/image.png # Displays the changes staged for commit ``` + {% data reusables.git.commit-file %} + ```shell $ git commit -m "Move file to new directory" # Commits the tracked changes and prepares them to be pushed to a remote repository. # {% data reusables.git.reset-head-to-previous-commit-codeblock %} ``` + {% data reusables.git.git-push %} diff --git a/content/repositories/working-with-files/managing-files/renaming-a-file.md b/content/repositories/working-with-files/managing-files/renaming-a-file.md index aea769da0e..ba3685999e 100644 --- a/content/repositories/working-with-files/managing-files/renaming-a-file.md +++ b/content/repositories/working-with-files/managing-files/renaming-a-file.md @@ -51,10 +51,13 @@ Many files can be [renamed directly on {% data variables.product.product_name %} {% data reusables.command_line.open_the_multi_os_terminal %} {% data reusables.command_line.switching_directories_procedural %} 3. Rename the file, specifying the old file name and the new name you'd like to give the file. This will stage your change for commit. + ```shell git mv OLD-FILENAME NEW-FILENAME ``` + 4. Use `git status` to check the old and new file names. + ```shell $ git status > # On branch YOUR-BRANCH @@ -64,10 +67,13 @@ Many files can be [renamed directly on {% data variables.product.product_name %} > # renamed: OLD-FILENAME -> NEW-FILENAME > # ``` + {% data reusables.git.commit-file %} + ```shell $ git commit -m "Rename file" # Commits the tracked changes and prepares them to be pushed to a remote repository. # {% data reusables.git.reset-head-to-previous-commit-codeblock %} ``` + {% data reusables.git.git-push %} diff --git a/content/repositories/working-with-files/managing-large-files/about-large-files-on-github.md b/content/repositories/working-with-files/managing-large-files/about-large-files-on-github.md index 3f93b0decc..c5d2f1b81b 100644 --- a/content/repositories/working-with-files/managing-large-files/about-large-files-on-github.md +++ b/content/repositories/working-with-files/managing-large-files/about-large-files-on-github.md @@ -79,18 +79,23 @@ If the file was added with your most recent commit, and you have not pushed to { {% data reusables.command_line.open_the_multi_os_terminal %} {% data reusables.command_line.switching_directories_procedural %} 3. To remove the file, enter `git rm --cached`: + ```shell $ git rm --cached GIANT_FILE # Stage our giant file for removal, but leave it on disk ``` + 4. Commit this change using `--amend -CHEAD`: + ```shell $ git commit --amend -CHEAD # Amend the previous commit with your change # Simply making a new commit won't work, as you need # to remove the file from the unpushed history as well ``` + 5. Push your commits to {% data variables.location.product_location %}: + ```shell $ git push # Push our rewritten, smaller commit diff --git a/content/repositories/working-with-files/managing-large-files/configuring-git-large-file-storage.md b/content/repositories/working-with-files/managing-large-files/configuring-git-large-file-storage.md index 8a74cf6ae5..997b00a051 100644 --- a/content/repositories/working-with-files/managing-large-files/configuring-git-large-file-storage.md +++ b/content/repositories/working-with-files/managing-large-files/configuring-git-large-file-storage.md @@ -32,10 +32,12 @@ If there are existing files in your repository that you'd like to use {% data va 3. To associate a file type in your repository with {% data variables.large_files.product_name_short %}, enter `git {% data variables.large_files.command_name %} track` followed by the name of the file extension you want to automatically upload to {% data variables.large_files.product_name_short %}. For example, to associate a _.psd_ file, enter the following command: + ```shell $ git {% data variables.large_files.command_name %} track "*.psd" > Adding path *.psd ``` + Every file type you want to associate with {% data variables.large_files.product_name_short %} will need to be added with `git {% data variables.large_files.command_name %} track`. This command amends your repository's _.gitattributes_ file and associates large files with {% data variables.large_files.product_name_short %}. {% note %} @@ -49,15 +51,20 @@ If there are existing files in your repository that you'd like to use {% data va {% endnote %} 4. Add a file to the repository matching the extension you've associated: + ```shell git add path/to/file.psd ``` + 5. Commit the file and push it to {% data variables.product.product_name %}: + ```shell git commit -m "add file.psd" git push ``` + You should see some diagnostic information about your file upload: + ```shell > Sending file.psd > 44.74 MB / 81.04 MB 55.21 % 14s diff --git a/content/repositories/working-with-files/managing-large-files/installing-git-large-file-storage.md b/content/repositories/working-with-files/managing-large-files/installing-git-large-file-storage.md index d91a9a7e2c..e62f576216 100644 --- a/content/repositories/working-with-files/managing-large-files/installing-git-large-file-storage.md +++ b/content/repositories/working-with-files/managing-large-files/installing-git-large-file-storage.md @@ -24,29 +24,35 @@ shortTitle: Install Git LFS 2. On your computer, locate and unzip the downloaded file. {% data reusables.command_line.open_the_multi_os_terminal %} 3. Change the current working directory into the folder you downloaded and unzipped. + ```shell cd ~/Downloads/git-lfs-1.X.X ``` + {% note %} **Note:** The file path you use after `cd` depends on your operating system, Git LFS version you downloaded, and where you saved the {% data variables.large_files.product_name_short %} download. {% endnote %} 4. To install the file, run this command: + ```shell $ ./install.sh > {% data variables.large_files.product_name_short %} initialized. ``` + {% note %} **Note:** You may have to use `sudo ./install.sh` to install the file. {% endnote %} 5. Verify that the installation was successful: + ```shell $ git {% data variables.large_files.command_name %} install > {% data variables.large_files.product_name_short %} initialized. ``` + 6. If you don't see a message indicating that `git {% data variables.large_files.command_name %} install` was successful, please contact {% data variables.contact.contact_support %}. Be sure to include the name of your operating system. {% endmac %} @@ -64,10 +70,12 @@ shortTitle: Install Git LFS 3. Double click on the file called _git-lfs-windows-1.X.X.exe_, where 1.X.X is replaced with the Git LFS version you downloaded. When you open this file Windows will run a setup wizard to install {% data variables.large_files.product_name_short %}. {% data reusables.command_line.open_the_multi_os_terminal %} 5. Verify that the installation was successful: + ```shell $ git {% data variables.large_files.command_name %} install > {% data variables.large_files.product_name_short %} initialized. ``` + 6. If you don't see a message indicating that `git {% data variables.large_files.command_name %} install` was successful, please contact {% data variables.contact.contact_support %}. Be sure to include the name of your operating system. {% endwindows %} @@ -84,29 +92,35 @@ shortTitle: Install Git LFS 2. On your computer, locate and unzip the downloaded file. {% data reusables.command_line.open_the_multi_os_terminal %} 3. Change the current working directory into the folder you downloaded and unzipped. + ```shell cd ~/Downloads/git-lfs-1.X.X ``` + {% note %} **Note:** The file path you use after `cd` depends on your operating system, Git LFS version you downloaded, and where you saved the {% data variables.large_files.product_name_short %} download. {% endnote %} 4. To install the file, run this command: + ```shell $ ./install.sh > {% data variables.large_files.product_name_short %} initialized. ``` + {% note %} **Note:** You may have to use `sudo ./install.sh` to install the file. {% endnote %} 5. Verify that the installation was successful: + ```shell $ git {% data variables.large_files.command_name %} install > {% data variables.large_files.product_name_short %} initialized. ``` + 6. If you don't see a message indicating that `git {% data variables.large_files.command_name %} install` was successful, please contact {% data variables.contact.contact_support %}. Be sure to include the name of your operating system. {% endlinux %} diff --git a/content/repositories/working-with-files/managing-large-files/removing-files-from-git-large-file-storage.md b/content/repositories/working-with-files/managing-large-files/removing-files-from-git-large-file-storage.md index 43d2a2656e..c0c989e2e1 100644 --- a/content/repositories/working-with-files/managing-large-files/removing-files-from-git-large-file-storage.md +++ b/content/repositories/working-with-files/managing-large-files/removing-files-from-git-large-file-storage.md @@ -29,10 +29,13 @@ shortTitle: Remove files 1. Remove the files from the repository's Git history using either the `filter-repo` command or BFG Repo-Cleaner. For detailed information on using these, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository)." 2. Optionally, to uninstall {% data variables.large_files.product_name_short %} in the repository, run: + ```shell git lfs uninstall ``` + For {% data variables.large_files.product_name_short %} versions below 1.1.0, run: + ```shell git lfs uninit ``` diff --git a/content/repositories/working-with-files/managing-large-files/resolving-git-large-file-storage-upload-failures.md b/content/repositories/working-with-files/managing-large-files/resolving-git-large-file-storage-upload-failures.md index c19e6b3fec..0f418a7cc6 100644 --- a/content/repositories/working-with-files/managing-large-files/resolving-git-large-file-storage-upload-failures.md +++ b/content/repositories/working-with-files/managing-large-files/resolving-git-large-file-storage-upload-failures.md @@ -18,10 +18,13 @@ To resolve the error message, you must reinstall your local {% data variables.la 1. Open Terminal. 2. Reinstall {% data variables.large_files.product_name_short %}. + ```shell git lfs install ``` + 3. Push all referenced {% data variables.large_files.product_name_short %} files. + ```shell git lfs push --all origin ``` diff --git a/content/rest/enterprise-admin/index.md b/content/rest/enterprise-admin/index.md index ee03672f8f..5ab9685897 100644 --- a/content/rest/enterprise-admin/index.md +++ b/content/rest/enterprise-admin/index.md @@ -64,6 +64,7 @@ When endpoints include `{enterprise}`, replace `{enterprise}` with the handle fo ```shell http(s)://HOSTNAME/ ``` + {% endif %} {% ifversion ghae or ghes %} ## Authentication diff --git a/content/rest/quickstart.md b/content/rest/quickstart.md index 4b70ab2ede..5677aebf97 100644 --- a/content/rest/quickstart.md +++ b/content/rest/quickstart.md @@ -82,6 +82,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % 1. Add a step to generate a token, and use that token instead of `GITHUB_TOKEN`. Note that this token will expire after 60 minutes. For example: ```yaml + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=1 %} on: @@ -102,6 +103,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % GH_TOKEN: {% raw %}${{ steps.generate_token.outputs.token }}{% endraw %} run: | gh api repos/octocat/Spoon-Knife/issues + ``` {% endcli %} @@ -227,6 +229,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % 1. Add a step to generate a token, and use that token instead of `GITHUB_TOKEN`. Note that this token will expire after 60 minutes. For example: ```yaml + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=1 %} on: @@ -259,6 +262,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % node .github/actions-scripts/use-the-api.mjs env: TOKEN: {% raw %}${{ steps.generate_token.outputs.token }}{% endraw %} + ``` {% endjavascript %} @@ -354,6 +358,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % 1. Add a step to generate a token, and use that token instead of `GITHUB_TOKEN`. Note that this token will expire after 60 minutes. For example: ```yaml + {% indented_data_reference reusables.actions.actions-not-certified-by-github-comment spaces=1 %} on: @@ -377,6 +382,7 @@ If you are authenticating with a {% data variables.product.prodname_github_app % --url "https://api.github.com/repos/octocat/Spoon-Knife/issues" \ --header "Accept: application/vnd.github+json" \ --header "Authorization: Bearer $GH_TOKEN" + ``` {% endcurl %} diff --git a/content/rest/search.md b/content/rest/search.md index bb14aa37fb..569dddc5a5 100644 --- a/content/rest/search.md +++ b/content/rest/search.md @@ -54,6 +54,7 @@ GitHub Octocat in:readme user:defunkt ``` **Note:** Be sure to use your language's preferred HTML-encoder to construct your query strings. For example: + ```javascript // JavaScript const queryString = 'q=' + encodeURIComponent('GitHub Octocat in:readme user:defunkt'); diff --git a/content/search-github/github-code-search/understanding-github-code-search-syntax.md b/content/search-github/github-code-search/understanding-github-code-search-syntax.md index 6e3a8fb432..471bfb7800 100644 --- a/content/search-github/github-code-search/understanding-github-code-search-syntax.md +++ b/content/search-github/github-code-search/understanding-github-code-search-syntax.md @@ -175,6 +175,7 @@ For example, to search for files with the extension `txt`, you can use: ``` path:*.txt ``` +
To search for JavaScript files within a `src` directory, you could use: @@ -187,11 +188,13 @@ path:src/*.js ``` path:/src/*.js ``` + - Note that `*` doesn't match the `/` character, so for the above example, all results will be direct descendants of the `src` directory. To match within subdirectories, so that results include deeply nested files such as `/src/app/testing/utils/example.js`, you can use `**`. For example: ``` path:/src/**/*.js ``` +
You can also use the `?` global character. For example, to match the path `file.aac` or `file.abc`, you can use: @@ -199,6 +202,7 @@ You can also use the `?` global character. For example, to match the path `file. ``` path:*.a?c ``` +
To search for a filename which contains a special character like `*` or `?`, just use a quoted string: diff --git a/content/support/contacting-github-support/providing-data-to-github-support.md b/content/support/contacting-github-support/providing-data-to-github-support.md index 42529d9b7b..7d77fd1b24 100644 --- a/content/support/contacting-github-support/providing-data-to-github-support.md +++ b/content/support/contacting-github-support/providing-data-to-github-support.md @@ -123,9 +123,11 @@ You can use these steps to create and share a support bundle if you have SSH acc {% data reusables.enterprise_enterprise_support.use_ghe_cluster_support_bundle %} 1. Download the support bundle via SSH: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -o' > support-bundle.tgz ``` + For more information about the `ghe-support-bundle` command, see "[AUTOTITLE](/admin/configuration/configuring-your-enterprise/command-line-utilities#ghe-support-bundle)". {% data reusables.enterprise_enterprise_support.sign-in-to-support %} {% data reusables.enterprise_enterprise_support.upload-support-bundle %} @@ -149,6 +151,7 @@ You can directly upload a support bundle to our server if: - Outbound HTTPS connections over TCP port 443 are allowed from {% data variables.location.product_location %} to _enterprise-bundles.github.com_ and _esbtoolsproduction.blob.core.windows.net_. 1. Upload the bundle to our support bundle server: + ```shell ssh -p122 admin@HOSTNAME -- 'ghe-support-bundle -u' ``` @@ -164,9 +167,11 @@ To prevent bundles from becoming too large, bundles only contain logs that haven You can use these steps to create and share an extended support bundle if you have SSH access to {% data variables.location.product_location %} and you have outbound internet access. 1. Download the extended support bundle via SSH by adding the `-x` flag to the `ghe-support-bundle` command: + ```shell ssh -p 122 admin@HOSTNAME -- 'ghe-support-bundle -o -x' > support-bundle.tgz ``` + {% data reusables.enterprise_enterprise_support.sign-in-to-support %} {% data reusables.enterprise_enterprise_support.upload-support-bundle %} @@ -177,6 +182,7 @@ You can directly upload a support bundle to our server if: - Outbound HTTPS connections over TCP port 443 are allowed from {% data variables.location.product_location %} to _enterprise-bundles.github.com_ and _esbtoolsproduction.blob.core.windows.net_. 1. Upload the bundle to our support bundle server: + ```shell ssh -p122 admin@HOSTNAME -- 'ghe-support-bundle -u -x' ``` diff --git a/content/webhooks-and-events/events/github-event-types.md b/content/webhooks-and-events/events/github-event-types.md index cba1e7f4fb..348ff8460c 100644 --- a/content/webhooks-and-events/events/github-event-types.md +++ b/content/webhooks-and-events/events/github-event-types.md @@ -57,6 +57,7 @@ HTTP/2 200 Link: ; rel="next", ; rel="last" ``` + ```json [ { diff --git a/data/reusables/actions/actions-group-concurrency.md b/data/reusables/actions/actions-group-concurrency.md index 09ed49d39d..e4282d3ef9 100644 --- a/data/reusables/actions/actions-group-concurrency.md +++ b/data/reusables/actions/actions-group-concurrency.md @@ -3,25 +3,31 @@ When a concurrent job or workflow is queued, if another job or workflow using th ### Examples: Using concurrency and the default behavior {% raw %} + ```yaml concurrency: staging_environment ``` + {% endraw %} {% raw %} + ```yaml concurrency: ci-${{ github.ref }} ``` + {% endraw %} ### Example: Using concurrency to cancel any in-progress job or run {% raw %} + ```yaml concurrency: group: ${{ github.ref }} cancel-in-progress: true ``` + {% endraw %} ### Example: Using a fallback value @@ -29,11 +35,13 @@ concurrency: If you build the group name with a property that is only defined for specific events, you can use a fallback value. For example, `github.head_ref` is only defined on `pull_request` events. If your workflow responds to other events in addition to `pull_request` events, you will need to provide a fallback to avoid a syntax error. The following concurrency group cancels in-progress jobs or runs on `pull_request` events only; if `github.head_ref` is undefined, the concurrency group will fallback to the run ID, which is guaranteed to be both unique and defined for the run. {% raw %} + ```yaml concurrency: group: ${{ github.head_ref || github.run_id }} cancel-in-progress: true ``` + {% endraw %} ### Example: Only cancel in-progress jobs or runs for the current workflow @@ -43,9 +51,11 @@ concurrency: To only cancel in-progress runs of the same workflow, you can use the `github.workflow` property to build the concurrency group: {% raw %} + ```yaml concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true ``` + {% endraw %} diff --git a/data/reusables/actions/actions-vars-context-example-usage.md b/data/reusables/actions/actions-vars-context-example-usage.md index 6b81757ce9..5e207d89ab 100644 --- a/data/reusables/actions/actions-vars-context-example-usage.md +++ b/data/reusables/actions/actions-vars-context-example-usage.md @@ -3,6 +3,7 @@ If a configuration variable has not been set, the return value of a context refe The following example shows using configuration variables with the `vars` context across a workflow. Each of the following configuration variables have been defined at the repository, organization, or environment levels. {% raw %} + ```yaml copy on: workflow_dispatch: @@ -36,4 +37,5 @@ jobs: who-to-greet: ${{ vars.GREET_NAME }} ``` + {% endraw %} diff --git a/data/reusables/actions/configure-storage-provider-platform-commands.md b/data/reusables/actions/configure-storage-provider-platform-commands.md index 30077916a5..cfbbf10dd3 100644 --- a/data/reusables/actions/configure-storage-provider-platform-commands.md +++ b/data/reusables/actions/configure-storage-provider-platform-commands.md @@ -3,15 +3,18 @@ ```shell copy ghe-config secrets.actions.storage.blob-provider "azure" ``` + - Amazon S3: ```shell copy ghe-config secrets.actions.storage.blob-provider "s3" ``` + {%- ifversion actions-ghes-gcp-storage %} - Google Cloud Storage: ```shell copy ghe-config secrets.actions.storage.blob-provider "gcs" ``` + {%- endif %} diff --git a/data/reusables/actions/configure-storage-provider.md b/data/reusables/actions/configure-storage-provider.md index 0aabd04132..b319babb1c 100644 --- a/data/reusables/actions/configure-storage-provider.md +++ b/data/reusables/actions/configure-storage-provider.md @@ -5,6 +5,7 @@ ```shell copy ghe-config secrets.actions.storage.azure.connection-string "CONNECTION STRING" ``` + - Amazon S3: ```shell copy @@ -19,6 +20,7 @@ ```shell copy ghe-config secrets.actions.storage.s3.force-path-style true ``` + {%- ifversion actions-ghes-gcp-storage %} - Google Cloud Storage: @@ -28,4 +30,5 @@ ghe-config secrets.actions.storage.gcs.access-key-id "HMAC ACCESS ID" ghe-config secrets.actions.storage.gcs.access-secret "HMAC SECRET" ``` + {%- endif %} diff --git a/data/reusables/actions/example-github-runner.md b/data/reusables/actions/example-github-runner.md index bddef7ed5a..29c292285c 100644 --- a/data/reusables/actions/example-github-runner.md +++ b/data/reusables/actions/example-github-runner.md @@ -3,17 +3,21 @@ The starter workflow configures jobs to run on Linux, using the {% data variables.product.prodname_dotcom %}-hosted `ubuntu-latest` runners. You can change the `runs-on` key to run your jobs on a different operating system. For example, you can use the {% data variables.product.prodname_dotcom %}-hosted Windows runners. {% raw %} + ```yaml runs-on: windows-latest ``` + {% endraw %} Or, you can run on the {% data variables.product.prodname_dotcom %}-hosted macOS runners. {% raw %} + ```yaml runs-on: macos-latest ``` + {% endraw %} You can also run jobs in Docker containers, or you can provide a self-hosted runner that runs on your own infrastructure. For more information, see "[AUTOTITLE](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on)." diff --git a/data/reusables/actions/installing-actions-importer.md b/data/reusables/actions/installing-actions-importer.md index 949a779afc..d4c87cb64d 100644 --- a/data/reusables/actions/installing-actions-importer.md +++ b/data/reusables/actions/installing-actions-importer.md @@ -3,6 +3,7 @@ ```bash copy gh extension install github/gh-actions-importer ``` + 1. Verify that the extension is installed: ```bash diff --git a/data/reusables/actions/jobs/section-choosing-the-runner-for-a-job.md b/data/reusables/actions/jobs/section-choosing-the-runner-for-a-job.md index 4e6bf7790e..aa8baf5c77 100644 --- a/data/reusables/actions/jobs/section-choosing-the-runner-for-a-job.md +++ b/data/reusables/actions/jobs/section-choosing-the-runner-for-a-job.md @@ -9,13 +9,16 @@ Use `jobs..runs-on` to define the type of machine to run the job on. - a single variable containing a string - an array of strings, variables containing strings, or a combination of both - If you specify an array of strings or variables, your workflow will execute on any runner that matches all of the specified `runs-on` values. For example, here the job will only run on a self-hosted runner that has the labels `linux`, `x64`, and `gpu`: + ```yaml runs-on: [self-hosted, linux, x64, gpu] ``` + For more information, see "[Choosing self-hosted runners](#choosing-self-hosted-runners)." - You can mix strings and variables in an array. For example: {% raw %} + ```yaml on: workflow_dispatch: @@ -33,6 +36,7 @@ Use `jobs..runs-on` to define the type of machine to run the job on. steps: - run: echo Hello world! ``` + {% endraw %} - If you would like to run your workflow on multiple machines, use [`jobs..strategy`](/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategy). diff --git a/data/reusables/actions/jobs/section-defining-outputs-for-jobs.md b/data/reusables/actions/jobs/section-defining-outputs-for-jobs.md index 94857bb44b..c71df3e28f 100644 --- a/data/reusables/actions/jobs/section-defining-outputs-for-jobs.md +++ b/data/reusables/actions/jobs/section-defining-outputs-for-jobs.md @@ -9,6 +9,7 @@ To use job outputs in a dependent job, you can use the `needs` context. For more ### Example: Defining outputs for a job {% raw %} + ```yaml jobs: job1: @@ -39,4 +40,5 @@ jobs: OUTPUT2: ${{needs.job1.outputs.output2}} run: echo "$OUTPUT1 $OUTPUT2" ``` + {% endraw %} diff --git a/data/reusables/actions/jobs/section-running-jobs-in-a-container-credentials.md b/data/reusables/actions/jobs/section-running-jobs-in-a-container-credentials.md index d645231024..d7a5d9703e 100644 --- a/data/reusables/actions/jobs/section-running-jobs-in-a-container-credentials.md +++ b/data/reusables/actions/jobs/section-running-jobs-in-a-container-credentials.md @@ -3,6 +3,7 @@ #### Example: Defining credentials for a container registry {% raw %} + ```yaml container: image: ghcr.io/owner/image @@ -10,4 +11,5 @@ container: username: ${{ github.actor }} password: ${{ secrets.github_token }} ``` + {% endraw %} diff --git a/data/reusables/actions/jobs/section-using-environments-for-jobs.md b/data/reusables/actions/jobs/section-using-environments-for-jobs.md index 2a3dd5fc4c..bf4e4d24d4 100644 --- a/data/reusables/actions/jobs/section-using-environments-for-jobs.md +++ b/data/reusables/actions/jobs/section-using-environments-for-jobs.md @@ -4,9 +4,11 @@ You can provide the environment as only the environment `name`, or as an environ ### Example: Using a single environment name {% raw %} + ```yaml environment: staging_environment ``` + {% endraw %} ### Example: Using environment name and URL @@ -21,19 +23,23 @@ The value of `url` can be an expression. Allowed expression contexts: [`github`] ### Example: Using output as URL {% raw %} + ```yaml environment: name: production_environment url: ${{ steps.step_id.outputs.url_output }} ``` + {% endraw %} The value of `name` can be an expression. Allowed expression contexts: [`github`](/actions/learn-github-actions/contexts#github-context), [`inputs`](/actions/learn-github-actions/contexts#inputs-context), [`vars`](/actions/learn-github-actions/contexts#vars-context), [`needs`](/actions/learn-github-actions/contexts#needs-context), [`strategy`](/actions/learn-github-actions/contexts#strategy-context), and [`matrix`](/actions/learn-github-actions/contexts#matrix-context). For more information about expressions, see "[AUTOTITLE](/actions/learn-github-actions/expressions)." ### Example: Using an expression as environment name {% raw %} + ```yaml environment: name: ${{ github.ref_name }} ``` + {% endraw %} diff --git a/data/reusables/actions/matrix-variable-example.md b/data/reusables/actions/matrix-variable-example.md index ce6e9d6d24..6ba10ca367 100644 --- a/data/reusables/actions/matrix-variable-example.md +++ b/data/reusables/actions/matrix-variable-example.md @@ -1,6 +1,7 @@ In this example, the matrix entries for `node-version` are each configured to use different values for the `site` and `datacenter` environment variables. The `Echo site details` step then uses {% raw %}`env: ${{ matrix.env }}`{% endraw %} to refer to the custom variables: {% raw %} + ```yaml name: Node.js CI on: [push] @@ -23,4 +24,5 @@ jobs: DATACENTER: ${{ matrix.datacenter }} run: echo $SITE $DATACENTER ``` + {% endraw %} diff --git a/data/reusables/actions/pass-inputs-to-reusable-workflows.md b/data/reusables/actions/pass-inputs-to-reusable-workflows.md index fff33080de..3df1c74e4c 100644 --- a/data/reusables/actions/pass-inputs-to-reusable-workflows.md +++ b/data/reusables/actions/pass-inputs-to-reusable-workflows.md @@ -1,6 +1,7 @@ To pass named inputs to a called workflow, use the `with` keyword in a job. Use the `secrets` keyword to pass named secrets. For inputs, the data type of the input value must match the type specified in the called workflow (either boolean, number, or string). {% raw %} + ```yaml jobs: call-workflow-passing-data: @@ -10,12 +11,14 @@ jobs: secrets: envPAT: ${{ secrets.envPAT }} ``` + {% endraw %} {% ifversion actions-inherit-secrets-reusable-workflows %} Workflows that call reusable workflows in the same organization or enterprise can use the `inherit` keyword to implicitly pass the secrets. {% raw %} + ```yaml jobs: call-workflow-passing-data: @@ -24,6 +27,7 @@ jobs: config-path: .github/labeler.yml secrets: inherit ``` + {% endraw %} {%endif%} diff --git a/data/reusables/actions/using-context-or-environment-variables.md b/data/reusables/actions/using-context-or-environment-variables.md index cf07a9899a..a192c0ad6c 100644 --- a/data/reusables/actions/using-context-or-environment-variables.md +++ b/data/reusables/actions/using-context-or-environment-variables.md @@ -6,6 +6,7 @@ The following example demonstrates how these different types of variables can be used together in a job: {% raw %} + ```yaml name: CI on: push @@ -16,6 +17,7 @@ jobs: steps: - run: echo "Deploying to production server on branch $GITHUB_REF" ``` + {% endraw %} In this example, the `if` statement checks the [`github.ref`](/actions/learn-github-actions/contexts#github-context) context to determine the current branch name; if the name is `refs/heads/main`, then the subsequent steps are executed. The `if` check is processed by {% data variables.product.prodname_actions %}, and the job is only sent to the runner if the result is `true`. Once the job is sent to the runner, the step is executed and refers to the [`$GITHUB_REF`](/actions/learn-github-actions/variables#default-environment-variables) variable from the runner. diff --git a/data/reusables/actions/workflow-basic-example-and-explanation.md b/data/reusables/actions/workflow-basic-example-and-explanation.md index c73581233b..db6dcef89b 100644 --- a/data/reusables/actions/workflow-basic-example-and-explanation.md +++ b/data/reusables/actions/workflow-basic-example-and-explanation.md @@ -24,6 +24,7 @@ You can create an example workflow in your repository that automatically trigger - run: npm install -g bats - run: bats -v ``` + 1. Commit these changes and push them to your {% data variables.product.prodname_dotcom %} repository. Your new {% data variables.product.prodname_actions %} workflow file is now installed in your repository and will run automatically each time someone pushes a change to the repository. To see the details about a workflow's execution history, see "[Viewing the activity for a workflow run](#viewing-the-activity-for-a-workflow-run)." @@ -43,6 +44,7 @@ To help you understand how YAML syntax is used to create a workflow file, this s ```yaml name: learn-github-actions ``` + Optional - The name of the workflow as it will appear in the "Actions" tab of the {% data variables.product.prodname_dotcom %} repository. @@ -55,6 +57,7 @@ To help you understand how YAML syntax is used to create a workflow file, this s ```yaml run-name: {% raw %}${{ github.actor }}{% endraw %} is learning GitHub Actions ``` + @@ -68,6 +71,7 @@ To help you understand how YAML syntax is used to create a workflow file, this s ```yaml on: [push] ``` + Specifies the trigger for this workflow. This example uses the push event, so a workflow run is triggered every time someone pushes a change to the repository or merges a pull request. This is triggered by a push to every branch; for examples of syntax that runs only on pushes to specific branches, paths, or tags, see "Workflow syntax for {% data variables.product.prodname_actions %}." @@ -79,6 +83,7 @@ Specifies the trigger for this workflow. This example uses the push ```yaml jobs: ``` + Groups together all the jobs that run in the learn-github-actions workflow. @@ -90,6 +95,7 @@ Specifies the trigger for this workflow. This example uses the push ```yaml check-bats-version: ``` + Defines a job named check-bats-version. The child keys will define properties of the job. @@ -101,6 +107,7 @@ Defines a job named check-bats-version. The child keys will define ```yaml runs-on: ubuntu-latest ``` + Configures the job to run on the latest version of an Ubuntu Linux runner. This means that the job will execute on a fresh virtual machine hosted by GitHub. For syntax examples using other runners, see "Workflow syntax for {% data variables.product.prodname_actions %}." @@ -112,6 +119,7 @@ Defines a job named check-bats-version. The child keys will define ```yaml steps: ``` + Groups together all the steps that run in the check-bats-version job. Each item nested under this section is a separate action or shell script. @@ -123,6 +131,7 @@ Defines a job named check-bats-version. The child keys will define ```yaml - uses: {% data reusables.actions.action-checkout %} ``` + The uses keyword specifies that this step will run v3 of the actions/checkout action. This is an action that checks out your repository onto the runner, allowing you to run scripts or other actions against your code (such as build and test tools). You should use the checkout action any time your workflow will run against the repository's code. @@ -136,6 +145,7 @@ The uses keyword specifies that this step will run v3 with: node-version: '14' ``` + This step uses the {% data reusables.actions.action-setup-node %} action to install the specified version of the Node.js (this example uses v14). This puts both the node and npm commands in your PATH. @@ -147,6 +157,7 @@ The uses keyword specifies that this step will run v3 ```yaml - run: npm install -g bats ``` + The run keyword tells the job to execute a command on the runner. In this case, you are using npm to install the bats software testing package. @@ -158,6 +169,7 @@ The uses keyword specifies that this step will run v3 ```yaml - run: bats -v ``` + Finally, you'll run the bats command with a parameter that outputs the software version. diff --git a/data/reusables/audit_log/create-s3-policy.md b/data/reusables/audit_log/create-s3-policy.md index 4d58a6fc38..785046b509 100644 --- a/data/reusables/audit_log/create-s3-policy.md +++ b/data/reusables/audit_log/create-s3-policy.md @@ -15,4 +15,5 @@ ] } ``` + For more information, see [Creating IAM policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_create.html) in the AWS documentation. diff --git a/data/reusables/codespaces/troubleshooting-simple-browser.md b/data/reusables/codespaces/troubleshooting-simple-browser.md index af02515335..489349815e 100644 --- a/data/reusables/codespaces/troubleshooting-simple-browser.md +++ b/data/reusables/codespaces/troubleshooting-simple-browser.md @@ -29,6 +29,7 @@ To implement the port configuration specified in `devcontainer.json`, {% data va ```bash copy cat ~/.vscode-remote/data/Machine/settings.json ``` + 1. Verify that the `settings.json` file contains lines like the following. ``` diff --git a/data/reusables/command_line/git-clone-url.md b/data/reusables/command_line/git-clone-url.md index 0c968e7204..9cb67bbe57 100644 --- a/data/reusables/command_line/git-clone-url.md +++ b/data/reusables/command_line/git-clone-url.md @@ -1,4 +1,5 @@ 5. Type `git clone`, and then paste the URL you copied earlier. + ```shell git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/YOUR-REPOSITORY ``` diff --git a/data/reusables/command_line/local-clone-created.md b/data/reusables/command_line/local-clone-created.md index 8e41089152..056fc0be1a 100644 --- a/data/reusables/command_line/local-clone-created.md +++ b/data/reusables/command_line/local-clone-created.md @@ -1,4 +1,5 @@ 6. Press **Enter** to create your local clone. + ```shell $ git clone https://{% data variables.command_line.codeblock %}/YOUR-USERNAME/YOUR-REPOSITORY > Cloning into `Spoon-Knife`... diff --git a/data/reusables/desktop/windows_git_bash_turn_on_ssh_agent.md b/data/reusables/desktop/windows_git_bash_turn_on_ssh_agent.md index 189bafa780..447c363fe1 100644 --- a/data/reusables/desktop/windows_git_bash_turn_on_ssh_agent.md +++ b/data/reusables/desktop/windows_git_bash_turn_on_ssh_agent.md @@ -1,4 +1,5 @@ **If you are using Git Bash**, turn on ssh-agent: + ```shell # start the ssh-agent in the background $ eval "$(ssh-agent -s)" diff --git a/data/reusables/desktop/windows_git_for_windows_turn_on_ssh_agent.md b/data/reusables/desktop/windows_git_for_windows_turn_on_ssh_agent.md index 649a5edc2d..c6675c0296 100644 --- a/data/reusables/desktop/windows_git_for_windows_turn_on_ssh_agent.md +++ b/data/reusables/desktop/windows_git_for_windows_turn_on_ssh_agent.md @@ -1,4 +1,5 @@ **If you are using another terminal prompt**, such as [Git for Windows](https://git-for-windows.github.io/), turn on ssh-agent: + ```shell # start the ssh-agent in the background $ eval $(ssh-agent -s) diff --git a/data/reusables/enterprise-migration-tool/grant-migrator-role-pat.md b/data/reusables/enterprise-migration-tool/grant-migrator-role-pat.md index 06d2ecbb18..5a5bbf6f37 100644 --- a/data/reusables/enterprise-migration-tool/grant-migrator-role-pat.md +++ b/data/reusables/enterprise-migration-tool/grant-migrator-role-pat.md @@ -2,10 +2,13 @@ 1. Set the {% data variables.product.pat_generic %} as an environment variable, replacing TOKEN in the commands below with the {% data variables.product.pat_generic %} you recorded above. - If you're using Terminal, use the `export` command. + ```shell copy export GH_PAT="TOKEN" ``` + - If you're using PowerShell, use the `$env` command. + ```shell copy $env:GH_PAT="TOKEN" ``` diff --git a/data/reusables/enterprise-migration-tool/migrate-multiple-repos.md b/data/reusables/enterprise-migration-tool/migrate-multiple-repos.md index 796463886f..bc3a4063e5 100644 --- a/data/reusables/enterprise-migration-tool/migrate-multiple-repos.md +++ b/data/reusables/enterprise-migration-tool/migrate-multiple-repos.md @@ -1,10 +1,13 @@ To migrate multiple repositories, run the script you generated above. Replace FILENAME in the commands below with the filename you provided when generating the script. - If you're using Terminal, use `./`. + ```shell copy ./FILENAME ``` + - If you're using PowerShell, use `.\`. + ```shell copy .\FILENAME ``` diff --git a/data/reusables/enterprise/apply-configuration.md b/data/reusables/enterprise/apply-configuration.md index 91fb45f364..118bf086e7 100644 --- a/data/reusables/enterprise/apply-configuration.md +++ b/data/reusables/enterprise/apply-configuration.md @@ -9,4 +9,5 @@ ```shell copy ghe-config-apply ``` + 1. Wait for the configuration run to complete. diff --git a/data/reusables/enterprise_backup_utilities/enterprise-backup-utils-verify-upgrade.md b/data/reusables/enterprise_backup_utilities/enterprise-backup-utils-verify-upgrade.md index d6daca2a35..be74118607 100644 --- a/data/reusables/enterprise_backup_utilities/enterprise-backup-utils-verify-upgrade.md +++ b/data/reusables/enterprise_backup_utilities/enterprise-backup-utils-verify-upgrade.md @@ -1,9 +1,11 @@ 1. To verify you have successfully upgraded, run the following command. + ```shell ./bin/ghe-backup --version ``` 1. To verify SSH connectivity between your configured {% data variables.product.prodname_ghe_server %}, run the following command. + ```shell ./bin/ghe-host-check ``` diff --git a/data/reusables/enterprise_installation/configuration-recognized.md b/data/reusables/enterprise_installation/configuration-recognized.md index 0806d8af1d..cdb63ea2d5 100644 --- a/data/reusables/enterprise_installation/configuration-recognized.md +++ b/data/reusables/enterprise_installation/configuration-recognized.md @@ -1,4 +1,5 @@ 1. Once the instance has fully restarted and you can reach it, use the SSH administrative shell to verify that the new resource configuration is recognized: + ```shell ssh -p 122 admin@HOSTNAME ghe-system-info diff --git a/data/reusables/enterprise_installation/download-package.md b/data/reusables/enterprise_installation/download-package.md index 0dc61379f6..951e9f8eb5 100644 --- a/data/reusables/enterprise_installation/download-package.md +++ b/data/reusables/enterprise_installation/download-package.md @@ -1,4 +1,5 @@ 1. Download the upgrade package to {% data variables.location.product_location %} using `curl`: + ```shell admin@HOSTNAME:~$ curl -L -O UPGRADE-PKG-URL ``` diff --git a/data/reusables/enterprise_installation/generate-replication-key-pair.md b/data/reusables/enterprise_installation/generate-replication-key-pair.md index 12de418f33..d65035b194 100644 --- a/data/reusables/enterprise_installation/generate-replication-key-pair.md +++ b/data/reusables/enterprise_installation/generate-replication-key-pair.md @@ -1,4 +1,5 @@ 7. To generate a key pair for replication, use the `ghe-repl-setup` command with the primary appliance's IP address and copy the public key that it returns. + ```shell ghe-repl-setup PRIMARY_IP ``` diff --git a/data/reusables/enterprise_installation/replica-ssh.md b/data/reusables/enterprise_installation/replica-ssh.md index 6c53cf974a..558689d887 100644 --- a/data/reusables/enterprise_installation/replica-ssh.md +++ b/data/reusables/enterprise_installation/replica-ssh.md @@ -1,4 +1,5 @@ 1. Connect to the replica node over SSH as the `admin` user on port 122: + ```shell ssh -p 122 admin@REPLICA_HOST ``` diff --git a/data/reusables/enterprise_installation/replication-command.md b/data/reusables/enterprise_installation/replication-command.md index a7f150befe..ad0e1b94ee 100644 --- a/data/reusables/enterprise_installation/replication-command.md +++ b/data/reusables/enterprise_installation/replication-command.md @@ -1,7 +1,9 @@ 1. To start replication of the datastores, use the `ghe-repl-start` command. + ```shell ghe-repl-start ``` + {% warning %} **Warning:** `ghe-repl-start` causes a brief outage on the primary server, during which users may see internal server errors. To provide a friendlier message, run `ghe-maintenance -s` on the primary node before running `ghe-repl-start` on the replica node to put the appliance in maintenance mode. Once replication starts, disable maintenance mode with `ghe-maintenance -u`. Git replication will not progress while the primary node is in maintenance mode. diff --git a/data/reusables/enterprise_installation/ssh-into-target-instance.md b/data/reusables/enterprise_installation/ssh-into-target-instance.md index 0220014c39..b0db048b5e 100644 --- a/data/reusables/enterprise_installation/ssh-into-target-instance.md +++ b/data/reusables/enterprise_installation/ssh-into-target-instance.md @@ -1,4 +1,5 @@ 1. As a site admin, [SSH into your target {% data variables.product.prodname_ghe_server %} instance]({% ifversion not ghes %}/enterprise-server@latest{% endif %}/admin/configuration/configuring-your-enterprise/accessing-the-administrative-shell-ssh). + ```shell ssh -p 122 admin@HOSTNAME ``` diff --git a/data/reusables/enterprise_installation/verify-replication-channel.md b/data/reusables/enterprise_installation/verify-replication-channel.md index 9141c6d703..83a9e26a4f 100644 --- a/data/reusables/enterprise_installation/verify-replication-channel.md +++ b/data/reusables/enterprise_installation/verify-replication-channel.md @@ -1,4 +1,5 @@ 11. To verify the status of each datastore's replication channel, use the `ghe-repl-status` command. + ```shell ghe-repl-status ``` diff --git a/data/reusables/enterprise_migrations/unlocking-on-instances.md b/data/reusables/enterprise_migrations/unlocking-on-instances.md index 29f0034a47..5951aa3a1b 100644 --- a/data/reusables/enterprise_migrations/unlocking-on-instances.md +++ b/data/reusables/enterprise_migrations/unlocking-on-instances.md @@ -1,4 +1,5 @@ 1. Unlock all the imported repositories with the `ghe-migrator unlock` command. You'll need your Migration GUID: + ```shell $ ghe-migrator unlock -g MIGRATION-GUID > Unlocked octo-org/octo-project diff --git a/data/reusables/enterprise_site_admin_settings/add-key-to-web-flow-user.md b/data/reusables/enterprise_site_admin_settings/add-key-to-web-flow-user.md index e8bec44b77..0d502cb382 100644 --- a/data/reusables/enterprise_site_admin_settings/add-key-to-web-flow-user.md +++ b/data/reusables/enterprise_site_admin_settings/add-key-to-web-flow-user.md @@ -3,6 +3,7 @@ ```bash copy gpg --armor --export KEY-ID ``` + 1. Copy your PGP key, beginning with `-----BEGIN PGP PUBLIC KEY BLOCK-----` and ending with `-----END PGP PUBLIC KEY BLOCK-----`. 1. Sign into {% data variables.product.prodname_ghe_server %} as the user created for web commit signing, for example, `web-flow`. 1. Add the public PGP key to the user's profile. For more information, see "[AUTOTITLE](/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account)." diff --git a/data/reusables/git/git-push.md b/data/reusables/git/git-push.md index 53ed89a5e0..fa10c82af3 100644 --- a/data/reusables/git/git-push.md +++ b/data/reusables/git/git-push.md @@ -1,4 +1,5 @@ 1. [Push the changes](/get-started/using-git/pushing-commits-to-a-remote-repository) in your local repository to {% data variables.location.product_location %}. + ```shell $ git push origin YOUR_BRANCH # Pushes the changes in your local repository up to the remote repository you specified as the origin diff --git a/data/reusables/gpg/configure-ssh-signing.md b/data/reusables/gpg/configure-ssh-signing.md index ba3de44cb7..bc2f2d3059 100644 --- a/data/reusables/gpg/configure-ssh-signing.md +++ b/data/reusables/gpg/configure-ssh-signing.md @@ -1,4 +1,5 @@ 1. Configure Git to use SSH to sign commits and tags: + ```bash git config --global gpg.format ssh ``` diff --git a/data/reusables/gpg/copy-gpg-key-id.md b/data/reusables/gpg/copy-gpg-key-id.md index 7569abbb14..a0a8d4ab64 100644 --- a/data/reusables/gpg/copy-gpg-key-id.md +++ b/data/reusables/gpg/copy-gpg-key-id.md @@ -1,5 +1,6 @@ 1. From the list of GPG keys, copy the long form of the GPG key ID you'd like to use. In this example, the GPG key ID is `3AA5C34371567BD2`: ```shell copy + $ gpg --list-secret-keys --keyid-format=long /Users/hubot/.gnupg/secring.gpg ------------------------------------ diff --git a/data/reusables/gpg/paste-gpg-key-id.md b/data/reusables/gpg/paste-gpg-key-id.md index adff1c7438..75a4f71bc3 100644 --- a/data/reusables/gpg/paste-gpg-key-id.md +++ b/data/reusables/gpg/paste-gpg-key-id.md @@ -1,9 +1,11 @@ 1. To set your primary GPG signing key in Git, paste the text below, substituting in the GPG primary key ID you'd like to use. In this example, the GPG key ID is `3AA5C34371567BD2`: + ```shell git config --global user.signingkey 3AA5C34371567BD2 ``` Alternatively, when setting a subkey include the `!` suffix. In this example, the GPG subkey ID is `4BB6D45482678BE3`: + ```shell git config --global user.signingkey 4BB6D45482678BE3! ``` diff --git a/data/reusables/gpg/paste-ssh-public-key.md b/data/reusables/gpg/paste-ssh-public-key.md index 667e22f668..01a73cdcaa 100644 --- a/data/reusables/gpg/paste-ssh-public-key.md +++ b/data/reusables/gpg/paste-ssh-public-key.md @@ -1,3 +1,4 @@ 1. To set your SSH signing key in Git, paste the text below, substituting **/PATH/TO/KEY.PUB** with the path to the public key you'd like to use. + ```bash $ git config --global user.signingkey /PATH/TO/.SSH/KEY.PUB diff --git a/data/reusables/gpg/x-509-key.md b/data/reusables/gpg/x-509-key.md index 1a594264f2..4727f49852 100644 --- a/data/reusables/gpg/x-509-key.md +++ b/data/reusables/gpg/x-509-key.md @@ -9,32 +9,43 @@ You can use [smimesign](https://github.com/github/smimesign) to sign commits and {% data reusables.command_line.open_the_multi_os_terminal %} 3. Configure Git to use S/MIME to sign commits and tags. In Git 2.19 or later, use the `git config gpg.x509.program` and `git config gpg.format` commands: - To use S/MIME to sign for all repositories: + ```shell git config --global gpg.x509.program smimesign git config --global gpg.format x509 ``` + - To use S/MIME to sign for a single repository: + ```shell cd PATH-TO-REPOSITORY git config --local gpg.x509.program smimesign git config --local gpg.format x509 ``` + In Git 2.18 or earlier, use the `git config gpg.program` command: - To use S/MIME to sign for all repositories: + ```shell git config --global gpg.program smimesign ``` + - To use S/MIME to sign for a single repository: + ```shell cd PATH-TO-REPOSITORY git config --local gpg.program smimesign ``` + If you're using an X.509 key that matches your committer identity, you can begin signing commits and tags. 4. If you're not using an X.509 key that matches your committer identity, list X.509 keys for which you have both a certificate and private key using the `smimesign --list-keys` command. + ```shell smimesign --list-keys ``` + 5. From the list of X.509 keys, copy the certificate ID of the X.509 key you'd like to use. In this example, the certificate ID is `0ff455a2708394633e4bb2f88002e3cd80cbd76f`: + ```shell $ smimesign --list-keys ID: 0ff455a2708394633e4bb2f88002e3cd80cbd76f @@ -45,12 +56,16 @@ You can use [smimesign](https://github.com/github/smimesign) to sign commits and Subject: CN=Octocat,O=GitHub\, Inc.,L=San Francisco,ST=California,C=US Emails: octocat@github.com ``` + 6. To set your X.509 signing key in Git, paste the text below, substituting in the certificate ID you copied earlier. - To use your X.509 key to sign for all repositories: + ```shell git config --global user.signingkey 0ff455a2708394633e4bb2f88002e3cd80cbd76f ``` + - To use your X.509 key to sign for a single repository: + ```shell cd PATH-TO-REPOSITORY git config --local user.signingkey 0ff455a2708394633e4bb2f88002e3cd80cbd76f diff --git a/data/reusables/package_registry/authenticate-to-container-registry-steps.md b/data/reusables/package_registry/authenticate-to-container-registry-steps.md index 6ec5230fe0..f326d82fe2 100644 --- a/data/reusables/package_registry/authenticate-to-container-registry-steps.md +++ b/data/reusables/package_registry/authenticate-to-container-registry-steps.md @@ -14,13 +14,17 @@ For more information, see "[AUTOTITLE](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." 2. Save your {% data variables.product.pat_v1 %}. We recommend saving your token as an environment variable. + ```shell export CR_PAT=YOUR_TOKEN ``` + 3. Using the CLI for your container type, sign in to the {% data variables.product.prodname_container_registry %} service at `{% data reusables.package_registry.container-registry-hostname %}`. {% raw %} + ```shell $ echo $CR_PAT | docker login {% endraw %}{% data reusables.package_registry.container-registry-hostname %}{% raw %} -u USERNAME --password-stdin > Login Succeeded ``` + {% endraw %} diff --git a/data/reusables/package_registry/checksum-maven-plugin.md b/data/reusables/package_registry/checksum-maven-plugin.md index 312be929f4..47c66dd99b 100644 --- a/data/reusables/package_registry/checksum-maven-plugin.md +++ b/data/reusables/package_registry/checksum-maven-plugin.md @@ -1,5 +1,7 @@ {%- ifversion ghae %} + 1. In the `plugins` element of the _pom.xml_ file, add the [checksum-maven-plugin](https://search.maven.org/artifact/net.nicoulaj.maven.plugins/checksum-maven-plugin) plugin, and configure the plugin to send at least SHA-256 checksums. + ```xml @@ -21,4 +23,5 @@ ``` + {%- endif %} diff --git a/data/reusables/package_registry/create-npmrc-owner-step.md b/data/reusables/package_registry/create-npmrc-owner-step.md index a80a1001b0..1acb12c063 100644 --- a/data/reusables/package_registry/create-npmrc-owner-step.md +++ b/data/reusables/package_registry/create-npmrc-owner-step.md @@ -1,16 +1,22 @@ 2. In the same directory as your `package.json` file, create or edit an `.npmrc` file to include a line specifying {% data variables.product.prodname_registry %} URL and the namespace where the package is hosted. Replace `NAMESPACE` with the name of the user or organization account {% ifversion packages-npm-v2 %}to which the package will be scoped{% else %}that owns the repository containing your project{% endif %}. {% ifversion fpt or ghec %} + ```shell @NAMESPACE:registry=https://npm.pkg.github.com ``` + {% else %} If subdomain isolation is enabled: + ```shell @NAMESPACE:registry=https://npm.HOSTNAME ``` + If subdomain isolation is disabled: + ```shell @NAMESPACE:registry=https://HOSTNAME/_registry/npm ``` + {% endif %} diff --git a/data/reusables/pages/remove-submodule.md b/data/reusables/pages/remove-submodule.md index 8d0f9658b6..b7a72b9b58 100644 --- a/data/reusables/pages/remove-submodule.md +++ b/data/reusables/pages/remove-submodule.md @@ -1,6 +1,7 @@ To troubleshoot, first decide if you actually want to use a submodule, which is a Git project inside a Git project; submodules are sometimes created accidentally. If you don't want to use a submodule, remove the submodule, replacing PATH-TO-SUBMODULE with the path to the submodule: + ```shell git submodule deinit PATH-TO-SUBMODULE git rm PATH-TO-SUBMODULE