1
0
mirror of synced 2025-12-22 19:34:15 -05:00

Merge branch 'main' into add-jr-to-address

This commit is contained in:
Lee Dohm
2021-08-03 11:44:49 -07:00
740 changed files with 138634 additions and 136452 deletions

View File

@@ -15,8 +15,6 @@ module.exports = {
},
rules: {
'import/no-extraneous-dependencies': ['error', { packageDir: '.' }],
'node/global-require': ['error'],
'import/no-dynamic-require': ['error'],
},
overrides: [
{

2
.gitattributes vendored
View File

@@ -1,6 +1,6 @@
# Set default behaviour, in case users don't have core.autocrlf set.
* text=auto
# Explicitly declare text files we want to always be normalized and converted
# to native line endings on checkout.
*.md text
*.json.br filter=lfs diff=lfs merge=lfs -text

1
.github/CODEOWNERS vendored
View File

@@ -10,7 +10,6 @@
/.github/ @github/docs-engineering
/script/ @github/docs-engineering
/includes/ @github/docs-engineering
/layouts/ @github/docs-engineering
app.json @github/docs-engineering
Dockerfile @github/docs-engineering
package-lock.json @github/docs-engineering

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env node
import fs from 'fs'
import fs from 'fs/promises'
import path from 'path'
import { getOctokit } from '@actions/github'
import enterpriseDates from '../../lib/enterprise-dates.js'
@@ -74,7 +74,7 @@ async function run() {
process.exit(0)
}
const milestoneSteps = fs.readFileSync(
const milestoneSteps = await fs.readFile(
path.join(
process.cwd(),
`.github/actions-scripts/enterprise-server-issue-templates/${milestone}-issue.md`

View File

@@ -1,13 +1,13 @@
#!/usr/bin/env node
import fs from 'fs'
import fs from 'fs/promises'
import { setOutput } from '@actions/core'
const eventPayload = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8'))
const eventPayload = JSON.parse(await fs.readFile(process.env.GITHUB_EVENT_PATH, 'utf8'))
// This workflow-run script does the following:
// 1. Gets an array of labels on a PR.
// 2. Finds one with the relevant Algolia text; if none found, exits early.
// 2. Finds one with the relevant search text; if none found, exits early.
// 3. Gets the version substring from the label string.
const labelText = 'sync-english-index-for-'
@@ -19,18 +19,18 @@ if (!(labelsArray && labelsArray.length)) {
}
// Find the relevant label
const algoliaLabel = labelsArray
const searchLabel = labelsArray
.map((label) => label.name)
.find((label) => label.startsWith(labelText))
// Exit early if no relevant label is found
if (!algoliaLabel) {
if (!searchLabel) {
process.exit(0)
}
// Given: sync-english-index-for-enterprise-server@3.0
// Returns: enterprise-server@3.0
const versionToSync = algoliaLabel.split(labelText)[1]
const versionToSync = searchLabel.split(labelText)[1]
// Store the version so we can access it later in the workflow
setOutput('versionToSync', versionToSync)

View File

@@ -34,7 +34,7 @@
- PLACEHOLDER
```
**Note:** All of the content in this file will be updated when the release notes are created in the megabranch including the filename `PLACEHOLDER.yml`. You can update the date or leave it as-is and wait to update it when the release notes are finalized.
- [ ] Create the Algolia search indices for the new release:
- [ ] Create the search indices for the new release:
```
npm run sync-search-ghes-release
```
@@ -51,7 +51,7 @@
```
sync-english-index-for-<PLAN@RELEASE>
```
☝️ This will run a workflow **on every push to the PR** that will sync **only** the English index for the new version to Algolia. This will make the GHES content searchable on staging throughout content creation, and will ensure the search updates go live at the same time the content is published. See [`contributing/search.md`](https://github.com/github/docs-internal/blob/main/contributing/search.md) for details.
☝️ This will run a workflow **on every push to the PR** that will sync **only** the English index for the new version. This will make the GHES content searchable on staging throughout content creation, and will ensure the search updates go live at the same time the content is published. See [`contributing/search.md`](https://github.com/github/docs-internal/blob/main/contributing/search.md) for details.
- [ ] In `github/github`, to create a new GHES release follow these steps:
- [ ] Copy the previous release's root document to a new root document for this release `cp app/api/description/ghes-<LATEST RELEASE NUMBER>.yaml app/api/description/ghes-<NEXT RELEASE NUMBER>.yaml`.
@@ -79,7 +79,7 @@ If the `OpenAPI dev mode check / check-schema-versions` check fails with the fol
#### `Node.js tests / test content` failures
If the `Node.js tests / test content` check fails with the following message, the `lib/enterprise-dates.json` file is not up-to-date:
> FAIL tests/content/algolia-search.js ● algolia has remote indexNames in every language for every supported GHE version
> FAIL tests/content/search.js ● search has remote indexNames in every language for every supported GHE version
This file should be automatically updated, but you can also run `script/update-enterprise-dates.js` to update it. **Note:** If the test is still failing after running this script, look at the dates for this release. If the date is still inaccurate, it may be an issue with the source at https://github.com/github/enterprise-releases/blob/master/docs/supported-versions.md#release-lifecycle-dates. If that is the case, manually update the dates in the `lib/enterprise-dates.json` file.

View File

@@ -0,0 +1,366 @@
import { graphql } from '@octokit/graphql'
// Given a list of PR/issue node IDs and a project node ID,
// adds the PRs/issues to the project
// and returns the node IDs of the project items
async function addItemsToProject(items, project) {
console.log(`Adding ${items} to project ${project}`)
const mutations = items.map(
(pr, index) => `
pr_${index}: addProjectNextItem(input: {
projectId: $project
contentId: "${pr}"
}) {
projectNextItem {
id
}
}
`
)
const mutation = `
mutation($project:ID!) {
${mutations.join(' ')}
}
`
const newItems = await graphql(mutation, {
project: project,
headers: {
authorization: `token ${process.env.TOKEN}`,
'GraphQL-Features': 'projects_next_graphql',
},
})
// The output of the mutation is
// {"pr_0":{"projectNextItem":{"id":ID!}},...}
// Pull out the ID for each new item
const newItemIDs = Object.entries(newItems).map((item) => item[1].projectNextItem.id)
console.log(`New item IDs: ${newItemIDs}`)
return newItemIDs
}
// Given a list of project item node IDs and a list of corresponding authors
// generates a GraphQL mutation to populate:
// - "Status" (as "Ready for review" option)
// - "Date posted" (as today)
// - "Review due date" (as today + 2 weekdays)
// - "Feature" (as "OpenAPI schema update")
// - "Contributor type" (as "Hubber or partner" option)
// Does not populate "Review needs" or "Size"
function generateUpdateProjectNextItemFieldMutation(items, authors) {
// Formats a date object into the required format for projects
function formatDate(date) {
return date.getFullYear() + '-' + (date.getMonth() + 1) + '-' + date.getDate()
}
// Calculate 2 weekdays from now (excluding weekends; not considering holidays)
const datePosted = new Date()
let daysUntilDue
switch (datePosted.getDay()) {
case 0: // Sunday
daysUntilDue = 3
break
case 6: // Saturday
daysUntilDue = 4
break
default:
daysUntilDue = 2
}
const millisecPerDay = 24 * 60 * 60 * 1000
const dueDate = new Date(datePosted.getTime() + millisecPerDay * daysUntilDue)
// Build the mutation for a single field
function generateMutation({ index, item, fieldID, value, literal = false }) {
let parsedValue
if (literal) {
parsedValue = `value: "${value}"`
} else {
parsedValue = `value: ${value}`
}
return `
set_${fieldID.substr(1)}_item_${index}: updateProjectNextItemField(input: {
projectId: $project
itemId: "${item}"
fieldId: ${fieldID}
${parsedValue}
}) {
projectNextItem {
id
}
}
`
}
// Build the mutation for all fields for all items
const mutations = items.map(
(item, index) => `
${generateMutation({
index: index,
item: item,
fieldID: '$statusID',
value: '$readyForReviewID',
})}
${generateMutation({
index: index,
item: item,
fieldID: '$datePostedID',
value: formatDate(datePosted),
literal: true,
})}
${generateMutation({
index: index,
item: item,
fieldID: '$reviewDueDateID',
value: formatDate(dueDate),
literal: true,
})}
${generateMutation({
index: index,
item: item,
fieldID: '$contributorTypeID',
value: '$hubberTypeID',
})}
${generateMutation({
index: index,
item: item,
fieldID: '$featureID',
value: 'OpenAPI schema update',
literal: true,
})}
${generateMutation({
index: index,
item: item,
fieldID: '$authorID',
value: authors[index],
literal: true,
})}
`
)
// Build the full mutation
const mutation = `
mutation(
$project: ID!
$statusID: ID!
$readyForReviewID: String!
$datePostedID: ID!
$reviewDueDateID: ID!
$contributorTypeID: ID!
$hubberTypeID: String!
$featureID: ID!
$authorID: ID!
) {
${mutations.join(' ')}
}
`
return mutation
}
async function run() {
// Get info about the docs-content review board project
// and about open github/github PRs
const data = await graphql(
`
query ($organization: String!, $repo: String!, $projectNumber: Int!, $num_prs: Int!) {
organization(login: $organization) {
projectNext(number: $projectNumber) {
id
items(last: 100) {
nodes {
id
}
}
fields(first: 20) {
nodes {
id
name
settings
}
}
}
}
repository(name: $repo, owner: $organization) {
pullRequests(last: $num_prs, states: OPEN) {
nodes {
id
isDraft
reviewRequests(first: 10) {
nodes {
requestedReviewer {
... on Team {
name
}
}
}
}
labels(first: 5) {
nodes {
name
}
}
reviews(first: 10) {
nodes {
onBehalfOf(first: 1) {
nodes {
name
}
}
}
}
author {
login
}
}
}
}
}
`,
{
organization: process.env.ORGANIZATION,
repo: process.env.REPO,
projectNumber: parseInt(process.env.PROJECT_NUMBER),
num_prs: parseInt(process.env.NUM_PRS),
headers: {
authorization: `token ${process.env.TOKEN}`,
'GraphQL-Features': 'projects_next_graphql',
},
}
)
// Get the PRs that are:
// - not draft
// - not a train
// - are requesting a review by docs-reviewers
// - have not already been reviewed on behalf of docs-reviewers
const prs = data.repository.pullRequests.nodes.filter(
(pr) =>
!pr.isDraft &&
!pr.labels.nodes.find((label) => label.name === 'Deploy train 🚂') &&
pr.reviewRequests.nodes.find(
(requestedReviewers) => requestedReviewers.requestedReviewer.name === process.env.REVIEWER
) &&
!pr.reviews.nodes
.flatMap((review) => review.onBehalfOf.nodes)
.find((behalf) => behalf.name === process.env.REVIEWER)
)
if (prs.length === 0) {
console.log('No PRs found. Exiting.')
return
}
const prIDs = prs.map((pr) => pr.id)
const prAuthors = prs.map((pr) => pr.author.login)
console.log(`PRs found: ${prIDs}`)
// Get the project ID
const projectID = data.organization.projectNext.id
// Get the IDs of the last 100 items on the board.
// Until we have a way to check from a PR whether the PR is in a project,
// this is how we (roughly) avoid overwriting PRs that are already on the board.
// If we are overwriting items, query for more items.
const existingItemIDs = data.organization.projectNext.items.nodes.map((node) => node.id)
function findFieldID(fieldName, data) {
const field = data.organization.projectNext.fields.nodes.find(
(field) => field.name === fieldName
)
if (field && field.id) {
return field.id
} else {
throw new Error(
`A field called "${fieldName}" was not found. Check if the field was renamed.`
)
}
}
function findSingleSelectID(singleSelectName, fieldName, data) {
const field = data.organization.projectNext.fields.nodes.find(
(field) => field.name === fieldName
)
if (!field) {
throw new Error(
`A field called "${fieldName}" was not found. Check if the field was renamed.`
)
}
const singleSelect = JSON.parse(field.settings).options.find(
(field) => field.name === singleSelectName
)
if (singleSelect && singleSelect.id) {
return singleSelect.id
} else {
throw new Error(
`A single select called "${singleSelectName}" for the field "${fieldName}" was not found. Check if the single select was renamed.`
)
}
}
// Get the ID of the fields that we want to populate
const datePostedID = findFieldID('Date posted', data)
const reviewDueDateID = findFieldID('Review due date', data)
const statusID = findFieldID('Status', data)
const featureID = findFieldID('Feature', data)
const contributorTypeID = findFieldID('Contributor type', data)
const authorID = findFieldID('Author', data)
// Get the ID of the single select values that we want to set
const readyForReviewID = findSingleSelectID('Ready for review', 'Status', data)
const hubberTypeID = findSingleSelectID('Hubber or partner', 'Contributor type', data)
// Add the PRs to the project
const itemIDs = await addItemsToProject(prIDs, projectID)
// If an item already existed on the project, the existing ID will be returned.
// Exclude existing items going forward.
// Until we have a way to check from a PR whether the PR is in a project,
// this is how we (roughly) avoid overwriting PRs that are already on the board
const newItemIDs = itemIDs.filter((id) => !existingItemIDs.includes(id))
if (newItemIDs.length === 0) {
console.log('All found PRs are already on the project. Exiting.')
return
}
// Populate fields for the new project items
// Note: Since there is not a way to check if a PR is already on the board,
// this will overwrite the values of PRs that are on the board
const updateProjectNextItemMutation = generateUpdateProjectNextItemFieldMutation(
newItemIDs,
prAuthors
)
console.log(`Populating fields for these items: ${newItemIDs}`)
await graphql(updateProjectNextItemMutation, {
project: projectID,
statusID: statusID,
readyForReviewID: readyForReviewID,
datePostedID: datePostedID,
reviewDueDateID: reviewDueDateID,
contributorTypeID: contributorTypeID,
hubberTypeID: hubberTypeID,
featureID: featureID,
authorID: authorID,
headers: {
authorization: `token ${process.env.TOKEN}`,
'GraphQL-Features': 'projects_next_graphql',
},
})
console.log('Done populating fields')
return newItemIDs
}
run().catch((error) => {
console.log(`#ERROR# ${error}`)
process.exit(1)
})

View File

@@ -1,232 +0,0 @@
# TODO: Convert to JavaScript for language consistency
import json
import logging
import os
import requests
# Constants
endpoint = 'https://api.github.com/graphql'
# ID of the github/github repo
github_repo_id = "MDEwOlJlcG9zaXRvcnkz"
# ID of the docs-reviewers team
docs_reviewers_id = "MDQ6VGVhbTQzMDMxMzk="
# ID of the "Docs content first responder" board
docs_project_id = "MDc6UHJvamVjdDQ1NzI0ODI="
# ID of the "OpenAPI review requests" column on the "Docs content first responder" board
docs_column_id = "PC_lAPNJr_OAEXFQs4A2OFq"
# 100 is an educated guess of how many PRs are opened in a day on the github/github repo
# If we are missing PRs, either increase this number or increase the frequency at which this script is run
num_prs_to_search = 100
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def find_open_prs_for_repo(repo_id: str, num_prs: int):
"""Return data about a specified number of open PRs for a specified repo
Arguments:
repo_id: The node ID of the repo to search
num_prs: The max number of PRs to return
Returns:
Returns a JSON object of this structure:
{
"data": {
"node": {
"pullRequests": {
"nodes": [
{
"id": str,
"isDraft": bool,
"reviewRequests": {
"nodes": [
{
"requestedReviewer": {
"id": str
}
}...
]
},
"projectCards": {
"nodes": [
{
"project": {
"id": str
}
}...
]
}
}...
]
}
}
}
}
"""
query = """query ($repo_id: ID!, $num_prs: Int!) {
node(id: $repo_id) {
... on Repository {
pullRequests(last: $num_prs, states: OPEN) {
nodes {
id
isDraft
reviewRequests(first: 10) {
nodes {
requestedReviewer {
... on Team {
id
}
}
}
}
projectCards(first: 10) {
nodes {
project {
id
}
}
}
}
}
}
}
}
"""
variables = {
"repo_id": github_repo_id,
"num_prs": num_prs
}
response = requests.post(
endpoint,
json={'query': query, 'variables': variables},
headers = {'Authorization': f"bearer {os.environ['TOKEN']}"}
)
response.raise_for_status()
json_response = json.loads(response.text)
if 'errors' in json_response:
raise RuntimeError(f'Error in GraphQL response: {json_response}')
return json_response
def add_prs_to_board(prs_to_add: list, column_id: str):
"""Adds PRs to a column of a project board
Arguments:
prs_to_add: A list of PR node IDs
column_id: The node ID of the column to add the PRs to
Returns:
Nothing
"""
logger.info(f"adding: {prs_to_add}")
mutation = """mutation($pr_id: ID!, $column_id: ID!) {
addProjectCard(input:{contentId: $pr_id, projectColumnId: $column_id}) {
projectColumn {
name
}
}
}"""
for pr_id in prs_to_add:
logger.info(f"Attempting to add {pr_id} to board")
variables = {
"pr_id": pr_id,
"column_id": column_id
}
response = requests.post(
endpoint,
json={'query': mutation, 'variables': variables},
headers = {'Authorization': f"bearer {os.environ['TOKEN']}"}
)
json_response = json.loads(response.text)
if 'errors' in json_response:
logger.info(f"GraphQL error when adding {pr_id}: {json_response}")
def filter_prs(data, reviewer_id: str, project_id):
"""Given data about the draft state, reviewers, and project boards for PRs,
return just the PRs that are:
- not draft
- are requesting a review for the specified team
- are not already on the specified project board
Arguments:
data: A JSON object of this structure:
{
"data": {
"node": {
"pullRequests": {
"nodes": [
{
"id": str,
"isDraft": bool,
"reviewRequests": {
"nodes": [
{
"requestedReviewer": {
"id": str
}
}...
]
},
"projectCards": {
"nodes": [
{
"project": {
"id": str
}
}...
]
}
}...
]
}
}
}
}
reviewer_id: The node ID of the reviewer to filter for
project_id: The project ID of the project to filter against
Returns:
A list of node IDs of the PRs that met the requirements
"""
pr_data = data['data']['node']['pullRequests']['nodes']
prs_to_add = []
for pr in pr_data:
if (
not pr['isDraft'] and
reviewer_id in [req_rev['requestedReviewer']['id'] for req_rev in pr['reviewRequests']['nodes'] if req_rev['requestedReviewer']] and
project_id not in [proj_card['project']['id'] for proj_card in pr['projectCards']['nodes']]
):
prs_to_add.append(pr['id'])
return prs_to_add
def main():
query_data = find_open_prs_for_repo(github_repo_id, num_prs_to_search)
prs_to_add = filter_prs(query_data, docs_reviewers_id, docs_project_id)
add_prs_to_board(prs_to_add, docs_column_id)
if __name__ == "__main__":
main()

View File

@@ -8,7 +8,6 @@ export default [
'actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d', // v4.0.2
'actions/labeler@5f867a63be70efff62b767459b009290364495eb', // v2.2.0
'actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f', // v2.2.0
'actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6', // v2.2.2
'actions/stale@9d6f46564a515a9ea11e7762ab3957ee58ca50da', // v3.0.16
'alex-page/github-project-automation-plus@fdb7991b72040d611e1123d2b75ff10eda9372c9',
'andymckay/labeler@22d5392de2b725cea4b284df5824125054049d84',

8
.github/labeler.yml vendored
View File

@@ -1,8 +1,6 @@
engineering:
- lib/*
- lib/**/*
- layouts/*
- layouts/**/*
- middleware/*
- middleware/**/*
- tests/*
@@ -11,5 +9,7 @@ engineering:
- stylesheets/**/*
- script/*
- script/**/*
- javascripts/*
- javascripts/**/*
- components/*
- components/**/*
- pages/*
- pages/**/*

View File

@@ -39,3 +39,4 @@ jobs:
MERGE_RETRY_SLEEP: '10000' # ten seconds
UPDATE_LABELS: 'automerge,autosquash'
UPDATE_METHOD: 'merge'
HUSKY: '0' # Disable pre-commit hooks

View File

@@ -20,7 +20,13 @@ jobs:
REPORT_LABEL: broken link report
REPORT_REPOSITORY: github/docs-content
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Check out repo's default branch
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: npm ci
run: npm ci
- name: npm run build

View File

@@ -20,6 +20,15 @@ jobs:
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Homogenize frontmatter
run: script/i18n/homogenize-frontmatter.js
@@ -34,4 +43,4 @@ jobs:
env:
# Disable pre-commit hooks; they don't play nicely with add-and-commit
HUSKY: 0
HUSKY: '0'

View File

@@ -1,6 +1,6 @@
name: Add docs-reviewers request to FR board
name: Add docs-reviewers request to the docs-content review board
# **What it does**: Adds PRs in github/github that requested a review from docs-reviewers to the FR board
# **What it does**: Adds PRs in github/github that requested a review from docs-reviewers to the docs-content review board
# **Why we have it**: To catch docs-reviewers requests in github/github
# **Who does it impact**: docs-content maintainers
@@ -18,18 +18,24 @@ jobs:
- name: Check out repo content
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Set up Python 3.9
uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
python-version: '3.9'
node-version: 16.x
cache: npm
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests
run: npm install @octokit/graphql
- name: Run script
run: |
python .github/actions-scripts/fr-add-docs-reviewers-requests.py
node .github/actions-scripts/fr-add-docs-reviewers-requests.js
env:
TOKEN: ${{ secrets.DOCS_BOT }}
PROJECT_NUMBER: 2936
ORGANIZATION: 'github'
REPO: 'github'
REVIEWER: 'docs-reviewers'
# This is an educated guess of how many PRs are opened in a day on the github/github repo
# If we are missing PRs, either increase this number or increase the frequency at which this script is run
NUM_PRS: 100

View File

@@ -1,6 +1,6 @@
name: (Dry run) Algolia
name: (Dry run) Search indexes
# **What it does**: On request, dry run Algolia to check for issues with search indexing.
# **What it does**: On request, dry run to check for issues with search indexing.
# **Why we have it**: It helps us debug issues with search indexing.
# **Who does it impact**: Docs engineering.
@@ -15,7 +15,8 @@ jobs:
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm

View File

@@ -3,7 +3,7 @@ name: Enterprise date updater
# **What it does**: Runs on a schedule to update lib/enterprise-dates.json.
# **Why we have it**: The lib/enterprise-dates.json file needs to be up-to-date
# for the .github/workflows/open-enterprise-issue.yml workflow and the
# tests/content/algolia-search.js test.
# tests/content/search.js test.
# **Who does it impact**: Docs engineering, docs content.
on:
@@ -34,6 +34,12 @@ jobs:
- name: Checkout repository code
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install Node.js dependencies
run: npm ci
@@ -46,6 +52,9 @@ jobs:
- name: Create pull request
id: create-pull-request
uses: peter-evans/create-pull-request@8c603dbb04b917a9fc2dd991dc54fef54b640b43
env:
# Disable pre-commit hooks; they don't play nicely here
HUSKY: '0'
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -48,7 +48,7 @@ jobs:
owner: owner,
repo: originalRepo,
issue_number: issueNo,
body: `👋 You opened this issue in `${context.repo.repo}`. Moving forward, we're asking that folks create new issues in the following repositories instead:\n- For issues with the docs site, please submit to the [${process.env.TEAM_ENGINEERING_REPO}](/${owner}/${process.env.TEAM_ENGINEERING_REPO}) repo.\n- For all new content issues, please submit to the [${process.env.TEAM_CONTENT_REPO}](/${owner}/${process.env.TEAM_CONTENT_REPO}) repo.\n\nWe will transfer this issue for you!`
body: `👋 You opened this issue in '${context.repo.repo}'. Moving forward, we're asking that folks create new issues in the following repositories instead:\n- For issues with the docs site, please submit to the [${process.env.TEAM_ENGINEERING_REPO}](/${owner}/${process.env.TEAM_ENGINEERING_REPO}) repo.\n- For all new content issues, please submit to the [${process.env.TEAM_CONTENT_REPO}](/${owner}/${process.env.TEAM_CONTENT_REPO}) repo.\n\nWe will transfer this issue for you!`
})
// Transfer the issue to the correct repo

View File

@@ -18,6 +18,12 @@ jobs:
- name: Checkout repository code
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install dependencies
run: npm ci

View File

@@ -44,4 +44,4 @@ jobs:
env:
# Disable pre-commit hooks; they don't play nicely with add-and-commit
HUSKY: 0
HUSKY: '0'

View File

@@ -17,6 +17,11 @@ jobs:
HEROKU_API_TOKEN: ${{ secrets.HEROKU_API_TOKEN }}
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: npm ci
run: npm ci
- name: npm run build

View File

@@ -19,9 +19,18 @@ jobs:
if: ${{ github.repository == 'github/docs-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: npm ci
- name: Check out repo's default branch
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Run script
run: script/remove-stale-staging-apps.js
env:

View File

@@ -23,6 +23,11 @@ jobs:
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: npm ci
run: npm ci
- name: Run scripts
@@ -38,6 +43,9 @@ jobs:
run: rm -rf ./results.md
- name: Create pull request
uses: peter-evans/create-pull-request@8c603dbb04b917a9fc2dd991dc54fef54b640b43
env:
# Disable pre-commit hooks; they don't play nicely here
HUSKY: '0'
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}

View File

@@ -96,10 +96,20 @@ jobs:
- name: Check out repo
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
# Set up npm and run npm ci to run husky to get githooks for LFS
- name: Setup node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Sync repo to branch
uses: repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88
env:
GITHUB_TOKEN: ${{ secrets.OCTOMERGER_PAT_WITH_REPO_AND_WORKFLOW_SCOPE }}
CI: true
with:
source_repo: ${{ secrets.SOURCE_REPO }} # https://${access_token}@github.com/github/the-other-repo.git
source_branch: main

View File

@@ -1,4 +1,4 @@
name: Algolia
name: Sync search indexes
# **What it does**: This updates our search indexes after each deployment.
# **Why we have it**: We want our search indexes kept up to date.
@@ -18,7 +18,8 @@ jobs:
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
@@ -39,4 +40,4 @@ jobs:
channel: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
bot-token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
color: failure
text: The last Algolia workflow run for ${{github.repository}} failed. Search actions for `workflow:Algolia`
text: The last search index workflow run for ${{github.repository}} failed. Search actions for `workflow:search`

View File

@@ -1,4 +1,4 @@
name: Algolia Sync Single English Index
name: Sync Single English Index
# **What it does**:
# **Why we have it**:
@@ -24,15 +24,16 @@ jobs:
steps:
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: npm ci
- name: Install dependencies
run: npm ci
- name: Get version from Algolia label if present; only continue if the label is found.
- name: Get version from search label if present; only continue if the label is found.
id: getVersion
run: $GITHUB_WORKSPACE/.github/actions-scripts/enterprise-algolia-label.js
run: $GITHUB_WORKSPACE/.github/actions-scripts/enterprise-search-label.js
- if: ${{ steps.getVersion.outputs.versionToSync }}
name: Sync English index for single version
env:

View File

@@ -27,7 +27,6 @@ jobs:
if: github.repository == 'github/docs' && github.event.pull_request.user.login != 'Octomerger'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Check for existing requested changes
id: requested-change
uses: actions/github-script@2b34a689ec86a68d8ab9478298f91d5401337b7d

View File

@@ -29,6 +29,11 @@ jobs:
exit 1 # prevents further steps from running
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install Node.js dependencies
run: npm ci
- name: Run updater scripts
@@ -40,6 +45,9 @@ jobs:
- name: Create pull request
id: create-pull-request
uses: peter-evans/create-pull-request@8c603dbb04b917a9fc2dd991dc54fef54b640b43
env:
# Disable pre-commit hooks; they don't play nicely here
HUSKY: '0'
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.GITHUB_TOKEN }}
@@ -52,6 +60,7 @@ jobs:
If CI does not pass or other problems arise, contact #docs-engineering on slack."
labels: automerge,autoupdate
branch: graphql-schema-update
- if: ${{ failure() && env.FREEZE != 'true'}}
name: Delete remote branch (if previous steps failed)
uses: dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911

11
.husky/post-checkout Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/sh
[ -n "$CI" ] && exit 0
if command -v git-lfs >/dev/null 2>&1
then
if ! (git lfs post-checkout "$@")
then
printf >&2 "\nGitHub Docs requires Git LFS but using the 'git-lfs' on your path failed.\n"
fi
else
printf >&2 "\nGitHub Docs requires Git LFS but 'git-lfs' was not found on your path.\nLearn how to install Git LFS at <https://git.io/JBCId>.\n"
fi

11
.husky/post-commit Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/sh
[ -n "$CI" ] && exit 0
if command -v git-lfs >/dev/null 2>&1
then
if ! (git lfs post-commit "$@")
then
printf >&2 "\nGitHub Docs requires Git LFS but using the 'git-lfs' on your path failed.\n"
fi
else
printf >&2 "\nGitHub Docs requires Git LFS but 'git-lfs' was not found on your path.\nLearn how to install Git LFS at <https://git.io/JBCId>.\n"
fi

11
.husky/post-merge Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/sh
[ -n "$CI" ] && exit 0
if command -v git-lfs >/dev/null 2>&1
then
if ! (git lfs post-merge "$@")
then
printf >&2 "\nGitHub Docs requires Git LFS but using the 'git-lfs' on your path failed.\n"
fi
else
printf >&2 "\nGitHub Docs requires Git LFS but 'git-lfs' was not found on your path.\nLearn how to install Git LFS at <https://git.io/JBCId>.\n"
fi

View File

@@ -1,4 +1,5 @@
#!/bin/sh
[ -n "$CI" ] && exit 0
. "$(dirname "$0")/_/husky.sh"
node script/prevent-translation-commits.js

View File

@@ -1,4 +1,15 @@
#!/bin/sh
[ -n "$CI" ] && exit 0
if command -v git-lfs >/dev/null 2>&1
then
if ! (git lfs pre-push "$@")
then
printf >&2 "\nGitHub Docs requires Git LFS but using the 'git-lfs' on your path failed.\n"
fi
else
printf >&2 "\nGitHub Docs requires Git LFS but 'git-lfs' was not found on your path.\nLearn how to install Git LFS at <https://git.io/JBCId>.\n"
fi
. "$(dirname "$0")/_/husky.sh"
npm run prevent-pushes-to-main

View File

@@ -23,6 +23,8 @@ If you spot something new, open an issue using a [template](https://github.com/g
### Ready to make a change? Fork the repo
You'll want to [install Git LFS](https://docs.github.com/en/github/managing-large-files/versioning-large-files/installing-git-large-file-storage).
Fork using GitHub Desktop:
- [Getting started with GitHub Desktop](https://docs.github.com/en/desktop/installing-and-configuring-github-desktop/getting-started-with-github-desktop) will guide you through setting up Desktop.

View File

@@ -37,7 +37,6 @@ FROM all_deps as builder
ENV NODE_ENV production
COPY javascripts ./javascripts
COPY stylesheets ./stylesheets
COPY pages ./pages
COPY components ./components
@@ -48,6 +47,7 @@ COPY content/index.md ./content/index.md
COPY next.config.js ./next.config.js
COPY tsconfig.json ./tsconfig.json
COPY next-env.d.ts ./next-env.d.ts
RUN npx tsc --noEmit
@@ -77,7 +77,7 @@ COPY --chown=node:node --from=builder /usr/src/docs/.next /usr/src/docs/.next
# We should always be running in production mode
ENV NODE_ENV production
# Use Lunr instead of Algolia
# Hide iframes, add warnings to external links
ENV AIRGAP true
# Copy only what's needed to run the server

View File

@@ -58,8 +58,7 @@ In addition to the README you're reading right now, this repo includes other REA
- [data/variables/README.md](data/variables/README.md)
- [includes/liquid-tags/README.md](includes/liquid-tags/README.md)
- [includes/README.md](includes/README.md)
- [javascripts/README.md](javascripts/README.md)
- [layouts/README.md](layouts/README.md)
- [components/README.md](components/README.md)
- [lib/liquid-tags/README.md](lib/liquid-tags/README.md)
- [middleware/README.md](middleware/README.md)
- [script/README.md](script/README.md)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 175 KiB

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 176 KiB

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 147 KiB

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 148 KiB

After

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 183 KiB

After

Width:  |  Height:  |  Size: 131 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 126 KiB

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 193 KiB

After

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.9 KiB

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

After

Width:  |  Height:  |  Size: 3.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.9 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 102 KiB

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 316 KiB

After

Width:  |  Height:  |  Size: 194 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 119 KiB

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 140 KiB

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 190 KiB

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 338 KiB

After

Width:  |  Height:  |  Size: 262 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 316 KiB

After

Width:  |  Height:  |  Size: 242 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 KiB

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 156 KiB

After

Width:  |  Height:  |  Size: 106 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 92 KiB

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 126 KiB

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

Some files were not shown because too many files have changed in this diff Show More