Update src/workflows to TypeScript (#53220)
This commit is contained in:
@@ -16,7 +16,7 @@ runs:
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
shell: bash
|
||||
run: node src/early-access/scripts/what-docs-early-access-branch.js
|
||||
run: npm run what-docs-early-access-branch
|
||||
|
||||
- name: Clone
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
|
||||
2
.github/actions/labeler/action.yml
vendored
2
.github/actions/labeler/action.yml
vendored
@@ -23,7 +23,7 @@ runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Add label to an issue or pr
|
||||
run: node .github/actions/labeler/labeler.js
|
||||
run: npm run labeler
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
|
||||
@@ -1,10 +1,22 @@
|
||||
/* See function main in this file for documentation */
|
||||
|
||||
import coreLib from '@actions/core'
|
||||
import { type Octokit } from '@octokit/rest'
|
||||
import { CoreInject } from '@/links/scripts/action-injections'
|
||||
|
||||
import github from '#src/workflows/github.js'
|
||||
import { getActionContext } from '#src/workflows/action-context.js'
|
||||
import { boolEnvVar } from '#src/workflows/get-env-inputs.js'
|
||||
import github from '#src/workflows/github.ts'
|
||||
import { getActionContext } from '#src/workflows/action-context.ts'
|
||||
import { boolEnvVar } from '#src/workflows/get-env-inputs.ts'
|
||||
|
||||
type Options = {
|
||||
addLabels?: string[]
|
||||
removeLabels?: string[]
|
||||
ignoreIfAssigned?: boolean
|
||||
ignoreIfLabeled?: boolean
|
||||
issue_number?: number
|
||||
owner?: string
|
||||
repo?: string
|
||||
}
|
||||
|
||||
// When this file is invoked directly from action as opposed to being imported
|
||||
if (import.meta.url.endsWith(process.argv[1])) {
|
||||
@@ -16,28 +28,19 @@ if (import.meta.url.endsWith(process.argv[1])) {
|
||||
|
||||
const octokit = github()
|
||||
|
||||
const opts = {
|
||||
addLabels: ADD_LABELS,
|
||||
removeLabels: REMOVE_LABELS,
|
||||
const opts: Options = {
|
||||
ignoreIfAssigned: boolEnvVar('IGNORE_IF_ASSIGNED'),
|
||||
ignoreIfLabeled: boolEnvVar('IGNORE_IF_LABELED'),
|
||||
}
|
||||
|
||||
// labels come in comma separated from actions
|
||||
let addLabels
|
||||
|
||||
if (opts.addLabels) {
|
||||
addLabels = [...opts.addLabels.split(',')]
|
||||
opts.addLabels = addLabels.map((l) => l.trim())
|
||||
if (typeof ADD_LABELS === 'string') {
|
||||
opts.addLabels = [...ADD_LABELS.split(',')].map((l) => l.trim())
|
||||
} else {
|
||||
opts.addLabels = []
|
||||
}
|
||||
|
||||
let removeLabels
|
||||
|
||||
if (opts.removeLabels) {
|
||||
removeLabels = [...opts.removeLabels.split(',')]
|
||||
opts.removeLabels = removeLabels.map((l) => l.trim())
|
||||
if (typeof REMOVE_LABELS === 'string') {
|
||||
opts.removeLabels = [...REMOVE_LABELS.split(',')].map((l) => l.trim())
|
||||
} else {
|
||||
opts.removeLabels = []
|
||||
}
|
||||
@@ -54,7 +57,7 @@ if (import.meta.url.endsWith(process.argv[1])) {
|
||||
opts.owner = owner
|
||||
opts.repo = repo
|
||||
|
||||
main(coreLib, octokit, opts, {})
|
||||
main(coreLib, octokit, opts)
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -69,22 +72,31 @@ if (import.meta.url.endsWith(process.argv[1])) {
|
||||
* ignoreIfAssigned {boolean} don't apply labels if there are assignees
|
||||
* ignoreIfLabeled {boolean} don't apply labels if there are already labels added
|
||||
*/
|
||||
export default async function main(core, octokit, opts = {}) {
|
||||
export default async function main(
|
||||
core: typeof coreLib | CoreInject,
|
||||
octokit: Octokit,
|
||||
opts: Options = {},
|
||||
) {
|
||||
if (opts.addLabels?.length === 0 && opts.removeLabels?.length === 0) {
|
||||
core.info('No labels to add or remove specified, nothing to do.')
|
||||
return
|
||||
}
|
||||
|
||||
if (!opts.issue_number || !opts.owner || !opts.repo) {
|
||||
throw new Error(`Missing required parameters ${JSON.stringify(opts)}`)
|
||||
}
|
||||
const issueOpts = {
|
||||
issue_number: opts.issue_number,
|
||||
owner: opts.owner,
|
||||
repo: opts.repo,
|
||||
}
|
||||
|
||||
if (opts.ignoreIfAssigned || opts.ignoreIfLabeled) {
|
||||
try {
|
||||
const { data } = await octokit.issues.get({
|
||||
issue_number: opts.issue_number,
|
||||
owner: opts.owner,
|
||||
repo: opts.repo,
|
||||
})
|
||||
const { data } = await octokit.issues.get(issueOpts)
|
||||
|
||||
if (opts.ignoreIfAssigned) {
|
||||
if (data.assignees.length > 0) {
|
||||
if (data.assignees?.length) {
|
||||
core.info(
|
||||
`ignore-if-assigned is true: not applying labels since there's ${data.assignees.length} assignees`,
|
||||
)
|
||||
@@ -105,31 +117,24 @@ export default async function main(core, octokit, opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.removeLabels?.length > 0) {
|
||||
if (opts.removeLabels?.length) {
|
||||
// removing a label fails if the label isn't already applied
|
||||
let appliedLabels = []
|
||||
|
||||
try {
|
||||
const { data } = await octokit.issues.get({
|
||||
issue_number: opts.issue_number,
|
||||
owner: opts.owner,
|
||||
repo: opts.repo,
|
||||
})
|
||||
|
||||
appliedLabels = data.labels.map((l) => l.name)
|
||||
const { data } = await octokit.issues.get(issueOpts)
|
||||
appliedLabels = data.labels.map((l) => (typeof l === 'string' ? l : l.name))
|
||||
} catch (err) {
|
||||
throw new Error(`Error getting issue: ${err}`)
|
||||
}
|
||||
|
||||
opts.removeLabels = opts.removeLabels.filter((l) => appliedLabels.includes(l))
|
||||
opts.removeLabels = opts.removeLabels?.filter((l) => appliedLabels.includes(l))
|
||||
|
||||
await Promise.all(
|
||||
opts.removeLabels.map(async (label) => {
|
||||
try {
|
||||
await octokit.issues.removeLabel({
|
||||
issue_number: opts.issue_number,
|
||||
owner: opts.owner,
|
||||
repo: opts.repo,
|
||||
...issueOpts,
|
||||
name: label,
|
||||
})
|
||||
} catch (err) {
|
||||
@@ -138,17 +143,15 @@ export default async function main(core, octokit, opts = {}) {
|
||||
}),
|
||||
)
|
||||
|
||||
if (opts.removeLabels.length > 0) {
|
||||
if (opts.removeLabels?.length) {
|
||||
core.info(`Removed labels: ${opts.removeLabels.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.addLabels?.length > 0) {
|
||||
if (opts.addLabels?.length) {
|
||||
try {
|
||||
await octokit.issues.addLabels({
|
||||
issue_number: opts.issue_number,
|
||||
owner: opts.owner,
|
||||
repo: opts.repo,
|
||||
...issueOpts,
|
||||
labels: opts.addLabels,
|
||||
})
|
||||
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
CHECK_INTERVAL: 10000
|
||||
EXPECTED_SHA: ${{ github.sha }}
|
||||
CANARY_BUILD_URL: https://ghdocs-prod-canary.azurewebsites.net/_build
|
||||
run: src/workflows/check-canary-slots.js
|
||||
run: npm run check-canary-slots
|
||||
|
||||
- name: 'Swap canary slot to production'
|
||||
run: |
|
||||
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
CHECK_INTERVAL: 10000
|
||||
EXPECTED_SHA: ${{ github.sha }}
|
||||
CANARY_BUILD_URL: https://ghdocs-staging-canary.azurewebsites.net/_build
|
||||
run: src/workflows/check-canary-slots.js
|
||||
run: npm run check-canary-slots
|
||||
|
||||
- name: 'Swap deployment slot to production'
|
||||
run: |
|
||||
|
||||
2
.github/workflows/docs-review-collect.yml
vendored
2
.github/workflows/docs-review-collect.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Run script for audit-log-allowlists
|
||||
run: |
|
||||
node src/workflows/fr-add-docs-reviewers-requests.js
|
||||
npm run fr-add-docs-reviewers-requests
|
||||
env:
|
||||
TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
PROJECT_NUMBER: 2936
|
||||
|
||||
5
.github/workflows/enterprise-dates.yml
vendored
5
.github/workflows/enterprise-dates.yml
vendored
@@ -29,8 +29,7 @@ jobs:
|
||||
- uses: ./.github/actions/node-npm-setup
|
||||
|
||||
- name: Run src/ghes-releases/scripts/update-enterprise-dates.js
|
||||
run: |
|
||||
src/ghes-releases/scripts/update-enterprise-dates.js
|
||||
run: npm run update-enterprise-dates
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
|
||||
@@ -57,7 +56,7 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_READPUBLICKEY }}
|
||||
AUTOMERGE_PR_NUMBER: ${{ steps.create-pull-request.outputs.pull-request-number }}
|
||||
run: node src/workflows/enable-automerge.js
|
||||
run: npm run enable-automerge
|
||||
|
||||
- if: ${{ failure() }}
|
||||
name: Delete remote branch (if previous steps failed)
|
||||
|
||||
@@ -24,14 +24,12 @@ jobs:
|
||||
- uses: ./.github/actions/node-npm-setup
|
||||
|
||||
- name: Create an enterprise release issue
|
||||
run: |
|
||||
src/ghes-releases/scripts/create-enterprise-issue.js release
|
||||
run: npm run create-enterprise-issue -- release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
|
||||
- name: Create an enterprise deprecation issue
|
||||
run: |
|
||||
src/ghes-releases/scripts/create-enterprise-issue.js deprecation
|
||||
run: npm run create-enterprise-issue -- deprecation
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
|
||||
|
||||
2
.github/workflows/index-general-search.yml
vendored
2
.github/workflows/index-general-search.yml
vendored
@@ -207,7 +207,7 @@ jobs:
|
||||
FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
|
||||
FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
|
||||
FASTLY_SURROGATE_KEY: api-search:${{ matrix.language }}
|
||||
run: src/workflows/purge-fastly-edge-cache.js
|
||||
run: npm run purge-fastly-edge-cache
|
||||
|
||||
- uses: ./.github/actions/slack-alert
|
||||
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
|
||||
|
||||
2
.github/workflows/link-check-daily.yml
vendored
2
.github/workflows/link-check-daily.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_READPUBLICKEY }}
|
||||
run: node src/early-access/scripts/what-docs-early-access-branch.js
|
||||
run: npm run what-docs-early-access-branch
|
||||
|
||||
- name: Check out docs-early-access too, if internal repo
|
||||
if: ${{ github.repository == 'github/docs-internal' }}
|
||||
|
||||
2
.github/workflows/manually-purge-fastly.yml
vendored
2
.github/workflows/manually-purge-fastly.yml
vendored
@@ -27,4 +27,4 @@ jobs:
|
||||
FASTLY_TOKEN: ${{ secrets.FASTLY_TOKEN }}
|
||||
FASTLY_SERVICE_ID: ${{ secrets.FASTLY_SERVICE_ID }}
|
||||
FASTLY_SURROGATE_KEY: 'manual-purge'
|
||||
run: src/workflows/purge-fastly-edge-cache.js
|
||||
run: npm run purge-fastly-edge-cache
|
||||
|
||||
2
.github/workflows/orphaned-files-check.yml
vendored
2
.github/workflows/orphaned-files-check.yml
vendored
@@ -16,7 +16,7 @@ on:
|
||||
- 'package*.json'
|
||||
- src/assets/scripts/find-orphaned-assets.js
|
||||
- src/content-render/scripts/reusables-cli/find/unused.ts
|
||||
- src/workflows/walk-files.js
|
||||
- src/workflows/walk-files.ts
|
||||
- src/languages/lib/languages.js
|
||||
- .github/actions/clone-translations/action.yml
|
||||
- .github/actions/node-npm-setup/action.yml
|
||||
|
||||
2
.github/workflows/os-ready-for-review.yml
vendored
2
.github/workflows/os-ready-for-review.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
node src/workflows/ready-for-docs-review.js
|
||||
npm run ready-for-docs-review
|
||||
env:
|
||||
TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
PROJECT_NUMBER: 2936
|
||||
|
||||
4
.github/workflows/purge-fastly.yml
vendored
4
.github/workflows/purge-fastly.yml
vendored
@@ -45,13 +45,13 @@ jobs:
|
||||
|
||||
- name: Purge Fastly edge cache independent of language
|
||||
if: ${{ inputs.nuke_all }}
|
||||
run: src/workflows/purge-fastly-edge-cache.js
|
||||
run: npm run purge-fastly-edge-cache
|
||||
|
||||
- name: Purge Fastly edge cache per language
|
||||
if: ${{ !inputs.nuke_all }}
|
||||
env:
|
||||
LANGUAGES: ${{ inputs.languages }}
|
||||
run: src/languages/scripts/purge-fastly-edge-cache-per-language.js
|
||||
run: npm run purge-fastly-edge-cache-per-language
|
||||
|
||||
- uses: ./.github/actions/slack-alert
|
||||
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
env:
|
||||
# Necessary to be able to delete deployment environments
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WORKFLOW_READORG }}
|
||||
run: src/workflows/purge-old-deployment-environments.js
|
||||
run: npm run purge-old-deployment-environments
|
||||
|
||||
- uses: ./.github/actions/slack-alert
|
||||
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
env:
|
||||
# Necessary to be able to delete deployment environments
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WORKFLOW_READORG }}
|
||||
run: src/workflows/purge-old-workflow-runs.js
|
||||
run: npm run purge-old-workflow-runs
|
||||
|
||||
- uses: ./.github/actions/slack-alert
|
||||
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
|
||||
|
||||
2
.github/workflows/ready-for-doc-review.yml
vendored
2
.github/workflows/ready-for-doc-review.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
node src/workflows/ready-for-docs-review.js
|
||||
npm run ready-for-docs-review
|
||||
env:
|
||||
TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
PROJECT_NUMBER: 2936
|
||||
|
||||
5
.github/workflows/sync-graphql.yml
vendored
5
.github/workflows/sync-graphql.yml
vendored
@@ -25,8 +25,7 @@ jobs:
|
||||
env:
|
||||
# need to use a token from a user with access to github/github for this step
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_WRITEORG_PROJECT }}
|
||||
run: |
|
||||
src/graphql/scripts/sync.js
|
||||
run: npm run graphql-sync
|
||||
- name: Create pull request
|
||||
id: create-pull-request
|
||||
uses: peter-evans/create-pull-request@6cd32fd93684475c31847837f87bb135d40a2b79 # pin @v7.0.3
|
||||
@@ -53,7 +52,7 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_READPUBLICKEY }}
|
||||
AUTOMERGE_PR_NUMBER: ${{ steps.create-pull-request.outputs.pull-request-number }}
|
||||
run: node src/workflows/enable-automerge.js
|
||||
run: npm run enable-automerge
|
||||
|
||||
- if: ${{ failure() }}
|
||||
name: Delete remote branch (if previous steps failed)
|
||||
|
||||
44
package.json
44
package.json
@@ -22,73 +22,87 @@
|
||||
"archive-version": "tsx --max-old-space-size=16384 src/ghes-releases/scripts/archive-version.ts",
|
||||
"audit-log-sync": "tsx src/audit-logs/scripts/sync.ts",
|
||||
"build": "next build",
|
||||
"check-content-type": "node src/workflows/check-content-type.js",
|
||||
"check-canary-slots": "tsx src/workflows/check-canary-slots.ts",
|
||||
"check-content-type": "tsx src/workflows/check-content-type.ts",
|
||||
"check-github-github-links": "tsx src/links/scripts/check-github-github-links.ts",
|
||||
"close-dangling-prs": "tsx src/workflows/close-dangling-prs.ts",
|
||||
"cmp-files": "tsx src/workflows/cmp-files.ts",
|
||||
"content-changes-table-comment": "tsx src/workflows/content-changes-table-comment.ts",
|
||||
"copy-fixture-data": "node src/tests/scripts/copy-fixture-data.js",
|
||||
"copy-fixture-data": "tsx src/tests/scripts/copy-fixture-data.js",
|
||||
"count-translation-corruptions": "tsx src/languages/scripts/count-translation-corruptions.ts",
|
||||
"create-acr-token": "tsx src/workflows/acr-create-token.js",
|
||||
"create-acr-token": "tsx src/workflows/acr-create-token.ts",
|
||||
"create-enterprise-issue": "tsx src/ghes-releases/scripts/create-enterprise-issue.js",
|
||||
"debug": "cross-env NODE_ENV=development ENABLED_LANGUAGES=en nodemon --inspect src/frame/server.ts",
|
||||
"delete-orphan-translation-files": "tsx src/workflows/delete-orphan-translation-files.ts",
|
||||
"deleted-assets-pr-comment": "tsx src/assets/scripts/deleted-assets-pr-comment.ts",
|
||||
"deleted-features-pr-comment": "tsx src/data-directory/scripts/deleted-features-pr-comment.ts",
|
||||
"dev": "cross-env npm start",
|
||||
"enable-automerge": "tsx src/workflows/enable-automerge.ts",
|
||||
"find-orphaned-assets": "tsx src/assets/scripts/find-orphaned-assets.ts",
|
||||
"find-orphaned-features": "tsx src/data-directory/scripts/find-orphaned-features/index.ts",
|
||||
"find-past-built-pr": "tsx src/workflows/find-past-built-pr.ts",
|
||||
"find-unused-variables": "tsx src/content-linter/scripts/find-unsed-variables.ts",
|
||||
"fixture-dev": "cross-env ROOT=src/fixtures/fixtures npm start",
|
||||
"fixture-test": "cross-env ROOT=src/fixtures/fixtures npm test -- src/fixtures/tests",
|
||||
"fr-add-docs-reviewers-requests": "tsx src/workflows/fr-add-docs-reviewers-requests.ts",
|
||||
"general-search-scrape": "tsx src/search/scripts/scrape/scrape-cli.ts",
|
||||
"general-search-scrape-server": "cross-env NODE_ENV=production PORT=4002 MINIMAL_RENDER=true CHANGELOG_DISABLED=true tsx src/frame/server.ts",
|
||||
"ghes-release-scrape-with-server": "cross-env GHES_RELEASE=1 start-server-and-test general-search-scrape-server 4002 general-search-scrape",
|
||||
"general-search-scrape-with-server": "cross-env NODE_OPTIONS='--max_old_space_size=8192' start-server-and-test general-search-scrape-server 4002 general-search-scrape",
|
||||
"graphql-sync": "tsx src/graphql/scripts/sync.js",
|
||||
"index": "tsx src/search/scripts/index/index-cli autocomplete docs-internal-data",
|
||||
"index-ai-search-autocomplete": "tsx src/search/scripts/index/index-cli ai-search-autocomplete",
|
||||
"index-general-autocomplete": "tsx src/search/scripts/index/index-cli general-autocomplete",
|
||||
"index-general-search": "tsx src/search/scripts/index/index-cli general-search",
|
||||
"index-test-fixtures": "./src/search/scripts/index-test-fixtures.sh",
|
||||
"labeler": "tsx .github/actions/labeler/labeler.ts",
|
||||
"lint": "eslint '**/*.{js,mjs,ts,tsx}'",
|
||||
"lint-content": "tsx src/content-linter/scripts/lint-content.js",
|
||||
"lint-translation": "vitest src/content-linter/tests/lint-files.js",
|
||||
"liquid-markdown-tables": "tsx src/tools/scripts/liquid-markdown-tables/index.ts",
|
||||
"generate-code-scanning-query-list": "tsx src/code-scanning/scripts/generate-code-scanning-query-list.ts",
|
||||
"generate-content-linter-docs": "tsx src/content-linter/scripts/generate-docs.ts",
|
||||
"move-content": "node src/content-render/scripts/move-content.js",
|
||||
"openapi-docs": "node src/rest/docs.js",
|
||||
"move-content": "tsx src/content-render/scripts/move-content.js",
|
||||
"openapi-docs": "tsx src/rest/docs.js",
|
||||
"playwright-test": "playwright test --config src/fixtures/playwright.config.ts --project=\"Google Chrome\"",
|
||||
"post-lints": "node src/content-linter/scripts/post-lints.js",
|
||||
"post-lints": "tsx src/content-linter/scripts/post-lints.js",
|
||||
"postinstall": "cp package-lock.json .installed.package-lock.json && echo \"Updated .installed.package-lock.json\" # see husky/post-checkout and husky/post-merge",
|
||||
"precompute-pageinfo": "tsx src/pageinfo/scripts/precompute-pageinfo.ts",
|
||||
"prepare": "husky src/workflows/husky",
|
||||
"prettier": "prettier -w \"**/*.{ts,tsx,js,mjs,scss,yml,yaml}\"",
|
||||
"prettier-check": "prettier -c \"**/*.{ts,tsx,js,mjs,scss,yml,yaml}\"",
|
||||
"prevent-pushes-to-main": "node src/workflows/prevent-pushes-to-main.js",
|
||||
"release-banner": "node src/ghes-releases/scripts/release-banner.js",
|
||||
"prevent-pushes-to-main": "tsx src/workflows/prevent-pushes-to-main.ts",
|
||||
"purge-fastly-edge-cache": "tsx src/workflows/purge-fastly-edge-cache.ts",
|
||||
"purge-fastly-edge-cache-per-language": "tsx src/languages/scripts/purge-fastly-edge-cache-per-language.js",
|
||||
"purge-old-deployment-environments": "tsx src/workflows/purge-old-deployment-environments.ts",
|
||||
"purge-old-workflow-runs": "tsx src/workflows/purge-old-workflow-runs.js",
|
||||
"ready-for-docs-review": "tsx src/workflows/ready-for-docs-review.ts",
|
||||
"release-banner": "tsx src/ghes-releases/scripts/release-banner.js",
|
||||
"reusables": "tsx src/content-render/scripts/reusables-cli.ts",
|
||||
"remove-version-markup": "node src/ghes-releases/scripts/remove-version-markup.js",
|
||||
"remove-version-markup": "tsx src/ghes-releases/scripts/remove-version-markup.js",
|
||||
"rendered-content-link-checker": "tsx src/links/scripts/rendered-content-link-checker.ts",
|
||||
"rendered-content-link-checker-cli": "tsx src/links/scripts/rendered-content-link-checker-cli.ts",
|
||||
"rest-dev": "node src/rest/scripts/update-files.js",
|
||||
"rest-dev": "tsx src/rest/scripts/update-files.js",
|
||||
"show-action-deps": "echo 'Action Dependencies:' && rg '^[\\s|-]*(uses:.*)$' .github -I -N --no-heading -r '$1$2' | sort | uniq | cut -c 7-",
|
||||
"start": "cross-env NODE_ENV=development ENABLED_LANGUAGES=en nodemon src/frame/server.ts",
|
||||
"start-all-languages": "cross-env NODE_ENV=development tsx src/frame/server.ts",
|
||||
"start-for-playwright": "cross-env ROOT=src/fixtures/fixtures TRANSLATIONS_FIXTURE_ROOT=src/fixtures/fixtures/translations ENABLED_LANGUAGES=en,ja NODE_ENV=test tsx src/frame/server.ts",
|
||||
"symlink-from-local-repo": "node src/early-access/scripts/symlink-from-local-repo.js",
|
||||
"symlink-from-local-repo": "tsx src/early-access/scripts/symlink-from-local-repo.js",
|
||||
"sync-rest": "tsx src/rest/scripts/update-files.ts",
|
||||
"sync-secret-scanning": "tsx src/secret-scanning/scripts/sync.ts",
|
||||
"sync-webhooks": "npx tsx src/rest/scripts/update-files.ts -o webhooks",
|
||||
"test": "vitest",
|
||||
"test-local-dev": "node src/workflows/test-local-dev.js",
|
||||
"test-local-dev": "tsx src/workflows/test-local-dev.ts",
|
||||
"test-moved-content": "tsx src/content-render/scripts/test-moved-content.ts",
|
||||
"tsc": "tsc --noEmit",
|
||||
"unallowed-contributions": "node src/workflows/unallowed-contributions.js",
|
||||
"update-data-and-image-paths": "node src/early-access/scripts/update-data-and-image-paths.js",
|
||||
"unallowed-contributions": "tsx src/workflows/unallowed-contributions.ts",
|
||||
"update-data-and-image-paths": "tsx src/early-access/scripts/update-data-and-image-paths.js",
|
||||
"update-enterprise-dates": "tsx src/ghes-releases/scripts/update-enterprise-dates.js",
|
||||
"update-internal-links": "tsx src/links/scripts/update-internal-links.ts",
|
||||
"validate-asset-images": "tsx src/assets/scripts/validate-asset-images.ts",
|
||||
"validate-github-github-docs-urls": "tsx src/links/scripts/validate-github-github-docs-urls/index.ts",
|
||||
"warmup-remotejson": "tsx src/archives/scripts/warmup-remotejson.ts"
|
||||
"warmup-remotejson": "tsx src/archives/scripts/warmup-remotejson.ts",
|
||||
"what-docs-early-access-branch": "tsx src/early-access/scripts/what-docs-early-access-branch.js"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,mjs,ts,tsx}": "eslint --cache --fix",
|
||||
|
||||
@@ -12,8 +12,8 @@ import path from 'path'
|
||||
import { program } from 'commander'
|
||||
import walk from 'walk-sync'
|
||||
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import languages from '#src/languages/lib/languages.js'
|
||||
import walkFiles from '@/workflows/walk-files'
|
||||
import languages from '@/languages/lib/languages'
|
||||
|
||||
const EXCEPTIONS = new Set([
|
||||
'assets/images/site/favicon.ico',
|
||||
|
||||
@@ -8,7 +8,7 @@ import { applyFixes } from 'markdownlint-rule-helpers'
|
||||
import boxen from 'boxen'
|
||||
import ora from 'ora'
|
||||
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import walkFiles from '#src/workflows/walk-files.ts'
|
||||
import { allConfig, allRules, customRules } from '../lib/helpers/get-rules.js'
|
||||
import { customConfig, githubDocsFrontmatterConfig } from '../style/github-docs.js'
|
||||
import { defaultConfig } from '../lib/default-markdownlint-options.js'
|
||||
|
||||
@@ -4,8 +4,8 @@ import { program } from 'commander'
|
||||
import fs from 'fs'
|
||||
import coreLib from '@actions/core'
|
||||
|
||||
import github from '#src/workflows/github.js'
|
||||
import { getEnvInputs } from '#src/workflows/get-env-inputs.js'
|
||||
import github from '#src/workflows/github.ts'
|
||||
import { getEnvInputs } from '#src/workflows/get-env-inputs.ts'
|
||||
import { createReportIssue, linkReports } from '#src/workflows/issue-report.js'
|
||||
|
||||
// [start-readme]
|
||||
|
||||
@@ -15,7 +15,7 @@ import { execFileSync } from 'child_process'
|
||||
import frontmatter from '#src/frame/lib/read-frontmatter.js'
|
||||
import patterns from '#src/frame/lib/patterns.js'
|
||||
import addRedirectToFrontmatter from '#src/redirects/scripts/helpers/add-redirect-to-frontmatter.js'
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import walkFiles from '#src/workflows/walk-files.ts'
|
||||
|
||||
const contentFiles = walkFiles('content', '.md', { includeEarlyAccess: true })
|
||||
const contentDir = path.posix.join(process.cwd(), 'content')
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { program } from 'commander'
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import walkFiles from '#src/workflows/walk-files.ts'
|
||||
import { escapeRegExp } from 'lodash-es'
|
||||
import patterns from '#src/frame/lib/patterns.js'
|
||||
|
||||
|
||||
@@ -7,8 +7,8 @@ import walk from 'walk-sync'
|
||||
import matter from 'gray-matter'
|
||||
|
||||
import { latest, oldestSupported } from '#src/versions/lib/enterprise-server-releases.js'
|
||||
import { getContents } from '#src/workflows/git-utils.js'
|
||||
import github from '#src/workflows/github.js'
|
||||
import { getContents } from '#src/workflows/git-utils.ts'
|
||||
import github from '#src/workflows/github.ts'
|
||||
|
||||
// Required by github() to authenticate
|
||||
if (!process.env.GITHUB_TOKEN) {
|
||||
|
||||
@@ -14,7 +14,7 @@ import frontmatter from '#src/frame/lib/read-frontmatter.js'
|
||||
import removeLiquidStatements from './remove-liquid-statements.js'
|
||||
import removeDeprecatedFrontmatter from './remove-deprecated-frontmatter.js'
|
||||
import { all, getNextReleaseNumber } from '#src/versions/lib/enterprise-server-releases.js'
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import walkFiles from '#src/workflows/walk-files.ts'
|
||||
|
||||
program
|
||||
.description(
|
||||
|
||||
@@ -11,7 +11,7 @@ import { fileURLToPath } from 'url'
|
||||
import path from 'path'
|
||||
import fs from 'fs/promises'
|
||||
|
||||
import { getContents } from '#src/workflows/git-utils.js'
|
||||
import { getContents } from '#src/workflows/git-utils.ts'
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const enterpriseDatesFile = path.join(__dirname, '../lib/enterprise-dates.json')
|
||||
|
||||
@@ -8,7 +8,7 @@ import { slug } from 'github-slugger'
|
||||
import yaml from 'js-yaml'
|
||||
import walk from 'walk-sync'
|
||||
|
||||
import { getContents, getDirectoryContents } from '#src/workflows/git-utils.js'
|
||||
import { getContents, getDirectoryContents } from '#src/workflows/git-utils.ts'
|
||||
import permissionSchema from './permission-list-schema.js'
|
||||
import enabledSchema from './enabled-list-schema.js'
|
||||
import { validateJson } from '#src/tests/lib/validate-json-schema.js'
|
||||
|
||||
@@ -4,7 +4,7 @@ import path from 'path'
|
||||
import { mkdirp } from 'mkdirp'
|
||||
import yaml from 'js-yaml'
|
||||
import { execSync } from 'child_process'
|
||||
import { getContents, hasMatchingRef } from '#src/workflows/git-utils.js'
|
||||
import { getContents, hasMatchingRef } from '#src/workflows/git-utils.ts'
|
||||
import { allVersions } from '#src/versions/lib/all-versions.js'
|
||||
import processPreviews from './utils/process-previews.js'
|
||||
import processUpcomingChanges from './utils/process-upcoming-changes.js'
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { languageKeys } from '#src/languages/lib/languages.js'
|
||||
|
||||
import { makeLanguageSurrogateKey } from '#src/frame/middleware/set-fastly-surrogate-key.js'
|
||||
import purgeEdgeCache from '#src/workflows/purge-edge-cache.js'
|
||||
import purgeEdgeCache from '#src/workflows/purge-edge-cache.ts'
|
||||
|
||||
/**
|
||||
* In simple terms, this script sends purge commands for...
|
||||
|
||||
@@ -17,7 +17,7 @@ import fs from 'fs/promises'
|
||||
import got, { RequestError } from 'got'
|
||||
import { program } from 'commander'
|
||||
|
||||
import { getContents, getPathsWithMatchingStrings } from 'src/workflows/git-utils.js'
|
||||
import { getContents, getPathsWithMatchingStrings } from 'src/workflows/git-utils'
|
||||
|
||||
if (!process.env.GITHUB_TOKEN) {
|
||||
throw new Error('Error! You must have a GITHUB_TOKEN set in an .env file to run this script.')
|
||||
|
||||
@@ -18,7 +18,7 @@ import yaml from 'js-yaml'
|
||||
|
||||
import { updateInternalLinks } from '#src/links/lib/update-internal-links.js'
|
||||
import frontmatter from 'src/frame/lib/read-frontmatter.js'
|
||||
import walkFiles from 'src/workflows/walk-files.js'
|
||||
import walkFiles from 'src/workflows/walk-files'
|
||||
|
||||
program
|
||||
.description('Update internal links in content files')
|
||||
|
||||
@@ -15,7 +15,7 @@ import { allVersions, getDocsVersion } from '#src/versions/lib/all-versions.js'
|
||||
import { REST_DATA_DIR, REST_SCHEMA_FILENAME } from '../lib/index.js'
|
||||
import { nonAutomatedRestPaths } from '../lib/config.js'
|
||||
import { deprecated } from '#src/versions/lib/enterprise-server-releases.js'
|
||||
import walkFiles from '#src/workflows/walk-files.js'
|
||||
import walkFiles from '#src/workflows/walk-files.ts'
|
||||
|
||||
export async function getDiffOpenAPIContentRest() {
|
||||
const contentFiles = getAutomatedMarkdownFiles('content/rest')
|
||||
|
||||
@@ -21,7 +21,7 @@ export function getActionContext() {
|
||||
context.owner = context.repository.owner.login
|
||||
context.repo = context.repository.name
|
||||
} else {
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY?.split('/') || []
|
||||
context.owner = owner
|
||||
context.repo = repo
|
||||
}
|
||||
@@ -2,14 +2,14 @@
|
||||
import { execSync } from 'child_process'
|
||||
import yaml from 'js-yaml'
|
||||
|
||||
const slotName = process.env.SLOT_NAME
|
||||
const appServiceName = process.env.APP_SERVICE_NAME
|
||||
const resourceGroupName = process.env.RESOURCE_GROUP_NAME
|
||||
const expectedSHA = process.env.EXPECTED_SHA
|
||||
const waitDuration = parseInt(process.env.CHECK_INTERVAL, 10) || 10000
|
||||
const maxWaitingTimeSeconds = parseInt(process.MAX_WAITING_TIME || 10 * 60 * 1000, 10)
|
||||
const slotName = process.env.SLOT_NAME || ''
|
||||
const appServiceName = process.env.APP_SERVICE_NAME || ''
|
||||
const resourceGroupName = process.env.RESOURCE_GROUP_NAME || ''
|
||||
const expectedSHA = process.env.EXPECTED_SHA || ''
|
||||
const waitDuration = parseInt(process.env.CHECK_INTERVAL || '', 10) || 10000
|
||||
const maxWaitingTimeSeconds = parseInt(process.env.MAX_WAITING_TIME || '', 10) || 10 * 60 * 1000
|
||||
|
||||
function getBuildSha(slot, appService, resourceGroup) {
|
||||
function getBuildSha(slot: string, appService: string, resourceGroup: string) {
|
||||
console.log('Getting Canary App Service Docker config')
|
||||
const t0 = Date.now()
|
||||
let config
|
||||
@@ -20,7 +20,7 @@ function getBuildSha(slot, appService, resourceGroup) {
|
||||
{ encoding: 'utf8' },
|
||||
),
|
||||
)
|
||||
} catch (err) {
|
||||
} catch {
|
||||
console.log('Error getting the Canary App Service Slot config')
|
||||
return null
|
||||
}
|
||||
@@ -31,13 +31,13 @@ function getBuildSha(slot, appService, resourceGroup) {
|
||||
// The value key contains the stringified YAML file, so we
|
||||
// need to parse it to JSON to extract the image sha.
|
||||
const dockerComposeYaml = config.find(
|
||||
(obj) => obj.name === 'DOCKER_CUSTOM_IMAGE_NAME_DECODED',
|
||||
(obj: Record<string, any>) => obj.name === 'DOCKER_CUSTOM_IMAGE_NAME_DECODED',
|
||||
).value
|
||||
|
||||
let dockerComposeConfig
|
||||
try {
|
||||
dockerComposeConfig = yaml.load(dockerComposeYaml)
|
||||
} catch (err) {
|
||||
dockerComposeConfig = yaml.load(dockerComposeYaml) as Record<string, any>
|
||||
} catch {
|
||||
console.log('Error loading the YAML configuration data from the Canary App Service Slot config')
|
||||
return null
|
||||
}
|
||||
@@ -48,7 +48,7 @@ function getBuildSha(slot, appService, resourceGroup) {
|
||||
return sha
|
||||
}
|
||||
|
||||
function getStatesForSlot(slot, appService, resourceGroup) {
|
||||
function getStatesForSlot(slot: string, appService: string, resourceGroup: string) {
|
||||
return JSON.parse(
|
||||
execSync(
|
||||
`az webapp list-instances --slot ${slot} --query "[].state" -n ${appService} -g ${resourceGroup}`,
|
||||
@@ -67,7 +67,7 @@ async function doCheck() {
|
||||
const states = getStatesForSlot(slotName, appServiceName, resourceGroupName)
|
||||
console.log('Instance states:', states)
|
||||
|
||||
const isAllReady = states.every((s) => s === 'READY')
|
||||
const isAllReady = states.every((s: string) => s === 'READY')
|
||||
|
||||
if (buildSha === expectedSHA && isAllReady) {
|
||||
console.log('Got the expected build SHA and all slots are ready! 🚀')
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import coreLib from '@actions/core'
|
||||
|
||||
import { checkContentType } from '#src/workflows/fm-utils.js'
|
||||
import { checkContentType } from '@/workflows/fm-utils'
|
||||
|
||||
const { CHANGED_FILE_PATHS, CONTENT_TYPE } = process.env
|
||||
|
||||
@@ -12,8 +12,8 @@ async function main() {
|
||||
// CHANGED_FILE_PATHS is a string of space-separated
|
||||
// file paths. For example:
|
||||
// 'content/path/foo.md content/path/bar.md'
|
||||
const filePaths = CHANGED_FILE_PATHS.split(' ')
|
||||
const containsRai = checkContentType(filePaths, CONTENT_TYPE)
|
||||
const filePaths = CHANGED_FILE_PATHS?.split(' ') || []
|
||||
const containsRai = checkContentType(filePaths, CONTENT_TYPE || '')
|
||||
if (containsRai.length === 0) {
|
||||
coreLib.setOutput('containsContentType', false)
|
||||
} else {
|
||||
@@ -10,18 +10,18 @@ import fs from 'fs'
|
||||
|
||||
import { program } from 'commander'
|
||||
|
||||
program.description('Compare N files').arguments('[files...]', '').parse(process.argv)
|
||||
program.description('Compare N files').arguments('[files...]').parse(process.argv)
|
||||
|
||||
main(program.args)
|
||||
|
||||
function main(files) {
|
||||
function main(files: string[]) {
|
||||
if (files.length < 2) throw new Error('Must be at least 2 files')
|
||||
try {
|
||||
const contents = files.map((file) => fs.readFileSync(file, 'utf-8'))
|
||||
if (new Set(contents).size > 1) {
|
||||
process.exit(1)
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
process.exit(1)
|
||||
} else {
|
||||
@@ -24,7 +24,7 @@ import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
import { program } from 'commander'
|
||||
import walkFiles from 'src/workflows/walk-files.js'
|
||||
import walkFiles from 'src/workflows/walk-files'
|
||||
import { ROOT } from 'src/frame/lib/constants.js'
|
||||
|
||||
program
|
||||
|
||||
@@ -2,7 +2,7 @@ import { getOctokit } from '@actions/github'
|
||||
|
||||
main()
|
||||
async function main() {
|
||||
const [org, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
const [org, repo] = process.env.GITHUB_REPOSITORY?.split('/') || []
|
||||
if (!org || !repo) {
|
||||
throw new Error('GITHUB_REPOSITORY environment variable not set')
|
||||
}
|
||||
@@ -36,13 +36,13 @@ async function main() {
|
||||
id: pullNodeId,
|
||||
}
|
||||
|
||||
const graph = await github.graphql(mutation, variables)
|
||||
const graph: Record<string, any> = await github.graphql(mutation, variables)
|
||||
console.log('GraphQL mutation result:\n' + JSON.stringify(graph))
|
||||
|
||||
if (graph.errors && graph.errors.length > 0) {
|
||||
console.error(
|
||||
'ERROR! Failed to enable auto-merge:\n - ' +
|
||||
graph.errors.map((error) => error.message).join('\n - '),
|
||||
graph.errors.map((error: any) => error.message).join('\n - '),
|
||||
)
|
||||
} else {
|
||||
console.log('Auto-merge enabled!')
|
||||
@@ -4,7 +4,7 @@ import matter from 'gray-matter'
|
||||
// Filters out files from a list of filePaths
|
||||
// that have a type property in their frontmatter
|
||||
// where the type value matches the type argument
|
||||
export function checkContentType(filePaths, type) {
|
||||
export function checkContentType(filePaths: string[], type: string) {
|
||||
const unallowedChangedFiles = []
|
||||
for (const filePath of filePaths) {
|
||||
const { data } = matter(readFileSync(filePath, 'utf8'))
|
||||
@@ -12,9 +12,9 @@ import {
|
||||
async function getAllOpenPRs() {
|
||||
let prsRemaining = true
|
||||
let cursor
|
||||
let prData = []
|
||||
let prData: any[] = []
|
||||
while (prsRemaining) {
|
||||
const data = await graphql(
|
||||
const data: Record<string, any> = await graphql(
|
||||
`
|
||||
query ($organization: String!, $repo: String!) {
|
||||
repository(name: $repo, owner: $organization) {
|
||||
@@ -83,13 +83,14 @@ async function run() {
|
||||
const prs = prData.filter(
|
||||
(pr) =>
|
||||
!pr.isDraft &&
|
||||
!pr.labels.nodes.find((label) => label.name === 'Deploy train 🚂') &&
|
||||
!pr.labels.nodes.find((label: Record<string, any>) => label.name === 'Deploy train 🚂') &&
|
||||
pr.reviewRequests.nodes.find(
|
||||
(requestedReviewers) => requestedReviewers.requestedReviewer?.name === process.env.REVIEWER,
|
||||
(requestedReviewers: Record<string, any>) =>
|
||||
requestedReviewers.requestedReviewer?.name === process.env.REVIEWER,
|
||||
) &&
|
||||
!pr.reviews.nodes
|
||||
.flatMap((review) => review.onBehalfOf.nodes)
|
||||
.find((behalf) => behalf.name === process.env.REVIEWER),
|
||||
.flatMap((review: Record<string, any>) => review.onBehalfOf.nodes)
|
||||
.find((behalf: Record<string, any>) => behalf.name === process.env.REVIEWER),
|
||||
)
|
||||
if (prs.length === 0) {
|
||||
console.log('No PRs found. Exiting.')
|
||||
@@ -101,7 +102,7 @@ async function run() {
|
||||
console.log(`PRs found: ${prIDs}`)
|
||||
|
||||
// Get info about the docs-content review board project
|
||||
const projectData = await graphql(
|
||||
const projectData: Record<string, any> = await graphql(
|
||||
`
|
||||
query ($organization: String!, $projectNumber: Int!) {
|
||||
organization(login: $organization) {
|
||||
@@ -134,7 +135,7 @@ async function run() {
|
||||
`,
|
||||
{
|
||||
organization: process.env.ORGANIZATION,
|
||||
projectNumber: parseInt(process.env.PROJECT_NUMBER),
|
||||
projectNumber: parseInt(process.env.PROJECT_NUMBER || ''),
|
||||
headers: {
|
||||
authorization: `token ${process.env.TOKEN}`,
|
||||
},
|
||||
@@ -148,7 +149,9 @@ async function run() {
|
||||
// Until we have a way to check from a PR whether the PR is in a project,
|
||||
// this is how we (roughly) avoid overwriting PRs that are already on the board.
|
||||
// If we are overwriting items, query for more items.
|
||||
const existingItemIDs = projectData.organization.projectV2.items.nodes.map((node) => node.id)
|
||||
const existingItemIDs = projectData.organization.projectV2.items.nodes.map(
|
||||
(node: Record<string, any>) => node.id,
|
||||
)
|
||||
|
||||
// Get the ID of the fields that we want to populate
|
||||
const datePostedID = findFieldID('Date posted', projectData)
|
||||
@@ -172,8 +175,8 @@ async function run() {
|
||||
// Exclude existing items going forward.
|
||||
// Until we have a way to check from a PR whether the PR is in a project,
|
||||
// this is how we (roughly) avoid overwriting PRs that are already on the board
|
||||
const newItemIDs = []
|
||||
const newItemAuthors = []
|
||||
const newItemIDs: any[] = []
|
||||
const newItemAuthors: any[] = []
|
||||
itemIDs.forEach((id, index) => {
|
||||
if (!existingItemIDs.includes(id)) {
|
||||
newItemIDs.push(id)
|
||||
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* @returns {Object} - key value of expected env variables and their values
|
||||
*/
|
||||
export function getEnvInputs(options) {
|
||||
export function getEnvInputs(options: string[]) {
|
||||
return Object.fromEntries(
|
||||
options.map((envVarName) => {
|
||||
const envVarValue = process.env[envVarName]
|
||||
@@ -29,7 +29,7 @@ export function getEnvInputs(options) {
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function boolEnvVar(key) {
|
||||
export function boolEnvVar(key: string) {
|
||||
const value = process.env[key] || ''
|
||||
if (value === '' || value === 'false' || value === '0') return false
|
||||
if (value === 'true' || value === '1') return true
|
||||
@@ -8,7 +8,7 @@ import { retryingGithub } from './github.js'
|
||||
const github = retryingGithub()
|
||||
|
||||
// https://docs.github.com/rest/reference/git#get-a-reference
|
||||
export async function getCommitSha(owner, repo, ref) {
|
||||
export async function getCommitSha(owner: string, repo: string, ref: string) {
|
||||
try {
|
||||
const { data } = await github.git.getRef({
|
||||
owner,
|
||||
@@ -23,7 +23,7 @@ export async function getCommitSha(owner, repo, ref) {
|
||||
}
|
||||
|
||||
// based on https://docs.github.com/rest/reference/git#get-a-reference
|
||||
export async function hasMatchingRef(owner, repo, ref) {
|
||||
export async function hasMatchingRef(owner: string, repo: string, ref: string) {
|
||||
try {
|
||||
await github.git.getRef({
|
||||
owner,
|
||||
@@ -41,7 +41,7 @@ export async function hasMatchingRef(owner, repo, ref) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/rest/reference/git#get-a-commit
|
||||
export async function getTreeSha(owner, repo, commitSha) {
|
||||
export async function getTreeSha(owner: string, repo: string, commitSha: string) {
|
||||
try {
|
||||
const { data } = await github.git.getCommit({
|
||||
owner,
|
||||
@@ -56,7 +56,7 @@ export async function getTreeSha(owner, repo, commitSha) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/rest/reference/git#get-a-tree
|
||||
export async function getTree(owner, repo, ref) {
|
||||
export async function getTree(owner: string, repo: string, ref: string) {
|
||||
const commitSha = await getCommitSha(owner, repo, ref)
|
||||
const treeSha = await getTreeSha(owner, repo, commitSha)
|
||||
try {
|
||||
@@ -64,7 +64,7 @@ export async function getTree(owner, repo, ref) {
|
||||
owner,
|
||||
repo,
|
||||
tree_sha: treeSha,
|
||||
recursive: 1,
|
||||
recursive: 'true',
|
||||
})
|
||||
// only return files that match the patterns in allowedPaths
|
||||
// skip actions/changes files
|
||||
@@ -76,7 +76,7 @@ export async function getTree(owner, repo, ref) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/rest/reference/git#get-a-blob
|
||||
export async function getContentsForBlob(owner, repo, sha) {
|
||||
export async function getContentsForBlob(owner: string, repo: string, sha: string) {
|
||||
const { data } = await github.git.getBlob({
|
||||
owner,
|
||||
repo,
|
||||
@@ -87,7 +87,7 @@ export async function getContentsForBlob(owner, repo, sha) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/rest/reference/repos#get-repository-content
|
||||
export async function getContents(owner, repo, ref, path) {
|
||||
export async function getContents(owner: string, repo: string, ref: string, path: string) {
|
||||
const { data } = await getContent(owner, repo, ref, path)
|
||||
if (!data.content) {
|
||||
return await getContentsForBlob(owner, repo, data.sha)
|
||||
@@ -97,7 +97,7 @@ export async function getContents(owner, repo, ref, path) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/rest/reference/repos#get-repository-content
|
||||
export async function getContentAndData(owner, repo, ref, path) {
|
||||
export async function getContentAndData(owner: string, repo: string, ref: string, path: string) {
|
||||
const { data } = await getContent(owner, repo, ref, path)
|
||||
const content = data.content
|
||||
? Buffer.from(data.content, 'base64').toString()
|
||||
@@ -106,7 +106,12 @@ export async function getContentAndData(owner, repo, ref, path) {
|
||||
return { content, blobSha: data.sha }
|
||||
}
|
||||
|
||||
async function getContent(owner, repo, ref, path) {
|
||||
async function getContent(
|
||||
owner: string,
|
||||
repo: string,
|
||||
ref: string,
|
||||
path: string,
|
||||
): Promise<Record<string, any>> {
|
||||
try {
|
||||
return await github.repos.getContent({
|
||||
owner,
|
||||
@@ -121,7 +126,7 @@ async function getContent(owner, repo, ref, path) {
|
||||
}
|
||||
|
||||
// https://docs.github.com/en/rest/reference/pulls#list-pull-requests
|
||||
export async function listPulls(owner, repo) {
|
||||
export async function listPulls(owner: string, repo: string) {
|
||||
try {
|
||||
const { data } = await github.pulls.list({
|
||||
owner,
|
||||
@@ -135,7 +140,12 @@ export async function listPulls(owner, repo) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function createIssueComment(owner, repo, pullNumber, body) {
|
||||
export async function createIssueComment(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number,
|
||||
body: string,
|
||||
) {
|
||||
try {
|
||||
const { data } = await github.issues.createComment({
|
||||
owner,
|
||||
@@ -152,9 +162,9 @@ export async function createIssueComment(owner, repo, pullNumber, body) {
|
||||
|
||||
// Search for a string in a file in code and return the array of paths to files that contain string
|
||||
export async function getPathsWithMatchingStrings(
|
||||
strArr,
|
||||
org,
|
||||
repo,
|
||||
strArr: string[],
|
||||
org: string,
|
||||
repo: string,
|
||||
{ cache = true, forceDownload = false } = {},
|
||||
) {
|
||||
const perPage = 100
|
||||
@@ -169,7 +179,7 @@ export async function getPathsWithMatchingStrings(
|
||||
|
||||
do {
|
||||
const data = await searchCode(q, perPage, currentPage, cache, forceDownload)
|
||||
data.items.map((el) => paths.add(el.path))
|
||||
data.items.map((el: Record<string, any>) => paths.add(el.path))
|
||||
totalCount = data.total_count
|
||||
currentCount += data.items.length
|
||||
currentPage++
|
||||
@@ -183,7 +193,13 @@ export async function getPathsWithMatchingStrings(
|
||||
return paths
|
||||
}
|
||||
|
||||
async function searchCode(q, perPage, currentPage, cache = true, forceDownload = false) {
|
||||
async function searchCode(
|
||||
q: string,
|
||||
perPage: number,
|
||||
currentPage: number,
|
||||
cache = true,
|
||||
forceDownload = false,
|
||||
) {
|
||||
const cacheKey = `searchCode-${q}-${perPage}-${currentPage}`
|
||||
const tempFilename = `/tmp/searchCode-${crypto
|
||||
.createHash('md5')
|
||||
@@ -193,7 +209,7 @@ async function searchCode(q, perPage, currentPage, cache = true, forceDownload =
|
||||
if (!forceDownload && cache) {
|
||||
try {
|
||||
return JSON.parse(await fs.readFile(tempFilename, 'utf8'))
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
@@ -219,11 +235,16 @@ async function searchCode(q, perPage, currentPage, cache = true, forceDownload =
|
||||
}
|
||||
}
|
||||
|
||||
async function secondaryRateLimitRetry(callable, args, maxAttempts = 10, sleepTime = 1000) {
|
||||
async function secondaryRateLimitRetry(
|
||||
callable: Function,
|
||||
args: Record<string, any>,
|
||||
maxAttempts = 10,
|
||||
sleepTime = 1000,
|
||||
) {
|
||||
try {
|
||||
const response = await callable(args)
|
||||
return response
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
// If you get a secondary rate limit error (403) you'll get a data
|
||||
// response that includes:
|
||||
//
|
||||
@@ -260,9 +281,14 @@ async function secondaryRateLimitRetry(callable, args, maxAttempts = 10, sleepTi
|
||||
// array of file contents. This function could be modified to return an array
|
||||
// of objects that include the path and the content of the file if needed
|
||||
// in the future.
|
||||
export async function getDirectoryContents(owner, repo, branch, path) {
|
||||
export async function getDirectoryContents(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
path: string,
|
||||
) {
|
||||
const { data } = await getContent(owner, repo, branch, path)
|
||||
const files = []
|
||||
const files: any[] = []
|
||||
|
||||
for (const blob of data) {
|
||||
if (blob.type === 'dir') {
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Same process in husky/post-merge
|
||||
echo "Checking if packages need to be installed..."
|
||||
if node src/workflows/cmp-files.js package-lock.json .installed.package-lock.json; then
|
||||
if npm run cmp-files -- package-lock.json .installed.package-lock.json; then
|
||||
echo "Packages are up-to-date"
|
||||
else
|
||||
echo "Installing packages..."
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Same process in husky/post-checkout
|
||||
echo "Checking if packages need to be installed..."
|
||||
if node src/workflows/cmp-files.js package-lock.json .installed.package-lock.json; then
|
||||
if npm run cmp-files -- package-lock.json .installed.package-lock.json; then
|
||||
echo "Packages are up-to-date"
|
||||
else
|
||||
echo "Installing packages..."
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
import { type Octokit } from '@octokit/rest'
|
||||
import coreLib from '@actions/core'
|
||||
|
||||
type CRIArgs = {
|
||||
core: typeof coreLib
|
||||
octokit: Octokit
|
||||
reportTitle: string
|
||||
reportBody: string
|
||||
reportRepository: string
|
||||
reportLabel: string
|
||||
}
|
||||
export async function createReportIssue({
|
||||
core,
|
||||
octokit,
|
||||
@@ -5,7 +16,7 @@ export async function createReportIssue({
|
||||
reportBody,
|
||||
reportRepository,
|
||||
reportLabel,
|
||||
}) {
|
||||
}: CRIArgs) {
|
||||
const [owner, repo] = reportRepository.split('/')
|
||||
// Create issue
|
||||
let newReport
|
||||
@@ -19,7 +30,7 @@ export async function createReportIssue({
|
||||
})
|
||||
newReport = data
|
||||
core.info(`Created new report issue at ${newReport.html_url}\n`)
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
core.error(error)
|
||||
core.setFailed('Error creating new issue')
|
||||
throw error
|
||||
@@ -28,6 +39,14 @@ export async function createReportIssue({
|
||||
return newReport
|
||||
}
|
||||
|
||||
type LRArgs = {
|
||||
core: typeof coreLib
|
||||
octokit: Octokit
|
||||
newReport: any
|
||||
reportRepository: string
|
||||
reportAuthor: string
|
||||
reportLabel: string
|
||||
}
|
||||
export async function linkReports({
|
||||
core,
|
||||
octokit,
|
||||
@@ -35,7 +54,7 @@ export async function linkReports({
|
||||
reportRepository,
|
||||
reportAuthor,
|
||||
reportLabel,
|
||||
}) {
|
||||
}: LRArgs) {
|
||||
const [owner, repo] = reportRepository.split('/')
|
||||
|
||||
core.info('Attempting to link reports...')
|
||||
@@ -88,7 +107,7 @@ export async function linkReports({
|
||||
}
|
||||
|
||||
// If an old report is not assigned to someone we close it
|
||||
const shouldClose = !previousReport.assignees.length
|
||||
const shouldClose = !previousReport.assignees?.length
|
||||
let body = `➡️ [Newer report](${newReport.html_url})`
|
||||
if (shouldClose) {
|
||||
body += '\n\nClosing in favor of newer report since there are no assignees on this issue'
|
||||
@@ -4,8 +4,10 @@ import { graphql } from '@octokit/graphql'
|
||||
// Shared functions for managing projects (memex)
|
||||
|
||||
// Pull out the node ID of a project field
|
||||
export function findFieldID(fieldName, data) {
|
||||
const field = data.organization.projectV2.fields.nodes.find((field) => field.name === fieldName)
|
||||
export function findFieldID(fieldName: string, data: Record<string, any>) {
|
||||
const field = data.organization.projectV2.fields.nodes.find(
|
||||
(field: Record<string, any>) => field.name === fieldName,
|
||||
)
|
||||
|
||||
if (field && field.id) {
|
||||
return field.id
|
||||
@@ -15,13 +17,21 @@ export function findFieldID(fieldName, data) {
|
||||
}
|
||||
|
||||
// Pull out the node ID of a single select field value
|
||||
export function findSingleSelectID(singleSelectName, fieldName, data) {
|
||||
const field = data.organization.projectV2.fields.nodes.find((field) => field.name === fieldName)
|
||||
export function findSingleSelectID(
|
||||
singleSelectName: string,
|
||||
fieldName: string,
|
||||
data: Record<string, any>,
|
||||
) {
|
||||
const field = data.organization.projectV2.fields.nodes.find(
|
||||
(field: Record<string, any>) => field.name === fieldName,
|
||||
)
|
||||
if (!field) {
|
||||
throw new Error(`A field called "${fieldName}" was not found. Check if the field was renamed.`)
|
||||
}
|
||||
|
||||
const singleSelect = field.options.find((field) => field.name === singleSelectName)
|
||||
const singleSelect = field.options.find(
|
||||
(field: Record<string, any>) => field.name === singleSelectName,
|
||||
)
|
||||
|
||||
if (singleSelect && singleSelect.id) {
|
||||
return singleSelect.id
|
||||
@@ -35,7 +45,7 @@ export function findSingleSelectID(singleSelectName, fieldName, data) {
|
||||
// Given a list of PR/issue node IDs and a project node ID,
|
||||
// adds the PRs/issues to the project
|
||||
// and returns the node IDs of the project items
|
||||
export async function addItemsToProject(items, project) {
|
||||
export async function addItemsToProject(items: string[], project: string) {
|
||||
console.log(`Adding ${items} to project ${project}`)
|
||||
|
||||
const mutations = items.map(
|
||||
@@ -57,7 +67,7 @@ export async function addItemsToProject(items, project) {
|
||||
}
|
||||
`
|
||||
|
||||
const newItems = await graphql(mutation, {
|
||||
const newItems: Record<string, any> = await graphql(mutation, {
|
||||
project,
|
||||
headers: {
|
||||
authorization: `token ${process.env.TOKEN}`,
|
||||
@@ -73,7 +83,7 @@ export async function addItemsToProject(items, project) {
|
||||
return newItemIDs
|
||||
}
|
||||
|
||||
export async function addItemToProject(item, project) {
|
||||
export async function addItemToProject(item: string, project: string) {
|
||||
const newItemIDs = await addItemsToProject([item], project)
|
||||
|
||||
const newItemID = newItemIDs[0]
|
||||
@@ -83,13 +93,13 @@ export async function addItemToProject(item, project) {
|
||||
|
||||
// Given a GitHub login, returns a bool indicating
|
||||
// whether the login is part of the docs team
|
||||
export async function isDocsTeamMember(login) {
|
||||
export async function isDocsTeamMember(login: string) {
|
||||
// Returns true if login is docs-bot, to bypass the checks and make PRs opened by docs-bot be treated as though they were made by a docs team member
|
||||
if (login === 'docs-bot') {
|
||||
return true
|
||||
}
|
||||
// Get all members of the docs team
|
||||
const data = await graphql(
|
||||
const data: Record<string, any> = await graphql(
|
||||
`
|
||||
query {
|
||||
organization(login: "github") {
|
||||
@@ -110,15 +120,17 @@ export async function isDocsTeamMember(login) {
|
||||
},
|
||||
)
|
||||
|
||||
const teamMembers = data.organization.team.members.nodes.map((entry) => entry.login)
|
||||
const teamMembers = data.organization.team.members.nodes.map(
|
||||
(entry: Record<string, any>) => entry.login,
|
||||
)
|
||||
|
||||
return teamMembers.includes(login)
|
||||
}
|
||||
|
||||
// Given a GitHub login, returns a bool indicating
|
||||
// whether the login is part of the GitHub org
|
||||
export async function isGitHubOrgMember(login) {
|
||||
const data = await graphql(
|
||||
export async function isGitHubOrgMember(login: string) {
|
||||
const data: Record<string, any> = await graphql(
|
||||
`
|
||||
query {
|
||||
user(login: "${login}") {
|
||||
@@ -139,14 +151,14 @@ export async function isGitHubOrgMember(login) {
|
||||
}
|
||||
|
||||
// Formats a date object into the required format for projects
|
||||
export function formatDateForProject(date) {
|
||||
export function formatDateForProject(date: Date) {
|
||||
return date.toISOString()
|
||||
}
|
||||
|
||||
// Given a date object and optional turnaround time
|
||||
// Calculate the date {turnaround} business days from now
|
||||
// (excluding weekends; not considering holidays)
|
||||
export function calculateDueDate(datePosted, turnaround = 2) {
|
||||
export function calculateDueDate(datePosted: Date, turnaround = 2) {
|
||||
let daysUntilDue
|
||||
switch (datePosted.getDay()) {
|
||||
case 4: // Thursday
|
||||
@@ -179,13 +191,30 @@ export function generateUpdateProjectV2ItemFieldMutation({
|
||||
author,
|
||||
turnaround = 2,
|
||||
feature = '',
|
||||
}: {
|
||||
item: string
|
||||
author: string
|
||||
turnaround?: number
|
||||
feature?: string
|
||||
}) {
|
||||
const datePosted = new Date()
|
||||
const dueDate = calculateDueDate(datePosted, turnaround)
|
||||
|
||||
// Build the mutation to update a single project field
|
||||
// Specify literal=true to indicate that the value should be used as a string, not a variable
|
||||
function generateMutationToUpdateField({ item, fieldID, value, fieldType, literal = false }) {
|
||||
function generateMutationToUpdateField({
|
||||
item,
|
||||
fieldID,
|
||||
value,
|
||||
fieldType,
|
||||
literal = false,
|
||||
}: {
|
||||
item: string
|
||||
fieldID: string
|
||||
value: string
|
||||
fieldType: string
|
||||
literal?: boolean
|
||||
}) {
|
||||
const parsedValue = literal ? `${fieldType}: "${value}"` : `${fieldType}: ${value}`
|
||||
|
||||
// Strip all non-alphanumeric out of the item ID when creating the mutation ID to avoid a GraphQL parsing error
|
||||
@@ -274,13 +303,13 @@ export function generateUpdateProjectV2ItemFieldMutation({
|
||||
}
|
||||
|
||||
// Guess the affected docs sets based on the files that the PR changed
|
||||
export function getFeature(data) {
|
||||
export function getFeature(data: Record<string, any>) {
|
||||
// For issues, just use an empty string
|
||||
if (data.item.__typename !== 'PullRequest') {
|
||||
return ''
|
||||
}
|
||||
|
||||
const paths = data.item.files.nodes.map((node) => node.path)
|
||||
const paths = data.item.files.nodes.map((node: Record<string, any>) => node.path)
|
||||
|
||||
// For docs and docs-internal and docs-early-access PRs,
|
||||
// determine the affected docs sets by looking at which
|
||||
@@ -291,8 +320,8 @@ export function getFeature(data) {
|
||||
process.env.REPO === 'github/docs' ||
|
||||
process.env.REPO === 'github/docs-early-access'
|
||||
) {
|
||||
const features = new Set([])
|
||||
paths.forEach((path) => {
|
||||
const features: Set<string> = new Set([])
|
||||
paths.forEach((path: string) => {
|
||||
const pathComponents = path.split('/')
|
||||
if (pathComponents[0] === 'content') {
|
||||
features.add(pathComponents[1])
|
||||
@@ -305,10 +334,10 @@ export function getFeature(data) {
|
||||
|
||||
// for github/github PRs, try to classify the OpenAPI files
|
||||
if (process.env.REPO === 'github/github') {
|
||||
const features = new Set([])
|
||||
if (paths.some((path) => path.startsWith('app/api/description'))) {
|
||||
const features: Set<string> = new Set([])
|
||||
if (paths.some((path: string) => path.startsWith('app/api/description'))) {
|
||||
features.add('OpenAPI')
|
||||
paths.forEach((path) => {
|
||||
paths.forEach((path: string) => {
|
||||
if (path.startsWith('app/api/description/operations')) {
|
||||
features.add(path.split('/')[4])
|
||||
features.add('rest')
|
||||
@@ -336,7 +365,7 @@ export function getFeature(data) {
|
||||
}
|
||||
|
||||
// Guess the size of an item
|
||||
export function getSize(data) {
|
||||
export function getSize(data: Record<string, any>) {
|
||||
// We need to set something in case this is an issue, so just guesstimate small
|
||||
if (data.item.__typename !== 'PullRequest') {
|
||||
return 'S'
|
||||
@@ -346,7 +375,7 @@ export function getSize(data) {
|
||||
if (process.env.REPO === 'github/github') {
|
||||
let numFiles = 0
|
||||
let numChanges = 0
|
||||
data.item.files.nodes.forEach((node) => {
|
||||
data.item.files.nodes.forEach((node: Record<string, any>) => {
|
||||
if (node.path.startsWith('app/api/description')) {
|
||||
numFiles += 1
|
||||
numChanges += node.additions
|
||||
@@ -366,7 +395,7 @@ export function getSize(data) {
|
||||
// Otherwise, estimated the size based on all files
|
||||
let numFiles = 0
|
||||
let numChanges = 0
|
||||
data.item.files.nodes.forEach((node) => {
|
||||
data.item.files.nodes.forEach((node: Record<string, any>) => {
|
||||
numFiles += 1
|
||||
numChanges += node.additions
|
||||
numChanges += node.deletions
|
||||
@@ -13,9 +13,17 @@ import got from 'got'
|
||||
const DELAY_BEFORE_FIRST_PURGE = 0 * 1000
|
||||
const DELAY_BEFORE_SECOND_PURGE = 2 * 1000
|
||||
|
||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
|
||||
async function purgeFastlyBySurrogateKey({ apiToken, serviceId, surrogateKey }) {
|
||||
async function purgeFastlyBySurrogateKey({
|
||||
apiToken,
|
||||
serviceId,
|
||||
surrogateKey,
|
||||
}: {
|
||||
apiToken: string
|
||||
serviceId: string
|
||||
surrogateKey: string
|
||||
}) {
|
||||
const safeServiceId = encodeURIComponent(serviceId)
|
||||
|
||||
const headers = {
|
||||
@@ -28,7 +36,7 @@ async function purgeFastlyBySurrogateKey({ apiToken, serviceId, surrogateKey })
|
||||
}
|
||||
|
||||
export default async function purgeEdgeCache(
|
||||
surrogateKey,
|
||||
surrogateKey: string,
|
||||
{
|
||||
purgeTwice = true,
|
||||
delayBeforeFirstPurge = DELAY_BEFORE_FIRST_PURGE,
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
import { SURROGATE_ENUMS } from '#src/frame/middleware/set-fastly-surrogate-key.js'
|
||||
import purgeEdgeCache from './purge-edge-cache.js'
|
||||
import { SURROGATE_ENUMS } from '@/frame/middleware/set-fastly-surrogate-key'
|
||||
import purgeEdgeCache from './purge-edge-cache'
|
||||
|
||||
// This will purge every response that *contains*
|
||||
// `process.env.FASTLY_SURROGATE_KEY || SURROGATE_ENUMS.DEFAULT`.
|
||||
@@ -10,7 +10,7 @@ async function main() {
|
||||
const MAX_DELETIONS = parseInt(JSON.parse(process.env.MAX_DELETIONS || '10'))
|
||||
const MIN_AGE_DAYS = parseInt(process.env.MIN_AGE_DAYS || '90', 10)
|
||||
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
const [owner, repo] = (process.env.GITHUB_REPOSITORY || '').split('/') || []
|
||||
if (!owner || !repo) {
|
||||
throw new Error('GITHUB_REPOSITORY environment variable not set')
|
||||
}
|
||||
@@ -32,7 +32,7 @@ async function main() {
|
||||
const MAX_DELETIONS = parseInt(JSON.parse(process.env.MAX_DELETIONS || '500'))
|
||||
const MIN_AGE_DAYS = parseInt(process.env.MIN_AGE_DAYS || '90', 10)
|
||||
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY?.split('/') || []
|
||||
if (!owner || !repo) {
|
||||
throw new Error('GITHUB_REPOSITORY environment variable not set')
|
||||
}
|
||||
@@ -53,7 +53,7 @@ async function main() {
|
||||
owner,
|
||||
repo,
|
||||
})
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.log('Error happened when getting workflows')
|
||||
console.warn('Status: %O', error.status)
|
||||
console.warn('Message: %O', error.message)
|
||||
@@ -71,7 +71,8 @@ async function main() {
|
||||
|
||||
const validWorkflows = allWorkflows.filter((w) => !w.path.startsWith('dynamic/'))
|
||||
|
||||
const sortByDate = (a, b) => a.updated_at.localeCompare(b.updated_at)
|
||||
const sortByDate = (a: Record<string, any>, b: Record<string, any>) =>
|
||||
a.updated_at.localeCompare(b.updated_at)
|
||||
const workflows = [
|
||||
...validWorkflows.filter((w) => !fs.existsSync(w.path)).sort(sortByDate),
|
||||
...validWorkflows.filter((w) => fs.existsSync(w.path)).sort(sortByDate),
|
||||
@@ -91,7 +92,7 @@ async function main() {
|
||||
minAgeDays: MIN_AGE_DAYS,
|
||||
maxDeletions: MAX_DELETIONS - deletions,
|
||||
})
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.log("Error happened when calling 'deleteWorkflowRuns'")
|
||||
console.warn('Status: %O', error.status)
|
||||
console.warn('Message: %O', error.message)
|
||||
@@ -115,7 +116,7 @@ async function main() {
|
||||
console.log(`Deleted ${deletions} runs in total`)
|
||||
}
|
||||
|
||||
function isOperationalError(status, message) {
|
||||
function isOperationalError(status: number, message: string) {
|
||||
if (status && status >= 500) {
|
||||
return true
|
||||
}
|
||||
@@ -130,10 +131,10 @@ function isOperationalError(status, message) {
|
||||
}
|
||||
|
||||
async function deleteWorkflowRuns(
|
||||
github,
|
||||
owner,
|
||||
repo,
|
||||
workflow,
|
||||
github: ReturnType<typeof getOctokit>,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflow: Record<string, any>,
|
||||
{ dryRun = false, minAgeDays = 90, maxDeletions = 500 },
|
||||
) {
|
||||
// https://docs.github.com/en/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates
|
||||
@@ -191,7 +192,7 @@ async function deleteWorkflowRuns(
|
||||
},
|
||||
)
|
||||
assert(status === 204, `Unexpected status deleting logs for run ${run.id}: ${status}`)
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.warn('ERROR trying to delete the logs for run', run.id, error.message)
|
||||
if (error.message && error.message.includes('API rate limit exceeded')) {
|
||||
// This can not be recovered by continuing on to the next run.
|
||||
@@ -211,7 +212,7 @@ async function deleteWorkflowRuns(
|
||||
},
|
||||
)
|
||||
assert(status === 204, `Unexpected status deleting logs for run ${run.id}: ${status}`)
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.warn('ERROR trying to delete run', run.id, error.message)
|
||||
if (error.message && error.message.includes('API rate limit exceeded')) {
|
||||
// This can not be recovered by continuing on to the next run.
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
|
||||
async function run() {
|
||||
// Get info about the docs-content review board project
|
||||
const data = await graphql(
|
||||
const data: Record<string, any> = await graphql(
|
||||
`
|
||||
query ($organization: String!, $projectNumber: Int!, $id: ID!) {
|
||||
organization(login: $organization) {
|
||||
@@ -55,7 +55,7 @@ async function run() {
|
||||
{
|
||||
id: process.env.ITEM_NODE_ID,
|
||||
organization: process.env.ORGANIZATION,
|
||||
projectNumber: parseInt(process.env.PROJECT_NUMBER),
|
||||
projectNumber: parseInt(process.env.PROJECT_NUMBER || ''),
|
||||
headers: {
|
||||
authorization: `token ${process.env.TOKEN}`,
|
||||
},
|
||||
@@ -81,7 +81,7 @@ async function run() {
|
||||
const osContributorTypeID = findSingleSelectID('OS contributor', 'Contributor type', data)
|
||||
|
||||
// Add the PR to the project
|
||||
const newItemID = await addItemToProject(process.env.ITEM_NODE_ID, projectID)
|
||||
const newItemID = await addItemToProject(process.env.ITEM_NODE_ID || '', projectID)
|
||||
|
||||
// Determine the feature and size
|
||||
const feature = getFeature(data)
|
||||
@@ -92,7 +92,7 @@ async function run() {
|
||||
// If yes, set the author to 'first time contributor' instead of to the author login
|
||||
let firstTimeContributor
|
||||
if (process.env.REPO === 'github/docs') {
|
||||
const contributorData = await graphql(
|
||||
const contributorData: Record<string, any> = await graphql(
|
||||
`
|
||||
query ($author: String!) {
|
||||
user(login: $author) {
|
||||
@@ -126,13 +126,13 @@ async function run() {
|
||||
)
|
||||
const docsPRData =
|
||||
contributorData.user.contributionsCollection.pullRequestContributionsByRepository.filter(
|
||||
(item) => item.repository.nameWithOwner === 'github/docs',
|
||||
(item: Record<string, any>) => item.repository.nameWithOwner === 'github/docs',
|
||||
)[0]
|
||||
const prCount = docsPRData ? docsPRData.contributions.totalCount : 0
|
||||
|
||||
const docsIssueData =
|
||||
contributorData.user.contributionsCollection.issueContributionsByRepository.filter(
|
||||
(item) => item.repository.nameWithOwner === 'github/docs',
|
||||
(item: Record<string, any>) => item.repository.nameWithOwner === 'github/docs',
|
||||
)[0]
|
||||
const issueCount = docsIssueData ? docsIssueData.contributions.totalCount : 0
|
||||
|
||||
@@ -144,16 +144,16 @@ async function run() {
|
||||
// Generate a mutation to populate fields for the new project item
|
||||
const updateProjectV2ItemMutation = generateUpdateProjectV2ItemFieldMutation({
|
||||
item: newItemID,
|
||||
author: firstTimeContributor ? ':star: first time contributor' : process.env.AUTHOR_LOGIN,
|
||||
author: firstTimeContributor ? ':star: first time contributor' : process.env.AUTHOR_LOGIN || '',
|
||||
turnaround,
|
||||
feature,
|
||||
})
|
||||
|
||||
// Determine which variable to use for the contributor type
|
||||
let contributorType
|
||||
if (await isDocsTeamMember(process.env.AUTHOR_LOGIN)) {
|
||||
if (await isDocsTeamMember(process.env.AUTHOR_LOGIN || '')) {
|
||||
contributorType = docsMemberTypeID
|
||||
} else if (await isGitHubOrgMember(process.env.AUTHOR_LOGIN)) {
|
||||
} else if (await isGitHubOrgMember(process.env.AUTHOR_LOGIN || '')) {
|
||||
contributorType = hubberTypeID
|
||||
} else if (process.env.REPO === 'github/docs') {
|
||||
contributorType = osContributorTypeID
|
||||
@@ -17,22 +17,22 @@ import got from 'got'
|
||||
* For engineers to test this locally do the following:
|
||||
*
|
||||
* 1. Start `npm run dev` in one terminal
|
||||
* 2. Run `src/workflows/test-local-dev.js` in another terminal
|
||||
* 2. Run `src/workflows/test-local-dev.ts` in another terminal
|
||||
*
|
||||
*/
|
||||
|
||||
main()
|
||||
|
||||
async function get(path, options) {
|
||||
async function get(path: string, options?: Record<string, any>) {
|
||||
// By default, got() will use retries and follow redirects.
|
||||
const t0 = new Date()
|
||||
const response = await got(makeURL(path), options)
|
||||
const took = new Date() - t0
|
||||
const took = new Date().getTime() - t0.getTime()
|
||||
console.log(`GET ${path} => ${response.statusCode} (${took}ms)`)
|
||||
return response
|
||||
}
|
||||
|
||||
function makeURL(path) {
|
||||
function makeURL(path: string) {
|
||||
return `http://localhost:4000${path}`
|
||||
}
|
||||
|
||||
@@ -11,18 +11,29 @@ const actionHashRegexp = /^[A-Za-z0-9-/]+@[0-9a-f]{40}$/
|
||||
const checkoutRegexp = /^[actions/checkout]+@[0-9a-f]{40}$/
|
||||
const permissionsRegexp = /(read|write)/
|
||||
|
||||
type WorkflowMeta = {
|
||||
filename: string
|
||||
fullpath: string
|
||||
data: {
|
||||
name: string
|
||||
on: Record<string, any>
|
||||
permissions: Record<string, any>
|
||||
jobs: Record<string, any>
|
||||
}
|
||||
}
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const workflowsDir = path.join(__dirname, '../../../.github/workflows')
|
||||
const workflows = fs
|
||||
const workflows: WorkflowMeta[] = fs
|
||||
.readdirSync(workflowsDir)
|
||||
.filter((filename) => filename.endsWith('.yml') || filename.endsWith('.yaml'))
|
||||
.map((filename) => {
|
||||
const fullpath = path.join(workflowsDir, filename)
|
||||
const data = yaml.load(fs.readFileSync(fullpath, 'utf8'), { fullpath })
|
||||
const data = yaml.load(fs.readFileSync(fullpath, 'utf8')) as WorkflowMeta['data']
|
||||
return { filename, fullpath, data }
|
||||
})
|
||||
|
||||
function actionsUsedInWorkflow(workflow) {
|
||||
function actionsUsedInWorkflow(workflow: WorkflowMeta) {
|
||||
return Object.keys(flatten(workflow))
|
||||
.filter((key) => key.endsWith('.uses'))
|
||||
.map((key) => get(workflow, key))
|
||||
@@ -54,7 +65,7 @@ const alertWorkflows = workflows
|
||||
// to generate list, console.log(new Set(workflows.map(({ data }) => Object.keys(data.on)).flat()))
|
||||
|
||||
const dailyWorkflows = scheduledWorkflows.filter(({ data }) =>
|
||||
data.on.schedule.find(({ cron }) => /^20 [^*]/.test(cron)),
|
||||
data.on.schedule.find(({ cron }: { cron: string }) => /^20 [^*]/.test(cron)),
|
||||
)
|
||||
|
||||
describe('GitHub Actions workflows', () => {
|
||||
@@ -90,7 +101,7 @@ describe('GitHub Actions workflows', () => {
|
||||
},
|
||||
)
|
||||
|
||||
test.each(workflows)('limits repository scope $filename', ({ filename, data }) => {
|
||||
test.each(workflows)('limits repository scope $filename', ({ data }) => {
|
||||
for (const condition of Object.values(data.jobs).map((job) => job.if)) {
|
||||
expect(condition).toContain('github.repository')
|
||||
}
|
||||
@@ -100,7 +111,11 @@ describe('GitHub Actions workflows', () => {
|
||||
'scheduled workflows slack alert on fail $filename',
|
||||
({ filename, data }) => {
|
||||
for (const [name, job] of Object.entries(data.jobs)) {
|
||||
if (!job.steps.find((step) => step.uses === './.github/actions/slack-alert')) {
|
||||
if (
|
||||
!job.steps.find(
|
||||
(step: Record<string, any>) => step.uses === './.github/actions/slack-alert',
|
||||
)
|
||||
) {
|
||||
throw new Error(`Job ${filename} # ${name} missing slack alert on fail`)
|
||||
}
|
||||
}
|
||||
@@ -111,7 +126,7 @@ describe('GitHub Actions workflows', () => {
|
||||
'performs a checkout before calling composite action $filename',
|
||||
({ filename, data }) => {
|
||||
for (const [name, job] of Object.entries(data.jobs)) {
|
||||
if (!job.steps.find((step) => checkoutRegexp.test(step.uses))) {
|
||||
if (!job.steps.find((step: Record<string, any>) => checkoutRegexp.test(step.uses))) {
|
||||
throw new Error(
|
||||
`Job ${filename} # ${name} missing a checkout before calling the composite action`,
|
||||
)
|
||||
@@ -5,8 +5,8 @@ import { readFileSync } from 'fs'
|
||||
import yaml from 'js-yaml'
|
||||
import { difference } from 'lodash-es'
|
||||
|
||||
import { checkContentType } from '#src/workflows/fm-utils.js'
|
||||
import github from '#src/workflows/github.js'
|
||||
import { checkContentType } from '@/workflows/fm-utils'
|
||||
import github from '@/workflows/github'
|
||||
|
||||
const core = coreLib
|
||||
const octokit = github()
|
||||
@@ -18,33 +18,35 @@ const {
|
||||
CHANGED_FILE_PATHS,
|
||||
ADDED_CONTENT_FILES,
|
||||
} = process.env
|
||||
const [owner, repo] = REPO_OWNER_AND_NAME.split('/')
|
||||
const filters = yaml.load(readFileSync('src/workflows/unallowed-contribution-filters.yml', 'utf8'))
|
||||
const [owner, repo] = (REPO_OWNER_AND_NAME || '').split('/') || []
|
||||
const filters = yaml.load(
|
||||
readFileSync('src/workflows/unallowed-contribution-filters.yml', 'utf8'),
|
||||
) as Record<string, any>
|
||||
|
||||
main()
|
||||
|
||||
async function main() {
|
||||
// Files in the diff that match specific paths we don't allow
|
||||
const unallowedChangedFiles = [...JSON.parse(FILE_PATHS_NOT_ALLOWED)]
|
||||
const unallowedChangedFiles = [...JSON.parse(FILE_PATHS_NOT_ALLOWED || '')]
|
||||
|
||||
// Content files that are added in a forked repo won't be in the
|
||||
// `github/docs` repo, so we don't need to check them. They will be
|
||||
// reviewed manually by a content writer.
|
||||
const contentFilesToCheck = difference(
|
||||
JSON.parse(CHANGED_FILE_PATHS),
|
||||
JSON.parse(ADDED_CONTENT_FILES),
|
||||
const contentFilesToCheck: string[] = difference(
|
||||
JSON.parse(CHANGED_FILE_PATHS || ''),
|
||||
JSON.parse(ADDED_CONTENT_FILES || ''),
|
||||
)
|
||||
|
||||
// Any modifications or deletions to a file in the content directory
|
||||
// could potentially have `type: rai` so each changed content file's
|
||||
// frontmatter needs to be checked.
|
||||
unallowedChangedFiles.push(...(await checkContentType(contentFilesToCheck, 'rai')))
|
||||
unallowedChangedFiles.push(...checkContentType(contentFilesToCheck, 'rai'))
|
||||
|
||||
if (unallowedChangedFiles.length === 0) return
|
||||
|
||||
// Format into Markdown bulleted list to use in the PR comment
|
||||
const listUnallowedChangedFiles = unallowedChangedFiles.map((file) => `\n - ${file}`).join('')
|
||||
const listUnallowedFiles = filters.notAllowed.map((file) => `\n - ${file}`).join('')
|
||||
const listUnallowedFiles = filters.notAllowed.map((file: string) => `\n - ${file}`).join('')
|
||||
|
||||
const reviewMessage = `👋 Hey there spelunker. It looks like you've modified some files that we can't accept as contributions:${listUnallowedChangedFiles}\n\nYou'll need to revert all of the files you changed that match that list using [GitHub Desktop](https://docs.github.com/en/free-pro-team@latest/desktop/contributing-and-collaborating-using-github-desktop/managing-commits/reverting-a-commit-in-github-desktop) or \`git checkout origin/main <file name>\`. Once you get those files reverted, we can continue with the review process. :octocat:\n\nThe complete list of files we can't accept are:${listUnallowedFiles}\n\nWe also can't accept contributions to files in the content directory with frontmatter \`type: rai\`.`
|
||||
|
||||
@@ -56,7 +58,7 @@ async function main() {
|
||||
createdComment = await octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: PR_NUMBER,
|
||||
issue_number: Number(PR_NUMBER || ''),
|
||||
body: reviewMessage,
|
||||
})
|
||||
|
||||
@@ -8,7 +8,7 @@ const DELAY_SECONDS = 15
|
||||
* Promise resolves once url is healthy or fails if timeout has passed
|
||||
* @param {string} url - health url, e.g. docs.com/healthz
|
||||
*/
|
||||
export async function waitUntilUrlIsHealthy(url) {
|
||||
export async function waitUntilUrlIsHealthy(url: string) {
|
||||
try {
|
||||
await got.head(url, {
|
||||
retry: {
|
||||
@@ -9,7 +9,11 @@
|
||||
import walk from 'walk-sync'
|
||||
import fs from 'fs'
|
||||
|
||||
export default function walkFiles(dir, ext, opts = {}) {
|
||||
export default function walkFiles(
|
||||
dir: string,
|
||||
ext: string | string[],
|
||||
opts: Record<string, any> = {},
|
||||
) {
|
||||
const extensions = Array.isArray(ext) ? ext : [ext]
|
||||
const walkSyncOpts = { includeBasePath: true, directories: false }
|
||||
|
||||
@@ -23,14 +27,14 @@ export function readFiles(dir = 'content', ext = 'md', opts = {}) {
|
||||
return paths.map((path) => [path, fs.readFileSync(path, 'utf8')])
|
||||
}
|
||||
|
||||
export function filterFiles(files, fn) {
|
||||
export function filterFiles(files: [path: string, file: string][], fn: Function) {
|
||||
return files.filter(([path, file]) => fn(path, file))
|
||||
}
|
||||
|
||||
export function withFiles(files, fn) {
|
||||
export function withFiles(files: [path: string, file: string][], fn: Function) {
|
||||
return files.map(([path, file]) => [path, fn(path, file)])
|
||||
}
|
||||
|
||||
export function writeFiles(files) {
|
||||
export function writeFiles(files: [path: string, file: string][]) {
|
||||
return files.map(([path, file]) => fs.writeFileSync(path, file))
|
||||
}
|
||||
Reference in New Issue
Block a user