1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Update js to ts references (#58029)

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Kevin Heis
2025-10-21 08:00:13 -07:00
committed by GitHub
parent 2e1fef3b8c
commit e8be8376a5
103 changed files with 182 additions and 177 deletions

View File

@@ -15,7 +15,7 @@ For code reviews, follow guidelines, tests, and validate instructions. For creat
- Be careful fetching full HTML pages off the internet. Prefer to use gh cli whenever possible for github.com. Limit the number of tokens when grabbing HTML.
- Avoid pull requests with over 300 lines of code changed. When significantly larger, offer to split up into smaller pull requests if possible.
- All new code should be written in TypeScript and not JavaScript.
- We use absolute imports, relative to the `src` directory, using the `@` symbol. For example, `getRedirect` which lives in `src/redirects/lib/get-redirect.js` can be imported with `import getRedirect from '@/redirects/lib/get-redirect'`. The same rule applies for TypeScript (`.ts`) imports, e.g. `import type { GeneralSearchHit } from '@/search/types'`
- We use absolute imports, relative to the `src` directory, using the `@` symbol. For example, `getRedirect` which lives in `src/redirects/lib/get-redirect.ts` can be imported with `import getRedirect from '@/redirects/lib/get-redirect'`. The same rule applies for TypeScript (`.ts`) imports, e.g. `import type { GeneralSearchHit } from '@/search/types'`
## Tests

View File

@@ -14,7 +14,7 @@ Before committing content changes, always:
1. **Use the content linter** to validate content: `npm run lint-content -- --paths <file-paths>`
2. **Check for proper variable usage** in your content
3. **Verify [AUTOTITLE] links** point to existing articles
4. **Run tests** on changed content: `npm run test -- src/content-render/tests/render-changed-and-deleted-files.js`
4. **Run tests** on changed content: `npm run test -- src/content-render/tests/render-changed-and-deleted-files.ts`
## Bullet lists

View File

@@ -9,9 +9,7 @@ on:
branches:
- main
paths:
- '**/*.js'
- '**/*.ts'
- '**/*.jsx'
- '**/*.tsx'
- '.github/workflows/codeql.yml'
# This is so that when CodeQL runs on a pull request, it can compare

View File

@@ -9,7 +9,7 @@ on:
pull_request:
paths:
- src/languages/scripts/count-translation-corruptions.ts
- src/languages/lib/correct-translation-content.js
- src/languages/lib/correct-translation-content.ts
- .github/workflows/count-translation-corruptions.yml
- .github/actions/node-npm-setup/action.yml
- .github/actions/clone-translations/action.yml

View File

@@ -28,7 +28,7 @@ jobs:
- uses: ./.github/actions/node-npm-setup
- name: Run src/ghes-releases/scripts/update-enterprise-dates.js
- name: Run src/ghes-releases/scripts/update-enterprise-dates.ts
run: npm run update-enterprise-dates
env:
GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_BASE }}
@@ -42,7 +42,7 @@ jobs:
with:
# need to use a token with repo and workflow scopes for this step
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
commit-message: '🤖 ran src/ghes-releases/scripts/update-enterprise-dates.js'
commit-message: '🤖 ran src/ghes-releases/scripts/update-enterprise-dates.ts'
title: 🤖 src/ghes-releases/lib/enterprise-dates.json update
body:
"Hello! The GitHub Enterprise Server release dates have changed.\n\n

View File

@@ -7,7 +7,7 @@ name: Move content script test
on:
pull_request:
paths:
- src/content-render/scripts/move-content.js
- src/content-render/scripts/move-content.ts
- src/content-render/scripts/test-move-content.ts
- 'src/frame/lib/**/*.js'
- .github/workflows/move-content.yml

View File

@@ -14,10 +14,10 @@ on:
- .github/workflows/orphaned-files-check.yml
# In case any of the dependencies affect the script
- 'package*.json'
- src/assets/scripts/find-orphaned-assets.js
- src/assets/scripts/find-orphaned-assets.ts
- src/content-render/scripts/reusables-cli/find/unused.ts
- src/workflows/walk-files.ts
- src/languages/lib/languages.js
- src/languages/lib/languages.ts
- .github/actions/clone-translations/action.yml
- .github/actions/node-npm-setup/action.yml

View File

@@ -15,7 +15,6 @@ on:
- reopened
- synchronize
paths:
- '**.js'
- '**.ts'
- '**.tsx'
- '**.scss'

View File

@@ -152,6 +152,4 @@ ARG BUILD_SHA
ENV BUILD_SHA=$BUILD_SHA
# Entrypoint to start the server
# Note: Currently we have to use tsx because
# we have a mix of `.ts` and `.js` files with multiple import patterns
CMD ["node_modules/.bin/tsx", "src/frame/server.ts"]

View File

@@ -16,4 +16,4 @@ ADD --chown=node:node data /openapi-check/data
RUN npm ci -D
ENTRYPOINT ["node", "/openapi-check/src/rest/scripts/openapi-check.js"]
ENTRYPOINT ["node", "/openapi-check/src/rest/scripts/openapi-check.ts"]

View File

@@ -51,13 +51,13 @@ It is a block of key-value content that lives at the top of every Markdown file.
The following frontmatter values have special meanings and requirements for this site.
There's also a schema that's used by the test suite to validate every page's frontmatter.
See [`lib/frontmatter.js`](/src/frame/lib/frontmatter.js).
See [`lib/frontmatter.ts`](/src/frame/lib/frontmatter.ts).
### `versions`
- Purpose: Indicates the [versions](/src/versions/lib/all-versions.js) to which a page applies.
- Purpose: Indicates the [versions](/src/versions/lib/all-versions.ts) to which a page applies.
See [Versioning](#versioning) for more info.
- Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.js`](/src/frame/lib/frontmatter.js).
- Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.ts`](/src/frame/lib/frontmatter.ts).
- This frontmatter value is currently **required** for all pages.
- The `*` is used to denote all releases for the version.
@@ -197,7 +197,7 @@ featuredLinks:
### `allowTitleToDifferFromFilename`
- Purpose: Indicates whether a page is allowed to have a title that differs from its filename. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.js`. Use this value if a file's `title` frontmatter includes Liquid or punctuation that cannot be part of the filename. For example, the article [About Enterprise Managed Users](https://docs.github.com/en/enterprise-cloud@latest/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users) uses a Liquid reusable in its title, `'About {% data variables.product.prodname_emus %}'`, which cannot be in the filename, `about-enterprise-managed-users.md`, so the `allowTitleToDifferFromFilename` frontmatter is set to `true`.
- Purpose: Indicates whether a page is allowed to have a title that differs from its filename. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.ts`. Use this value if a file's `title` frontmatter includes Liquid or punctuation that cannot be part of the filename. For example, the article [About Enterprise Managed Users](https://docs.github.com/en/enterprise-cloud@latest/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users) uses a Liquid reusable in its title, `'About {% data variables.product.prodname_emus %}'`, which cannot be in the filename, `about-enterprise-managed-users.md`, so the `allowTitleToDifferFromFilename` frontmatter is set to `true`.
- Type: `Boolean`. Default is `false`.
- Optional.
@@ -327,7 +327,7 @@ A content file can have **two** types of versioning:
- Liquid statements in content (**optional**)
- Conditionally render content depending on the current version being viewed. See [Versioning documentation](https://docs.github.com/en/contributing/writing-for-github-docs/versioning-documentation#versioning-with-liquid-conditional-operators) for more info. Note Liquid conditionals can also appear in `data` and `include` files.
**Note**: As of early 2021, the `free-pro-team@latest` version is not included URLs. A helper function called `src/versions/lib/remove-fpt-from-path.js` removes the version from URLs.
**Note**: As of early 2021, the `free-pro-team@latest` version is not included URLs. A helper function called `src/versions/lib/remove-fpt-from-path.ts` removes the version from URLs.
## Filenames

View File

@@ -9,7 +9,7 @@ versions:
## Troubleshooting tests that fail locally but pass in CI
If you run tests locally and get failures in `tests/rendering/server.js` around static assets, stylesheets, or the client-side JavaScript bundle, but the same tests pass in CI on a PR, run the command `npm run build`. This is a one-time command that creates static assets locally.
If you run tests locally and get failures in `tests/rendering/server.ts` around static assets, stylesheets, or the client-side JavaScript bundle, but the same tests pass in CI on a PR, run the command `npm run build`. This is a one-time command that creates static assets locally.
For more information, see [AUTOTITLE](/contributing/setting-up-your-environment-to-work-on-github-docs/creating-a-local-environment).

View File

@@ -469,7 +469,7 @@ For more information about links, see [AUTOTITLE](/contributing/style-guide-and-
Because the site is dynamic, it does not build HTML files for each different version of an article. Instead it generates a "permalink" for every version of the article. It does this based on the article's [`versions` frontmatter](/contributing/syntax-and-versioning-for-github-docs/using-yaml-frontmatter#versions).
> [!NOTE]
> As of early 2021, the `free-pro-team@latest` version is not included in URLs. A helper function called `lib/remove-fpt-from-path.js` removes the version from URLs.
> As of early 2021, the `free-pro-team@latest` version is not included in URLs. A helper function called `lib/remove-fpt-from-path.ts` removes the version from URLs.
For example, an article that is available in currently supported versions will have permalink URLs like the following:

View File

@@ -19,7 +19,7 @@ It is a block of key-value content that lives at the top of every Markdown file
The following frontmatter values have special meanings and requirements for {% data variables.product.prodname_docs %}.
There's also a schema that's used by the test suite to validate every page's frontmatter.
For more information, see [`lib/frontmatter.js`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.js).
For more information, see [`lib/frontmatter.ts`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.ts).
* [`versions`](#versions)
* [`redirect_from`](#redirect_from)
@@ -49,7 +49,7 @@ For more information, see [`lib/frontmatter.js`](https://github.com/github/docs/
* Purpose: Indicates the [versions](https://github.com/github/docs/blob/main/src/versions/lib/all-versions.ts) to which a page applies.
For more information about the different types of versioning, see [Versioning documentation](/contributing/syntax-and-versioning-for-github-docs/versioning-documentation).
* Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.js`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.js).
* Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.ts`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.ts).
* This frontmatter value is currently **required** for all pages.
* The `*` is used to denote all releases for the version.
* Must be present for all `index.md` files, but actual value is computed at runtime based on the children.
@@ -191,7 +191,7 @@ featuredLinks:
### `allowTitleToDifferFromFilename`
* Purpose: Indicates whether a page is allowed to have a title that differs from its filename. For example, `content/rest/reference/orgs.md` has a title of `Organizations` instead of `Orgs`. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.js`.
* Purpose: Indicates whether a page is allowed to have a title that differs from its filename. For example, `content/rest/reference/orgs.md` has a title of `Organizations` instead of `Orgs`. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.ts`.
* Type: `Boolean`. Default is `false`.
* Optional.

View File

@@ -68,7 +68,7 @@ For {% data variables.product.prodname_ghe_cloud %}, use `enterprise-cloud@lates
### {% data variables.product.prodname_ghe_server %}
Documentation for {% data variables.product.prodname_ghe_server %} has multiple versions and can be divided into two types: documentation for _supported releases_ (we support four at any one time), and documentation for _{% data variables.release-phases.closing_down %} releases_ (we do not link to these on the Docs site but we support a "frozen" snapshot of these docs in perpetuity, so they can still be accessed if you know the URLs). See [`lib/enterprise-server-releases.js`](https://github.com/github/docs/blob/main/src/versions/lib/enterprise-server-releases.js) for a list.
Documentation for {% data variables.product.prodname_ghe_server %} has multiple versions and can be divided into two types: documentation for _supported releases_ (we support four at any one time), and documentation for _{% data variables.release-phases.closing_down %} releases_ (we do not link to these on the Docs site but we support a "frozen" snapshot of these docs in perpetuity, so they can still be accessed if you know the URLs). See [`lib/enterprise-server-releases.ts`](https://github.com/github/docs/blob/main/src/versions/lib/enterprise-server-releases.ts) for a list.
The versions are named `enterprise-server@<release>`. The short name is `ghes`. In Liquid conditionals, we can specify ranges, like `ghes > 3.0`. For more information, see [Versioning with Liquid conditional operators](#versioning-with-liquid-conditional-operators).

View File

@@ -42,7 +42,7 @@ In a matter of minutes, you will be ready to edit, review and test your changes
By default the local server won't run with all supported languages enabled. If you need to run the server with a particular language, you can temporarily edit the `start` script in `package.json` and update the `ENABLED_LANGUAGES` variable. For example, to enable Japanese and Portuguese, you can set it to `ENABLED_LANGUAGES='en,ja,pt'` and then you need to restart the server for the change to take effect.
The supported language codes are defined in [lib/languages.js](../src/languages/lib/languages.js).
The supported language codes are defined in [lib/languages.ts](../src/languages/lib/languages.ts).
## Site structure

View File

@@ -39,7 +39,7 @@ You cannot use `feature:` to specify multiple concurrent versions, as this is no
## Schema enforcement
The schema for validating the feature versioning lives in [`src/data-directory/lib/data-schemas/features.js`](../../src/data-directory/lib/data-schemas/features.js).
The schema for validating the feature versioning lives in [`src/data-directory/lib/data-schemas/features.ts`](../../src/data-directory/lib/data-schemas/features.ts).
## Script to remove feature tags

View File

@@ -25,7 +25,7 @@ Learning track data for a product is defined in two places:
## Versioning
Versioning for learning tracks is processed at page render time. The code lives in [`lib/learning-tracks.js`](lib/learning-tracks.js), which is called by `page.render()`. The processed learning tracks are then rendered by `components/guides`.
Versioning for learning tracks is processed at page render time. The code lives in [`lib/learning-tracks.ts`](lib/learning-tracks.ts), which is called by `page.render()`. The processed learning tracks are then rendered by `components/guides`.
Liquid conditionals do **not** have to be used for versioning in the YAML file for guides. Only the learning track guides that apply to the current version will be rendered automatically. If there aren't any tracks with guides that belong to the current version, the learning tracks section will not render at all.
@@ -48,4 +48,4 @@ If the `versions` property is not included, it's assumed the track is available
## Schema enforcement
The schema for validating the learning track YAML lives in [`src/content-linter/lib/learning-tracks-schema.js`](src/content-linter/lib/learning-tracks-schema.js) and is exercised by [`tests/content/lint-files.js`](tests/content/lint-files.js).
The schema for validating the learning track YAML lives in [`src/content-linter/lib/learning-tracks-schema.ts`](src/content-linter/lib/learning-tracks-schema.ts) and is exercised by [`tests/content/lint-files.ts`](tests/content/lint-files.ts).

View File

@@ -35,7 +35,7 @@ where the syntax for `versions` is the same as the [frontmatter `versions` prope
## Rendering
The product example data is added to the `context` object in `src/frame/middleware/context/product-examples.js`.
The product example data is added to the `context` object in `src/frame/middleware/context/product-examples.ts`.
The data is then rendered by `components/landing`.

View File

@@ -25,7 +25,7 @@ The directories are named by GHES release number (with a hyphen instead of a per
The YAML files in each directory are named by patch number. Some patch filenames may end with `-rc<num>.yml`, which means it's a release candidate. A release candidate file also requires `release_candidate: true` in the YAML data.
Release notes of deprecated GHES versions (see `lib/enterprise-server-releases.js`) are **not** removed from the site and will always be displayed alongside currently supported versions.
Release notes of deprecated GHES versions (see `lib/enterprise-server-releases.ts`) are **not** removed from the site and will always be displayed alongside currently supported versions.
Note that patch files can be deprecated individually (i.e., hidden on the docs site) by an optional `deprecated: true` property.
@@ -41,6 +41,6 @@ The release notes page has a custom design with CSS in `stylesheets/release-note
### Schema
The schema that validates the YAML data lives in `src/content-linter/lib/release-notes-schema.js`. See the schema file to find out the required and optional properties.
The schema that validates the YAML data lives in `src/content-linter/lib/release-notes-schema.ts`. See the schema file to find out the required and optional properties.
The schema is exercised by a test in `src/content-linter/tests/lint-files.js`. The test will fail if the data does not pass validation.
The schema is exercised by a test in `src/content-linter/tests/lint-files.ts`. The test will fail if the data does not pass validation.

View File

@@ -86,7 +86,7 @@ After creating all three files:
1. **Build the site**: Run `npm run build`
2. **Test schemas**: Run `npm test -- src/data-directory/tests`
3. **Fix any errors**: If you get failures in `src/data-directory/tests/data-schemas.js`:
3. **Fix any errors**: If you get failures in `src/data-directory/tests/data-schemas.ts`:
- Copy the error message
- In VS Code Copilot Chat, type: "When I ran the schema test, I got this error:" and paste the error
- Update your schema file based on Copilot's suggestions
@@ -96,4 +96,4 @@ After creating all three files:
Once your table is working and tests pass, create a pull request for review.
The `docs-engineering` team must review and approve your implementation.
The `docs-engineering` team must review and approve your implementation.

View File

@@ -55,7 +55,7 @@ Most subject folders have their own mention in `.github/workflows/test.yml`.
Open the file to see the beginning of it. It's manually maintained but
it's important to point out two things:
1. It's manually entered so creating a `src/foo/tests/*.js` doesn't
1. It's manually entered so creating a `src/foo/tests/*.ts` doesn't
automatically start running those tests.
1. When you add an entry to `.github/workflows/test.yml`, and it's
gone into `main`, don't forget to add it to the branch protection's

View File

@@ -9,11 +9,17 @@ This script calls the [Models API](https://docs.github.com/en/rest/models/infere
## Usage
```sh
<<<<<<< HEAD:src/ai-editors/README.md
tsx src/ai-editors/scripts/ai-edit.ts --editor <type> --response <type> --files <file1.md>
||||||| 5ae4ec0f5cb:src/ai-editors/README.md
tsx src/ai-editors/scripts/ai-edit.js --editor <type> --response <type> --files <file1.md>
=======
# Direct command
tsx src/ai-tools/scripts/ai-tools.ts --refine <type> --files <file1.md>
# Or via npm script
npm run ai-tools -- --refine <type> --files <file1.md>
>>>>>>> origin/main:src/ai-tools/README.md
```
* `--files, -f`: One or more content file paths to process (required).
@@ -22,11 +28,17 @@ npm run ai-tools -- --refine <type> --files <file1.md>
**Examples:**
```sh
<<<<<<< HEAD:src/ai-editors/README.md
tsx src/ai-editors/scripts/ai-edit.ts --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --editor versioning --response list
||||||| 5ae4ec0f5cb:src/ai-editors/README.md
tsx src/ai-editors/scripts/ai-edit.js --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --editor versioning --response list
=======
# Direct command
tsx src/ai-tools/scripts/ai-tools.ts --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --refine versioning
# Via npm script
npm run ai-tools -- --files content/copilot/tutorials/coding-agent/get-the-best-results.md --refine intro
>>>>>>> origin/main:src/ai-tools/README.md
```
## Requirements

View File

@@ -8,7 +8,7 @@ const oldVersions = ['dotcom'].concat(supported)
const newVersions = Object.keys(allVersions)
// Utility functions for converting between old version paths and new version paths.
// See lib/path-utils.js for utility functions based on new paths.
// See lib/path-utils.ts for utility functions based on new paths.
// Examples:
// OLD /github/category/article to NEW /free-pro-team@latest/github/category/article
// OLD /enterprise/2.21/user/github/category/article to NEW /enterprise-server@2.21/github/category/article

View File

@@ -14,7 +14,7 @@ import type { ExtendedRequest } from '@/types'
// we strive to make the files in the repo only files that we actually
// use and refer to in the non-archived content.
// Note that, we also have `archived-enterprise-versions-assets.js`
// Note that, we also have `archived-enterprise-versions-assets.ts`
// but that one assumes the whole path refers to a prefix which is
// considered archived. E.g. /en/enterprise-server@2.9/foo/bar.css

View File

@@ -10,7 +10,7 @@ import type { ExtendedRequest } from '@/types'
// This module handles requests for the CSS and JS assets for
// deprecated GitHub Enterprise versions by routing them to static content in
// one of the docs-ghes-<release number> repos.
// See also ./archived-enterprise-versions.js for non-CSS/JS paths
// See also ./archived-enterprise-versions.ts for non-CSS/JS paths
export default async function archivedEnterpriseVersionsAssets(
req: ExtendedRequest,

View File

@@ -72,7 +72,7 @@ const cacheAggressively = (res: Response) => {
// 3. ~4000ms
//
// ...if the limit we set is 3.
// Our own timeout, in @/frame/middleware/timeout.js defaults to 10 seconds.
// Our own timeout, in @/frame/middleware/timeout.ts defaults to 10 seconds.
// So there's no point in trying more attempts than 3 because it would
// just timeout on the 10s. (i.e. 1000 + 2000 + 4000 + 8000 > 10,000)
const retryConfiguration = { limit: 3 }

View File

@@ -83,7 +83,7 @@ export const pageValidationMiddleware = (
const redirectsContext = { pages: req.context.pages, redirects: req.context.redirects }
// Similar to how the `handle-redirects.js` middleware works, let's first
// Similar to how the `handle-redirects.ts` middleware works, let's first
// check if the URL is just having a trailing slash.
while (pathname.endsWith('/') && pathname.length > 1) {
pathname = pathname.slice(0, -1)

View File

@@ -8,7 +8,7 @@ The audit log event pipeline generates the event data for 3 audit log pages; the
flowchart TD
Start([Start])-->
RunScript["Run:
src/auditlog/scripts/sync.js"]-->
src/auditlog/scripts/sync.ts"]-->
GetContents["getContents() schema.json
from github/audit-log-allowlists repo"]-->
SchemaFiles["audit log schema file\n

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from 'vitest'
import { getAuditLogEvents, getCategorizedAuditLogEvents } from '../lib/index.js'
import { getAuditLogEvents, getCategorizedAuditLogEvents } from '../lib/index'
import type { AuditLogEventT } from '../types'
describe('Audit log fields functionality', () => {

View File

@@ -41,10 +41,10 @@ When creating a new pipeline, the source data that is being consumed may not hav
- Create a new directory in the `src` directory with the name of the pipeline. For example, `src/codeql-cli`.
- Add a README.md file that describes the pipeline and how to use it. This should include any dependencies, how to run the pipeline, and any other information that is needed to use the pipeline. It's strongly recommended to include a diagram showing the overall flow of the pipeline.
- Each pipeline typically requires a workflow to allow scheduling or manually running the pipeline. The workflow should be placed in the `.github/workflows` directory and named `sync-<pipeline-name>.js`. Each workflow typically requires adding a manual run option and an input parameter to specify the source repo's branch to use.
- Each pipeline will need a `scripts` directory with (at minimum) a `scripts/sync.js` file to run the pipeline.
- Each pipeline typically requires a workflow to allow scheduling or manually running the pipeline. The workflow should be placed in the `.github/workflows` directory and named `sync-<pipeline-name>.ts`. Each workflow typically requires adding a manual run option and an input parameter to specify the source repo's branch to use.
- Each pipeline will need a `scripts` directory with (at minimum) a `scripts/sync.ts` file to run the pipeline.
- If the pipeline will contain structured data, you will need to add a `src/<pipeline-name>/data` directory. The files inside the `data` directory are typically organized by version (e.g., `src/webhooks/data/fpt/*`).
- Pipelines typically have tests specific to the pipeline that are placed in the `src/<pipeline-name>/tests` directory. There is no need to add tests that render the page because all autogenerated pages are tested in `src/automated-pipelines/tests/rendering.js`.
- Pipelines typically have tests specific to the pipeline that are placed in the `src/<pipeline-name>/tests` directory. There is no need to add tests that render the page because all autogenerated pages are tested in `src/automated-pipelines/tests/rendering.ts`.
- If the pipeline uses a Next.js page component (e.g., `pages/**/*.tsx`), ensure there is a test that fails if that page component is moved or deleted.
## How to get help

View File

@@ -10,7 +10,7 @@ The pipeline is used to generate Markdown files that create article pages on the
A [workflow](https://github.com/github/docs-internal/blob/main/.github/workflows/sync-codeql-cli.yml) is used to trigger the automation of the CodeQL CLI documentation. The workflow is manually triggered by a member of the GitHub Docs team approximately every two weeks to align to releases of the CodeQL CLI. The workflow takes an input parameter that specifies the branch to pull the source files from in the semmle-code repo. If the branch input is omitted, the workflow will default to the `main` branch.
The workflow runs the `src/codeql-cli/scripts/sync.js` script, which generates Markdown files under `content/code-security/codeql-cli/codeql-cli-manual`.
The workflow runs the `src/codeql-cli/scripts/sync.ts` script, which generates Markdown files under `content/code-security/codeql-cli/codeql-cli-manual`.
The workflow automatically creates a new pull request with the changes and the label `codeql-cli-pipeline`.
@@ -20,17 +20,17 @@ To run the CodeQL CLI pipeline locally:
1. Clone the `semmle-code` repository inside your local `docs-internal` repository.
2. [Install Pandoc](https://pandoc.org/installing.html). You can `brew install pandoc` on macOS.
3. Run `src/codeql-cli/scripts/sync.js`.
3. Run `src/codeql-cli/scripts/sync.ts`.
## About this directory
- `src/codeql-cli/lib/config.json` - A configuration file used to specify metadata about the CodeQL CLI pipeline.
- `src/codeql-cli/scripts` - The scripts and source code used run the CodeQL CLI pipeline.
- `src/codeql-cli/scripts/sync.js` - The entrypoint script that runs the CodeQL CLI pipeline.
- `src/codeql-cli/scripts/sync.ts` - The entrypoint script that runs the CodeQL CLI pipeline.
## Content team
The content writers can manually update parts of the autogenerated Markdown files in `content/code-security/codeql-cli/codeql-cli-manual`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/codeql-cli/lib/config.js`.
The content writers can manually update parts of the autogenerated Markdown files in `content/code-security/codeql-cli/codeql-cli-manual`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/codeql-cli/lib/config.ts`.
When a new Markdown file is created, a writer can manually change any of the frontmatter. The pipeline will not overwrite the frontmatter on subsequent runs.

View File

@@ -9,15 +9,15 @@ This README shows you how to contribute to the content linter code by adding new
At a high-level, there are four steps to create a new rule:
1. Adding a new rule file to [`src/content-linter/lib/linting-rules`](/src/content-linter/lib/linting-rules)
1. Importing the new rule and adding it to the custom rules array in [`src/content-linter/lib/linting-rules/index.js`](/src/content-linter/lib/linting-rules/index.js)
1. Adding the config for the new rule to [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js)
1. Importing the new rule and adding it to the custom rules array in [`src/content-linter/lib/linting-rules/index.ts`](/src/content-linter/lib/linting-rules/index.ts)
1. Adding the config for the new rule to [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts)
1. Adding a unit test for the new rule in [`src/content-linter/tests/unit`](/src/content-linter/tests/unit)
Rules are located in the `src/content-linter/lib/linting-rules` directory. Each rule is a separate file that exports an object with metadata and a function. The function is the core logic that implements the rule. In some cases a single file contains more than one rule when colocating them makes more sense. Rules that are very specific can return more than one error type.
## Creating a new rule
Create a new file in the `src/content-linter/lib/linting-rules` directory. The file name should be the same as the rule name. For example, if the rule name is `no-whitespace`, the file name should be `no-whitespace.js`. Avoid using the rule ID name for the file name. There is more information about the ID in [names](#names).
Create a new file in the `src/content-linter/lib/linting-rules` directory. The file name should be the same as the rule name. For example, if the rule name is `no-whitespace`, the file name should be `no-whitespace.ts`. Avoid using the rule ID name for the file name. There is more information about the ID in [names](#names).
Before creating a new rule, check that the rule does not already exist in [Markdownlint](https://github.com/DavidAnson/markdownlint/#rules--aliases). There are also many [open-source plugins](https://www.npmjs.com/search?q=keywords:markdownlint-rule) that may be used.
@@ -51,12 +51,12 @@ See the [custom rules](https://github.com/DavidAnson/markdownlint/blob/main/doc/
### Helper utilities
Markdownlint provides several helper functions. Take a look at the many exports in [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.js). Note, this is unsupported and may stop being published to in the future.
Markdownlint provides several helper functions. Take a look at the many exports in [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.ts). Note, this is unsupported and may stop being published to in the future.
We've also written a few of our own:
- [`utils`](/src/content-linter/lib/helpers/utils.js)
- [`liquid-utils`](/src/content-linter/lib/helpers/liquid-utils.js)
- [`utils`](/src/content-linter/lib/helpers/utils.ts)
- [`liquid-utils`](/src/content-linter/lib/helpers/liquid-utils.ts)
### Setting errors
@@ -66,7 +66,7 @@ When setting errors for a rule, there are a few different functions to choose fr
- `addErrorContext` - when error detail is not needed but a specific range of context (Markdown snippet being checked) is needed
- `addErrorDetailIf` - when the error detail just needs to be the expected and actual results
See [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.js) for more details.
See [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.ts) for more details.
### Async rules
@@ -96,7 +96,7 @@ See the [Markdownlint async documentation](https://github.com/DavidAnson/markdow
### Reading the data directory
When you need to read files in the data directory, you can use the `getDataByLanguage` or `getDeepDataByLanguage` export in [`lib/get-data.js`](/lib/get-data.js). This allows you to write unit tests that read data fixtures rather than real content. For an example of using `getDataByLanguage` or `getDeepDataByLanguage`, see the [`liquid-data-tags.js`](/src/content-linter/lib/linting-rules/liquid-data-tags.js) or [`liquid-versioning.js`](/src/content-linter/lib/linting-rules/liquid-versioning.js) rules.
When you need to read files in the data directory, you can use the `getDataByLanguage` or `getDeepDataByLanguage` export in [`lib/get-data.ts`](/lib/get-data.ts). This allows you to write unit tests that read data fixtures rather than real content. For an example of using `getDataByLanguage` or `getDeepDataByLanguage`, see the [`liquid-data-tags.ts`](/src/content-linter/lib/linting-rules/liquid-data-tags.ts) or [`liquid-versioning.ts`](/src/content-linter/lib/linting-rules/liquid-versioning.ts) rules.
### `names`
@@ -137,13 +137,13 @@ Tags are used to categorize rules. Choose one or more tags from the list below.
## Adding the rule to the custom rules array
To add the new rule to the list of custom rules that are run against GitHub Docs content, import the rule and add it to the `rules` array in [`src/content-linter/lib/linting-rules/index.js`](/src/content-linter/lib/linting-rules/index.js). The `rules` array defines all the custom rules that we add to the Markdownlint configuration [`options.customRules`](https://github.com/DavidAnson/markdownlint#optionscustomrules). Custom rules include the rules we write in this project and any open-source rules we use.
To add the new rule to the list of custom rules that are run against GitHub Docs content, import the rule and add it to the `rules` array in [`src/content-linter/lib/linting-rules/index.ts`](/src/content-linter/lib/linting-rules/index.ts). The `rules` array defines all the custom rules that we add to the Markdownlint configuration [`options.customRules`](https://github.com/DavidAnson/markdownlint#optionscustomrules). Custom rules include the rules we write in this project and any open-source rules we use.
## Configuring a new rule
Each rule that we configure for GitHub Docs has a corresponding entry in either `src/content-linter/style/base.js` or `src/content-linter/style/github-docs.js`. The `base.js` file contains rules that are available in the [Markdownlint](https://github.com/DavidAnson/markdownlint) project. The `github-docs.js` file contains open-source plugins (including [markdownlint-github](https://github.com/github/markdownlint-github/tree/main)) and the custom rules that we develop that are specific to GitHub Docs.
Each rule that we configure for GitHub Docs has a corresponding entry in either `src/content-linter/style/base.ts` or `src/content-linter/style/github-docs.ts`. The `base.ts` file contains rules that are available in the [Markdownlint](https://github.com/DavidAnson/markdownlint) project. The `github-docs.ts` file contains open-source plugins (including [markdownlint-github](https://github.com/github/markdownlint-github/tree/main)) and the custom rules that we develop that are specific to GitHub Docs.
Inside [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js), there are a few different sections:
Inside [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts), there are a few different sections:
- `githubDocsConfig` - Primary area that new rules are added to. The rules in this section configure Markdownlint to separate frontmatter from Markdown automatically. Both the frontmatter and Markdown are available to read from the rule logic, but you cannot leave an error on a line that contains frontmatter. Frontmatter is not sent through the Markdown parser by Markdownlint.
- `githubDocsFrontmatterConfig` - Contains rules that check frontmatter properties _and_ need to leave errors on frontmatter line numbers.
@@ -182,23 +182,23 @@ Once a rule is written, added to the custom rules array, and configured, you can
npm run lint-content -- --paths <path to file relative to docs-internal root> --rules <name of your new rule>
```
Each custom rule must add a unit test in the `src/content-linter/tests/unit` directory. The unit test should be named the same as the rule file name. For example, if the rule file name is `no-whitespace.js`, the unit test file name should be `no-whitespace.js`.
Each custom rule must add a unit test in the `src/content-linter/tests/unit` directory. The unit test should be named the same as the rule file name. For example, if the rule file name is `no-whitespace.ts`, the unit test file name should be `no-whitespace.ts`.
Unit tests must test auto-fixes if the rule allows them. The unit test should also test the line number and range. Include positive and negative tests.
If the test requires checking the file path, you can provide a fixture. For an example, see [`early-access-references.js`](/src/content-linter/tests/unit/early-access-references.js). Most tests pass Markdown strings to the rule directly.
If the test requires checking the file path, you can provide a fixture. For an example, see [`early-access-references.ts`](/src/content-linter/tests/unit/early-access-references.ts). Most tests pass Markdown strings to the rule directly.
## Content linter scripts
- [`lint-content.js`](/src/content-linter/scripts/lint-content.js) - The primary script used to run rules against content. We have a fairly customized implementation of Markdownlint, which prevented us from using [Markdownlint CLI2](https://github.com/DavidAnson/markdownlint-cli2). For example, we run Markdownlint more than once to allow different configurations for the `content` directory and `data` directory. We also run Markdownlint again to allow checking frontmatter properties. To view the options of the script, run `npm run lint-content -- --help`.
- [`disable-rules.js`](/src/content-linter/scripts/disable-rules.js) - This script is used to automatically add disable comments to the end of a line that violates a rule. This allows us to have violations in the content while also setting the rule's severity to `error`.
- [`pretty-print-results.js`](/src/content-linter/scripts/pretty-print-results.js) - This script simplifies and makes the results printed to the console easier to read.
- [`lint-content.ts`](/src/content-linter/scripts/lint-content.ts) - The primary script used to run rules against content. We have a fairly customized implementation of Markdownlint, which prevented us from using [Markdownlint CLI2](https://github.com/DavidAnson/markdownlint-cli2). For example, we run Markdownlint more than once to allow different configurations for the `content` directory and `data` directory. We also run Markdownlint again to allow checking frontmatter properties. To view the options of the script, run `npm run lint-content -- --help`.
- [`disable-rules.ts`](/src/content-linter/scripts/disable-rules.ts) - This script is used to automatically add disable comments to the end of a line that violates a rule. This allows us to have violations in the content while also setting the rule's severity to `error`.
- [`pretty-print-results.ts`](/src/content-linter/scripts/pretty-print-results.ts) - This script simplifies and makes the results printed to the console easier to read.
## Updating content to adhere to a new rule
Introducing a new rule with a severity of `error` can be difficult when many violations of that rule exist in content. If the rule implements an autofix by setting the `fixInfo` property in the error object, you can use the rule to autofix content before shipping the rule.
If the new rule doesn't have a possible autofix, you can use `disable-rules.js` to automatically add disable comments to the end of each Markdown line that contains a violation. This is not always possible since some lines are within code blocks and cannot be disabled.
If the new rule doesn't have a possible autofix, you can use `disable-rules.ts` to automatically add disable comments to the end of each Markdown line that contains a violation. This is not always possible since some lines are within code blocks and cannot be disabled.
The last option is to manually fix the violations. This is the most time-consuming option, but it's the only option when the rule cannot be autofixed and the line cannot be disabled.
@@ -206,7 +206,7 @@ A rule with too many violations to fix can be set to a severity of `warning`.
## Using the search-replace plugin
Because the search-replace rule consists of many search terms, it essentially performs one or more rule checks. Each rule is defined in the [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js) config under `searchReplaceConfig`.
Because the search-replace rule consists of many search terms, it essentially performs one or more rule checks. Each rule is defined in the [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts) config under `searchReplaceConfig`.
You can add a new `search-replace` rule using any search term or regex by adding it to the `rules` array. This is an easy way to perform checks if the check is just looking for a string or simple regex.
@@ -222,7 +222,7 @@ docs.github.com <!-- markdownlint-disable-line search-replace -->
## Adding context to a base rule's error message
If you want to add context to a base rule's error message, go to[`base.js`](/src/content-linter/style/base.js), and add the `context` property to the base rule's object. For e.g. if you wanted to add `context` to `MD040` (the `fenced-code-language` base rule), the object would look like this:
If you want to add context to a base rule's error message, go to[`base.ts`](/src/content-linter/style/base.ts), and add the `context` property to the base rule's object. For e.g. if you wanted to add `context` to `MD040` (the `fenced-code-language` base rule), the object would look like this:
```javascript
'fenced-code-language': {

View File

@@ -20,7 +20,7 @@ export const liquidQuotedConditionalArg: Rule = {
tags: ['liquid', 'format'],
function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n')
// Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.js which lacks type definitions
// Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.ts which lacks type definitions
const tokens = getLiquidTokens(content)
.filter((token: any) => token.kind === TokenKind.Tag)
.filter((token: any) => conditionalTags.includes(token.name))

View File

@@ -21,7 +21,7 @@ export const octiconAriaLabels: Rule = {
parser: 'markdownit',
function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n')
// Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.js which lacks type definitions
// Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.ts which lacks type definitions
const tokens = getLiquidTokens(content)
.filter((token: any) => token.kind === TokenKind.Tag)
.filter((token: any) => token.name === 'octicon')

View File

@@ -4,7 +4,7 @@
//
// Usage:
//
// src/content-linter/scripts/disable-rules.js no-generic-link-text
// src/content-linter/scripts/disable-rules.ts no-generic-link-text
import fs from 'fs'
import { spawn } from 'child_process'

View File

@@ -223,7 +223,7 @@ describe.skip('category pages', () => {
}`
const expectedSlug = expectedSlugs.at(-1) as string
const newCategoryDirPath = path.join(path.dirname(categoryDirPath), expectedSlug)
customMessage += `\nTo resolve this consider running:\n ./src/content-render/scripts/move-content.js ${categoryDirPath} ${newCategoryDirPath}\n`
customMessage += `\nTo resolve this consider running:\n ./src/content-render/scripts/move-content.ts ${categoryDirPath} ${newCategoryDirPath}\n`
// Check if the directory name matches the expected slug
expect(expectedSlugs.includes(categoryDirName), customMessage).toBeTruthy()
})

View File

@@ -35,7 +35,7 @@ describe('front matter', () => {
// Using any type because trouble array contains objects with varying error properties
if (trouble.find((t: any) => t.redirects)) {
customErrorMessage += `\n\nNOTE! To automatically fix the redirects run this command:\n`
customErrorMessage += `\n\t./src/links/scripts/update-internal-links.js content/${page.relativePath}\n\n`
customErrorMessage += `\n\t./src/links/scripts/update-internal-links.ts content/${page.relativePath}\n\n`
}
}
return customErrorMessage

View File

@@ -232,7 +232,7 @@ describe(codeAnnotationCommentSpacing.names.join(' - '), () => {
'echo "Hello"',
'',
'// This JS-style comment is fine',
'node script.js',
'node script.ts',
'',
'--This SQL comment needs space',
'psql -c "SELECT 1;"',

View File

@@ -1,5 +1,5 @@
import { describe, test, expect, vi } from 'vitest'
import { shouldIncludeRule } from '../../scripts/lint-content.js'
import { shouldIncludeRule } from '../../scripts/lint-content'
// Mock the get-rules module to provide test data for rule definitions
vi.mock('../../lib/helpers/get-rules', () => ({

View File

@@ -85,9 +85,9 @@ Each custom tag has the following:
- a JavaScript class in `lib/liquid-tags/`
- an HTML template in `includes/liquid-tags/`
The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.js` and `includes/liquid-tags/my-tag.html`
The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.ts` and `includes/liquid-tags/my-tag.html`
You must also register the new tag in `src/content-render/liquid/engine.js` with a line like this:
You must also register the new tag in `src/content-render/liquid/engine.ts` with a line like this:
```
engine.registerTag('my_tag', require('./liquid-tags/my-tag'))

View File

@@ -3,7 +3,7 @@
//
// Use this script to help you move or rename a single file or a folder. The script will move or rename the file or folder for you, update relevant `children` in the index.md file(s), and add a `redirect_from` to frontmatter in the renamed file(s). Note: You will still need to manually update the `title` if necessary.
//
// By default, the `move-content.js` script will commit the changes it makes. If you don't want the script to run any git commands for you, run it with the `--no-git` flag. Note: In most cases it will be easier and safer to let the script run the git commands for you, since git can get confused when a file is both renamed and edited.
// By default, the `move-content.ts` script will commit the changes it makes. If you don't want the script to run any git commands for you, run it with the `--no-git` flag. Note: In most cases it will be easier and safer to let the script run the git commands for you, since git can get confused when a file is both renamed and edited.
//
// To learn more about the script, you can run `npm run move-content --help`.
//

View File

@@ -6,8 +6,8 @@ import { renderLiquid } from '@/content-render/liquid/index'
import shortVersionsMiddleware from '@/versions/middleware/short-versions'
import type { ExtendedRequest } from '@/types'
const { loadPages } = await import('@/frame/lib/page-data.js')
const { allVersions } = await import('@/versions/lib/all-versions.js')
const { loadPages } = await import('@/frame/lib/page-data')
const { allVersions } = await import('@/versions/lib/all-versions')
const contentCopilotDir = path.join(process.cwd(), 'content-copilot')

View File

@@ -165,7 +165,7 @@ function moveFile(result: string[], options: ScriptOptions): void {
const stdout = execFileSync(
'tsx',
[
'src/content-render/scripts/move-content.js',
'src/content-render/scripts/move-content.ts',
'--no-git',
'--verbose',
contentPath,

View File

@@ -6,7 +6,7 @@ import nonEnterpriseDefaultVersion from '@/versions/lib/non-enterprise-default-v
import { DataDirectory } from '@/tests/helpers/data-directory'
describe('data tag', () => {
// Using 'any' type as DataDirectory is from data-directory.js which lacks type definitions
// Using 'any' type as DataDirectory is from data-directory.ts which lacks type definitions
let dd: any
const enDirBefore = languages.en.dir

View File

@@ -9,7 +9,7 @@ describe('liquid helper tags', () => {
// Using 'any' type as context is a test fixture with dynamic properties set in beforeAll
const context: any = {}
// Using 'any' type as DataDirectory is from data-directory.js which lacks type definitions
// Using 'any' type as DataDirectory is from data-directory.ts which lacks type definitions
let dd: any
const enDirBefore = languages.en.dir

View File

@@ -26,7 +26,7 @@
*
* export DELETED_FILES=`git diff --name-only --diff-filter=D main...`
* export CHANGED_FILES=`git diff --name-only --diff-filter=M main...`
* npm run test -- src/content-render/tests/render-changed-and-deleted-files.js
* npm run test -- src/content-render/tests/render-changed-and-deleted-files.ts
*/
import path from 'path'

View File

@@ -24,7 +24,7 @@ Contributing rules:
- You must start the code section with a single line comment, otherwise the two will be flipped.
- For HTML style, you can include a line after your annotations such as `<!-- -->` to maintain syntax highlighting; this will not impact what renders.
`parse-info-string.js` plugin is required for this to work, and must come before `remark-rehype`.
`parse-info-string.ts` plugin is required for this to work, and must come before `remark-rehype`.
`annotate` must come before the `highlight` plugin.
*/

View File

@@ -51,7 +51,7 @@ function wrapCodeExample(node: any, tree: any): Element {
const lang: string = node.children[0].properties.className?.[0].replace('language-', '')
const code: string = node.children[0].children[0].value
const subnav = null // getSubnav() lives in annotate.js, not needed for normal code blocks
const subnav = null // getSubnav() lives in annotate.ts, not needed for normal code blocks
const prompt = getPrompt(node, tree, code) // returns null if there's no prompt
const hasCopy: boolean = Boolean(getPreMeta(node).copy) // defaults to true

View File

@@ -81,7 +81,7 @@ export default function rewriteAssetImgTags() {
*/
function injectMaxWidth(pathname: string, maxWidth: number): string {
const split = pathname.split('/')
// This prefix needs to match what's possibly expected in dynamic-assets.js
// This prefix needs to match what's possibly expected in dynamic-assets.ts
const inject = `mw-${maxWidth}`
if (split.includes(inject)) {
throw new Error(`pathname already includes '${inject}'`)

View File

@@ -274,7 +274,7 @@ Look for an internal link that starts with '${url}'.
newHref = newHref.replace('/enterprise-server@latest/', `/enterprise-server@${latest}/`)
if (newHref === url) {
// start clean with no language (TOC pages already include the lang codes via lib/liquid-tags/link.js)
// start clean with no language (TOC pages already include the lang codes via lib/liquid-tags/link.ts)
const hrefWithoutLang = getPathWithoutLanguage(url)
// normalize any legacy links so they conform to new link structure

View File

@@ -32,13 +32,13 @@ function loadTableSchemas(): DataSchemas {
// Manual schema registrations for non-table data
const manualSchemas: DataSchemas = {
'data/features': '@/data-directory/lib/data-schemas/features.js',
'data/features': '@/data-directory/lib/data-schemas/features',
'data/variables': '@/data-directory/lib/data-schemas/variables',
'data/learning-tracks': '@/data-directory/lib/data-schemas/learning-tracks.js',
'data/release-notes': '@/data-directory/lib/data-schemas/release-notes.js',
'data/learning-tracks': '@/data-directory/lib/data-schemas/learning-tracks',
'data/release-notes': '@/data-directory/lib/data-schemas/release-notes',
'data/code-languages.yml': '@/data-directory/lib/data-schemas/code-languages',
'data/glossaries/candidates.yml': '@/data-directory/lib/data-schemas/glossaries-candidates.js',
'data/glossaries/external.yml': '@/data-directory/lib/data-schemas/glossaries-external.js',
'data/glossaries/candidates.yml': '@/data-directory/lib/data-schemas/glossaries-candidates',
'data/glossaries/external.yml': '@/data-directory/lib/data-schemas/glossaries-external',
}
// Combine manual registrations with auto-discovered table schemas

View File

@@ -38,15 +38,13 @@ export const getDeepDataByLanguage = memoize(
}
// The `dir` argument is only used for testing purposes.
// For example, our unit tests that depend on using a fixtures
// root.
// For example, our unit tests that depend on using a fixtures root.
// If we don't allow those tests to override the `dir` argument,
// it'll be stuck from the first time `languages.js` was imported.
let actualDir = dir
if (actualDir === null) {
actualDir = languages[langCode].dir
// it'll be stuck from the first time `languages.ts` was imported.
if (dir === null) {
dir = languages[langCode].dir
}
return getDeepDataByDir(dottedPath, actualDir)
return getDeepDataByDir(dottedPath, dir)
},
)

View File

@@ -9,7 +9,7 @@ const fixturesDir = path.join(__dirname, 'orphaned-features', 'fixtures')
// Import the actual helper functions from the orphaned features script
const { getVariableFiles, getReusableFiles } = await import(
'@/data-directory/scripts/find-orphaned-features/find.js'
'@/data-directory/scripts/find-orphaned-features/find'
)
describe('orphaned features detection', () => {

View File

@@ -93,12 +93,12 @@ const context = {
page_document_type: {
type: 'string',
description: 'The generic page document type based on URL path.',
enum: ['homepage', 'early-access', 'product', 'category', 'subcategory', 'article'], // get-document-type.js
enum: ['homepage', 'early-access', 'product', 'category', 'subcategory', 'article'], // get-document-type.ts
},
page_type: {
type: 'string',
description: 'Optional page type from the content frontmatter.',
enum: ['overview', 'quick_start', 'tutorial', 'how_to', 'reference', 'rai'], // frontmatter.js
enum: ['overview', 'quick_start', 'tutorial', 'how_to', 'reference', 'rai'], // frontmatter.ts
},
status: {
type: 'number',

View File

@@ -46,8 +46,8 @@ action.
Feel free to create sub-directories or new files. For example, if it's
about end-to-end testing a new custom Liquid tag called
`lib/liquid-tags/snacks.js` you create a new test called
`src/fixtures/tests/snack.js`. (And equally, you might want to create
`lib/liquid-tags/snacks.ts` you create a new test called
`src/fixtures/tests/snack.ts`. (And equally, you might want to create
`src/fixtures/fixtures/content/get-started/foo/snacking.md`)
To run the tests use:
@@ -73,7 +73,7 @@ There's a script you can always run that makes sure all and any of these
files are up to do:
```shell
./src/tests/scripts/copy-fixture-data.js
./src/tests/scripts/copy-fixture-data.ts
```
It's safe to run any time. And it might be necessary to run so that

View File

@@ -36,7 +36,7 @@ condition-e
too tied to the past.
You can type "3.9" if you want, but that version is only working right
now/today and will eventually break tests as the values in
`enterprise-server-releases.js` change over time.
`enterprise-server-releases.ts` change over time.
-->
{% ifversion ghes > __GHES_DEPRECATED__[0] %}

View File

@@ -1,5 +1,5 @@
---
front: matter used in tests/unit/liquid-tags/tokens-test.js
front: matter used in tests/unit/liquid-tags/tokens-test.ts
---
- One
{% if product.title == "Awesome Shoes" %}

View File

@@ -3,5 +3,5 @@ title: This is an article
intro: I have invalid versions frontmatter
versions:
fpt: '*'
ghec: 'issue-1234' # Only semver is allowed, per lib/all-versions.js
ghec: 'issue-1234' # Only semver is allowed, per lib/all-versions.ts
---

View File

@@ -103,7 +103,7 @@ export const getArticleContextFromRequest = (req: any): ArticleContextT => {
currentJourneyTrack: req.context.currentJourneyTrack,
detectedPlatforms: page.detectedPlatforms || [],
detectedTools: page.detectedTools || [],
allTools: page.allToolsParsed || [], // this is set at the page level, see lib/page.js
allTools: page.allToolsParsed || [], // this is set at the page level, see lib/page.ts
supportPortalVaIframeProps,
currentLayout: req.context.currentLayoutName,
}

View File

@@ -22,7 +22,7 @@ export type VersionItem = {
latestApiVersion: string
}
// This reflects what gets exported from `all-versions.js` in the
// This reflects what gets exported from `all-versions.ts` in the
// `allVersions` object.
// It's necessary for TypeScript, but we don't need to write down
// every possible key that might be present because we don't need it

View File

@@ -22,7 +22,7 @@ export const Breadcrumbs = ({ inHeader }: Props) => {
NOTE: The breadcrumbs class and the nav tag are used by the
Lunr search scripts. The a tag generated by the Link is also used.
If these change, please also change
updating src/search/scripts/parse-page-sections-into-records.js.
updating src/search/scripts/parse-page-sections-into-records.ts.
*/
<nav
data-testid={inHeader ? 'breadcrumbs-header' : 'breadcrumbs-in-article'}

View File

@@ -18,7 +18,7 @@
// We want to keep table-layout: auto so that column widths dynamically adjust;
// otherwise entries get needlessly squashed into narrow columns. As a workaround,
// we use components/lib/wrap-code-terms.js to prevent some reference table content
// we use components/lib/wrap-code-terms.ts to prevent some reference table content
// from expanding beyond the horizontal boundaries of the parent element.
@media (min-width: 544px) {
table-layout: auto;

View File

@@ -91,7 +91,7 @@ export function getVersionStringFromPath(
// version part is *not* supported, they get back `undefined`.
// But this function is used in many other places where it potentially
// doesn't care if the version is supported.
// For example, in lib/redirects/permalinks.js it needs to know if the
// For example, in lib/redirects/permalinks.ts it needs to know if the
// URL didn't contain a valid version.
if (supportedOnly) {
return

View File

@@ -25,7 +25,7 @@ const enterpriseServerVersions = Object.keys(allVersions).filter((version) =>
)
// Supply all route handlers with a baseline `req.context` object
// Note that additional middleware in middleware/index.js adds to this context object
// Note that additional middleware in middleware/index.ts adds to this context object
export default async function contextualize(
req: ExtendedRequest,
res: Response,

View File

@@ -46,7 +46,7 @@ export default async function reloadTree(req: ExtendedRequest, res: Response, ne
path.join(languages.en.dir, 'content'),
undefined,
warmed.unversionedTree.en,
)) as UnversionedTree // Note! Have to use `as` until create-tree.js is JS
)) as UnversionedTree
const after = getMtimes(warmed.unversionedTree.en)
// The next couple of operations are much slower (in total) than
// refreshing the tree. So we want to know if the tree changed before

View File

@@ -17,7 +17,7 @@ export default function robots(req: ExtendedRequest, res: Response, next: NextFu
const host = req.get('x-host') || req.get('x-forwarded-host') || req.get('host')
// only include robots.txt when it's our production domain and adding localhost for robots-txt.js test
// only include robots.txt when it's our production domain and adding localhost for robots-txt.ts test
if (
host === 'docs.github.com' ||
req.hostname === 'docs.github.com' ||

View File

@@ -296,7 +296,7 @@ describe('static routes', () => {
})
test('rewrites /assets requests from a cache-busting prefix', async () => {
// The rewrite-asset-urls.js Markdown plugin will do this to img tags.
// The rewrite-asset-urls.ts Markdown plugin will do this to img tags.
const res = await get('/assets/cb-123456/images/site/be-social.gif')
expect(res.statusCode).toBe(200)
expect(res.headers['set-cookie']).toBeUndefined()

View File

@@ -126,7 +126,7 @@ All previously archived content lives in its own repository. For example, GHES 3
1. In your `docs-internal` checkout, create a new branch: `git checkout -b deprecate-<version>`.
1. In your `docs-internal` checkout, edit `src/versions/lib/enterprise-server-releases.js` by removing the version number to be deprecated from the `supported` array and move it to the `deprecatedWithFunctionalRedirects` array.
1. In your `docs-internal` checkout, edit `src/versions/lib/enterprise-server-releases.ts` by removing the version number to be deprecated from the `supported` array and move it to the `deprecatedWithFunctionalRedirects` array.
1. Deprecate the automated pipelines data files:

View File

@@ -63,7 +63,7 @@ async function run() {
const releaseType = process.argv[2]
if (releaseType !== 'release' && releaseType !== 'deprecation') {
throw new Error(
"Please specify either 'release' or 'deprecation'\nExample: src/versions/scripts/create-enterprise-issue.js release",
"Please specify either 'release' or 'deprecation'\nExample: src/versions/scripts/create-enterprise-issue.ts release",
)
}

View File

@@ -120,7 +120,7 @@ async function main() {
if (!singlePage) {
// create redirect html files to preserve frontmatter redirects
await createRedirectsFile(pageList, path.join(tmpArchivalDirectory, version))
console.log(`next step: deprecate ${version} in lib/enterprise-server-releases.js`)
console.log(`next step: deprecate ${version} in lib/enterprise-server-releases.ts`)
} else {
console.log('🏁 Scraping a single page is complete')
}

View File

@@ -55,7 +55,7 @@ export async function updateAutomatedPipelines() {
// src/rest/lib/config.json file. We want to update 'api-versions'
// before the allVersions object is created so we need to import it
// after calling updateAutomatedConfigFiles.
const { allVersions } = await import('@/versions/lib/all-versions.js')
const { allVersions } = await import('@/versions/lib/all-versions')
// Gets all of the base names (e.g., ghes-) in the allVersions object
// Currently, this is only ghes- but if we had more than one type of
@@ -117,7 +117,7 @@ export async function updateAutomatedPipelines() {
const addFiles = difference(expectedDirectory, existingDataDir)
if (addFiles.length > numberedReleaseBaseNames.length) {
throw new Error(
'Only one new release per numbered release version should be added at a time. Check that the lib/enterprise-server-releases.js is correct.',
'Only one new release per numbered release version should be added at a time. Check that the lib/enterprise-server-releases.ts is correct.',
)
}

View File

@@ -9,7 +9,7 @@ import { program } from 'commander'
import { allVersions } from '@/versions/lib/all-versions'
const releaseCandidateJSFile = 'src/versions/lib/enterprise-server-releases.js'
const releaseCandidateJSFile = 'src/versions/lib/enterprise-server-releases.ts'
const allowedActions = ['create', 'remove'] as const
type AllowedAction = (typeof allowedActions)[number]

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from 'vitest'
import { shouldFilterMetadataPermission, calculateAdditionalPermissions } from '../scripts/sync.js'
import { shouldFilterMetadataPermission, calculateAdditionalPermissions } from '../scripts/sync'
describe('metadata permissions filtering', () => {
// Mock data structure representing operations with metadata permissions

View File

@@ -243,7 +243,7 @@ describe('getProgAccessData', () => {
})
})
// Helper function to simulate the data processing logic from sync.js
// Helper function to simulate the data processing logic from sync.ts
// without needing to set up the full file system or remote API calls
async function processProgAccessDataMock(
progAccessDataRaw: ProgAccessDataRaw[],

View File

@@ -8,7 +8,7 @@
## Editable files
* `src/graphql/lib/validator.json`
- JSON schema used in `tests/graphql.js`.
- JSON schema used in `tests/graphql.ts`.
* `src/graphql/lib/non-schema-scalars.json`
- An array of scalar types that live in [`graphql-ruby`](https://github.com/rmosolgo/graphql-ruby/tree/356d9d369e444423bf06cab3dc767ec75fbc6745/lib/graphql/types) only. These are
not part of the core GraphQL spec.
@@ -17,7 +17,7 @@
## Data files
Generated by `src/graphql/scripts/sync.js`:
Generated by `src/graphql/scripts/sync.ts`:
* `src/graphql/data/schema-VERSION.json` (separate files per version)
* `src/graphql/data/previews.json`
@@ -26,7 +26,7 @@ Generated by `src/graphql/scripts/sync.js`:
## Rendering docs
When the server starts, `middleware/graphql.js` accesses the static JSON files, fetches the data for the current version, and adds it to the `context` object. The added properties are:
When the server starts, `middleware/graphql.ts` accesses the static JSON files, fetches the data for the current version, and adds it to the `context` object. The added properties are:
* `context.graphql.schemaForCurrentVersion`
* `context.graphql.previewsForCurrentVersion`

View File

@@ -1,4 +1,4 @@
// the tests in tests/graphql.js use this schema to ensure the integrity
// the tests in tests/graphql.ts use this schema to ensure the integrity
// of the data in src/graphql/data/*.json
// JSON Schema type definitions for AJV validation

View File

@@ -3,7 +3,7 @@
A [scheduled workflow](../../../.github/workflows/sync-graphql.yml) runs the following
scripts on a daily basis:
```
src/graphql/scripts/sync.js
src/graphql/scripts/sync.ts
```
These scripts update the [JSON data files](../../../src/graphql/data) used to
render GraphQL docs. See the [`src/graphql/README`](../../../src/graphql/README.md)

View File

@@ -52,7 +52,7 @@ describe('featuredLinks', () => {
expect(columns.length).toBe(2)
for (const column of columns) {
const $featuredLinks = $('a', column)
// See MAX_FEATURED_LINKS constant in featured-links.js middleware
// See MAX_FEATURED_LINKS constant in featured-links.ts middleware
expect($featuredLinks.length).toBeLessThanOrEqual(4)
}
},

View File

@@ -105,7 +105,7 @@ Error messages are cleaned up for security and readability:
## Code Implementation
The feature is implemented in `src/languages/lib/render-with-fallback.js`:
The feature is implemented in `src/languages/lib/render-with-fallback.ts`:
- `createTranslationFallbackComment()` - Generates the HTML comment
- Enhanced `renderContentWithFallback()` - Adds comments for page properties
@@ -185,4 +185,4 @@ HTML comments are supported by all browsers and are invisible to end users.
### Monitoring
Translation fallback frequency can be monitored by tracking comment generation in logs or analytics.
This feature helps translation teams identify and fix issues more efficiently while maintaining the reliability of the docs site for all users.
This feature helps translation teams identify and fix issues more efficiently while maintaining the reliability of the docs site for all users.

View File

@@ -28,7 +28,7 @@ Periodically, translators read the `content/**` and `data/**` directories from `
During the build step of our deployment, we checkout every translation repo into the `translations/` directory.
The enabled languages and their source directories are interpreted in [`src/languages/lib/languages.js`](https://github.com/github/docs-internal/blob/a8e52aad1a6b67f41da92d314bd7fd8cd84193a4/src/languages/lib/languages.js), which ensures English and translated content are in the same Docker image we deploy.
The enabled languages and their source directories are interpreted in [`src/languages/lib/languages.ts`](https://github.com/github/docs-internal/blob/a8e52aad1a6b67f41da92d314bd7fd8cd84193a4/src/languages/lib/languages.js), which ensures English and translated content are in the same Docker image we deploy.
When the app starts up, we:
1. Create a [tree of possible pages in English](https://github.com/github/docs-internal/blob/c535fe30bc271f35090054b21e1aaf69cb125e71/src/frame/lib/page-data.js#L45-L47) by following the `children:` property in each `index.md` file.
@@ -46,7 +46,7 @@ When a user requests a translated page, it's possible that the content contains
In these situations, we fall back to English content for the part that generated an error, not the whole page. For example, a Japanese translation might have a perfectly good `title`, `shortTitle`, and Markdown body, but the Liquid within the `intro` frontmatter property could be broken. In that case, you'll get a Japanese title, English intro, and Japanese body.
Details of how the error handling and logic for fallback works, see `src/languages/lib/render-with-fallback.js`.
Details of how the error handling and logic for fallback works, see `src/languages/lib/render-with-fallback.ts`.
### Handling of moved or deleted content
@@ -81,7 +81,7 @@ If the translated `data/variables/support.yml` is corrupted, and we can't fall b
The translation repos can only be read by us. We **do not** and cannot modify their contents. When errors in translations are found, we communicate them to the translators and wait for the fixes to appear in the translated repos.
So if a translation urgently needs to be updated and we can't wait for the usual translation process, the best solution is to implement string replacement operations directly in the code. For example, with the `translateTree` function in `src/frame/lib/page-data.js`.
So if a translation urgently needs to be updated and we can't wait for the usual translation process, the best solution is to implement string replacement operations directly in the code. For example, with the `translateTree` function in `src/frame/lib/page-data.ts`.
When a file is renamed in `github/docs-internal`, say from `foo.md` to `bar.md`, the translation repos keep *both* `foo.md` and `bar.md`. Files are not deleted from translation repos. This limitation prevents us from scanning the directories for files like we do with the English content and it is the motivation for the creation of language-specific trees.
@@ -115,7 +115,7 @@ This section assumes you want your local environment to replicate the structure
git clone <ko-kr-repo-url> ko-kr
git clone <de-de-repo-url> de-de
```
1. Start the development server with `npm run start-all-languages`. If you need to enable specific languages, you can set `ENABLED_LANGUAGES`. For example, to start the server with English, Japanese, and Spanish only, use: `ENABLED_LANGUAGES=en,ja,es NODE_ENV=development nodemon server.js`
1. Start the development server with `npm run start-all-languages`. If you need to enable specific languages, you can set `ENABLED_LANGUAGES`. For example, to start the server with English, Japanese, and Spanish only, use: `ENABLED_LANGUAGES=en,ja,es NODE_ENV=development nodemon server.ts`
Note: If you ever need to place translations in a different location, use the `TRANSLATIONS_ROOT` environment variable. We do this for tests.

View File

@@ -1,4 +1,4 @@
// See also languages-schema.js
// See also languages-schema.ts
// Nota bene: If you are adding a new language,
// change accept-language handling in CDN config as well.
import path from 'path'

View File

@@ -131,7 +131,7 @@ export async function renderContentWithFallback(
const enPage = context.getEnglishPage(context)
const englishTemplate = (enPage as any)[property] as string
// If you don't change the context, it'll confuse the liquid plugins
// like `data.js` that uses `environment.scope.currentLanguage`
// like `data.ts` that uses `environment.scope.currentLanguage`
const enContext = Object.assign({}, context, { currentLanguage: 'en' })
// Render the English fallback content

View File

@@ -68,7 +68,7 @@ describe('learning tracks', () => {
}
if (fixables) {
errorMessage += `\nNOTE! To automatically fix the redirects run this command:\n`
errorMessage += `\n\t./src/links/scripts/update-internal-links.js data/learning-tracks/${topLevel}.yml\n`
errorMessage += `\n\t./src/links/scripts/update-internal-links.ts data/learning-tracks/${topLevel}.yml\n`
}
expect(troubles.length, errorMessage).toEqual(0)
})

View File

@@ -1,6 +1,6 @@
/**
* Linkinator relies
* on this in `src/links/scripts/rendered-content-link-checker-cli.js` when we encounter external
* on this in `src/links/scripts/rendered-content-link-checker-cli.ts` when we encounter external
* links that we *specifically ignore*. That means, that URLs or patterns
* mentioned in the corresponding YAML file might appear within our content but we don't
* bother checking that they actually work.

View File

@@ -45,7 +45,7 @@ main(program.opts(), program.args)
// 3. ~4000ms
//
// ...if the limit we set is 3.
// Our own timeout, in @/frame/middleware/timeout.js defaults to 10 seconds.
// Our own timeout, in @/frame/middleware/timeout.ts defaults to 10 seconds.
// So there's no point in trying more attempts than 3 because it would
// just timeout on the 10s. (i.e. 1000 + 2000 + 4000 + 8000 > 10,000)
const retryConfiguration = {

View File

@@ -13,7 +13,7 @@ See documentation below for:
Print usage info for any script in this directory:
```bash
tsx src/metrics/scripts/<SCRIPT_NAME>.js --help
tsx src/metrics/scripts/<SCRIPT_NAME>.ts --help
```
If you get `command not found: tsx`, run:
```
@@ -32,7 +32,7 @@ If the URL doesn't include a version, `docstat` will return data for **all versi
### Usage
The steps below show the [global alias](#set-a-global-alias). Use the full command path (`tsx src/metrics/scripts/docstat.js`) if you don't set up an alias.
The steps below show the [global alias](#set-a-global-alias). Use the full command path (`tsx src/metrics/scripts/docstat.ts`) if you don't set up an alias.
To see the available options:
```
@@ -66,7 +66,7 @@ To use `docstat` from any location in Terminal, set up a global alias:
1. Open your shell configuration file (like `~/.bash_profile` or `~/.zshrc`) in a text editor.
2. Add the following line, replacing the path with the actual path to your local directory, for example:
```bash
alias docstat="tsx ~/gh-repos/docs-internal/src/metrics/scripts/docstat.js"
alias docstat="tsx ~/gh-repos/docs-internal/src/metrics/scripts/docstat.ts"
```
3. Save the file and reload your configuration:
```bash
@@ -80,15 +80,15 @@ Run `docsaudit` on a top-level content directory to gather data about its files
To see the available options:
```
tsx src/metrics/scripts/docsaudit.js --help
tsx src/metrics/scripts/docsaudit.ts --help
```
Run the script on any top-level content directory:
```
tsx src/metrics/scripts/docsaudit.js <content directory name>
tsx src/metrics/scripts/docsaudit.ts <content directory name>
```
For example:
```
tsx src/metrics/scripts/docsaudit.js actions
tsx src/metrics/scripts/docsaudit.ts actions
```
## Future development
@@ -99,4 +99,4 @@ Applies to all scripts in this directory:
* In the future, we can add an option to set a custom end date.
* The only Kusto queries available are hardcoded in the `kusto/queries` directory.
* In the future, we can hardcode more queries, add the ability to send custom queries, or perhaps create pre-defined sets of queries.
* In the future, we can hardcode more queries, add the ability to send custom queries, or perhaps create pre-defined sets of queries.

View File

@@ -23,7 +23,7 @@ Error.getInitialProps = async (ctx: NextPageContext) => {
// `pages/404.tsx` which takes care of 404 messages.
if (err && res && req) {
// This is a (necessary) hack!
// You can't import `../lib/failbot.js` here in this
// You can't import `../lib/failbot.ts` here in this
// file because it gets imported by webpack to be used in the
// client-side JS bundle. It *could* be solved by overriding
// the webpack configuration in our `next.config.ts` but this is prone

View File

@@ -65,7 +65,7 @@ for (const productId of productIds) {
const applicableVersions = getApplicableVersions(tocData.versions, toc)
const href = removeFPTFromPath(path.posix.join('/', applicableVersions[0], productId))
// Note that a special middleware called `render-product-map.js` later
// Note that a special middleware called `render-product-map.ts` later
// mutates this object by adding a `nameRendered` property to each product.
// It's the outcome of rendering out possible Liquid from the
// `shortTitle` or `title` after all the other contextualizers have run.

View File

@@ -12,7 +12,7 @@ Read on for more about how redirects work under the hood.
Precompiled redirects account for the majority of the docs site's redirect handling.
When [`lib/warm-server.ts`](lib/warm-server.ts) runs on server start, it creates all pages in the site by instantiating the [`Page` class](lib/page.js) for each content file, then passes the pages to `lib/redirects/precompile.js` to create redirects. The precompile script runs `lib/redirects/permalinks.js`, which:
When [`lib/warm-server.ts`](lib/warm-server.ts) runs on server start, it creates all pages in the site by instantiating the [`Page` class](lib/page.ts) for each content file, then passes the pages to `lib/redirects/precompile.ts` to create redirects. The precompile script runs `lib/redirects/permalinks.ts`, which:
1. Includes all legacy redirects from `static/developerjson`
2. Loops over each page's [frontmatter `redirect_from` entries](content/README.md#redirect_from) and creates an array of legacy paths for each one (using the same handling as for permalinks).
@@ -24,7 +24,7 @@ Sometimes it contains the specific plan/version (e.g. `/enterprise-server@3.0/v3
All of the above are merged into a global redirects object. This object gets added to `req.context` via `src/frame/middleware/context/context.ts` and is made accessible on every request.
In the `handle-redirects.js` middleware, the language part of the URL is
In the `handle-redirects.ts` middleware, the language part of the URL is
removed, looked up, and if matched to something, redirects with language
put back in. Demonstrated with pseudo code:
@@ -40,7 +40,7 @@ if (newPath) {
Archived Enterprise redirects account for a much smaller percentage of redirects on the docs site.
Some background on archival: a snapshot of the HTML files for each deprecated Enterprise Server version is archived in a separate repo and proxied to docs.github.com via `src/archives/middleware/archived-enterprise-versions.js`.
Some background on archival: a snapshot of the HTML files for each deprecated Enterprise Server version is archived in a separate repo and proxied to docs.github.com via `src/archives/middleware/archived-enterprise-versions.ts`.
Starting with Enterprise Server 2.18, we updated the archival process to start preserving frontmatter and permalink redirects. But these redirects for 2.13 to 2.17 are not recoverable.
@@ -48,7 +48,7 @@ As a workaround for these lost redirects, we have two files in `lib/redirects/st
* `archived-redirects-from-213-to-217.json`
This file contains keys equal to old routes and values equal to new routes (aka snapshots of permalinks at the time) for versions 2.13 to 2.17. (The old routes were generated via `lib/redirects/get-old-paths-from-permalink.js`.)
This file contains keys equal to old routes and values equal to new routes (aka snapshots of permalinks at the time) for versions 2.13 to 2.17. (The old routes were generated via `lib/redirects/get-old-paths-from-permalink.ts`.)
* `archived-frontmatter-valid-urls.json`
@@ -62,10 +62,10 @@ As a workaround for these lost redirects, we have two files in `lib/redirects/st
version range of 2.13 to 2.17. So every key in `archived-frontmatter-valid-urls.json`
corresponds to a file that would work.
Here's how the `src/archives/middleware/archived-enterprise-versions.js` fallback works: if someone tries to access an article that was updated via a now-lost frontmatter redirect (for example, an article at the path `/en/enterprise/2.15/user/articles/viewing-contributions-on-your-profile-page`), the middleware will first look for a redirect in `archived-redirects-from-213-to-217.json`. If it does not find one, it will look for it in `archived-frontmatter-valid-urls.json` that contains the requested path. If it finds it, it will redirect to it to because that file knows exactly which URLs are valid in the `docs-ghes-<release number>` repos.
Here's how the `src/archives/middleware/archived-enterprise-versions.ts` fallback works: if someone tries to access an article that was updated via a now-lost frontmatter redirect (for example, an article at the path `/en/enterprise/2.15/user/articles/viewing-contributions-on-your-profile-page`), the middleware will first look for a redirect in `archived-redirects-from-213-to-217.json`. If it does not find one, it will look for it in `archived-frontmatter-valid-urls.json` that contains the requested path. If it finds it, it will redirect to it to because that file knows exactly which URLs are valid in the `docs-ghes-<release number>` repos.
## Tests
Redirect tests are mainly found in `tests/routing/*`, with some additional tests in `tests/rendering/server.js`.
Redirect tests are mainly found in `tests/routing/*`, with some additional tests in `tests/rendering/server.ts`.
The `src/fixtures/fixtures/*` directory includes `developer-redirects.json`, `graphql-redirects.json`, and `rest-redirects.json`.

View File

@@ -14,7 +14,7 @@ export default function permalinkRedirects(
// The following is handling for versionless redirect fallbacks!
// We put an entry into `redirects` without any version prefix that goes to the first supported
// version in the lib/all-versions.js order. For example, we want this versionless path:
// version in the lib/all-versions.ts order. For example, we want this versionless path:
// /billing/managing-billing-for-your-github-account/managing-invoices-for-your-enterprise
// to redirect to its first supported version, which is GHEC:
// /enterprise-cloud@latest/billing/managing-billing-for-your-github-account/managing-invoices-for-your-enterprise

View File

@@ -28,13 +28,13 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
// NOTE: Exception redirects **MUST COME AFTER** pageList redirects above in order
// to properly override them. Exception redirects are unicorn one-offs that are not
// otherwise handled by the versionless redirect fallbacks (see lib/all-versions.js).
// otherwise handled by the versionless redirect fallbacks (see lib/all-versions.ts).
//
// Examples of exceptions:
// * We deprecate the FPT version of a page, and we want the FPT version to redirect
// to a different version that goes against the order in lib/all-versions.js.
// to a different version that goes against the order in lib/all-versions.ts.
// * We deprecate a non-FPT version of a page, and we want the old version to redirect
// to a different version. Because the order in lib/all-versions.js only covers
// to a different version. Because the order in lib/all-versions.ts only covers
// versionless links (like `/foo`), we need to specify an exception for the old
// versioned links (like `/enterprise-cloud@latest/foo`).
// * We deprecate a version of a page, and instead of falling back to the next

View File

@@ -1,5 +1,5 @@
# These urls are exceptions to the versionless redirect fallbacks (described in lib/all-versions.js).
# See the comment in lib/redirects/precompile.js for an explanation of these exceptions.
# These urls are exceptions to the versionless redirect fallbacks (described in lib/all-versions.ts).
# See the comment in lib/redirects/precompile.ts for an explanation of these exceptions.
# Originally shipped in pull #20947 on 10/15/21
# Updated in pull #27307 on 07/21/22

View File

@@ -142,7 +142,7 @@ export default function handleRedirects(req: ExtendedRequest, res: Response, nex
function getLanguage(req: ExtendedRequest, default_ = 'en') {
// req.context.userLanguage, if it truthy, is always a valid supported
// language. It's whatever was in the user's request in lib/languages.js
// language. It's whatever was in the user's request in lib/languages.ts
return req.context!.userLanguage || default_
}

View File

@@ -12,7 +12,7 @@ const VERSIONLESS_REDIRECTS_FILE = path.join(
'../../../../src/fixtures/fixtures/versionless-redirects.txt',
)
// This test checks the default versioning redirect fallbacks described in lib/all-versions.js.
// This test checks the default versioning redirect fallbacks described in lib/all-versions.ts.
// The fixture now contains mock URLs instead of live URLs to prevent test failures when content is moved.
// This ensures the redirect logic works correctly without being dependent on real content files.
describe('versioned redirects', () => {

View File

@@ -62,7 +62,7 @@ The `src/rest/lib/config.json` file contain metadata used by the content and eng
## Content team
The content writers can manually update parts of the autogenerated Markdown files in `content/rest`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/rest/lib/config.js`.
The content writers can manually update parts of the autogenerated Markdown files in `content/rest`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/rest/lib/config.ts`.
When a new Markdown file is created, a writer can manually change any of the frontmatter except `versions`. The pipeline will overwrite the `versions` property on subsequent runs, but will not modify any other frontmatter properties.

View File

@@ -114,7 +114,7 @@ async function main() {
// so that we don't spend time generating data files for them.
if (sourceRepos.includes(REST_API_DESCRIPTION_ROOT)) {
const derefDir = await readdir(TEMP_OPENAPI_DIR)
// TODO: After migrating all-version.js to TypeScript, we can remove the type assertion
// TODO: After migrating all-version.ts to TypeScript, we can remove the type assertion
const currentOpenApiVersions = Object.values(allVersions).map(
(elem) => (elem as any).openApiVersionName,
)
@@ -261,7 +261,7 @@ async function validateInputParameters(): Promise<void> {
// team that owns the data we consume. This function translates the version
// names to use the names in the src/<pipeline>/lib/config.json file.
// The names in the config.json file maps the incoming version name to
// the short name of the version defined in lib/allVersions.js.
// the short name of the version defined in lib/allVersions.ts.
// This function also translates calendar-date format from .2022-11-28 to
// -2022-11-28
export async function normalizeDataVersionNames(sourceDirectory: string): Promise<void> {

View File

@@ -78,5 +78,5 @@ The preferred way to build and sync the search indices is to do so via the [GitH
- It's not strictly necessary to set an `objectID` as the search index will create one automatically, but by creating our own we have a guarantee that subsequent invocations of this upload script will overwrite existing records instead of creating numerous duplicate records with differing IDs.
- Our search querying has typo tolerance. Try spelling something wrong and see what you get!
- Our search querying has lots of controls for customizing each index, so we can add weights to certain attributes and create rules like "title is more important than body", etc. But it works pretty well as-is without any configuration.
- Our search querying has support for "advanced query syntax" for exact matching of quoted expressions and exclusion of words preceded by a `-` sign. This is off by default, but it is enabled in our browser client. The settings in the web interface can be overridden by the search endpoint. See [middleware/search.js](middleware/search.js).
- Our search querying has support for "advanced query syntax" for exact matching of quoted expressions and exclusion of words preceded by a `-` sign. This is off by default, but it is enabled in our browser client. The settings in the web interface can be overridden by the search endpoint. See [middleware/search.ts](middleware/search.ts).
- When needed, the Docs Engineering team can commit updates to the search index, as long as the label `skip-index-check` is applied to the PR.

View File

@@ -2,7 +2,7 @@ import { schema } from '@/frame/lib/frontmatter'
// Secret scanning entries have `versions` blocks that match `versions` frontmatter,
// so we can import that part of the FM schema.
// Access the versions property which is defined dynamically in frontmatter.js
// Access the versions property which is defined dynamically in frontmatter.ts
const versionsProps = Object.assign({}, (schema.properties as Record<string, any>).versions)
// The secret-scanning.json contains an array of objects that look like this:

View File

@@ -26,7 +26,7 @@ export type ExtendedRequest = Request & {
}
// TODO: Make this type from inference using AJV based on the schema.
// For now, it's based on `schema` in frame/lib/frontmatter.js
// For now, it's based on `schema` in frame/lib/frontmatter.ts
export type PageFrontmatter = {
title: string
versions: FrontmatterVersions

Some files were not shown because too many files have changed in this diff Show More