1
0
mirror of synced 2025-12-19 18:10:59 -05:00

Merge pull request #41687 from github/repo-sync

Repo sync
This commit is contained in:
docs-bot
2025-12-04 13:49:17 -08:00
committed by GitHub
51 changed files with 1423 additions and 190 deletions

View File

@@ -57,18 +57,20 @@ By default, {% data variables.product.prodname_dependabot %} will stop rebasing
You can use any of the following commands on a {% data variables.product.prodname_dependabot %} pull request. You can use any of the following commands on a {% data variables.product.prodname_dependabot %} pull request.
* `@dependabot cancel merge` cancels a previously requested merge. | Command | Description |
* `@dependabot close` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from recreating that pull request. You can achieve the same result by closing the pull request manually. | --- | --- |
* `@dependabot ignore this dependency` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this dependency (unless you reopen the pull request or upgrade to the suggested version of the dependency yourself). | `@dependabot cancel merge` | Cancels a previously requested merge. |
* `@dependabot ignore this major version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this major version (unless you reopen the pull request or upgrade to this major version yourself). | `@dependabot close` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from recreating that pull request. You can achieve the same result by closing the pull request manually. |
* `@dependabot ignore this minor version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this minor version (unless you reopen the pull request or upgrade to this minor version yourself). | `@dependabot ignore this dependency` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this dependency (unless you reopen the pull request or upgrade to the suggested version yourself). |
* `@dependabot ignore this patch version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this patch version (unless you reopen the pull request or upgrade to this patch version yourself). | `@dependabot ignore this major version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this major version (unless you reopen the pull request or upgrade to this major version yourself). |
* `@dependabot merge` merges the pull request once your CI tests have passed. | `@dependabot ignore this minor version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this minor version (unless you reopen the pull request or upgrade to this minor version yourself). |
* `@dependabot rebase` rebases the pull request. | `@dependabot ignore this patch version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from creating any more pull requests for this patch version (unless you reopen the pull request or upgrade to this patch version yourself). |
* `@dependabot recreate` recreates the pull request, overwriting any edits that have been made to the pull request. | `@dependabot merge` | Merges the pull request once your CI tests have passed. |
* `@dependabot reopen` reopens the pull request if the pull request is closed. | `@dependabot rebase` | Rebases the pull request. |
* `@dependabot show DEPENDENCY_NAME ignore conditions` retrieves information on the ignore conditions for the specified dependency, and comments on the pull request with a table that displays all ignore conditions for the dependency. For example, `@dependabot show express ignore conditions` would find all `ignore` conditions stored for the Express dependency, and comment on the pull request with that information. | `@dependabot recreate` | Recreates the pull request, overwriting any edits that have been made to the pull request. |
* `@dependabot squash and merge` squashes and merges the pull request once your CI tests have passed. | `@dependabot reopen` | Reopens the pull request if the pull request is closed. |
| `@dependabot show DEPENDENCY_NAME ignore conditions` | Retrieves information on the ignore conditions for the specified dependency, and comments on the pull request with a table that displays all ignore conditions for the dependency. For example, `@dependabot show express ignore conditions` would find all `ignore` conditions stored for the Express dependency, and comment on the pull request with that information. |
| `@dependabot squash and merge` | Squashes and merges the pull request once your CI tests have passed. |
{% data variables.product.prodname_dependabot %} will react with a "thumbs up" emoji to acknowledge the command, and may respond with a comment on the pull request. While {% data variables.product.prodname_dependabot %} usually responds quickly, some commands may take several minutes to complete if {% data variables.product.prodname_dependabot %} is busy processing other updates or commands. {% data variables.product.prodname_dependabot %} will react with a "thumbs up" emoji to acknowledge the command, and may respond with a comment on the pull request. While {% data variables.product.prodname_dependabot %} usually responds quickly, some commands may take several minutes to complete if {% data variables.product.prodname_dependabot %} is busy processing other updates or commands.
@@ -80,13 +82,15 @@ For more information, see [AUTOTITLE](/code-security/dependabot/working-with-dep
In {% data variables.product.prodname_dependabot %} pull requests for grouped version updates and security updates, you can use comment commands to ignore and un-ignore updates for specific dependencies and versions. You can use any of the following commands to manage ignore conditions for grouped updates. In {% data variables.product.prodname_dependabot %} pull requests for grouped version updates and security updates, you can use comment commands to ignore and un-ignore updates for specific dependencies and versions. You can use any of the following commands to manage ignore conditions for grouped updates.
* `@dependabot ignore DEPENDENCY_NAME` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency. | Command | Description |
* `@dependabot ignore DEPENDENCY_NAME major version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's major version. | --- | --- |
* `@dependabot ignore DEPENDENCY_NAME minor version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's minor version. | `@dependabot ignore DEPENDENCY_NAME` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency. |
* `@dependabot ignore DEPENDENCY_NAME patch version` closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's patch version. | `@dependabot ignore DEPENDENCY_NAME major version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's major version. |
* `@dependabot unignore *` closes the current pull request, clears all `ignore` conditions stored for all dependencies in the group, then opens a new pull request. | `@dependabot ignore DEPENDENCY_NAME minor version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's minor version. |
* `@dependabot unignore DEPENDENCY_NAME` closes the current pull request, clears all `ignore` conditions stored for the dependency, then opens a new pull request that includes available updates for the specified dependency. For example, `@dependabot unignore lodash` would open a new pull request that includes updates for the Lodash dependency. | `@dependabot ignore DEPENDENCY_NAME patch version` | Closes the pull request and prevents {% data variables.product.prodname_dependabot %} from updating this dependency's patch version. |
* `@dependabot unignore DEPENDENCY_NAME IGNORE_CONDITION` closes the current pull request, clears the stored `ignore` condition, then opens a new pull request that includes available updates for the specified ignore condition. For example, `@dependabot unignore express [< 1.9, > 1.8.0]` would open a new pull request that includes updates for Express between versions 1.8.0 and 1.9.0. | `@dependabot unignore *` | Closes the current pull request, clears all `ignore` conditions stored for all dependencies in the group, then opens a new pull request. |
| `@dependabot unignore DEPENDENCY_NAME` | Closes the current pull request, clears all `ignore` conditions stored for the dependency, then opens a new pull request that includes available updates for the specified dependency. For example, `@dependabot unignore lodash` would open a new pull request that includes updates for the Lodash dependency. |
| `@dependabot unignore DEPENDENCY_NAME IGNORE_CONDITION` | Closes the current pull request, clears the stored `ignore` condition, then opens a new pull request that includes available updates for the specified ignore condition. For example, `@dependabot unignore express [< 1.9, > 1.8.0]` would open a new pull request that includes updates for Express between versions 1.8.0 and 1.9.0. |
> [!TIP] > [!TIP]
> When you want to un-ignore a specific ignore condition, use the `@dependabot show DEPENDENCY_NAME ignore conditions` command to quickly check what ignore conditions a dependency currently has. > When you want to un-ignore a specific ignore condition, use the `@dependabot show DEPENDENCY_NAME ignore conditions` command to quickly check what ignore conditions a dependency currently has.

View File

@@ -17,7 +17,7 @@ category:
Experience less rate limiting and reduce the mental load of choosing a model by letting {% data variables.copilot.copilot_auto_model_selection %} automatically choose the best available model on your behalf. Experience less rate limiting and reduce the mental load of choosing a model by letting {% data variables.copilot.copilot_auto_model_selection %} automatically choose the best available model on your behalf.
{% data variables.copilot.copilot_auto_model_selection %} is currently optimized for model availability, choosing from a list of models that may change over time. It currently chooses from {% data variables.copilot.copilot_gpt_41 %}, {% data variables.copilot.copilot_gpt_5_mini %}, {% data variables.copilot.copilot_gpt_5 %}, {% data variables.copilot.copilot_claude_haiku_45 %}, and {% data variables.copilot.copilot_claude_sonnet_45 %}, based on your subscription type. {% data variables.copilot.copilot_auto_model_selection %} is currently optimized for model availability, choosing from a list of models that may change over time. It currently chooses from {% data variables.copilot.copilot_gpt_41 %}, {% data variables.copilot.copilot_gpt_5_mini %}, {% data variables.copilot.copilot_gpt_51_codex_max %}, {% data variables.copilot.copilot_claude_haiku_45 %}, {% data variables.copilot.copilot_claude_sonnet_45 %}, and {% data variables.copilot.copilot_gemini_3_pro %}, based on your subscription type.
With {% data variables.copilot.copilot_auto_model_selection %}, you benefit from: With {% data variables.copilot.copilot_auto_model_selection %}, you benefit from:
* Reduced chances of rate limiting * Reduced chances of rate limiting

View File

@@ -310,14 +310,21 @@ If you want to allow {% data variables.product.prodname_copilot_short %} to acce
"github-mcp-server": { "github-mcp-server": {
"type": "http", "type": "http",
// Remove "/readonly" to enable wider access to all tools. // Remove "/readonly" to enable wider access to all tools.
// Then, use the "tools" key to specify the subset of tools you'd like to include. // Then, use the "X-MCP-Toolsets" header to specify which toolsets you'd like to include.
// Use the "tools" field to select individual tools from the toolsets.
"url": "https://api.githubcopilot.com/mcp/readonly", "url": "https://api.githubcopilot.com/mcp/readonly",
"tools": ["*"] "tools": ["*"],
"headers": {
"X-MCP-Toolsets": "repos,issues,users,pull_requests,code_security,secret_protection,actions,web_search"
}
} }
} }
} }
``` ```
For more information on toolsets, refer to the [README](https://github.com/github/github-mcp-server?tab=readme-ov-file#available-toolsets) in the {% data variables.product.github %} Remote MCP Server documentation.
1. Click **Save**. 1. Click **Save**.
{% data reusables.actions.sidebar-environment %} {% data reusables.actions.sidebar-environment %}
1. Click the `copilot` environment. 1. Click the `copilot` environment.

View File

@@ -27,6 +27,7 @@ Used for:
* {% data variables.copilot.copilot_gpt_51 %} * {% data variables.copilot.copilot_gpt_51 %}
* {% data variables.copilot.copilot_gpt_51_codex %} * {% data variables.copilot.copilot_gpt_51_codex %}
* {% data variables.copilot.copilot_gpt_51_codex_mini %} * {% data variables.copilot.copilot_gpt_51_codex_mini %}
* {% data variables.copilot.copilot_gpt_51_codex_max %}
These models are hosted by OpenAI and {% data variables.product.github %}'s Azure infrastructure. These models are hosted by OpenAI and {% data variables.product.github %}'s Azure infrastructure.

View File

@@ -69,6 +69,10 @@
multiplier_paid: 0.33 multiplier_paid: 0.33
multiplier_free: Not applicable multiplier_free: Not applicable
- name: GPT-5.1-Codex-Max
multiplier_paid: 1.0
multiplier_free: Not applicable
- name: Grok Code Fast 1 - name: Grok Code Fast 1
multiplier_paid: 0.25 multiplier_paid: 0.25
multiplier_free: Not applicable multiplier_free: Not applicable

View File

@@ -67,6 +67,13 @@
ask_mode: true ask_mode: true
edit_mode: true edit_mode: true
- name: 'GPT-5.1-Codex-Max'
provider: 'OpenAI'
release_status: 'Public preview'
agent_mode: true
ask_mode: true
edit_mode: true
# Anthropic models # Anthropic models
- name: 'Claude Haiku 4.5' - name: 'Claude Haiku 4.5'
provider: 'Anthropic' provider: 'Anthropic'

View File

@@ -125,6 +125,14 @@
xcode: true xcode: true
jetbrains: true jetbrains: true
- name: GPT-5.1-Codex-Max
dotcom: true
vscode: true
vs: false
eclipse: false
xcode: false
jetbrains: false
- name: Grok Code Fast 1 - name: Grok Code Fast 1
dotcom: true dotcom: true
vscode: true vscode: true

View File

@@ -110,6 +110,13 @@
business: true business: true
enterprise: true enterprise: true
- name: GPT-5.1-Codex-Max
free: false
pro: true
pro_plus: true
business: true
enterprise: true
- name: Grok Code Fast 1 - name: Grok Code Fast 1
free: false free: false
pro: true pro: true

View File

@@ -158,6 +158,7 @@ copilot_gpt_5_mini: 'GPT-5 mini'
copilot_gpt_51: 'GPT-5.1' copilot_gpt_51: 'GPT-5.1'
copilot_gpt_51_codex: 'GPT-5.1-Codex' copilot_gpt_51_codex: 'GPT-5.1-Codex'
copilot_gpt_51_codex_mini: 'GPT-5.1-Codex-Mini' copilot_gpt_51_codex_mini: 'GPT-5.1-Codex-Mini'
copilot_gpt_51_codex_max: 'GPT-5.1-Codex-Max'
# OpenAI 'o' series: # OpenAI 'o' series:
copilot_o3: 'o3' copilot_o3: 'o3'
copilot_o4_mini: 'o4-mini' copilot_o4_mini: 'o4-mini'

View File

@@ -13,12 +13,24 @@ Article API endpoints allow consumers to query GitHub Docs for listings of curre
The `/api/article/meta` endpoint powers hovercards, which provide a preview for internal links on <docs.github.com>. The `/api/article/meta` endpoint powers hovercards, which provide a preview for internal links on <docs.github.com>.
The `/api/article/body` endpoint can serve markdown for both regular articles and autogenerated content (such as REST API documentation) using specialized transformers.
## How it works ## How it works
The `/api/article` endpoints return information about a page by `pathname`. The `/api/article` endpoints return information about a page by `pathname`.
`api/article/meta` is highly cached, in JSON format. `api/article/meta` is highly cached, in JSON format.
### Autogenerated Content Transformers
For autogenerated pages (REST, landing pages, audit logs, webhooks, GraphQL, etc), the Article API uses specialized transformers to convert the rendered content into markdown format. These transformers are located in `src/article-api/transformers/` and use an extensible architecture:
To add a new transformer for other autogenerated content types:
1. Create a new transformer file implementing the `PageTransformer` interface
2. Register it in `transformers/index.ts`
3. Create a template in `templates/` to configure how the transformer will organize the autogenerated content
4. The transformer will automatically be used by `/api/article/body`
## How to get help ## How to get help
For internal folks ask in the Docs Engineering slack channel. For internal folks ask in the Docs Engineering slack channel.
@@ -34,12 +46,13 @@ Get article metadata and content in a single object. Equivalent to calling `/art
**Parameters**: **Parameters**:
- **pathname** (string) - Article path (e.g. '/en/get-started/article-name') - **pathname** (string) - Article path (e.g. '/en/get-started/article-name')
- **[apiVersion]** (string) - API version for REST pages (optional, defaults to latest)
**Returns**: (object) - JSON object with article metadata and content (`meta` and `body` keys) **Returns**: (object) - JSON object with article metadata and content (`meta` and `body` keys)
**Throws**: **Throws**:
- (Error): 403 - If the article body cannot be retrieved. Reason is given in the error message. - (Error): 403 - If the article body cannot be retrieved. Reason is given in the error message.
- (Error): 400 - If pathname parameter is invalid. - (Error): 400 - If pathname or apiVersion parameters are invalid.
- (Error): 404 - If the path is valid, but the page couldn't be resolved. - (Error): 404 - If the path is valid, but the page couldn't be resolved.
**Example**: **Example**:
@@ -63,12 +76,13 @@ Get the contents of an article's body.
**Parameters**: **Parameters**:
- **pathname** (string) - Article path (e.g. '/en/get-started/article-name') - **pathname** (string) - Article path (e.g. '/en/get-started/article-name')
- **[apiVersion]** (string) - API version (optional, defaults to latest)
**Returns**: (string) - Article body content in markdown format. **Returns**: (string) - Article body content in markdown format.
**Throws**: **Throws**:
- (Error): 403 - If the article body cannot be retrieved. Reason is given in the error message. - (Error): 403 - If the article body cannot be retrieved. Reason is given in the error message.
- (Error): 400 - If pathname parameter is invalid. - (Error): 400 - If pathname or apiVersion parameters are invalid.
- (Error): 404 - If the path is valid, but the page couldn't be resolved. - (Error): 404 - If the path is valid, but the page couldn't be resolved.
**Example**: **Example**:

View File

@@ -0,0 +1,16 @@
/**
* API Transformer Liquid Tags
*
* This module contains custom Liquid tags used by article-api transformers
* to render API documentation in a consistent format.
*/
import { restTags } from './rest-tags'
// Export all API transformer tags for registration
export const apiTransformerTags = {
...restTags,
}
// Re-export individual tag modules for direct access if needed
export { restTags } from './rest-tags'

View File

@@ -0,0 +1,230 @@
import type { TagToken, Context as LiquidContext } from 'liquidjs'
import { fastTextOnly } from '@/content-render/unified/text-only'
import { renderContent } from '@/content-render/index'
import type { Context } from '@/types'
import type { Parameter, BodyParameter, ChildParameter, StatusCode } from '@/rest/components/types'
import { createLogger } from '@/observability/logger'
const logger = createLogger('article-api/liquid-renderers/rest-tags')
/**
* Custom Liquid tag for rendering REST API parameters
* Usage: {% rest_parameter param %}
*/
export class RestParameter {
private paramName: string
constructor(
token: TagToken,
remainTokens: TagToken[],
liquid: { options: any; parser: any },
private liquidContext?: LiquidContext,
) {
// The tag receives the parameter object from the template context
this.paramName = token.args.trim()
}
async render(ctx: LiquidContext, emitter: any): Promise<void> {
const param = ctx.get([this.paramName]) as Parameter
const context = ctx.get(['context']) as Context
if (!param) {
emitter.write('')
return
}
const lines: string[] = []
const required = param.required ? ' (required)' : ''
const type = param.schema?.type || 'string'
lines.push(`- **\`${param.name}\`** (${type})${required}`)
if (param.description) {
const description = await htmlToMarkdown(param.description, context)
lines.push(` ${description}`)
}
if (param.schema?.default !== undefined) {
lines.push(` Default: \`${param.schema.default}\``)
}
if (param.schema?.enum && param.schema.enum.length > 0) {
lines.push(` Can be one of: ${param.schema.enum.map((v) => `\`${v}\``).join(', ')}`)
}
emitter.write(lines.join('\n'))
}
}
/**
* Custom Liquid tag for rendering REST API body parameters
* Usage: {% rest_body_parameter param indent %}
*/
export class RestBodyParameter {
constructor(
token: TagToken,
remainTokens: TagToken[],
liquid: { options: any; parser: any },
private liquidContext?: LiquidContext,
) {
// Parse arguments - param name and optional indent level
const args = token.args.trim().split(/\s+/)
this.param = args[0]
this.indent = args[1] ? parseInt(args[1]) : 0
}
private param: string
private indent: number
async render(ctx: LiquidContext, emitter: any): Promise<void> {
const param = ctx.get([this.param]) as BodyParameter
const context = ctx.get(['context']) as Context
const indent = this.indent
if (!param) {
emitter.write('')
return
}
const lines: string[] = []
const prefix = ' '.repeat(indent)
const required = param.isRequired ? ' (required)' : ''
const type = param.type || 'string'
lines.push(`${prefix}- **\`${param.name}\`** (${type})${required}`)
if (param.description) {
const description = await htmlToMarkdown(param.description, context)
lines.push(`${prefix} ${description}`)
}
if (param.default !== undefined) {
lines.push(`${prefix} Default: \`${param.default}\``)
}
if (param.enum && param.enum.length > 0) {
lines.push(`${prefix} Can be one of: ${param.enum.map((v) => `\`${v}\``).join(', ')}`)
}
// Handle nested parameters
if (param.childParamsGroups && param.childParamsGroups.length > 0) {
for (const childGroup of param.childParamsGroups) {
lines.push(await renderChildParameter(childGroup, context, indent + 1))
}
}
emitter.write(lines.join('\n'))
}
}
/**
* Custom Liquid tag for rendering REST API status codes
* Usage: {% rest_status_code statusCode %}
*/
export class RestStatusCode {
private statusCodeName: string
constructor(
token: TagToken,
remainTokens: TagToken[],
liquid: { options: any; parser: any },
private liquidContext?: LiquidContext,
) {
this.statusCodeName = token.args.trim()
}
async render(ctx: LiquidContext, emitter: any): Promise<void> {
const statusCode = ctx.get([this.statusCodeName]) as StatusCode
const context = ctx.get(['context']) as Context
if (!statusCode) {
emitter.write('')
return
}
const lines: string[] = []
if (statusCode.description) {
const description = await htmlToMarkdown(statusCode.description, context)
lines.push(`- **${statusCode.httpStatusCode}**`)
if (description.trim()) {
lines.push(` ${description.trim()}`)
}
} else if (statusCode.httpStatusMessage) {
lines.push(`- **${statusCode.httpStatusCode}** - ${statusCode.httpStatusMessage}`)
} else {
lines.push(`- **${statusCode.httpStatusCode}**`)
}
emitter.write(lines.join('\n'))
}
}
/**
* Helper function to render child parameters recursively
*/
async function renderChildParameter(
param: ChildParameter,
context: Context,
indent: number,
): Promise<string> {
const lines: string[] = []
const prefix = ' '.repeat(indent)
const required = param.isRequired ? ' (required)' : ''
const type = param.type || 'string'
lines.push(`${prefix}- **\`${param.name}\`** (${type})${required}`)
if (param.description) {
const description = await htmlToMarkdown(param.description, context)
lines.push(`${prefix} ${description}`)
}
if (param.default !== undefined) {
lines.push(`${prefix} Default: \`${param.default}\``)
}
if (param.enum && param.enum.length > 0) {
lines.push(`${prefix} Can be one of: ${param.enum.map((v: string) => `\`${v}\``).join(', ')}`)
}
// Recursively handle nested parameters
if (param.childParamsGroups && param.childParamsGroups.length > 0) {
for (const child of param.childParamsGroups) {
lines.push(await renderChildParameter(child, context, indent + 1))
}
}
return lines.join('\n')
}
/**
* Helper function to convert HTML to markdown
*/
async function htmlToMarkdown(html: string, context: Context): Promise<string> {
if (!html) return ''
try {
const rendered = await renderContent(html, context, { textOnly: false })
return fastTextOnly(rendered)
} catch (error) {
logger.error('Failed to render HTML content to markdown in REST tag', {
error,
html: html.substring(0, 100), // First 100 chars for context
contextInfo: context && context.page ? { page: context.page.relativePath } : undefined,
})
// In non-production, re-throw to aid debugging
if (process.env.NODE_ENV !== 'production') {
throw error
}
// Fallback to simple text extraction
return fastTextOnly(html)
}
}
// Export tag names for registration
export const restTags = {
rest_parameter: RestParameter,
rest_body_parameter: RestBodyParameter,
rest_status_code: RestStatusCode,
}

View File

@@ -3,20 +3,15 @@ import type { Response } from 'express'
import { Context } from '@/types' import { Context } from '@/types'
import { ExtendedRequestWithPageInfo } from '@/article-api/types' import { ExtendedRequestWithPageInfo } from '@/article-api/types'
import contextualize from '@/frame/middleware/context/context' import contextualize from '@/frame/middleware/context/context'
import { transformerRegistry } from '@/article-api/transformers'
import { allVersions } from '@/versions/lib/all-versions'
import type { Page } from '@/types'
export async function getArticleBody(req: ExtendedRequestWithPageInfo) { /**
// req.pageinfo is set from pageValidationMiddleware and pathValidationMiddleware * Creates a mocked rendering request and contextualizes it.
// and is in the ExtendedRequestWithPageInfo * This is used to prepare a request for rendering pages in markdown format.
const { page, pathname, archived } = req.pageinfo */
async function createContextualizedRenderingRequest(pathname: string, page: Page) {
if (archived?.isArchived)
throw new Error(`Page ${pathname} is archived and can't be rendered in markdown.`)
// for anything that's not an article (like index pages), don't try to render and
// tell the user what's going on
if (page.documentType !== 'article') {
throw new Error(`Page ${pathname} isn't yet available in markdown.`)
}
// these parts allow us to render the page
const mockedContext: Context = {} const mockedContext: Context = {}
const renderingReq = { const renderingReq = {
path: pathname, path: pathname,
@@ -29,9 +24,51 @@ export async function getArticleBody(req: ExtendedRequestWithPageInfo) {
}, },
} }
// contextualize and render the page // contextualize the request to get proper version info
await contextualize(renderingReq as ExtendedRequestWithPageInfo, {} as Response, () => {}) await contextualize(renderingReq as ExtendedRequestWithPageInfo, {} as Response, () => {})
renderingReq.context.page = page renderingReq.context.page = page
return renderingReq
}
export async function getArticleBody(req: ExtendedRequestWithPageInfo) {
// req.pageinfo is set from pageValidationMiddleware and pathValidationMiddleware
// and is in the ExtendedRequestWithPageInfo
const { page, pathname, archived } = req.pageinfo
if (archived?.isArchived)
throw new Error(`Page ${pathname} is archived and can't be rendered in markdown.`)
// Extract apiVersion from query params if provided
const apiVersion = req.query.apiVersion as string | undefined
// Check if there's a transformer for this page type (e.g., REST, webhooks, etc.)
const transformer = transformerRegistry.findTransformer(page)
if (transformer) {
// Use the transformer for autogenerated pages
const renderingReq = await createContextualizedRenderingRequest(pathname, page)
// Determine the API version to use (provided or latest)
// Validation is handled by apiVersionValidationMiddleware
const currentVersion = renderingReq.context.currentVersion
let effectiveApiVersion = apiVersion
// Use latest version if not provided
if (!effectiveApiVersion && currentVersion && allVersions[currentVersion]) {
effectiveApiVersion = allVersions[currentVersion].latestApiVersion || undefined
}
return await transformer.transform(page, pathname, renderingReq.context, effectiveApiVersion)
}
// For regular articles (non-autogenerated)
if (page.documentType !== 'article') {
throw new Error(`Page ${pathname} isn't yet available in markdown.`)
}
// these parts allow us to render the page
const renderingReq = await createContextualizedRenderingRequest(pathname, page)
renderingReq.context.markdownRequested = true renderingReq.context.markdownRequested = true
return await page.render(renderingReq.context) return await page.render(renderingReq.context)
} }

View File

@@ -4,7 +4,11 @@ import express from 'express'
import { defaultCacheControl } from '@/frame/middleware/cache-control' import { defaultCacheControl } from '@/frame/middleware/cache-control'
import catchMiddlewareError from '@/observability/middleware/catch-middleware-error' import catchMiddlewareError from '@/observability/middleware/catch-middleware-error'
import { ExtendedRequestWithPageInfo } from '../types' import { ExtendedRequestWithPageInfo } from '../types'
import { pageValidationMiddleware, pathValidationMiddleware } from './validation' import {
pageValidationMiddleware,
pathValidationMiddleware,
apiVersionValidationMiddleware,
} from './validation'
import { getArticleBody } from './article-body' import { getArticleBody } from './article-body'
import { getMetadata } from './article-pageinfo' import { getMetadata } from './article-pageinfo'
import { import {
@@ -24,9 +28,10 @@ const router = express.Router()
* Get article metadata and content in a single object. Equivalent to calling `/article/meta` concatenated with `/article/body`. * Get article metadata and content in a single object. Equivalent to calling `/article/meta` concatenated with `/article/body`.
* @route GET /api/article * @route GET /api/article
* @param {string} pathname - Article path (e.g. '/en/get-started/article-name') * @param {string} pathname - Article path (e.g. '/en/get-started/article-name')
* @param {string} [apiVersion] - API version for REST pages (optional, defaults to latest)
* @returns {object} JSON object with article metadata and content (`meta` and `body` keys) * @returns {object} JSON object with article metadata and content (`meta` and `body` keys)
* @throws {Error} 403 - If the article body cannot be retrieved. Reason is given in the error message. * @throws {Error} 403 - If the article body cannot be retrieved. Reason is given in the error message.
* @throws {Error} 400 - If pathname parameter is invalid. * @throws {Error} 400 - If pathname or apiVersion parameters are invalid.
* @throws {Error} 404 - If the path is valid, but the page couldn't be resolved. * @throws {Error} 404 - If the path is valid, but the page couldn't be resolved.
* @example * @example
* curl -s "https://docs.github.com/api/article?pathname=/en/get-started/start-your-journey/about-github-and-git" * curl -s "https://docs.github.com/api/article?pathname=/en/get-started/start-your-journey/about-github-and-git"
@@ -43,6 +48,7 @@ router.get(
'/', '/',
pathValidationMiddleware as RequestHandler, pathValidationMiddleware as RequestHandler,
pageValidationMiddleware as RequestHandler, pageValidationMiddleware as RequestHandler,
apiVersionValidationMiddleware as RequestHandler,
catchMiddlewareError(async function (req: ExtendedRequestWithPageInfo, res: Response) { catchMiddlewareError(async function (req: ExtendedRequestWithPageInfo, res: Response) {
const { meta, cacheInfo } = await getMetadata(req) const { meta, cacheInfo } = await getMetadata(req)
let bodyContent let bodyContent
@@ -66,9 +72,10 @@ router.get(
* Get the contents of an article's body. * Get the contents of an article's body.
* @route GET /api/article/body * @route GET /api/article/body
* @param {string} pathname - Article path (e.g. '/en/get-started/article-name') * @param {string} pathname - Article path (e.g. '/en/get-started/article-name')
* @param {string} [apiVersion] - API version (optional, defaults to latest)
* @returns {string} Article body content in markdown format. * @returns {string} Article body content in markdown format.
* @throws {Error} 403 - If the article body cannot be retrieved. Reason is given in the error message. * @throws {Error} 403 - If the article body cannot be retrieved. Reason is given in the error message.
* @throws {Error} 400 - If pathname parameter is invalid. * @throws {Error} 400 - If pathname or apiVersion parameters are invalid.
* @throws {Error} 404 - If the path is valid, but the page couldn't be resolved. * @throws {Error} 404 - If the path is valid, but the page couldn't be resolved.
* @example * @example
* curl -s https://docs.github.com/api/article/body\?pathname=/en/get-started/start-your-journey/about-github-and-git * curl -s https://docs.github.com/api/article/body\?pathname=/en/get-started/start-your-journey/about-github-and-git
@@ -83,6 +90,7 @@ router.get(
'/body', '/body',
pathValidationMiddleware as RequestHandler, pathValidationMiddleware as RequestHandler,
pageValidationMiddleware as RequestHandler, pageValidationMiddleware as RequestHandler,
apiVersionValidationMiddleware as RequestHandler,
catchMiddlewareError(async function (req: ExtendedRequestWithPageInfo, res: Response) { catchMiddlewareError(async function (req: ExtendedRequestWithPageInfo, res: Response) {
let bodyContent let bodyContent
try { try {

View File

@@ -6,6 +6,7 @@ import { isArchivedVersionByPath } from '@/archives/lib/is-archived-version'
import getRedirect from '@/redirects/lib/get-redirect' import getRedirect from '@/redirects/lib/get-redirect'
import { getVersionStringFromPath, getLangFromPath } from '@/frame/lib/path-utils' import { getVersionStringFromPath, getLangFromPath } from '@/frame/lib/path-utils'
import nonEnterpriseDefaultVersion from '@/versions/lib/non-enterprise-default-version' import nonEnterpriseDefaultVersion from '@/versions/lib/non-enterprise-default-version'
import { allVersions } from '@/versions/lib/all-versions'
// validates the path for pagelist endpoint // validates the path for pagelist endpoint
// specifically, defaults to `/en/free-pro-team@latest` when those values are missing // specifically, defaults to `/en/free-pro-team@latest` when those values are missing
@@ -123,3 +124,47 @@ export const pageValidationMiddleware = (
return next() return next()
} }
export const apiVersionValidationMiddleware = (
req: ExtendedRequestWithPageInfo,
res: Response,
next: NextFunction,
) => {
const apiVersion = req.query.apiVersion as string | string[] | undefined
// If no apiVersion is provided, continue (it will default to latest)
if (!apiVersion) {
return next()
}
// Validate apiVersion is a single string, not an array
if (Array.isArray(apiVersion)) {
return res.status(400).json({ error: "Multiple 'apiVersion' keys" })
}
// Get the version from the pathname query parameter
const pathname = req.pageinfo?.pathname || (req.query.pathname as string)
if (!pathname) {
// This should not happen as pathValidationMiddleware runs first
throw new Error('pathname not available for apiVersion validation')
}
// Extract version from the pathname
const currentVersion = getVersionStringFromPath(pathname) || nonEnterpriseDefaultVersion
const versionInfo = allVersions[currentVersion]
if (!versionInfo) {
return res.status(400).json({ error: `Invalid version '${currentVersion}'` })
}
const validApiVersions = versionInfo.apiVersions || []
// If this version has API versioning, validate the provided version
if (validApiVersions.length > 0 && !validApiVersions.includes(apiVersion)) {
return res.status(400).json({
error: `Invalid apiVersion '${apiVersion}' for ${currentVersion}. Valid API versions are: ${validApiVersions.join(', ')}`,
})
}
return next()
}

View File

@@ -0,0 +1,100 @@
# {{ page.title }}
{{ page.intro }}
{{ manualContent }}
{% for operation in restOperations %}
## {{ operation.title }}
```
{{ operation.verb | upcase }} {{ operation.requestPath }}
```
{{ operation.description }}
{% if operation.hasParameters %}
### Parameters
{% if operation.showHeaders %}
#### Headers
{% if operation.needsContentTypeHeader %}
- **`content-type`** (string, required)
Setting to `application/json` is required.
{% endif %}
- **`accept`** (string)
Setting to `application/vnd.github+json` is recommended.
{% endif %}
{% if operation.parameters.size > 0 %}
#### Path and query parameters
{% for param in operation.parameters %}
{% rest_parameter param %}
{% endfor %}
{% endif %}
{% if operation.bodyParameters.size > 0 %}
#### Body parameters
{% for param in operation.bodyParameters %}
{% rest_body_parameter param %}
{% endfor %}
{% endif %}
{% endif %}
{% if operation.statusCodes.size > 0 %}
### HTTP response status codes
{% for statusCode in operation.statusCodes %}
- **{{ statusCode.httpStatusCode }}**{% if statusCode.description %} - {{ statusCode.description }}{% elsif statusCode.httpStatusMessage %} - {{ statusCode.httpStatusMessage }}{% endif %}
{% endfor %}
{% endif %}
{% if operation.codeExamples.size > 0 %}
### Code examples
{% for example in operation.codeExamples %}
{% if example.request.description %}
#### {{ example.request.description }}
{% endif %}
**Request:**
```curl
curl -L \
-X {{ operation.verb | upcase }} \
{{ example.request.url }} \
{%- if example.request.acceptHeader %}
-H "Accept: {{ example.request.acceptHeader }}" \
{%- endif %}
-H "Authorization: Bearer <YOUR-TOKEN>"{% if apiVersion %} \
-H "X-GitHub-Api-Version: {{ apiVersion }}"{% endif -%}
{%- if example.request.bodyParameters %} \
-d '{{ example.request.bodyParameters }}'{% endif %}
```
**Response schema:**
{% if example.response.schema %}
```json
Status: {{ example.response.statusCode }}
{{ example.response.schema }}
```
{% else %}
```
Status: {{ example.response.statusCode }}
```
{% endif %}
{% endfor %}
{% endif %}
{% endfor %}

View File

@@ -0,0 +1,309 @@
import { beforeAll, describe, expect, test } from 'vitest'
import { get } from '@/tests/helpers/e2etest'
const makeURL = (pathname: string, apiVersion?: string): string => {
const params = new URLSearchParams({ pathname })
if (apiVersion) {
params.set('apiVersion', apiVersion)
}
return `/api/article/body?${params}`
}
describe('REST transformer', () => {
beforeAll(() => {
if (!process.env.ROOT) {
console.warn(
'WARNING: The REST transformer tests require the ROOT environment variable to be set to the fixture root',
)
}
})
test('REST page renders with markdown structure', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
expect(res.headers['content-type']).toContain('text/markdown')
// Check for the main heading
expect(res.body).toContain('# GitHub Actions Artifacts')
// Check for intro (using fixture's prodname_actions which is 'HubGit Actions')
expect(res.body).toContain('Use the REST API to interact with artifacts in HubGit Actions.')
// Check for manual content section heading
expect(res.body).toContain('## About artifacts in HubGit Actions')
})
test('REST operations are formatted correctly', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for operation heading
expect(res.body).toContain('## List artifacts for a repository')
// Check for HTTP method and endpoint
expect(res.body).toContain('GET /repos/{owner}/{repo}/actions/artifacts')
// Check for operation description
expect(res.body).toContain('Lists all artifacts for a repository.')
})
test('Parameters section includes headers', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for parameters heading
expect(res.body).toContain('### Parameters')
// Check for headers section
expect(res.body).toContain('#### Headers')
// Check for accept header
expect(res.body).toContain('**`accept`** (string)')
expect(res.body).toContain('Setting to `application/vnd.github+json` is recommended.')
})
test('Path and query parameters are listed', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for path and query parameters section
expect(res.body).toContain('#### Path and query parameters')
// Check for specific parameters
expect(res.body).toContain('**`owner`** (string) (required)')
expect(res.body).toContain('The account owner of the repository.')
expect(res.body).toContain('**`repo`** (string) (required)')
expect(res.body).toContain('**`per_page`** (integer)')
expect(res.body).toContain('Default: `30`')
})
test('Status codes are formatted correctly', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for status codes section
expect(res.body).toContain('### HTTP response status codes')
// Check for specific status code
expect(res.body).toContain('**200**')
expect(res.body).toContain('OK')
})
test('Code examples include curl with proper formatting', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for code examples section
expect(res.body).toContain('### Code examples')
// Check for request/response labels
expect(res.body).toContain('**Request:**')
expect(res.body).toContain('**Response schema:**')
// Check for curl code block
expect(res.body).toContain('```curl')
expect(res.body).toContain('curl -L \\')
expect(res.body).toContain('-X GET \\')
expect(res.body).toContain('https://api.github.com/repos/OWNER/REPO/actions/artifacts \\')
expect(res.body).toContain('-H "Accept: application/vnd.github.v3+json" \\')
expect(res.body).toContain('-H "Authorization: Bearer <YOUR-TOKEN>"')
})
test('Code examples include X-GitHub-Api-Version header by default', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for API version header in curl example
expect(res.body).toContain('-H "X-GitHub-Api-Version: 2022-11-28"')
})
test('Code examples include specified API version', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts', '2022-11-28'))
expect(res.statusCode).toBe(200)
// Check for the specified API version header
expect(res.body).toContain('-H "X-GitHub-Api-Version: 2022-11-28"')
})
test('Liquid tags are rendered in intro', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Liquid tags should be rendered, not shown as raw tags (fixture uses 'HubGit Actions')
expect(res.body).toContain('HubGit Actions')
expect(res.body).not.toContain('{% data variables.product.prodname_actions %}')
// Check in both the intro and the manual content section
expect(res.body).toMatch(/Use the REST API to interact with artifacts in HubGit Actions/)
expect(res.body).toMatch(/About artifacts in HubGit Actions/)
})
test('AUTOTITLE links are resolved', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check that AUTOTITLE has been resolved to actual link text
// The link should have the actual page title, not "AUTOTITLE"
expect(res.body).toContain('[Storing workflow data as artifacts]')
expect(res.body).toContain('(/en/actions/using-workflows/storing-workflow-data-as-artifacts)')
// Make sure the raw AUTOTITLE tag is not present
expect(res.body).not.toContain('[AUTOTITLE]')
// Verify the link appears in the manual content section
expect(res.body).toMatch(
/About artifacts in HubGit Actions[\s\S]*Storing workflow data as artifacts/,
)
})
test('Markdown links are preserved in descriptions', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check that markdown links are preserved
expect(res.body).toMatch(/\[.*?\]\(\/en\/.*?\)/)
})
test('Response schema is formatted correctly', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for JSON code block with schema label
expect(res.body).toContain('**Response schema:**')
expect(res.body).toContain('```json')
expect(res.body).toContain('Status: 200')
// Verify schema structure is present (not an example)
expect(res.body).toContain('"type":')
expect(res.body).toContain('"properties":')
// Check for common schema keywords
const schemaMatch = res.body.match(/```json\s+Status: 200\s+([\s\S]*?)```/)
expect(schemaMatch).toBeTruthy()
if (schemaMatch) {
const schemaContent = schemaMatch[1]
const schema = JSON.parse(schemaContent)
// Verify it's a valid OpenAPI/JSON schema structure
expect(schema).toHaveProperty('type')
expect(schema.type).toBe('object')
expect(schema).toHaveProperty('properties')
// Verify it has expected properties for artifacts response
expect(schema.properties).toHaveProperty('total_count')
expect(schema.properties).toHaveProperty('artifacts')
}
})
test('Non-REST pages return appropriate error', async () => {
const res = await get(makeURL('/en/get-started/start-your-journey/hello-world'))
expect(res.statusCode).toBe(200)
// Regular article pages should still work, they just won't use the transformer
expect(res.body).toContain('## Introduction')
})
test('Invalid apiVersion returns 400 error', async () => {
// An invalid API version should return a validation error with 400 status
const res = await get(makeURL('/en/rest/actions/artifacts', 'invalid-version'))
// Returns 400 because the apiVersion is invalid (client error)
expect(res.statusCode).toBe(400)
const parsed = JSON.parse(res.body)
expect(parsed.error).toContain("Invalid apiVersion 'invalid-version'")
expect(parsed.error).toContain('Valid API versions are:')
expect(parsed.error).toContain('2022-11-28')
})
test('Multiple apiVersion query parameters returns 400 error', async () => {
// Multiple apiVersion parameters should be rejected
const res = await get(
'/api/article/body?pathname=/en/rest/actions/artifacts&apiVersion=2022-11-28&apiVersion=2023-01-01',
)
expect(res.statusCode).toBe(400)
const parsed = JSON.parse(res.body)
expect(parsed.error).toBe("Multiple 'apiVersion' keys")
})
test('Valid apiVersion passes validation', async () => {
// A valid API version should work
const res = await get(makeURL('/en/rest/actions/artifacts', '2022-11-28'))
expect(res.statusCode).toBe(200)
expect(res.body).toContain('-H "X-GitHub-Api-Version: 2022-11-28"')
})
test('Missing apiVersion defaults to latest', async () => {
// When no apiVersion is provided, it should default to the latest version
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Should include the default API version header
expect(res.body).toContain('-H "X-GitHub-Api-Version: 2022-11-28"')
})
test('Multiple operations on a page are all rendered', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Check for multiple operation headings
expect(res.body).toContain('## List artifacts for a repository')
expect(res.body).toContain('## Get an artifact')
expect(res.body).toContain('## Delete an artifact')
})
test('Body parameters are formatted correctly for POST/PUT operations', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// For operations with body parameters, check formatting
// (artifacts endpoint is mostly GET/DELETE, but structure should be there)
// The transformer handles body parameters when present
})
test('Content-type header is included for operations that need it', async () => {
const res = await get(makeURL('/en/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// Content-type header appears for operations that require it
// The REST transformer adds this based on the operation data
})
test('Non-English language paths work correctly', async () => {
// Note: This test may fail in dev mode with ENABLED_LANGUAGES=en
// but the transformer itself should handle any language path
const res = await get(makeURL('/ja/rest/actions/artifacts'))
expect(res.statusCode).toBe(200)
// The transformer should work regardless of language prefix
// because it looks for 'rest' in the path and gets the category/subcategory after it
// e.g. /ja/rest/actions/artifacts should work the same as /en/rest/actions/artifacts
// Verify the operation content is present (in English, since REST data is not translated)
expect(res.body).toContain('## List artifacts for a repository')
expect(res.body).toContain('GET /repos/{owner}/{repo}/actions/artifacts')
// Check what language is actually being served by examining the response
// If Japanese translations are loaded, the title will be in Japanese
// Otherwise, it falls back to English
const hasJapaneseTitle = res.body.includes('# GitHub Actions アーティファクト')
const hasEnglishTitle = res.body.includes('# GitHub Actions Artifacts')
// One of them must be present
expect(hasJapaneseTitle || hasEnglishTitle).toBe(true)
// Verify the appropriate content based on which language was served
if (hasJapaneseTitle) {
// If Japanese is loaded, expect Japanese intro text
expect(res.body).toContain('アーティファクト')
} else {
// If Japanese is not loaded, expect English fallback
expect(res.body).toContain('Use the REST API to interact with artifacts in HubGit Actions')
}
})
})

View File

@@ -0,0 +1,18 @@
import { TransformerRegistry } from './types'
import { RestTransformer } from './rest-transformer'
/**
* Global transformer registry
* Registers all available page-to-markdown transformers
*/
export const transformerRegistry = new TransformerRegistry()
// Register REST transformer
transformerRegistry.register(new RestTransformer())
// Future transformers can be registered here:
// transformerRegistry.register(new WebhooksTransformer())
// transformerRegistry.register(new GitHubAppsTransformer())
export { TransformerRegistry } from './types'
export type { PageTransformer } from './types'

View File

@@ -0,0 +1,210 @@
import type { Context, Page } from '@/types'
import type { PageTransformer } from './types'
import type { Operation } from '@/rest/components/types'
import { renderContent } from '@/content-render/index'
import matter from '@gr2m/gray-matter'
import { readFileSync } from 'fs'
import { join, dirname } from 'path'
import { fileURLToPath } from 'url'
import { fastTextOnly } from '@/content-render/unified/text-only'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
/**
* Transformer for REST API pages
* Converts REST operations and their data into markdown format using a Liquid template
*/
export class RestTransformer implements PageTransformer {
canTransform(page: Page): boolean {
// Only transform REST pages that are not landing pages
// Landing pages (like /en/rest) will be handled by a separate transformer
return page.autogenerated === 'rest' && !page.relativePath.endsWith('index.md')
}
async transform(
page: Page,
pathname: string,
context: Context,
apiVersion?: string,
): Promise<string> {
// Import getRest dynamically to avoid circular dependencies
const { default: getRest } = await import('@/rest/lib/index')
// Extract version from context
const currentVersion = context.currentVersion!
// Use the provided apiVersion, or fall back to the latest from context
const effectiveApiVersion =
apiVersion ||
(context.currentVersionObj?.apiVersions?.length
? context.currentVersionObj.latestApiVersion
: undefined)
// Parse the category and subcategory from the page path
// e.g. /en/rest/actions/artifacts -> category: actions, subcategory: artifacts
const pathParts = pathname.split('/').filter(Boolean)
const restIndex = pathParts.indexOf('rest')
if (restIndex === -1 || restIndex >= pathParts.length - 1) {
throw new Error(`Invalid REST path: ${pathname}`)
}
const category = pathParts[restIndex + 1]
const subcategory = pathParts[restIndex + 2] // May be undefined for category-only pages
// Get the REST operations data
const restData = await getRest(currentVersion, effectiveApiVersion)
let operations: Operation[] = []
if (subcategory && restData[category]?.[subcategory]) {
operations = restData[category][subcategory]
} else if (category && restData[category]) {
// For categories without subcategories, operations are nested directly
const categoryData = restData[category]
// Flatten all operations from all subcategories
operations = Object.values(categoryData).flat()
}
// Prepare manual content
let manualContent = ''
if (page.markdown) {
const markerIndex = page.markdown.indexOf(
'<!-- Content after this section is automatically generated -->',
)
if (markerIndex > 0) {
const { content } = matter(page.markdown)
const manualContentMarkerIndex = content.indexOf(
'<!-- Content after this section is automatically generated -->',
)
if (manualContentMarkerIndex > 0) {
const rawManualContent = content.substring(0, manualContentMarkerIndex).trim()
if (rawManualContent) {
manualContent = await renderContent(rawManualContent, {
...context,
markdownRequested: true,
})
}
}
}
}
// Prepare data for template
const templateData = await this.prepareTemplateData(
page,
operations,
context,
manualContent,
effectiveApiVersion,
)
// Load and render template
const templatePath = join(__dirname, '../templates/rest-page.template.md')
const templateContent = readFileSync(templatePath, 'utf8')
// Render the template with Liquid
const rendered = await renderContent(templateContent, {
...context,
...templateData,
markdownRequested: true,
})
return rendered
}
/**
* Prepare data for the Liquid template
*/
private async prepareTemplateData(
page: Page,
operations: Operation[],
context: Context,
manualContent: string,
apiVersion?: string,
): Promise<Record<string, any>> {
// Prepare page intro
const intro = page.intro ? await page.renderProp('intro', context, { textOnly: true }) : ''
// Prepare operations for the template
const preparedOperations = await Promise.all(
operations.map(async (operation) => await this.prepareOperation(operation)),
)
return {
page: {
title: page.title,
intro,
},
manualContent,
restOperations: preparedOperations,
apiVersion,
}
}
/**
* Prepare a single operation for template rendering
*/
private async prepareOperation(operation: Operation): Promise<Record<string, any>> {
// Convert HTML description to text
const description = operation.descriptionHTML ? fastTextOnly(operation.descriptionHTML) : ''
// Determine header settings
const needsContentTypeHeader = operation.subcategory === 'inference'
const omitHeaders =
operation.subcategory === 'management-console' || operation.subcategory === 'manage-ghes'
const showHeaders = !omitHeaders
// Check if operation has parameters
const hasParameters =
(operation.parameters?.length || 0) > 0 || (operation.bodyParameters?.length || 0) > 0
// Process status codes to convert HTML descriptions to plain text
const statusCodes = operation.statusCodes?.map((statusCode) => ({
...statusCode,
description: statusCode.description ? fastTextOnly(statusCode.description) : undefined,
}))
// Prepare code examples with processed URLs
const codeExamples =
operation.codeExamples?.map((example) => {
let url = `${operation.serverUrl}${operation.requestPath}`
// Replace path parameters in URL
if (example.request?.parameters && Object.keys(example.request.parameters).length > 0) {
for (const [key, value] of Object.entries(example.request.parameters)) {
url = url.replace(`{${key}}`, String(value))
}
}
return {
request: {
description: example.request?.description
? fastTextOnly(example.request.description)
: '',
url,
acceptHeader: example.request?.acceptHeader,
bodyParameters: example.request?.bodyParameters
? JSON.stringify(example.request.bodyParameters, null, 2)
: null,
},
response: {
statusCode: example.response?.statusCode,
schema: (example.response as any)?.schema
? JSON.stringify((example.response as any).schema, null, 2)
: null,
},
}
}) || []
return {
...operation,
description,
hasParameters,
showHeaders,
needsContentTypeHeader,
statusCodes,
codeExamples,
}
}
}

View File

@@ -0,0 +1,103 @@
import type { Context, Page } from '@/types'
/**
* Base interface for page-to-markdown transformers
*
* Transformers convert autogenerated pages (REST, webhooks, etc.)
* into markdown format for the Article API
*/
export interface PageTransformer {
/**
* Check if this transformer can handle the given page
*/
canTransform(page: Page): boolean
/**
* Transform the page into markdown format
* @param page - The page to transform
* @param pathname - The pathname of the page
* @param context - The rendering context
* @param apiVersion - Optional API version (e.g., '2022-11-28' for REST API calendar versioning)
*/
transform(page: Page, pathname: string, context: Context, apiVersion?: string): Promise<string>
}
/**
* Registry of available transformers for converting pages to markdown
*
* The TransformerRegistry manages a collection of PageTransformer instances
* and provides a mechanism to find the appropriate transformer for a given page.
*
* Transformers are evaluated in registration order. The first transformer
* whose `canTransform()` method returns true will be selected.
*
* @example
* ```typescript
* const registry = new TransformerRegistry()
*
* // Register transformers in priority order
* registry.register(new RestTransformer())
* registry.register(new WebhookTransformer())
* registry.register(new GraphQLTransformer())
*
* // Find and use a transformer
* const transformer = registry.findTransformer(page)
* if (transformer) {
* const markdown = await transformer.transform(page, pathname, context)
* }
* ```
*
* @remarks
* This class is not thread-safe. In server environments with concurrent requests,
* register all transformers during initialization before handling requests.
*/
export class TransformerRegistry {
private transformers: PageTransformer[] = []
/**
* Register a new transformer
*
* Transformers are evaluated in registration order when finding a match.
* Register more specific transformers before more general ones.
*
* @param transformer - The transformer to register
*
* @example
* ```typescript
* const registry = new TransformerRegistry()
* registry.register(new RestTransformer())
* ```
*/
register(transformer: PageTransformer): void {
this.transformers.push(transformer)
}
/**
* Find a transformer that can handle the given page
*
* Iterates through registered transformers in registration order and returns
* the first transformer whose `canTransform()` method returns true.
*
* @param page - The page to find a transformer for
* @returns The first matching transformer, or null if:
* - The page is null/undefined
* - No registered transformer can handle the page
*
* @example
* ```typescript
* const transformer = registry.findTransformer(page)
* if (transformer) {
* const markdown = await transformer.transform(page, pathname, context)
* } else {
* // Handle case where no transformer is available
* console.warn('No transformer found for page:', page.relativePath)
* }
* ```
*/
findTransformer(page: Page): PageTransformer | null {
if (page == null) {
return null
}
return this.transformers.find((t) => t.canTransform(page)) || null
}
}

View File

@@ -1,20 +1,16 @@
import { Tokenizer, TokenKind } from 'liquidjs' import { Tokenizer, TokenKind } from 'liquidjs'
import type { TopLevelToken, TagToken } from 'liquidjs'
import { deprecated } from '@/versions/lib/enterprise-server-releases' import { deprecated } from '@/versions/lib/enterprise-server-releases'
// Using `any` for the cache because TopLevelToken is a complex union type from liquidjs // Cache for liquid tokens to improve performance
// that includes TagToken, OutputToken, and HTMLToken with different properties. const liquidTokenCache = new Map<string, TopLevelToken[]>()
// The cache is private to this module and we control all access to it.
const liquidTokenCache = new Map<string, any>()
// Returns `any[]` instead of `TopLevelToken[]` because TopLevelToken is a union type // Returns TopLevelToken array from liquidjs which is a union of TagToken, OutputToken, and HTMLToken
// (TagToken | OutputToken | HTMLToken) and consumers of this function access properties
// like `name` and `args` that only exist on TagToken. Using `any` here avoids complex
// type narrowing throughout the codebase.
export function getLiquidTokens( export function getLiquidTokens(
content: string, content: string,
{ noCache = false }: { noCache?: boolean } = {}, { noCache = false }: { noCache?: boolean } = {},
): any[] { ): TopLevelToken[] {
if (!content) return [] if (!content) return []
if (noCache) { if (noCache) {
@@ -23,13 +19,13 @@ export function getLiquidTokens(
} }
if (liquidTokenCache.has(content)) { if (liquidTokenCache.has(content)) {
return liquidTokenCache.get(content) return liquidTokenCache.get(content)!
} }
const tokenizer = new Tokenizer(content) const tokenizer = new Tokenizer(content)
const tokens = tokenizer.readTopLevelTokens() const tokens = tokenizer.readTopLevelTokens()
liquidTokenCache.set(content, tokens) liquidTokenCache.set(content, tokens)
return liquidTokenCache.get(content) return liquidTokenCache.get(content)!
} }
export const OUTPUT_OPEN = '{%' export const OUTPUT_OPEN = '{%'
@@ -40,10 +36,9 @@ export const TAG_CLOSE = '}}'
export const conditionalTags = ['if', 'elseif', 'unless', 'case', 'ifversion'] export const conditionalTags = ['if', 'elseif', 'unless', 'case', 'ifversion']
const CONDITIONAL_TAG_NAMES = ['if', 'ifversion', 'elsif', 'else', 'endif'] const CONDITIONAL_TAG_NAMES = ['if', 'ifversion', 'elsif', 'else', 'endif']
// Token is `any` because it's used with different token types from liquidjs // Token parameter uses TopLevelToken which has begin and end properties
// that all have `begin` and `end` properties but are part of complex union types.
export function getPositionData( export function getPositionData(
token: any, token: TopLevelToken,
lines: string[], lines: string[],
): { lineNumber: number; column: number; length: number } { ): { lineNumber: number; column: number; length: number } {
// Liquid indexes are 0-based, but we want to // Liquid indexes are 0-based, but we want to
@@ -77,9 +72,9 @@ export function getPositionData(
* by Markdownlint: * by Markdownlint:
* [ { lineNumber: 1, column: 1, deleteCount: 3, }] * [ { lineNumber: 1, column: 1, deleteCount: 3, }]
*/ */
// Token is `any` because it's used with different token types from liquidjs. // Token parameter uses TopLevelToken from liquidjs
export function getContentDeleteData( export function getContentDeleteData(
token: any, token: TopLevelToken,
tokenEnd: number, tokenEnd: number,
lines: string[], lines: string[],
): Array<{ lineNumber: number; column: number; deleteCount: number }> { ): Array<{ lineNumber: number; column: number; deleteCount: number }> {
@@ -123,15 +118,14 @@ export function getContentDeleteData(
// related elsif, else, and endif tags). // related elsif, else, and endif tags).
// Docs doesn't use the standard `if` tag for versioning, instead the // Docs doesn't use the standard `if` tag for versioning, instead the
// `ifversion` tag is used. // `ifversion` tag is used.
// Returns `any[]` because the tokens need to be accessed as TagToken with `name` and `args` properties, // Returns TagToken array since we filter to only Tag tokens
// but TopLevelToken union type would require complex type narrowing. export function getLiquidIfVersionTokens(content: string): TagToken[] {
export function getLiquidIfVersionTokens(content: string): any[] {
const tokens = getLiquidTokens(content) const tokens = getLiquidTokens(content)
.filter((token) => token.kind === TokenKind.Tag) .filter((token): token is TagToken => token.kind === TokenKind.Tag)
.filter((token) => CONDITIONAL_TAG_NAMES.includes(token.name)) .filter((token) => CONDITIONAL_TAG_NAMES.includes(token.name))
let inIfStatement = false let inIfStatement = false
const ifVersionTokens: any[] = [] const ifVersionTokens: TagToken[] = []
for (const token of tokens) { for (const token of tokens) {
if (token.name === 'if') { if (token.name === 'if') {
inIfStatement = true inIfStatement = true

View File

@@ -11,8 +11,8 @@ export function addFixErrorDetail(
actual: string, actual: string,
// Using flexible type to accommodate different range formats from various linting rules // Using flexible type to accommodate different range formats from various linting rules
range: [number, number] | number[] | null, range: [number, number] | number[] | null,
// Using any for fixInfo as markdownlint-rule-helpers accepts various fix info structures // Using unknown for fixInfo as markdownlint-rule-helpers accepts various fix info structures
fixInfo: any, fixInfo: unknown,
): void { ): void {
addError(onError, lineNumber, `Expected: ${expected}`, ` Actual: ${actual}`, range, fixInfo) addError(onError, lineNumber, `Expected: ${expected}`, ` Actual: ${actual}`, range, fixInfo)
} }
@@ -20,9 +20,11 @@ export function addFixErrorDetail(
export function forEachInlineChild( export function forEachInlineChild(
params: RuleParams, params: RuleParams,
type: string, type: string,
// Using any for child and token types because different linting rules pass tokens with varying structures // Handler uses `any` for function parameter variance reasons. TypeScript's contravariance rules for function
// beyond the base MarkdownToken interface (e.g., ImageToken with additional properties) // parameters mean that a function accepting a specific type cannot be assigned to a parameter of type `unknown`.
handler: (child: any, token: any) => void, // Therefore, `unknown` cannot be used here, as different linting rules pass tokens with varying structures
// beyond the base MarkdownToken interface, and some handlers are async.
handler: (child: any, token?: any) => void | Promise<void>,
): void { ): void {
filterTokens(params, 'inline', (token: MarkdownToken) => { filterTokens(params, 'inline', (token: MarkdownToken) => {
for (const child of token.children!.filter((c) => c.type === type)) { for (const child of token.children!.filter((c) => c.type === type)) {
@@ -146,8 +148,8 @@ export const docsDomains = ['docs.github.com', 'help.github.com', 'developer.git
// This is the format we get from Markdownlint. // This is the format we get from Markdownlint.
// Returns null if the lines do not contain // Returns null if the lines do not contain
// frontmatter properties. // frontmatter properties.
// Returns frontmatter as a Record with any values since YAML can contain various types // Returns frontmatter as a Record with unknown values since YAML can contain various types
export function getFrontmatter(lines: string[]): Record<string, any> | null { export function getFrontmatter(lines: string[]): Record<string, unknown> | null {
const fmString = lines.join('\n') const fmString = lines.join('\n')
const { data } = matter(fmString) const { data } = matter(fmString)
// If there is no frontmatter or the frontmatter contains // If there is no frontmatter or the frontmatter contains

View File

@@ -1,7 +1,7 @@
import { filterTokens } from 'markdownlint-rule-helpers' import { filterTokens } from 'markdownlint-rule-helpers'
import { addFixErrorDetail, getRange } from '../helpers/utils' import { addFixErrorDetail, getRange } from '../helpers/utils'
import type { RuleParams, RuleErrorCallback, Rule } from '../../types' import type { RuleParams, RuleErrorCallback, Rule, MarkdownToken } from '../../types'
export const internalLinksSlash: Rule = { export const internalLinksSlash: Rule = {
names: ['GHD003', 'internal-links-slash'], names: ['GHD003', 'internal-links-slash'],
@@ -9,8 +9,8 @@ export const internalLinksSlash: Rule = {
tags: ['links', 'url'], tags: ['links', 'url'],
parser: 'markdownit', parser: 'markdownit',
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
// Using 'any' type for token as markdownlint-rule-helpers doesn't provide TypeScript types filterTokens(params, 'inline', (token: MarkdownToken) => {
filterTokens(params, 'inline', (token: any) => { if (!token.children) return
for (const child of token.children) { for (const child of token.children) {
if (child.type !== 'link_open') continue if (child.type !== 'link_open') continue
@@ -20,6 +20,7 @@ export const internalLinksSlash: Rule = {
// ['rel', 'canonical'], // ['rel', 'canonical'],
// ] // ]
// Attribute arrays are tuples of [attributeName, attributeValue] from markdownit parser // Attribute arrays are tuples of [attributeName, attributeValue] from markdownit parser
if (!child.attrs) continue
const hrefsMissingSlashes = child.attrs const hrefsMissingSlashes = child.attrs
// The attribute could also be `target` or `rel` // The attribute could also be `target` or `rel`
.filter((attr: [string, string]) => attr[0] === 'href') .filter((attr: [string, string]) => attr[0] === 'href')

View File

@@ -1,5 +1,6 @@
import { addError } from 'markdownlint-rule-helpers' import { addError } from 'markdownlint-rule-helpers'
import { TokenKind } from 'liquidjs' import { TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { getDataByLanguage } from '@/data-directory/lib/get-data' import { getDataByLanguage } from '@/data-directory/lib/get-data'
import { import {
@@ -23,10 +24,9 @@ export const liquidDataReferencesDefined = {
parser: 'markdownit', parser: 'markdownit',
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n') const content = params.lines.join('\n')
// Using any type because getLiquidTokens returns tokens from liquidjs library without complete type definitions
const tokens = getLiquidTokens(content) const tokens = getLiquidTokens(content)
.filter((token: any) => token.kind === TokenKind.Tag) .filter((token): token is TagToken => token.kind === TokenKind.Tag)
.filter((token: any) => token.name === 'data' || token.name === 'indented_data_reference') .filter((token) => token.name === 'data' || token.name === 'indented_data_reference')
if (!tokens.length) return if (!tokens.length) return
@@ -60,13 +60,11 @@ export const liquidDataTagFormat = {
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
const CHECK_LIQUID_TAGS = [OUTPUT_OPEN, OUTPUT_CLOSE, '{', '}'] const CHECK_LIQUID_TAGS = [OUTPUT_OPEN, OUTPUT_CLOSE, '{', '}']
const content = params.lines.join('\n') const content = params.lines.join('\n')
// Using any type because getLiquidTokens returns tokens from liquidjs library without complete type definitions const tokenTags = getLiquidTokens(content).filter(
// Tokens have properties like 'kind', 'name', 'args', and 'content' that aren't fully typed (token): token is TagToken => token.kind === TokenKind.Tag,
const tokenTags = getLiquidTokens(content).filter((token: any) => token.kind === TokenKind.Tag)
const dataTags = tokenTags.filter((token: any) => token.name === 'data')
const indentedDataTags = tokenTags.filter(
(token: any) => token.name === 'indented_data_reference',
) )
const dataTags = tokenTags.filter((token) => token.name === 'data')
const indentedDataTags = tokenTags.filter((token) => token.name === 'indented_data_reference')
for (const token of dataTags) { for (const token of dataTags) {
// A data tag has only one argument, the data directory path. // A data tag has only one argument, the data directory path.

View File

@@ -1,4 +1,5 @@
import { addError } from 'markdownlint-rule-helpers' import { addError } from 'markdownlint-rule-helpers'
import type { TopLevelToken } from 'liquidjs'
import { import {
getLiquidIfVersionTokens, getLiquidIfVersionTokens,
@@ -35,8 +36,11 @@ export const liquidIfversionVersions = {
const fileVersionsFm = params.name.startsWith('data') const fileVersionsFm = params.name.startsWith('data')
? { ghec: '*', ghes: '*', fpt: '*' } ? { ghec: '*', ghes: '*', fpt: '*' }
: fm : fm
? fm.versions ? (fm.versions as string | Record<string, string> | undefined)
: getFrontmatter(params.frontMatterLines)?.versions : (getFrontmatter(params.frontMatterLines)?.versions as
| string
| Record<string, string>
| undefined)
// This will only contain valid (non-deprecated) and future versions // This will only contain valid (non-deprecated) and future versions
const fileVersions = getApplicableVersions(fileVersionsFm, '', { const fileVersions = getApplicableVersions(fileVersionsFm, '', {
doNotThrow: true, doNotThrow: true,
@@ -134,7 +138,7 @@ function setLiquidErrors(condTagItems: any[], onError: RuleErrorCallback, lines:
{ {
begin: item.begin, begin: item.begin,
end: item.end, end: item.end,
}, } as TopLevelToken,
lines, lines,
) )
const deleteCount = length - column + 1 === lines[lineNumber - 1].length ? -1 : length const deleteCount = length - column + 1 === lines[lineNumber - 1].length ? -1 : length
@@ -159,7 +163,7 @@ function setLiquidErrors(condTagItems: any[], onError: RuleErrorCallback, lines:
{ {
begin: item.contentrange[0], begin: item.contentrange[0],
end: item.contentrange[1], end: item.contentrange[1],
}, } as TopLevelToken,
lines, lines,
) )
const insertText = `${item.action.name || item.name} ${item.action.cond || item.cond}` const insertText = `${item.action.name || item.name} ${item.action.cond || item.cond}`

View File

@@ -1,4 +1,5 @@
import { TokenKind } from 'liquidjs' import { TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { addError } from 'markdownlint-rule-helpers' import { addError } from 'markdownlint-rule-helpers'
import { getLiquidTokens, conditionalTags, getPositionData } from '../helpers/liquid-utils' import { getLiquidTokens, conditionalTags, getPositionData } from '../helpers/liquid-utils'
@@ -19,14 +20,12 @@ export const liquidQuotedConditionalArg: Rule = {
tags: ['liquid', 'format'], tags: ['liquid', 'format'],
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n') const content = params.lines.join('\n')
// Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.ts which lacks type definitions
const tokens = getLiquidTokens(content) const tokens = getLiquidTokens(content)
.filter((token: any) => token.kind === TokenKind.Tag) .filter((token): token is TagToken => token.kind === TokenKind.Tag)
.filter((token: any) => conditionalTags.includes(token.name)) .filter((token) => conditionalTags.includes(token.name))
.filter((token: any) => { .filter((token) => {
const tokensArray = token.args.split(/\s+/g) const tokensArray = token.args.split(/\s+/g)
// Using 'any' for args as they come from the untyped liquid token structure if (tokensArray.some((arg) => isStringQuoted(arg))) return true
if (tokensArray.some((arg: any) => isStringQuoted(arg))) return true
return false return false
}) })

View File

@@ -33,6 +33,7 @@ export const frontmatterLiquidSyntax = {
for (const key of keysWithLiquid) { for (const key of keysWithLiquid) {
const value = fm[key] const value = fm[key]
if (typeof value !== 'string') continue
try { try {
liquid.parse(value) liquid.parse(value)
} catch (error) { } catch (error) {

View File

@@ -1,4 +1,5 @@
import { TokenKind } from 'liquidjs' import { TokenKind } from 'liquidjs'
import type { TopLevelToken } from 'liquidjs'
import { getLiquidTokens, getPositionData } from '../helpers/liquid-utils' import { getLiquidTokens, getPositionData } from '../helpers/liquid-utils'
import { addFixErrorDetail } from '../helpers/utils' import { addFixErrorDetail } from '../helpers/utils'
@@ -36,7 +37,10 @@ export const liquidTagWhitespace: Rule = {
(token: LiquidToken) => token.kind === TokenKind.Tag, (token: LiquidToken) => token.kind === TokenKind.Tag,
) )
for (const token of tokens) { for (const token of tokens) {
const { lineNumber, column, length } = getPositionData(token, params.lines) const { lineNumber, column, length } = getPositionData(
token as unknown as TopLevelToken,
params.lines,
)
const range = [column, length] const range = [column, length]
const tag = params.lines[lineNumber - 1].slice(column - 1, column - 1 + length) const tag = params.lines[lineNumber - 1].slice(column - 1, column - 1 + length)

View File

@@ -1,5 +1,6 @@
import semver from 'semver' import semver from 'semver'
import { TokenKind } from 'liquidjs' import { TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { addError } from 'markdownlint-rule-helpers' import { addError } from 'markdownlint-rule-helpers'
import { getRange, addFixErrorDetail } from '../helpers/utils' import { getRange, addFixErrorDetail } from '../helpers/utils'
@@ -13,7 +14,7 @@ import type { RuleParams, RuleErrorCallback } from '@/content-linter/types'
interface Feature { interface Feature {
versions: Record<string, string> versions: Record<string, string>
[key: string]: any [key: string]: unknown
} }
type AllFeatures = Record<string, Feature> type AllFeatures = Record<string, Feature>
@@ -60,12 +61,13 @@ export const liquidIfTags = {
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n') const content = params.lines.join('\n')
const tokens = getLiquidTokens(content).filter( const tokens = getLiquidTokens(content)
(token) => .filter((token): token is TagToken => token.kind === TokenKind.Tag)
token.kind === TokenKind.Tag && .filter(
token.name === 'if' && (token) =>
token.args.split(/\s+/).some((arg: string) => getAllPossibleVersionNames().has(arg)), token.name === 'if' &&
) token.args.split(/\s+/).some((arg: string) => getAllPossibleVersionNames().has(arg)),
)
for (const token of tokens) { for (const token of tokens) {
const args = token.args const args = token.args
@@ -90,7 +92,7 @@ export const liquidIfVersionTags = {
function: (params: RuleParams, onError: RuleErrorCallback) => { function: (params: RuleParams, onError: RuleErrorCallback) => {
const content = params.lines.join('\n') const content = params.lines.join('\n')
const tokens = getLiquidTokens(content) const tokens = getLiquidTokens(content)
.filter((token) => token.kind === TokenKind.Tag) .filter((token): token is TagToken => token.kind === TokenKind.Tag)
.filter((token) => token.name === 'ifversion' || token.name === 'elsif') .filter((token) => token.name === 'ifversion' || token.name === 'elsif')
for (const token of tokens) { for (const token of tokens) {

View File

@@ -1,5 +1,6 @@
import { addError } from 'markdownlint-rule-helpers' import { addError } from 'markdownlint-rule-helpers'
import { TokenKind } from 'liquidjs' import { TokenKind } from 'liquidjs'
import type { TopLevelToken } from 'liquidjs'
import path from 'path' import path from 'path'
import { getFrontmatter } from '../helpers/utils' import { getFrontmatter } from '../helpers/utils'
@@ -45,7 +46,10 @@ export const raiReusableUsage: Rule = {
if (dataDirectoryReference.startsWith('reusables.rai')) continue if (dataDirectoryReference.startsWith('reusables.rai')) continue
const lines = params.lines const lines = params.lines
const { lineNumber, column, length } = getPositionData(token, lines) const { lineNumber, column, length } = getPositionData(
token as unknown as TopLevelToken,
lines,
)
addError( addError(
onError, onError,
lineNumber, lineNumber,

View File

@@ -22,7 +22,8 @@ import yaml from 'js-yaml'
import { program } from 'commander' import { program } from 'commander'
import { loadPages, loadUnversionedTree } from '@/frame/lib/page-data' import { loadPages, loadUnversionedTree } from '@/frame/lib/page-data'
import { TokenizationError } from 'liquidjs' import { TokenizationError, TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import readFrontmatter from '@/frame/lib/read-frontmatter' import readFrontmatter from '@/frame/lib/read-frontmatter'
import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils' import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils'
@@ -137,7 +138,10 @@ function getReusableFiles(root = 'data') {
function checkString(string: string, variables: Map<string, string>) { function checkString(string: string, variables: Map<string, string>) {
try { try {
for (const token of getLiquidTokens(string)) { const tokens = getLiquidTokens(string).filter(
(token): token is TagToken => token.kind === TokenKind.Tag,
)
for (const token of tokens) {
if (token.name === 'data') { if (token.name === 'data') {
const { args } = token const { args } = token
variables.delete(args) variables.delete(args)

View File

@@ -10,6 +10,7 @@ import { Tool, tags as toolTags } from './tool'
import { Spotlight, tags as spotlightTags } from './spotlight' import { Spotlight, tags as spotlightTags } from './spotlight'
import { Prompt } from './prompt' import { Prompt } from './prompt'
import IndentedDataReference from './indented-data-reference' import IndentedDataReference from './indented-data-reference'
import { apiTransformerTags } from '@/article-api/liquid-renderers'
// Type assertions for .js files without type definitions // Type assertions for .js files without type definitions
// Copilot: Remove these assertions when the corresponding .js files are converted to TypeScript // Copilot: Remove these assertions when the corresponding .js files are converted to TypeScript
@@ -40,6 +41,11 @@ for (const tag in spotlightTags) {
engine.registerTag('prompt', anyPrompt) engine.registerTag('prompt', anyPrompt)
// Register API transformer tags
for (const [tagName, tagClass] of Object.entries(apiTransformerTags)) {
engine.registerTag(tagName, tagClass as any)
}
/** /**
* Like the `size` filter, but specifically for * Like the `size` filter, but specifically for
* getting the number of keys in an object * getting the number of keys in an object

View File

@@ -2,25 +2,25 @@
// Defines {% prompt %}…{% endprompt %} to wrap its content in <code> and append the Copilot icon. // Defines {% prompt %}…{% endprompt %} to wrap its content in <code> and append the Copilot icon.
import octicons from '@primer/octicons' import octicons from '@primer/octicons'
import type { TagToken, TopLevelToken } from 'liquidjs'
import { generatePromptId } from '../lib/prompt-id' import { generatePromptId } from '../lib/prompt-id'
interface LiquidTag { interface LiquidTag {
type: 'block' type: 'block'
templates?: any[] // Note: Using 'any' because liquidjs doesn't provide proper types for template objects templates?: unknown[]
// Note: Using 'any' for liquid-related parameters because liquidjs doesn't provide comprehensive TypeScript definitions parse(tagToken: TagToken, remainTokens: TopLevelToken[]): void
parse(tagToken: any, remainTokens: any): void render(scope: unknown): Generator<unknown, string, unknown>
render(scope: any): Generator<any, string, unknown>
} }
export const Prompt: LiquidTag = { export const Prompt: LiquidTag = {
type: 'block', type: 'block',
// Collect everything until {% endprompt %} // Collect everything until {% endprompt %}
parse(tagToken: any, remainTokens: any): void { parse(tagToken: TagToken, remainTokens: TopLevelToken[]): void {
this.templates = [] this.templates = []
const stream = this.liquid.parser.parseStream(remainTokens) const stream = this.liquid.parser.parseStream(remainTokens)
stream stream
.on('template', (tpl: any) => this.templates.push(tpl)) .on('template', (tpl: unknown) => this.templates.push(tpl))
.on('tag:endprompt', () => stream.stop()) .on('tag:endprompt', () => stream.stop())
.on('end', () => { .on('end', () => {
throw new Error(`{% prompt %} tag not closed`) throw new Error(`{% prompt %} tag not closed`)
@@ -29,7 +29,7 @@ export const Prompt: LiquidTag = {
}, },
// Render the inner Markdown, wrap in <code>, then append the SVG // Render the inner Markdown, wrap in <code>, then append the SVG
*render(scope: any): Generator<any, string, unknown> { *render(scope: unknown): Generator<unknown, string, unknown> {
const content = yield this.liquid.renderer.renderTemplates(this.templates, scope) const content = yield this.liquid.renderer.renderTemplates(this.templates, scope)
const contentString = String(content) const contentString = String(content)

View File

@@ -1,5 +1,7 @@
import fs from 'fs' import fs from 'fs'
import path from 'path' import path from 'path'
import { TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils' import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils'
import { import {
getAllContentFilePaths, getAllContentFilePaths,
@@ -21,7 +23,9 @@ export function findUnused({ absolute }: { absolute: boolean }) {
for (let i = 0; i < totalFiles; i++) { for (let i = 0; i < totalFiles; i++) {
const filePath = allFilePaths[i] const filePath = allFilePaths[i]
const fileContents = fs.readFileSync(filePath, 'utf-8') const fileContents = fs.readFileSync(filePath, 'utf-8')
const liquidTokens = getLiquidTokens(fileContents) const liquidTokens = getLiquidTokens(fileContents).filter(
(token): token is TagToken => token.kind === TokenKind.Tag,
)
for (const token of liquidTokens) { for (const token of liquidTokens) {
const { args, name } = token const { args, name } = token
if ( if (

View File

@@ -1,5 +1,7 @@
import fs from 'fs' import fs from 'fs'
import path from 'path' import path from 'path'
import { TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils' import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils'
import { import {
FilesWithLineNumbers, FilesWithLineNumbers,
@@ -51,7 +53,9 @@ export function findTopUsed(numberOfMostUsedToFind: number, { absolute }: { abso
const reusableCounts = new Map<string, number>() const reusableCounts = new Map<string, number>()
for (const filePath of allFilePaths) { for (const filePath of allFilePaths) {
const fileContents = fs.readFileSync(filePath, 'utf-8') const fileContents = fs.readFileSync(filePath, 'utf-8')
const liquidTokens = getLiquidTokens(fileContents) const liquidTokens = getLiquidTokens(fileContents).filter(
(token): token is TagToken => token.kind === TokenKind.Tag,
)
for (const token of liquidTokens) { for (const token of liquidTokens) {
const { args, name } = token const { args, name } = token
if (name === 'data' && args.startsWith('reusables.')) { if (name === 'data' && args.startsWith('reusables.')) {

View File

@@ -1,6 +1,7 @@
import walk from 'walk-sync' import walk from 'walk-sync'
import path from 'path' import path from 'path'
import { TokenizationError } from 'liquidjs' import { TokenizationError, TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils' import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils'
const __dirname = path.dirname(new URL(import.meta.url).pathname) const __dirname = path.dirname(new URL(import.meta.url).pathname)
@@ -56,7 +57,10 @@ export function getReusableLiquidString(reusablePath: string): string {
export function getIndicesOfLiquidVariable(liquidVariable: string, fileContents: string): number[] { export function getIndicesOfLiquidVariable(liquidVariable: string, fileContents: string): number[] {
const indices: number[] = [] const indices: number[] = []
try { try {
for (const token of getLiquidTokens(fileContents)) { const tokens = getLiquidTokens(fileContents).filter(
(token): token is TagToken => token.kind === TokenKind.Tag,
)
for (const token of tokens) {
if (token.name === 'data' && token.args.trim() === liquidVariable) { if (token.name === 'data' && token.args.trim() === liquidVariable) {
indices.push(token.begin) indices.push(token.begin)
} }

View File

@@ -11,13 +11,13 @@ export interface Context {
currentVersion?: string currentVersion?: string
currentProduct?: string currentProduct?: string
markdownRequested?: boolean markdownRequested?: boolean
pages?: any pages?: Record<string, unknown>
redirects?: any redirects?: Record<string, string>
page?: { page?: {
fullPath: string fullPath: string
[key: string]: any [key: string]: unknown
} }
[key: string]: any [key: string]: unknown
} }
/** /**
@@ -27,20 +27,20 @@ export interface RenderOptions {
cache?: boolean | ((template: string, context: Context) => string | null) cache?: boolean | ((template: string, context: Context) => string | null)
filename?: string filename?: string
textOnly?: boolean textOnly?: boolean
[key: string]: any [key: string]: unknown
} }
/** /**
* Unified processor plugin function type * Unified processor plugin function type
*/ */
export type UnifiedPlugin = (context?: Context) => any export type UnifiedPlugin = (context?: Context) => unknown
/** /**
* VFile interface for unified processing * VFile interface for unified processing
*/ */
export interface VFile { export interface VFile {
toString(): string toString(): string
[key: string]: any [key: string]: unknown
} }
/** /**
@@ -48,5 +48,5 @@ export interface VFile {
*/ */
export interface UnifiedProcessor { export interface UnifiedProcessor {
process(content: string): Promise<VFile> process(content: string): Promise<VFile>
use(plugin: any, ...args: any[]): UnifiedProcessor use(plugin: unknown, ...args: unknown[]): UnifiedProcessor
} }

View File

@@ -40,21 +40,21 @@ export function createProcessor(context: Context): UnifiedProcessor {
.use(gfm) .use(gfm)
// Markdown AST below vvv // Markdown AST below vvv
.use(parseInfoString) .use(parseInfoString)
// Using 'as any' because rewriteLocalLinks is a factory function that takes context // Using type assertion because rewriteLocalLinks is a factory function that takes context
// and returns a transformer, but TypeScript's unified plugin types don't handle this pattern // and returns a transformer, but TypeScript's unified plugin types don't handle this pattern
.use(rewriteLocalLinks as any, context) .use(rewriteLocalLinks as unknown as (ctx: Context) => void, context)
.use(emoji) .use(emoji)
// Markdown AST above ^^^ // Markdown AST above ^^^
.use(remark2rehype, { allowDangerousHtml: true }) .use(remark2rehype, { allowDangerousHtml: true })
// HTML AST below vvv // HTML AST below vvv
.use(slug) .use(slug)
// useEnglishHeadings plugin requires context with englishHeadings property // useEnglishHeadings plugin requires context with englishHeadings property
.use(useEnglishHeadings as any, context || {}) .use(useEnglishHeadings as unknown as (ctx: Context) => void, context || {})
.use(headingLinks) .use(headingLinks)
.use(codeHeader) .use(codeHeader)
.use(annotate, context) .use(annotate, context)
// Using 'as any' for highlight plugin due to complex type mismatch between unified and rehype-highlight // Using type assertion for highlight plugin due to complex type mismatch between unified and rehype-highlight
.use(highlight as any, { .use(highlight as unknown as (options: unknown) => void, {
languages: { ...common, graphql, dockerfile, http, groovy, erb, powershell }, languages: { ...common, graphql, dockerfile, http, groovy, erb, powershell },
subset: false, subset: false,
aliases: { aliases: {
@@ -82,9 +82,9 @@ export function createProcessor(context: Context): UnifiedProcessor {
.use(rewriteImgSources) .use(rewriteImgSources)
.use(rewriteAssetImgTags) .use(rewriteAssetImgTags)
// alerts plugin requires context with alertTitles property // alerts plugin requires context with alertTitles property
.use(alerts as any, context || {}) .use(alerts as unknown as (ctx: Context) => void, context || {})
// HTML AST above ^^^ // HTML AST above ^^^
.use(html) as UnifiedProcessor // String below vvv .use(html) as unknown as UnifiedProcessor // String below vvv
) )
} }
@@ -93,10 +93,10 @@ export function createMarkdownOnlyProcessor(context: Context): UnifiedProcessor
unified() unified()
.use(remarkParse) .use(remarkParse)
.use(gfm) .use(gfm)
// Using 'as any' because rewriteLocalLinks is a factory function that takes context // Using type assertion because rewriteLocalLinks is a factory function that takes context
// and returns a transformer, but TypeScript's unified plugin types don't handle this pattern // and returns a transformer, but TypeScript's unified plugin types don't handle this pattern
.use(rewriteLocalLinks as any, context) .use(rewriteLocalLinks as unknown as (ctx: Context) => void, context)
.use(remarkStringify) as UnifiedProcessor .use(remarkStringify) as unknown as UnifiedProcessor
) )
} }
@@ -105,12 +105,12 @@ export function createMinimalProcessor(context: Context): UnifiedProcessor {
unified() unified()
.use(remarkParse) .use(remarkParse)
.use(gfm) .use(gfm)
// Using 'as any' because rewriteLocalLinks is a factory function that takes context // Using type assertion because rewriteLocalLinks is a factory function that takes context
// and returns a transformer, but TypeScript's unified plugin types don't handle this pattern // and returns a transformer, but TypeScript's unified plugin types don't handle this pattern
.use(rewriteLocalLinks as any, context) .use(rewriteLocalLinks as unknown as (ctx: Context) => void, context)
.use(remark2rehype, { allowDangerousHtml: true }) .use(remark2rehype, { allowDangerousHtml: true })
.use(slug) .use(slug)
.use(raw) .use(raw)
.use(html) as UnifiedProcessor .use(html) as unknown as UnifiedProcessor
) )
} }

View File

@@ -2,14 +2,10 @@ import GithubSlugger from 'github-slugger'
import { encode } from 'html-entities' import { encode } from 'html-entities'
import { toString } from 'hast-util-to-string' import { toString } from 'hast-util-to-string'
import { visit } from 'unist-util-visit' import { visit } from 'unist-util-visit'
import type { Element, Root } from 'hast'
const slugger = new GithubSlugger() const slugger = new GithubSlugger()
// Note: Using 'any' for node because the unist/hast type system is complex and
// the visit function's type constraints don't easily allow for proper element typing
// without extensive type gymnastics. The runtime check ensures type safety.
const matcher = (node: any) => node.type === 'element' && ['h2', 'h3', 'h4'].includes(node.tagName)
interface UseEnglishHeadingsOptions { interface UseEnglishHeadingsOptions {
englishHeadings?: Record<string, string> englishHeadings?: Record<string, string>
} }
@@ -17,12 +13,9 @@ interface UseEnglishHeadingsOptions {
// replace translated IDs and links in headings with English // replace translated IDs and links in headings with English
export default function useEnglishHeadings({ englishHeadings }: UseEnglishHeadingsOptions) { export default function useEnglishHeadings({ englishHeadings }: UseEnglishHeadingsOptions) {
if (!englishHeadings) return if (!englishHeadings) return
// Note: Using 'any' for tree because unified's AST types are complex and return (tree: Root) => {
// this function works with different tree types depending on the processor visit(tree, 'element', (node: Element) => {
return (tree: any) => { if (!['h2', 'h3', 'h4'].includes(node.tagName)) return
// Note: Using 'any' for node because visit() callback typing is restrictive
// and doesn't easily allow for proper element typing without complex generics
visit(tree, matcher, (node: any) => {
slugger.reset() slugger.reset()
// Get the plain text content of the heading node // Get the plain text content of the heading node
const text: string = toString(node) const text: string = toString(node)

View File

@@ -32,7 +32,8 @@ import fs from 'fs'
import path from 'path' import path from 'path'
import chalk from 'chalk' import chalk from 'chalk'
import { TokenizationError } from 'liquidjs' import { TokenizationError, TokenKind } from 'liquidjs'
import type { TagToken } from 'liquidjs'
import type { Page } from '@/types' import type { Page } from '@/types'
import warmServer from '@/frame/lib/warm-server' import warmServer from '@/frame/lib/warm-server'
@@ -246,7 +247,10 @@ function checkString(
// a LOT of different strings in and the cache will fill up rapidly // a LOT of different strings in and the cache will fill up rapidly
// when testing every possible string in every possible language for // when testing every possible string in every possible language for
// every page. // every page.
for (const token of getLiquidTokens(string, { noCache: true })) { const tokens = getLiquidTokens(string, { noCache: true }).filter(
(token): token is TagToken => token.kind === TokenKind.Tag,
)
for (const token of tokens) {
if (token.name === 'ifversion' || token.name === 'elsif') { if (token.name === 'ifversion' || token.name === 'elsif') {
for (const arg of token.args.split(/\s+/)) { for (const arg of token.args.split(/\s+/)) {
if (IGNORE_ARGS.has(arg)) continue if (IGNORE_ARGS.has(arg)) continue

View File

@@ -11,4 +11,5 @@ versions:
ghec: '*' ghec: '*'
children: children:
- /category - /category
- /using-workflows
--- ---

View File

@@ -0,0 +1,12 @@
---
title: Using workflows
intro: Learn how to use workflows in GitHub Actions.
versions:
fpt: '*'
ghec: '*'
ghes: '*'
children:
- /storing-workflow-data-as-artifacts
---
This is a fixture index page for testing.

View File

@@ -0,0 +1,10 @@
---
title: Storing workflow data as artifacts
intro: Artifacts allow you to share data between jobs in a workflow and store data once that workflow has completed.
versions:
fpt: '*'
ghec: '*'
ghes: '*'
---
This is a fixture file for testing links in the REST API artifacts documentation.

View File

@@ -16,4 +16,6 @@ autogenerated: rest
## About artifacts in {% data variables.product.prodname_actions %} ## About artifacts in {% data variables.product.prodname_actions %}
You can use the REST API to download, delete, and retrieve information about workflow artifacts in {% data variables.product.prodname_actions %}. Artifacts enable you to share data between jobs in a workflow and store data once that workflow has completed. For more information, see [AUTOTITLE](/actions/using-workflows/storing-workflow-data-as-artifacts).
<!-- Content after this section is automatically generated --> <!-- Content after this section is automatically generated -->

View File

@@ -18,6 +18,7 @@ import {
import { useTheme } from '@/color-schemes/components/useTheme' import { useTheme } from '@/color-schemes/components/useTheme'
import { SharedUIContextProvider } from '@/frame/components/context/SharedUIContext' import { SharedUIContextProvider } from '@/frame/components/context/SharedUIContext'
import { CTAPopoverProvider } from '@/frame/components/context/CTAContext' import { CTAPopoverProvider } from '@/frame/components/context/CTAContext'
import type { ExtendedRequest } from '@/types'
type MyAppProps = AppProps & { type MyAppProps = AppProps & {
isDotComAuthenticated: boolean isDotComAuthenticated: boolean
@@ -158,7 +159,7 @@ MyApp.getInitialProps = async (appContext: AppContext) => {
const { ctx } = appContext const { ctx } = appContext
// calls page's `getInitialProps` and fills `appProps.pageProps` // calls page's `getInitialProps` and fills `appProps.pageProps`
const appProps = await App.getInitialProps(appContext) const appProps = await App.getInitialProps(appContext)
const req: any = ctx.req const req = ctx.req as unknown as ExtendedRequest
// Have to define the type manually here because `req.context.languages` // Have to define the type manually here because `req.context.languages`
// comes from Node JS and is not type-aware. // comes from Node JS and is not type-aware.
@@ -188,11 +189,14 @@ MyApp.getInitialProps = async (appContext: AppContext) => {
} }
} }
} }
const stagingName = req.headers['x-ong-external-url']?.match(/staging-(\w+)\./)?.[1] const headerValue = req.headers['x-ong-external-url']
const stagingName = (typeof headerValue === 'string' ? headerValue : headerValue?.[0])?.match(
/staging-(\w+)\./,
)?.[1]
return { return {
...appProps, ...appProps,
languagesContext, languagesContext,
stagingName: stagingNames.has(stagingName) ? stagingName : undefined, stagingName: stagingName && stagingNames.has(stagingName) ? stagingName : undefined,
} }
} }

View File

@@ -190,7 +190,7 @@ async function createIssue(
body, body,
labels, labels,
}) })
} catch (error: any) { } catch (error: unknown) {
console.log(`#ERROR# ${error}\n🛑 There was an error creating the issue.`) console.log(`#ERROR# ${error}\n🛑 There was an error creating the issue.`)
throw error throw error
} }
@@ -223,7 +223,7 @@ async function updateIssue(
body, body,
labels, labels,
}) })
} catch (error: any) { } catch (error: unknown) {
console.log( console.log(
`#ERROR# ${error}\n🛑 There was an error updating issue ${issueNumber} in ${fullRepo}.`, `#ERROR# ${error}\n🛑 There was an error updating issue ${issueNumber} in ${fullRepo}.`,
) )
@@ -244,8 +244,13 @@ async function addRepoLabels(fullRepo: string, labels: string[]) {
repo, repo,
name, name,
}) })
} catch (error: any) { } catch (error: unknown) {
if (error.status === 404) { if (
typeof error === 'object' &&
error !== null &&
'status' in error &&
(error as { status: number }).status === 404
) {
labelsToAdd.push(name) labelsToAdd.push(name)
} else { } else {
console.log(`#ERROR# ${error}\n🛑 There was an error getting the label ${name}.`) console.log(`#ERROR# ${error}\n🛑 There was an error getting the label ${name}.`)
@@ -260,7 +265,7 @@ async function addRepoLabels(fullRepo: string, labels: string[]) {
repo, repo,
name, name,
}) })
} catch (error: any) { } catch (error: unknown) {
console.log(`#ERROR# ${error}\n🛑 There was an error adding the label ${name}.`) console.log(`#ERROR# ${error}\n🛑 There was an error adding the label ${name}.`)
throw error throw error
} }

View File

@@ -1,4 +1,5 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
import type { Request, Response } from 'express'
import { createLogger } from '@/observability/logger' import { createLogger } from '@/observability/logger'
import { initLoggerContext, updateLoggerContext } from '@/observability/logger/lib/logger-context' import { initLoggerContext, updateLoggerContext } from '@/observability/logger/lib/logger-context'
@@ -8,7 +9,7 @@ describe('logger integration tests', () => {
let originalConsoleError: typeof console.error let originalConsoleError: typeof console.error
let originalEnv: typeof process.env let originalEnv: typeof process.env
const consoleLogs: string[] = [] const consoleLogs: string[] = []
const consoleErrors: any[] = [] const consoleErrors: unknown[] = []
beforeEach(() => { beforeEach(() => {
// Store original console methods and environment // Store original console methods and environment
@@ -20,7 +21,7 @@ describe('logger integration tests', () => {
console.log = vi.fn((message: string) => { console.log = vi.fn((message: string) => {
consoleLogs.push(message) consoleLogs.push(message)
}) })
console.error = vi.fn((error: any) => { console.error = vi.fn((error: unknown) => {
consoleErrors.push(error) consoleErrors.push(error)
}) })
@@ -78,9 +79,9 @@ describe('logger integration tests', () => {
'accept-language': 'en-US,en;q=0.9', 'accept-language': 'en-US,en;q=0.9',
}, },
query: { filter: 'active' }, query: { filter: 'active' },
} as any } as unknown as Request
const mockRes = {} as any const mockRes = {} as unknown as Response
// Use a Promise to handle the async local storage execution // Use a Promise to handle the async local storage execution
const result = await new Promise<void>((resolve, reject) => { const result = await new Promise<void>((resolve, reject) => {

View File

@@ -1,6 +1,8 @@
import { GetServerSideProps } from 'next' import { GetServerSideProps } from 'next'
import { Liquid } from 'liquidjs' import { Liquid } from 'liquidjs'
import pick from 'lodash/pick' import pick from 'lodash/pick'
import get from 'lodash/get'
import type { Response } from 'express'
import { import {
MainContextT, MainContextT,
@@ -11,6 +13,7 @@ import {
import { DefaultLayout } from '@/frame/components/DefaultLayout' import { DefaultLayout } from '@/frame/components/DefaultLayout'
import { GHESReleaseNotes } from '@/release-notes/components/GHESReleaseNotes' import { GHESReleaseNotes } from '@/release-notes/components/GHESReleaseNotes'
import { GHESReleaseNotesContextT } from '@/release-notes/components/types' import { GHESReleaseNotesContextT } from '@/release-notes/components/types'
import type { ExtendedRequest } from '@/types'
const liquid = new Liquid() const liquid = new Liquid()
type Props = { type Props = {
@@ -33,22 +36,30 @@ export default function ReleaseNotes({ mainContext, ghesContext }: Props) {
) )
} }
export const getServerSideProps: GetServerSideProps<Props> = async (context) => { export const getServerSideProps: GetServerSideProps<Props> = async (
const req = context.req as any context,
const res = context.res as any ): Promise<{ props: Props }> => {
const req = context.req as unknown as ExtendedRequest
const res = context.res as unknown as Response
// The `req.context.allVersion[X]` entries contains more keys (and values) // The `req.context.allVersion[X]` entries contains more keys (and values)
// than we need so only pick out the keys that are actually needed // than we need so only pick out the keys that are actually needed
// explicitly in the components served from these props. // explicitly in the components served from these props.
const currentVersion = pick(req.context.allVersions[req.context.currentVersion], [ const currentVersion = pick(req.context!.allVersions?.[req.context!.currentVersion!] || {}, [
'plan', 'plan',
'planTitle', 'planTitle',
'versionTitle', 'versionTitle',
'currentRelease', 'currentRelease',
'releases', 'releases',
]) ]) as {
plan?: string
planTitle?: string
versionTitle?: string
currentRelease?: string
releases?: string[]
}
const { latestPatch = '', latestRelease = '' } = req.context const { latestPatch = '', latestRelease = '' } = req.context!
const mainContext = await getMainContext(req, res) const mainContext = await getMainContext(req, res)
addUINamespaces(req, mainContext.data.ui, ['release_notes']) addUINamespaces(req, mainContext.data.ui, ['release_notes'])
@@ -58,28 +69,39 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
mainContext, mainContext,
ghesContext: ghesContext:
currentVersion.plan === 'enterprise-server' currentVersion.plan === 'enterprise-server'
? { ? ({
currentVersion, currentVersion,
latestPatch, latestPatch,
latestRelease, latestRelease,
releaseNotes: req.context.ghesReleaseNotes, releaseNotes: req.context!.ghesReleaseNotes || [],
releases: req.context.ghesReleases, releases: req.context!.ghesReleases || [],
message: { message: {
ghes_release_notes_upgrade_patch_only: liquid.parseAndRenderSync( ghes_release_notes_upgrade_patch_only: liquid.parseAndRenderSync(
req.context.site.data.ui.header.notices.ghes_release_notes_upgrade_patch_only, get(
req.context,
'site.data.ui.header.notices.ghes_release_notes_upgrade_patch_only',
'',
) as string,
{ latestPatch, latestRelease }, { latestPatch, latestRelease },
), ),
ghes_release_notes_upgrade_release_only: liquid.parseAndRenderSync( ghes_release_notes_upgrade_release_only: liquid.parseAndRenderSync(
req.context.site.data.ui.header.notices.ghes_release_notes_upgrade_release_only, get(
req.context,
'site.data.ui.header.notices.ghes_release_notes_upgrade_release_only',
'',
) as string,
{ latestPatch, latestRelease }, { latestPatch, latestRelease },
), ),
ghes_release_notes_upgrade_patch_and_release: liquid.parseAndRenderSync( ghes_release_notes_upgrade_patch_and_release: liquid.parseAndRenderSync(
req.context.site.data.ui.header.notices get(
.ghes_release_notes_upgrade_patch_and_release, req.context,
'site.data.ui.header.notices.ghes_release_notes_upgrade_patch_and_release',
'',
) as string,
{ latestPatch, latestRelease }, { latestPatch, latestRelease },
), ),
}, },
} } as unknown as GHESReleaseNotesContextT)
: null, : null,
}, },
} }

View File

@@ -130,12 +130,15 @@ export default async function buildRecords(
}) })
.on('error', (err) => { .on('error', (err) => {
// Track the failure // Track the failure
const url = (err as any).url const url = (err as unknown as { url?: string }).url
const relativePath = (err as any).relativePath const relativePath = (err as unknown as { relativePath?: string }).relativePath
// Check for HTTPError by name since it may come from a different module // Check for HTTPError by name since it may come from a different module
if ((err instanceof HTTPError || err?.name === 'HTTPError') && (err as any).response) { if (
const httpErr = err as any (err instanceof HTTPError || err?.name === 'HTTPError') &&
(err as unknown as HTTPError).response
) {
const httpErr = err as unknown as HTTPError
failedPages.push({ failedPages.push({
url: httpErr.request?.requestUrl?.pathname || url, url: httpErr.request?.requestUrl?.pathname || url,
relativePath, relativePath,
@@ -146,7 +149,7 @@ export default async function buildRecords(
if (!noMarkers) process.stdout.write(chalk.red('✗')) if (!noMarkers) process.stdout.write(chalk.red('✗'))
} else if (err instanceof Error) { } else if (err instanceof Error) {
// Enhanced error handling for timeout and network errors // Enhanced error handling for timeout and network errors
const errorType = (err.cause as any)?.code || err.name const errorType = (err.cause as unknown as { code?: string })?.code || err.name
const isTimeout = const isTimeout =
errorType === 'UND_ERR_HEADERS_TIMEOUT' || errorType === 'UND_ERR_HEADERS_TIMEOUT' ||
errorType === 'UND_ERR_CONNECT_TIMEOUT' || errorType === 'UND_ERR_CONNECT_TIMEOUT' ||

View File

@@ -14,8 +14,14 @@ interface GetApplicableVersionsOptions {
includeNextVersion?: boolean includeNextVersion?: boolean
} }
// Using any for feature data as it's dynamically loaded from YAML files interface FeatureData {
let featureData: any = null [featureName: string]: {
versions: VersionsObject
}
}
// Feature data is dynamically loaded from YAML files
let featureData: FeatureData | null = null
const allVersionKeys = Object.keys(allVersions) const allVersionKeys = Object.keys(allVersions)
@@ -55,13 +61,13 @@ function getApplicableVersions(
? {} ? {}
: reduce( : reduce(
versionsObj, versionsObj,
(result: any, value, key) => { (result: VersionsObject, value, key) => {
if (key === 'feature') { if (key === 'feature') {
if (typeof value === 'string') { if (typeof value === 'string') {
Object.assign(result, { ...featureData[value]?.versions }) Object.assign(result, { ...featureData?.[value]?.versions })
} else if (Array.isArray(value)) { } else if (Array.isArray(value)) {
for (const str of value) { for (const str of value) {
Object.assign(result, { ...featureData[str].versions }) Object.assign(result, { ...featureData?.[str]?.versions })
} }
} }
delete result[key] delete result[key]

View File

@@ -1,17 +1,27 @@
import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils' import { getLiquidTokens } from '@/content-linter/lib/helpers/liquid-utils'
import type { TagToken } from 'liquidjs'
import { TokenKind } from 'liquidjs'
type Token = { // Type guard to check if a token is a TagToken
name?: string function isTagToken(token: unknown): token is TagToken {
args?: string return (
token !== null &&
typeof token === 'object' &&
'kind' in token &&
token.kind === TokenKind.Tag &&
'name' in token &&
typeof token.name === 'string' &&
'args' in token
)
} }
const parsedLiquidTokensCache = new Map<string, Token[]>() const parsedLiquidTokensCache = new Map<string, TagToken[]>()
export function inLiquid(filePath: string, fileContents: string, needle: string) { export function inLiquid(filePath: string, fileContents: string, needle: string) {
if (!parsedLiquidTokensCache.has(filePath)) { if (!parsedLiquidTokensCache.has(filePath)) {
parsedLiquidTokensCache.set(filePath, getLiquidTokens(fileContents)) parsedLiquidTokensCache.set(filePath, getLiquidTokens(fileContents).filter(isTagToken))
} }
const tokens = parsedLiquidTokensCache.get(filePath) as Token[] const tokens = parsedLiquidTokensCache.get(filePath)!
for (const token of tokens) { for (const token of tokens) {
if (token.name === 'data') { if (token.name === 'data') {
const { args } = token const { args } = token