Migrate 13 JS files to TypeScript (#57881)
This commit is contained in:
@@ -24,8 +24,8 @@ import ajv from '@/tests/lib/validate-json-schema'
|
||||
// mdDict will be populated with:
|
||||
//
|
||||
// { '/foo/bar/0': 'item 1', '/foo/bar/1': 'item 2' }
|
||||
const mdDict = new Map()
|
||||
const lintableData = Object.keys(dataSchemas)
|
||||
const mdDict = new Map<string, string>()
|
||||
const lintableData: string[] = Object.keys(dataSchemas)
|
||||
|
||||
// To redefine a custom keyword, you must remove it
|
||||
// then re-add it with the new definition. The default
|
||||
@@ -37,7 +37,8 @@ ajv.addKeyword({
|
||||
type: 'string',
|
||||
// For docs on defining validate see
|
||||
// https://ajv.js.org/keywords.html#define-keyword-with-validate-function
|
||||
validate: (compiled, data, schema, parentInfo) => {
|
||||
// Using any for validate function params because AJV's type definitions for custom keywords are complex
|
||||
validate: (compiled: any, data: any, schema: any, parentInfo: any): boolean => {
|
||||
mdDict.set(parentInfo.instancePath, data)
|
||||
return true
|
||||
},
|
||||
@@ -55,13 +56,14 @@ ajv.addKeyword({
|
||||
// back to the location in the original schema file,
|
||||
// so we also need the parent path of the `lintable`
|
||||
// property in the schema.
|
||||
export async function getLintableYml(dataFilePath) {
|
||||
export async function getLintableYml(dataFilePath: string): Promise<Record<string, string> | null> {
|
||||
const matchingDataPath = lintableData.find(
|
||||
(ref) => dataFilePath === ref || dataFilePath.startsWith(ref),
|
||||
)
|
||||
if (!matchingDataPath) return null
|
||||
|
||||
const schemaFilePath = dataSchemas[matchingDataPath]
|
||||
if (!schemaFilePath) return null
|
||||
const schema = (await import(schemaFilePath)).default
|
||||
if (!schema) return null
|
||||
|
||||
@@ -78,13 +80,15 @@ export async function getLintableYml(dataFilePath) {
|
||||
// back to a file in the data directory.
|
||||
// The resulting key looks like:
|
||||
// 'data/variables/product.yml /pat_v1_caps'
|
||||
function addPathToKey(mdDict, dataFilePath) {
|
||||
function addPathToKey(mdDict: Map<string, string>, dataFilePath: string): Map<string, string> {
|
||||
const keys = Array.from(mdDict.keys())
|
||||
keys.forEach((key) => {
|
||||
const newKey = `${dataFilePath} ${key}`
|
||||
const value = mdDict.get(key)
|
||||
if (value !== undefined) {
|
||||
mdDict.delete(key)
|
||||
mdDict.set(newKey, value)
|
||||
}
|
||||
})
|
||||
return mdDict
|
||||
}
|
||||
@@ -1,7 +1,15 @@
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError } from 'markdownlint-rule-helpers'
|
||||
import yaml from 'js-yaml'
|
||||
|
||||
import { getRange, getFrontmatter } from '../helpers/utils'
|
||||
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'
|
||||
|
||||
interface Frontmatter {
|
||||
redirect_from?: string | string[]
|
||||
children?: string[]
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
const ERROR_MESSAGE =
|
||||
'An early access reference appears to be used in a non-early access doc. Remove early access references or disable this rule.'
|
||||
@@ -10,20 +18,20 @@ const ERROR_MESSAGE =
|
||||
// There are several existing allowed references to `early access`
|
||||
// as a GitHub feature. This rule focuses on references to early
|
||||
// access pages.
|
||||
const isEarlyAccessFilepath = (filepath) => filepath.includes('early-access')
|
||||
const isEarlyAccessFilepath = (filepath: string): boolean => filepath.includes('early-access')
|
||||
|
||||
const EARLY_ACCESS_REGEX = /early-access/gi
|
||||
// This is a pattern seen in link paths for articles about
|
||||
// early access. This pattern is ok.
|
||||
const EARLY_ACCESS_ARTICLE_REGEX = /-early-access-/
|
||||
|
||||
export const earlyAccessReferences = {
|
||||
export const earlyAccessReferences: Rule = {
|
||||
names: ['GHD008', 'early-access-references'],
|
||||
description:
|
||||
'Files that are not early access should not reference early-access or early-access files',
|
||||
tags: ['feature', 'early-access'],
|
||||
severity: 'error',
|
||||
function: (params, onError) => {
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
if (isEarlyAccessFilepath(params.name)) return
|
||||
|
||||
// Find errors in content
|
||||
@@ -44,17 +52,17 @@ export const earlyAccessReferences = {
|
||||
},
|
||||
}
|
||||
|
||||
export const frontmatterEarlyAccessReferences = {
|
||||
export const frontmatterEarlyAccessReferences: Rule = {
|
||||
names: ['GHD009', 'frontmatter-early-access-references'],
|
||||
description:
|
||||
'Files that are not early access should not have frontmatter that references early-access',
|
||||
tags: ['frontmatter', 'feature', 'early-access'],
|
||||
function: (params, onError) => {
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
const filepath = params.name
|
||||
if (isEarlyAccessFilepath(filepath)) return
|
||||
|
||||
// Find errors in frontmatter
|
||||
const fm = getFrontmatter(params.lines)
|
||||
const fm = getFrontmatter(params.lines) as Frontmatter | null
|
||||
if (!fm) return
|
||||
|
||||
// The redirect_from property is allowed to contain early-access paths
|
||||
@@ -1,5 +1,8 @@
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError, newLineRe } from 'markdownlint-rule-helpers'
|
||||
|
||||
import type { RuleParams, RuleErrorCallback, MarkdownToken, Rule } from '@/content-linter/types'
|
||||
|
||||
// This rule looks for opening and closing HTML comment tags that
|
||||
// contain an expiration date in the format:
|
||||
//
|
||||
@@ -8,20 +11,20 @@ import { addError, newLineRe } from 'markdownlint-rule-helpers'
|
||||
//
|
||||
// The `end expires` closing tag closes the content that is expired
|
||||
// and must be removed.
|
||||
export const expiredContent = {
|
||||
export const expiredContent: Rule = {
|
||||
names: ['GHD038', 'expired-content'],
|
||||
description: 'Expired content must be remediated.',
|
||||
tags: ['expired'],
|
||||
function: (params, onError) => {
|
||||
const tokensToCheck = params.tokens.filter(
|
||||
(token) => token.type === 'inline' || token.type === 'html_block',
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
const tokensToCheck = (params.tokens || []).filter(
|
||||
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
|
||||
)
|
||||
|
||||
tokensToCheck.forEach((token) => {
|
||||
tokensToCheck.forEach((token: MarkdownToken) => {
|
||||
// Looking for just opening tag with format:
|
||||
// <!-- expires yyyy-mm-dd -->
|
||||
const match = token.content.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
|
||||
if (!match) return
|
||||
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
|
||||
if (!match || !token.content) return
|
||||
|
||||
const expireDate = new Date(match.splice(1, 3).join(' '))
|
||||
const today = new Date()
|
||||
@@ -57,20 +60,20 @@ export const DAYS_TO_WARN_BEFORE_EXPIRED = 14
|
||||
//
|
||||
// The `end expires` closing tag closes the content that is expired
|
||||
// and must be removed.
|
||||
export const expiringSoon = {
|
||||
export const expiringSoon: Rule = {
|
||||
names: ['GHD039', 'expiring-soon'],
|
||||
description: 'Content that expires soon should be proactively addressed.',
|
||||
tags: ['expired'],
|
||||
function: (params, onError) => {
|
||||
const tokensToCheck = params.tokens.filter(
|
||||
(token) => token.type === 'inline' || token.type === 'html_block',
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
const tokensToCheck = (params.tokens || []).filter(
|
||||
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
|
||||
)
|
||||
|
||||
tokensToCheck.forEach((token) => {
|
||||
tokensToCheck.forEach((token: MarkdownToken) => {
|
||||
// Looking for just opening tag with format:
|
||||
// <!-- expires yyyy-mm-dd -->
|
||||
const match = token.content.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
|
||||
if (!match) return
|
||||
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
|
||||
if (!match || !token.content) return
|
||||
|
||||
const expireDate = new Date(match.splice(1, 3).join(' '))
|
||||
const today = new Date()
|
||||
@@ -1,12 +1,19 @@
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError } from 'markdownlint-rule-helpers'
|
||||
import { getFrontmatter } from '@/content-linter/lib/helpers/utils'
|
||||
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'
|
||||
|
||||
export const frontmatterVersionsWhitespace = {
|
||||
interface Frontmatter {
|
||||
versions?: Record<string, string | string[]>
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export const frontmatterVersionsWhitespace: Rule = {
|
||||
names: ['GHD051', 'frontmatter-versions-whitespace'],
|
||||
description: 'Versions frontmatter should not contain unnecessary whitespace',
|
||||
tags: ['frontmatter', 'versions'],
|
||||
function: (params, onError) => {
|
||||
const fm = getFrontmatter(params.lines)
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
const fm = getFrontmatter(params.lines) as Frontmatter | null
|
||||
if (!fm || !fm.versions) return
|
||||
|
||||
const versionsObj = fm.versions
|
||||
@@ -58,7 +65,7 @@ export const frontmatterVersionsWhitespace = {
|
||||
* Allows whitespace in complex expressions like '<3.6 >3.8'
|
||||
* but disallows leading/trailing whitespace
|
||||
*/
|
||||
function checkForUnwantedWhitespace(value) {
|
||||
function checkForUnwantedWhitespace(value: string): boolean {
|
||||
// Don't flag if the value is just whitespace or empty
|
||||
if (!value || value.trim() === '') return false
|
||||
|
||||
@@ -82,7 +89,7 @@ function checkForUnwantedWhitespace(value) {
|
||||
/**
|
||||
* Get the cleaned version of a value by removing appropriate whitespace
|
||||
*/
|
||||
function getCleanedValue(value) {
|
||||
function getCleanedValue(value: string): string {
|
||||
// For values with operators, just trim leading/trailing whitespace
|
||||
const hasOperators = /[<>=]/.test(value)
|
||||
if (hasOperators) {
|
||||
@@ -1,16 +1,26 @@
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError } from 'markdownlint-rule-helpers'
|
||||
import path from 'path'
|
||||
|
||||
import { getFrontmatter } from '../helpers/utils'
|
||||
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'
|
||||
|
||||
export const frontmatterVideoTranscripts = {
|
||||
interface Frontmatter {
|
||||
product_video?: string
|
||||
product_video_transcript?: string
|
||||
title?: string
|
||||
layout?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export const frontmatterVideoTranscripts: Rule = {
|
||||
names: ['GHD011', 'frontmatter-video-transcripts'],
|
||||
description: 'Video transcript must be configured correctly',
|
||||
tags: ['frontmatter', 'feature', 'video-transcripts'],
|
||||
function: (params, onError) => {
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
const filepath = params.name
|
||||
|
||||
const fm = getFrontmatter(params.lines)
|
||||
const fm = getFrontmatter(params.lines) as Frontmatter | null
|
||||
if (!fm) return
|
||||
|
||||
const isTranscriptContent =
|
||||
@@ -29,7 +39,7 @@ export const frontmatterVideoTranscripts = {
|
||||
null, // No fix possible
|
||||
)
|
||||
}
|
||||
if (!fm.title.startsWith('Transcript - ')) {
|
||||
if (fm.title && !fm.title.startsWith('Transcript - ')) {
|
||||
const lineNumber = params.lines.findIndex((line) => line.startsWith('title:')) + 1
|
||||
const lineContent = params.lines[lineNumber - 1]
|
||||
addError(
|
||||
@@ -1,16 +1,23 @@
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError } from 'markdownlint-rule-helpers'
|
||||
import { getRange } from '../helpers/utils'
|
||||
import frontmatter from '@/frame/lib/read-frontmatter'
|
||||
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'
|
||||
|
||||
export const multipleEmphasisPatterns = {
|
||||
interface Frontmatter {
|
||||
autogenerated?: boolean
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export const multipleEmphasisPatterns: Rule = {
|
||||
names: ['GHD050', 'multiple-emphasis-patterns'],
|
||||
description: 'Do not use more than one emphasis/strong, italics, or uppercase for a string',
|
||||
tags: ['formatting', 'emphasis', 'style'],
|
||||
severity: 'warning',
|
||||
function: (params, onError) => {
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
// Skip autogenerated files
|
||||
const frontmatterString = params.frontMatterLines.join('\n')
|
||||
const fm = frontmatter(frontmatterString).data
|
||||
const fm = frontmatter(frontmatterString).data as Frontmatter
|
||||
if (fm && fm.autogenerated) return
|
||||
|
||||
const lines = params.lines
|
||||
@@ -38,9 +45,9 @@ export const multipleEmphasisPatterns = {
|
||||
/**
|
||||
* Check for multiple emphasis types in a single text segment
|
||||
*/
|
||||
function checkMultipleEmphasis(line, lineNumber, onError) {
|
||||
function checkMultipleEmphasis(line: string, lineNumber: number, onError: RuleErrorCallback): void {
|
||||
// Focus on the clearest violations of the style guide
|
||||
const multipleEmphasisPatterns = [
|
||||
const multipleEmphasisPatterns: Array<{ regex: RegExp; types: string[] }> = [
|
||||
// Bold + italic combinations (***text***)
|
||||
{ regex: /\*\*\*([^*]+)\*\*\*/g, types: ['bold', 'italic'] },
|
||||
{ regex: /___([^_]+)___/g, types: ['bold', 'italic'] },
|
||||
@@ -76,7 +83,7 @@ function checkMultipleEmphasis(line, lineNumber, onError) {
|
||||
/**
|
||||
* Determine if a match should be skipped (likely intentional formatting)
|
||||
*/
|
||||
function shouldSkipMatch(fullMatch, content) {
|
||||
function shouldSkipMatch(fullMatch: string, content: string): boolean {
|
||||
// Skip common false positives
|
||||
if (!content) return true
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { addError, filterTokens } from 'markdownlint-rule-helpers'
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError } from 'markdownlint-rule-helpers'
|
||||
|
||||
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'
|
||||
|
||||
// Detects a Markdown table delimiter row
|
||||
const delimiterRegexPure = /(\s)*(:)?(-+)(:)?(\s)*(\|)/
|
||||
@@ -9,13 +12,13 @@ const liquidRegex = /^{%-?\s*(ifversion|else|endif).*-?%}/
|
||||
// Detects a Markdown table row with a Liquid versioning tag
|
||||
const liquidAfterRowRegex = /(\|{1}).*(\|{1}).*{%\s*(ifversion|else|endif).*%}$/
|
||||
|
||||
export const tableLiquidVersioning = {
|
||||
export const tableLiquidVersioning: Rule = {
|
||||
names: ['GHD040', 'table-liquid-versioning'],
|
||||
description: 'Tables must use the correct liquid versioning format',
|
||||
severity: 'error',
|
||||
tags: ['tables'],
|
||||
|
||||
function: function GHD040(params, onError) {
|
||||
function: function GHD040(params: RuleParams, onError: RuleErrorCallback) {
|
||||
const lines = params.lines
|
||||
let inTable = false
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
@@ -75,7 +78,7 @@ export const tableLiquidVersioning = {
|
||||
},
|
||||
}
|
||||
|
||||
function isPreviousLineIndented(line, previousLine) {
|
||||
function isPreviousLineIndented(line: string, previousLine: string): boolean {
|
||||
if (!line || !previousLine) return false
|
||||
const numWhitespaceLine = line.length - line.trimLeft().length
|
||||
const numWhitespacePrevLine = previousLine.length - previousLine.trimLeft().length
|
||||
@@ -1,8 +1,10 @@
|
||||
import yaml from 'js-yaml'
|
||||
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
|
||||
import { addError, filterTokens } from 'markdownlint-rule-helpers'
|
||||
import yaml from 'js-yaml'
|
||||
|
||||
import { liquid } from '@/content-render/index'
|
||||
import { allVersions } from '@/versions/lib/all-versions'
|
||||
import type { RuleParams, RuleErrorCallback, MarkdownToken, Rule } from '@/content-linter/types'
|
||||
|
||||
// Detects third-party actions in the format `owner/repo@ref`
|
||||
const actionRegex = /[\w-]+\/[\w-]+@[\w-]+/
|
||||
@@ -11,16 +13,33 @@ const shaRegex = /[\w-]+\/[\w-]+@[0-9a-fA-F]{40}/
|
||||
// Detects first-party actions
|
||||
const firstPartyPrefixes = ['actions/', './.github/actions/', 'github/', 'octo-org/', 'OWNER/']
|
||||
|
||||
export const thirdPartyActionPinning = {
|
||||
interface WorkflowStep {
|
||||
uses?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface WorkflowJob {
|
||||
steps?: WorkflowStep[]
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface WorkflowYaml {
|
||||
jobs?: Record<string, WorkflowJob>
|
||||
steps?: WorkflowStep[]
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export const thirdPartyActionPinning: Rule = {
|
||||
names: ['GHD041', 'third-party-action-pinning'],
|
||||
description:
|
||||
'Code examples that use third-party actions must always pin to a full length commit SHA',
|
||||
tags: ['feature', 'actions'],
|
||||
parser: 'markdownit',
|
||||
asynchronous: true,
|
||||
function: (params, onError) => {
|
||||
filterTokens(params, 'fence', async (token) => {
|
||||
const lang = token.info.trim().split(/\s+/u).shift().toLowerCase()
|
||||
function: (params: RuleParams, onError: RuleErrorCallback) => {
|
||||
filterTokens(params, 'fence', async (token: MarkdownToken) => {
|
||||
if (!token.info || !token.content) return
|
||||
const lang = token.info.trim().split(/\s+/u).shift()?.toLowerCase()
|
||||
if (lang !== 'yaml' && lang !== 'yml') return
|
||||
if (!token.content.includes('steps:')) return
|
||||
if (!token.content.includes('uses:')) return
|
||||
@@ -32,7 +51,7 @@ export const thirdPartyActionPinning = {
|
||||
// If we don't parse the Liquid first, yaml loading chokes on {% raw %} tags
|
||||
const renderedYaml = await liquid.parseAndRender(token.content, context)
|
||||
try {
|
||||
const yamlObj = yaml.load(renderedYaml)
|
||||
const yamlObj = yaml.load(renderedYaml) as WorkflowYaml
|
||||
const steps = getWorkflowSteps(yamlObj)
|
||||
if (!steps.some((step) => step.uses)) return
|
||||
|
||||
@@ -40,11 +59,13 @@ export const thirdPartyActionPinning = {
|
||||
if (step.uses) {
|
||||
const actionMatch = step.uses.match(actionRegex)
|
||||
if (actionMatch) {
|
||||
const isFirstParty = firstPartyPrefixes.some((prefix) => step.uses.startsWith(prefix))
|
||||
const isFirstParty = firstPartyPrefixes.some((prefix) =>
|
||||
step.uses!.startsWith(prefix),
|
||||
)
|
||||
if (!isFirstParty && !shaRegex.test(step.uses)) {
|
||||
addError(
|
||||
onError,
|
||||
getLineNumber(token.content, step.uses) + token.lineNumber,
|
||||
getLineNumber(token.content!, step.uses) + token.lineNumber,
|
||||
'Code examples that use third-party actions must always pin to a full length commit SHA',
|
||||
step.uses,
|
||||
)
|
||||
@@ -64,7 +85,7 @@ export const thirdPartyActionPinning = {
|
||||
},
|
||||
}
|
||||
|
||||
function getWorkflowSteps(yamlObj) {
|
||||
function getWorkflowSteps(yamlObj: WorkflowYaml): WorkflowStep[] {
|
||||
if (yamlObj?.jobs) {
|
||||
const jobs = Object.values(yamlObj.jobs)
|
||||
return jobs.flatMap((job) => job.steps || [])
|
||||
@@ -74,7 +95,7 @@ function getWorkflowSteps(yamlObj) {
|
||||
return []
|
||||
}
|
||||
|
||||
function getLineNumber(tokenContent, step) {
|
||||
function getLineNumber(tokenContent: string, step: string): number {
|
||||
const contentLines = tokenContent.split('\n')
|
||||
return contentLines.findIndex((line) => line.includes(step)) + 1
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { TokenizationError } from 'liquidjs'
|
||||
import type { TagToken, Liquid, Template } from 'liquidjs'
|
||||
|
||||
import { THROW_ON_EMPTY, DataReferenceError } from './error-handling'
|
||||
import { getDataByLanguage } from '@/data-directory/lib/get-data'
|
||||
@@ -6,8 +7,22 @@ import { getDataByLanguage } from '@/data-directory/lib/get-data'
|
||||
const Syntax = /([a-z0-9/\\_.\-[\]]+)/i
|
||||
const SyntaxHelp = "Syntax Error in 'data' - Valid syntax: data [path]"
|
||||
|
||||
// Using any for scope because it has custom environments property not in Liquid's Scope type
|
||||
interface CustomScope {
|
||||
environments: any
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface DataTag {
|
||||
path: string
|
||||
tagToken: TagToken
|
||||
liquid: Liquid
|
||||
parse(tagToken: TagToken): void
|
||||
render(scope: CustomScope): Promise<Template | undefined>
|
||||
}
|
||||
|
||||
export default {
|
||||
parse(tagToken) {
|
||||
parse(tagToken: TagToken) {
|
||||
if (!tagToken || !Syntax.test(tagToken.args)) {
|
||||
throw new TokenizationError(SyntaxHelp, tagToken)
|
||||
}
|
||||
@@ -16,7 +31,7 @@ export default {
|
||||
this.tagToken = tagToken
|
||||
},
|
||||
|
||||
async render(scope) {
|
||||
async render(scope: CustomScope) {
|
||||
let text = getDataByLanguage(this.path, scope.environments.currentLanguage)
|
||||
if (text === undefined) {
|
||||
if (scope.environments.currentLanguage === 'en') {
|
||||
@@ -35,9 +50,9 @@ export default {
|
||||
|
||||
return this.liquid.parseAndRender(text, scope.environments)
|
||||
},
|
||||
}
|
||||
} as DataTag
|
||||
|
||||
function handleIndent(tagToken, text) {
|
||||
function handleIndent(tagToken: TagToken, text: string): string {
|
||||
// Any time what we're about to replace in here has more than one line,
|
||||
// if the use of `{% data ... %}` was itself indented, from the left,
|
||||
// keep *that* indentation, in replaced output, for every line.
|
||||
@@ -67,17 +82,20 @@ function handleIndent(tagToken, text) {
|
||||
// When a reusable has multiple lines, and the input line is a blockquote,
|
||||
// keep the blockquote character on every successive line.
|
||||
const blockquoteRegexp = /^\n?([ \t]*>[ \t]?)/
|
||||
function handleBlockquote(tagToken, text) {
|
||||
function handleBlockquote(tagToken: TagToken, text: string): string {
|
||||
// If the text isn't multiline, skip
|
||||
if (text.split('\n').length <= 1) return text
|
||||
|
||||
// If the line with the liquid tag starts with a blockquote...
|
||||
const { input, content } = tagToken
|
||||
if (!content) return text
|
||||
const inputLine = input.split('\n').find((line) => line.includes(content))
|
||||
if (!blockquoteRegexp.test(inputLine)) return text
|
||||
if (!inputLine || !blockquoteRegexp.test(inputLine)) return text
|
||||
|
||||
// Keep the character on successive lines
|
||||
const start = inputLine.match(blockquoteRegexp)[0]
|
||||
const match = inputLine.match(blockquoteRegexp)
|
||||
if (!match) return text
|
||||
const start = match[0]
|
||||
return text
|
||||
.split('\n')
|
||||
.map((line, i) => {
|
||||
@@ -1,27 +1,42 @@
|
||||
import path from 'path'
|
||||
import { existsSync } from 'fs'
|
||||
import type { Response, NextFunction } from 'express'
|
||||
|
||||
import { ROOT } from '@/frame/lib/constants'
|
||||
import Page from '@/frame/lib/page'
|
||||
import { languagePrefixPathRegex } from '@/languages/lib/languages'
|
||||
import type { ExtendedRequest } from '@/types'
|
||||
|
||||
interface FindPageOptions {
|
||||
isDev?: boolean
|
||||
contentRoot?: string
|
||||
}
|
||||
|
||||
const englishPrefixRegex = /^\/en(\/|$)/
|
||||
const CONTENT_ROOT = path.join(ROOT, 'content')
|
||||
|
||||
export default async function findPage(
|
||||
req,
|
||||
res,
|
||||
next,
|
||||
req: ExtendedRequest,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
// Express won't execute these but it makes it easier to unit test
|
||||
// the middleware.
|
||||
{ isDev = process.env.NODE_ENV === 'development', contentRoot = CONTENT_ROOT } = {},
|
||||
) {
|
||||
{
|
||||
isDev = process.env.NODE_ENV === 'development',
|
||||
contentRoot = CONTENT_ROOT,
|
||||
}: FindPageOptions = {},
|
||||
): Promise<any> {
|
||||
// Filter out things like `/will/redirect` or `/_next/data/...`
|
||||
if (!languagePrefixPathRegex.test(req.pagePath)) {
|
||||
if (!req.pagePath || !languagePrefixPathRegex.test(req.pagePath)) {
|
||||
return next()
|
||||
}
|
||||
|
||||
let page = req.context.pages[req.pagePath]
|
||||
if (!req.context?.pages) {
|
||||
return next()
|
||||
}
|
||||
|
||||
// Using any for page because it's dynamically assigned properties (like version) that aren't in the Page type
|
||||
let page: any = req.context.pages[req.pagePath]
|
||||
if (page && isDev && englishPrefixRegex.test(req.pagePath)) {
|
||||
// The .applicableVersions and .permalinks properties are computed
|
||||
// when the page is read in from disk. But when the initial tree
|
||||
@@ -32,7 +47,14 @@ export default async function findPage(
|
||||
const oldApplicableVersions = page.applicableVersions
|
||||
const oldPermalinks = page.permalinks
|
||||
|
||||
page = await rereadByPath(req.pagePath, contentRoot, req.context.currentVersion)
|
||||
const rereadPage = await rereadByPath(
|
||||
req.pagePath,
|
||||
contentRoot,
|
||||
req.context?.currentVersion || '',
|
||||
)
|
||||
if (rereadPage) {
|
||||
page = rereadPage
|
||||
}
|
||||
if (reuseOldVersions) {
|
||||
page.applicableVersions = oldApplicableVersions
|
||||
page.permalinks = oldPermalinks
|
||||
@@ -41,24 +63,28 @@ export default async function findPage(
|
||||
// This can happen if the page we just re-read has changed which
|
||||
// versions it's available in (the `versions` frontmatter) meaning
|
||||
// it might no longer be available on the current URL.
|
||||
if (!page.applicableVersions.includes(req.context.currentVersion)) {
|
||||
if (
|
||||
req.context?.currentVersion &&
|
||||
!page.applicableVersions.includes(req.context.currentVersion)
|
||||
) {
|
||||
return res
|
||||
.status(404)
|
||||
.send(
|
||||
`After re-reading the page, '${req.context.currentVersion}' is no longer an applicable version. ` +
|
||||
`After re-reading the page, '${req.context?.currentVersion}' is no longer an applicable version. ` +
|
||||
'A restart is required.',
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (page) {
|
||||
if (page && req.context) {
|
||||
req.context.page = page
|
||||
req.context.page.version = req.context.currentVersion
|
||||
// Note: Page doesn't have a version property, this might be setting it dynamically
|
||||
;(req.context.page as any).version = req.context.currentVersion
|
||||
|
||||
// We can't depend on `page.hidden` because the dedicated search
|
||||
// results page is a hidden page but it needs to offer all possible
|
||||
// languages.
|
||||
if (page.relativePath.startsWith('early-access')) {
|
||||
if (page.relativePath.startsWith('early-access') && req.context?.languages?.en) {
|
||||
// Override the languages to be only English
|
||||
req.context.languages = {
|
||||
en: req.context.languages.en,
|
||||
@@ -69,8 +95,14 @@ export default async function findPage(
|
||||
return next()
|
||||
}
|
||||
|
||||
async function rereadByPath(uri, contentRoot, currentVersion) {
|
||||
const languageCode = uri.match(languagePrefixPathRegex)[1]
|
||||
async function rereadByPath(
|
||||
uri: string,
|
||||
contentRoot: string,
|
||||
currentVersion: string,
|
||||
): Promise<Page | null> {
|
||||
const match = uri.match(languagePrefixPathRegex)
|
||||
if (!match) return null
|
||||
const languageCode = match[1]
|
||||
const withoutLanguage = uri.replace(languagePrefixPathRegex, '/')
|
||||
const withoutVersion = withoutLanguage.replace(`/${currentVersion}`, '')
|
||||
// TODO: Support loading translations the same way.
|
||||
@@ -86,9 +118,10 @@ async function rereadByPath(uri, contentRoot, currentVersion) {
|
||||
// if it can't read the file in from disk. E.g. a request for /en/non/existent.
|
||||
// In other words, it's fine if it can't be read from disk. It'll get
|
||||
// handled and turned into a nice 404 message.
|
||||
return await Page.init({
|
||||
const page = await Page.init({
|
||||
basePath,
|
||||
relativePath,
|
||||
languageCode,
|
||||
})
|
||||
return page || null
|
||||
}
|
||||
@@ -2,8 +2,8 @@ import { describe, expect, test } from 'vitest'
|
||||
|
||||
import getMiniTocItems from '@/frame/lib/get-mini-toc-items'
|
||||
|
||||
function generateHeading(h) {
|
||||
return (slug) => `<${h} id="${slug}">
|
||||
function generateHeading(h: string): (slug: string) => string {
|
||||
return (slug: string) => `<${h} id="${slug}">
|
||||
<a href="${slug}" class="heading-link">
|
||||
${slug}
|
||||
</a>
|
||||
@@ -129,7 +129,7 @@ async function renderInnerHTML(page: Page, permalink: Permalink) {
|
||||
}
|
||||
await contextualize(req as ExtendedRequest, res as Response, next)
|
||||
await shortVersions(req as ExtendedRequest, res as Response, next)
|
||||
await findPage(req, res, next)
|
||||
await findPage(req as ExtendedRequest, res as Response, next)
|
||||
features(req as ExtendedRequest, res as Response, next)
|
||||
|
||||
const markdown = await liquid.parseAndRender(page.markdown, req.context)
|
||||
|
||||
@@ -4,6 +4,10 @@ import path from 'path'
|
||||
|
||||
import yaml from 'js-yaml'
|
||||
|
||||
interface DataStructure {
|
||||
[key: string]: string | DataStructure
|
||||
}
|
||||
|
||||
// This helper class exists so you can create a temporary root directory
|
||||
// full of data files.
|
||||
// For example, if you want to unit test with files that are not part
|
||||
@@ -41,17 +45,24 @@ import yaml from 'js-yaml'
|
||||
// will create a single <tempdir>/data/ui.yml file.
|
||||
//
|
||||
export class DataDirectory {
|
||||
constructor(data, root) {
|
||||
root: string
|
||||
|
||||
constructor(data: DataStructure, root?: string) {
|
||||
this.root = root || this.createTempRoot('data-directory')
|
||||
this.create(data)
|
||||
}
|
||||
|
||||
createTempRoot(prefix) {
|
||||
createTempRoot(prefix: string): string {
|
||||
const fullPath = path.join(os.tmpdir(), prefix)
|
||||
return fs.mkdtempSync(fullPath)
|
||||
}
|
||||
|
||||
create(data, root = null, isVariables = false, isReusables = false) {
|
||||
create(
|
||||
data: DataStructure,
|
||||
root: string | null = null,
|
||||
isVariables = false,
|
||||
isReusables = false,
|
||||
): void {
|
||||
const here = root || this.root
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
@@ -60,7 +71,8 @@ export class DataDirectory {
|
||||
fs.writeFileSync(path.join(here, `${key}.md`), value, 'utf-8')
|
||||
} else {
|
||||
fs.mkdirSync(path.join(here, key))
|
||||
this.create(value, path.join(here, key), false, true)
|
||||
// Using 'as' assertion because we know value must be an object when it's not a string in reusables context
|
||||
this.create(value as DataStructure, path.join(here, key), false, true)
|
||||
}
|
||||
} else if (isVariables) {
|
||||
fs.writeFileSync(path.join(here, `${key}.yml`), yaml.dump(value), 'utf-8')
|
||||
@@ -70,19 +82,20 @@ export class DataDirectory {
|
||||
} else {
|
||||
const there = path.join(here, key)
|
||||
fs.mkdirSync(there)
|
||||
// Using 'as' assertions because nested directory values are always objects, not strings
|
||||
if (key === 'reusables') {
|
||||
this.create(value, there, false, true)
|
||||
this.create(value as DataStructure, there, false, true)
|
||||
} else if (key === 'variables') {
|
||||
this.create(value, there, true, false)
|
||||
this.create(value as DataStructure, there, true, false)
|
||||
} else {
|
||||
this.create(value, there)
|
||||
this.create(value as DataStructure, there)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
destroy(): void {
|
||||
fs.rmSync(this.root, { recursive: true })
|
||||
}
|
||||
}
|
||||
@@ -7,9 +7,13 @@ import { allVersions } from '@/versions/lib/all-versions'
|
||||
import getApplicableVersions from '@/versions/lib/get-applicable-versions'
|
||||
import { latest } from '@/versions/lib/enterprise-server-releases'
|
||||
|
||||
interface Versions {
|
||||
[key: string]: string | string[]
|
||||
}
|
||||
|
||||
describe('Versions frontmatter', () => {
|
||||
test('wildcard', async () => {
|
||||
const versions = {
|
||||
const versions: Versions = {
|
||||
fpt: '*',
|
||||
ghes: '*',
|
||||
}
|
||||
@@ -19,7 +23,7 @@ describe('Versions frontmatter', () => {
|
||||
})
|
||||
|
||||
test('greater than', async () => {
|
||||
const versions = {
|
||||
const versions: Versions = {
|
||||
fpt: '*',
|
||||
ghes: '>3.2',
|
||||
}
|
||||
@@ -28,7 +32,7 @@ describe('Versions frontmatter', () => {
|
||||
})
|
||||
|
||||
test('less than', async () => {
|
||||
const versions = {
|
||||
const versions: Versions = {
|
||||
fpt: '*',
|
||||
ghes: '<3.2',
|
||||
}
|
||||
@@ -43,7 +47,7 @@ describe('general cases', () => {
|
||||
expect.assertions(2)
|
||||
try {
|
||||
getApplicableVersions('*')
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
expect(e).toBeInstanceOf(Error)
|
||||
expect(e).toHaveProperty(
|
||||
'message',
|
||||
@@ -57,13 +61,13 @@ describe('general cases', () => {
|
||||
.filter((name) => name !== 'README.md')
|
||||
.map((name) => path.basename(name, '.yml'))
|
||||
for (const possibleFeature of possibleFeatures) {
|
||||
const versions = { feature: possibleFeature }
|
||||
const versions: Versions = { feature: possibleFeature }
|
||||
const applicableVersions = getApplicableVersions(versions)
|
||||
expect(applicableVersions.every((v) => Object.keys(allVersions).includes(v)))
|
||||
}
|
||||
// Same thing but as an array each time
|
||||
for (const possibleFeature of possibleFeatures) {
|
||||
const versions = { feature: [possibleFeature] }
|
||||
const versions: Versions = { feature: [possibleFeature] }
|
||||
const applicableVersions = getApplicableVersions(versions)
|
||||
expect(applicableVersions.every((v) => Object.keys(allVersions).includes(v)))
|
||||
}
|
||||
@@ -78,7 +82,7 @@ describe('invalid versions', () => {
|
||||
})
|
||||
|
||||
test('no valid versions found at all', () => {
|
||||
const versions = {
|
||||
const versions: Versions = {
|
||||
never: '*',
|
||||
heard: 'of',
|
||||
}
|
||||
Reference in New Issue
Block a user