diff --git a/README.md b/README.md index bd02c836ac..01eee7da20 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ You can also contribute by creating a local environment or opening a Codespace. Contribution call-to-action -For more complex contributions, please open an issue using the most appropriate [issue template](https://github.com/github/docs/issues/new/choose) to describe the changes you'd like to see. +For more complex contributions, please [open an issue in the docs-content repo](https://github.com/github/docs-content/issues/new/choose) describing the changes you'd like to see. If you're looking for a way to contribute, you can scan through our [help wanted board](https://github.com/github/docs/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) to find open issues already approved for work. diff --git a/data/reusables/dependabot/vnet-support-private-preview-note.md b/data/reusables/dependabot/vnet-support-private-preview-note.md deleted file mode 100644 index 86b409be5f..0000000000 --- a/data/reusables/dependabot/vnet-support-private-preview-note.md +++ /dev/null @@ -1,2 +0,0 @@ -> [!NOTE] -> VNET support for {% data variables.product.prodname_dependabot %} on {% data variables.product.prodname_actions %} is currently in {% data variables.release-phases.public_preview %} and subject to change. diff --git a/src/audit-logs/data/ghes-3.14/organization.json b/src/audit-logs/data/ghes-3.14/organization.json index 75288da17a..497a46d546 100644 --- a/src/audit-logs/data/ghes-3.14/organization.json +++ b/src/audit-logs/data/ghes-3.14/organization.json @@ -389,6 +389,21 @@ "description": "Triggered when a team discussion post is edited.", "docs_reference_links": "/communities/moderating-comments-and-conversations/managing-disruptive-comments#editing-a-comment" }, + { + "action": "enterprise_announcement.create", + "description": "A global announcement banner was created for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise#creating-a-global-announcement-banner" + }, + { + "action": "enterprise_announcement.destroy", + "description": "A global announcement banner was removed from the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, + { + "action": "enterprise_announcement.update", + "description": "A global announcement banner was updated for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, { "action": "enterprise_installation.create", "description": "The GitHub App associated with a GitHub Connect connection was created.", diff --git a/src/audit-logs/data/ghes-3.15/organization.json b/src/audit-logs/data/ghes-3.15/organization.json index 5f5c680d88..2b74016424 100644 --- a/src/audit-logs/data/ghes-3.15/organization.json +++ b/src/audit-logs/data/ghes-3.15/organization.json @@ -389,6 +389,21 @@ "description": "Triggered when a team discussion post is edited.", "docs_reference_links": "/communities/moderating-comments-and-conversations/managing-disruptive-comments#editing-a-comment" }, + { + "action": "enterprise_announcement.create", + "description": "A global announcement banner was created for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise#creating-a-global-announcement-banner" + }, + { + "action": "enterprise_announcement.destroy", + "description": "A global announcement banner was removed from the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, + { + "action": "enterprise_announcement.update", + "description": "A global announcement banner was updated for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, { "action": "enterprise_installation.create", "description": "The GitHub App associated with a GitHub Connect connection was created.", diff --git a/src/audit-logs/data/ghes-3.16/organization.json b/src/audit-logs/data/ghes-3.16/organization.json index d7736e5c3a..381ea8cff9 100644 --- a/src/audit-logs/data/ghes-3.16/organization.json +++ b/src/audit-logs/data/ghes-3.16/organization.json @@ -419,6 +419,21 @@ "description": "Triggered when a team discussion post is edited.", "docs_reference_links": "/communities/moderating-comments-and-conversations/managing-disruptive-comments#editing-a-comment" }, + { + "action": "enterprise_announcement.create", + "description": "A global announcement banner was created for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise#creating-a-global-announcement-banner" + }, + { + "action": "enterprise_announcement.destroy", + "description": "A global announcement banner was removed from the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, + { + "action": "enterprise_announcement.update", + "description": "A global announcement banner was updated for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, { "action": "enterprise_installation.create", "description": "The GitHub App associated with a GitHub Connect connection was created.", diff --git a/src/audit-logs/data/ghes-3.17/organization.json b/src/audit-logs/data/ghes-3.17/organization.json index 52e050f48a..b058924405 100644 --- a/src/audit-logs/data/ghes-3.17/organization.json +++ b/src/audit-logs/data/ghes-3.17/organization.json @@ -454,6 +454,21 @@ "description": "Triggered when a team discussion post is edited.", "docs_reference_links": "/communities/moderating-comments-and-conversations/managing-disruptive-comments#editing-a-comment" }, + { + "action": "enterprise_announcement.create", + "description": "A global announcement banner was created for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise#creating-a-global-announcement-banner" + }, + { + "action": "enterprise_announcement.destroy", + "description": "A global announcement banner was removed from the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, + { + "action": "enterprise_announcement.update", + "description": "A global announcement banner was updated for the enterprise.", + "docs_reference_links": "/admin/managing-accounts-and-repositories/communicating-information-to-users-in-your-enterprise/customizing-user-messages-for-your-enterprise" + }, { "action": "enterprise_installation.create", "description": "The GitHub App associated with a GitHub Connect connection was created.", diff --git a/src/audit-logs/lib/config.json b/src/audit-logs/lib/config.json index 2a81a03a10..ed4bb2b015 100644 --- a/src/audit-logs/lib/config.json +++ b/src/audit-logs/lib/config.json @@ -3,5 +3,5 @@ "apiOnlyEvents": "This event is not available in the web interface, only via the REST API, audit log streaming, or JSON/CSV exports.", "apiRequestEvent": "This event is only available via audit log streaming." }, - "sha": "ec298358d957110bcdd9b7921afd7d4ab23107dd" + "sha": "e14b4ba692f0019ca81fd2843ae8445239cf480e" } \ No newline at end of file diff --git a/src/automated-pipelines/README.md b/src/automated-pipelines/README.md index 4f90f6b57e..5ee2b55697 100644 --- a/src/automated-pipelines/README.md +++ b/src/automated-pipelines/README.md @@ -21,13 +21,15 @@ Automated pages allow for manually created content to be prepended to the automa ## How does it work We currently have two patterns that we used to create automated pipelines: + - REST, Webhooks, GitHub Apps, and GraphQL pipelines consume external structured data and transform that data into a JSON file that is used to create content for a page on docs.github.com. Typically, data files are a 1:1 mapping to a specific page on docs.github.com. - The CodeQL CLI pipeline takes an unstructured ReStructuredText file and transforms it directly into a Markdown file with frontmatter, that uses the same authoring format as the rest of the docs. ## Creating a new pipeline -Each pipeline should be evaluated individually to determine the best architecture for simplicity, maintainability, and requirements. +Each pipeline should be evaluated individually to determine the best architecture for simplicity, maintainability, and requirements. For example: + - Is the content being displayed basic Markdown content? For example, does the content avoid using complex tables and interactive elements? If so, then writing the Markdown content directly and avoiding the need to create a structured data file that requires a React component may be the best approach. This was the case for the CodeQL CLI pipeline. One caveat to think about before writing Markdown directly is whether the content will need liquid versioning. The current pipeline that writes Markdown directly does not need to use liquid versioning. Liquid versioning which would increase the complexity quite a bit. All of the Markdown content in each article that is generated from the CodeQL CLI pipeline applies to all versions listed in the `versions` frontmatter property, simplifying the Markdown generation process. - Is the page interactive like the REST and Webhooks pages? If so, then the data will likely need to be structured data. In that case, a new React component may be needed to display the data. diff --git a/src/automated-pipelines/lib/config.json b/src/automated-pipelines/lib/config.json index ea8a3b105f..080034d61d 100644 --- a/src/automated-pipelines/lib/config.json +++ b/src/automated-pipelines/lib/config.json @@ -6,4 +6,4 @@ "rest", "webhooks" ] -} \ No newline at end of file +} diff --git a/src/automated-pipelines/lib/update-markdown.js b/src/automated-pipelines/lib/update-markdown.ts similarity index 81% rename from src/automated-pipelines/lib/update-markdown.js rename to src/automated-pipelines/lib/update-markdown.ts index ba9a13e34c..888165869c 100644 --- a/src/automated-pipelines/lib/update-markdown.js +++ b/src/automated-pipelines/lib/update-markdown.ts @@ -9,6 +9,57 @@ import { difference, isEqual } from 'lodash-es' import { allVersions } from '#src/versions/lib/all-versions.js' import getApplicableVersions from '#src/versions/lib/get-applicable-versions.js' +import type { MarkdownFrontmatter } from '@/types' + +// Type definitions - extending existing type to add missing fields and make most fields optional +type FrontmatterData = Partial & { + autogenerated?: string + [key: string]: any +} + +type SourceContentItem = { + data: FrontmatterData + content: string +} + +type SourceContent = { + [key: string]: SourceContentItem +} + +type IndexOrder = { + [key: string]: { + startsWith?: string[] + } +} + +type UpdateContentDirectoryOptions = { + targetDirectory: string + sourceContent: SourceContent + frontmatter: FrontmatterData + indexOrder?: IndexOrder +} + +type UpdateDirectoryOptions = { + rootDirectoryOnly?: boolean + shortTitle?: boolean + indexOrder?: IndexOrder +} + +type ChildUpdates = { + itemsToAdd: string[] + itemsToRemove: string[] +} + +type DirectoryInfo = { + directoryContents: string[] + directoryFiles: string[] + childDirectories: string[] +} + +type ChildrenComparison = { + childrenOnDisk: string[] + indexChildren: string[] +} const ROOT_INDEX_FILE = 'content/index.md' export const MARKDOWN_COMMENT = '\n\n' @@ -20,7 +71,7 @@ export async function updateContentDirectory({ sourceContent, frontmatter, indexOrder, -}) { +}: UpdateContentDirectoryOptions): Promise { const sourceFiles = Object.keys(sourceContent) await createDirectory(targetDirectory) await removeMarkdownFiles(targetDirectory, sourceFiles, frontmatter.autogenerated) @@ -28,7 +79,11 @@ export async function updateContentDirectory({ } // Remove markdown files that are no longer in the source data -async function removeMarkdownFiles(targetDirectory, sourceFiles, autogeneratedType) { +async function removeMarkdownFiles( + targetDirectory: string, + sourceFiles: string[], + autogeneratedType: string | undefined, +): Promise { // Copy the autogenerated Markdown files to the target directory const autogeneratedFiles = await getAutogeneratedFiles(targetDirectory, autogeneratedType) // If the first array contains items that the second array does not, @@ -42,29 +97,37 @@ async function removeMarkdownFiles(targetDirectory, sourceFiles, autogeneratedTy // Gets a list of all files under targetDirectory that have the // `autogenerated` frontmatter set to `autogeneratedType`. -async function getAutogeneratedFiles(targetDirectory, autogeneratedType) { +async function getAutogeneratedFiles( + targetDirectory: string, + autogeneratedType: string | undefined, +): Promise { const files = walk(targetDirectory, { includeBasePath: true, - childDirectories: false, + directories: false, globs: ['**/*.md'], ignore: ['**/README.md', '**/index.md'], }) return ( await Promise.all( - files.map(async (file) => { + files.map(async (file: string) => { const { data } = matter(await readFile(file, 'utf-8')) if (data.autogenerated === autogeneratedType) { return file } }), ) - ).filter(Boolean) + ).filter(Boolean) as string[] } // The `sourceContent` object contains the new content and target file // path for the Markdown files. Ex: // { : { data: , content: } } -async function updateMarkdownFiles(targetDirectory, sourceContent, frontmatter, indexOrder = {}) { +async function updateMarkdownFiles( + targetDirectory: string, + sourceContent: SourceContent, + frontmatter: FrontmatterData, + indexOrder: IndexOrder = {}, +): Promise { for (const [file, newContent] of Object.entries(sourceContent)) { await updateMarkdownFile(file, newContent.data, newContent.content) } @@ -82,11 +145,11 @@ async function updateMarkdownFiles(targetDirectory, sourceContent, frontmatter, // edit the modifiable content of the file. If the Markdown file doesn't // exists, we create a new Markdown file. async function updateMarkdownFile( - file, - sourceData, - sourceContent, - commentDelimiter = MARKDOWN_COMMENT, -) { + file: string, + sourceData: FrontmatterData, + sourceContent: string, + commentDelimiter: string = MARKDOWN_COMMENT, +): Promise { if (existsSync(file)) { // update only the versions property of the file, assuming // the other properties have already been added and edited @@ -132,10 +195,10 @@ async function updateMarkdownFile( // ensure that the Markdown files have been updated and any files // that need to be deleted have been removed. async function updateDirectory( - directory, - frontmatter, - { rootDirectoryOnly = false, shortTitle = false, indexOrder = {} } = {}, -) { + directory: string, + frontmatter: FrontmatterData, + { rootDirectoryOnly = false, shortTitle = false, indexOrder = {} }: UpdateDirectoryOptions = {}, +): Promise { const initialDirectoryListing = await getDirectoryInfo(directory) // If there are no children on disk, remove the directory if (initialDirectoryListing.directoryContents.length === 0 && !rootDirectoryOnly) { @@ -162,7 +225,7 @@ async function updateDirectory( const { childrenOnDisk, indexChildren } = getChildrenToCompare( indexFile, directoryContents, - data.children, + data.children || [], ) const itemsToAdd = difference(childrenOnDisk, indexChildren) @@ -199,12 +262,16 @@ async function updateDirectory( // Children properties include a leading slash except when the // index.md file is the root index.md file. We also want to // remove the file extension from the files on disk. -function getChildrenToCompare(indexFile, directoryContents, fmChildren) { +function getChildrenToCompare( + indexFile: string, + directoryContents: string[], + fmChildren: string[] | undefined, +): ChildrenComparison { if (!fmChildren) { throw new Error(`No children property found in ${indexFile}`) } - const isEarlyAccess = (item) => isRootIndexFile(indexFile) && item === 'early-access' + const isEarlyAccess = (item: string) => isRootIndexFile(indexFile) && item === 'early-access' // Get the list of children from the directory contents const childrenOnDisk = directoryContents @@ -233,18 +300,24 @@ function getChildrenToCompare(indexFile, directoryContents, fmChildren) { // // 3. If the index file is not autogenerated, we leave the ordering // as is and append new children to the end. -function updateIndexChildren(data, childUpdates, indexFile, indexOrder, rootIndex = false) { +function updateIndexChildren( + data: FrontmatterData, + childUpdates: ChildUpdates, + indexFile: string, + indexOrder: IndexOrder, + rootIndex: boolean = false, +): FrontmatterData { const { itemsToAdd, itemsToRemove } = childUpdates const childPrefix = rootIndex ? '' : '/' // Get a new list of children with added and removed items - const children = [...data.children] + const children = [...(data.children || [])] // remove the '/' prefix used in index.md children .map((item) => item.replace(childPrefix, '')) .filter((item) => !itemsToRemove.includes(item)) children.push(...itemsToAdd) - const orderedIndexChildren = [] + const orderedIndexChildren: string[] = [] // Only used for tests. During testing, the content directory is // in a temp directory so the paths are not relative to @@ -280,7 +353,11 @@ function updateIndexChildren(data, childUpdates, indexFile, indexOrder, rootInde // Gets the contents of the index.md file from disk if it exits or // creates a new index.md file with the default frontmatter. -async function getIndexFileContents(indexFile, frontmatter, shortTitle = false) { +async function getIndexFileContents( + indexFile: string, + frontmatter: FrontmatterData, + shortTitle: boolean = false, +): Promise<{ data: FrontmatterData; content: string }> { const directory = path.dirname(indexFile) const indexFileContent = { data: { @@ -300,8 +377,11 @@ async function getIndexFileContents(indexFile, frontmatter, shortTitle = false) // Builds the index.md versions frontmatter by consolidating // the versions from each Markdown file in the directory + the // index.md files in any subdirectories of directory. -async function getIndexFileVersions(directory, files) { - const versions = new Set() +async function getIndexFileVersions( + directory: string, + files: string[], +): Promise<{ [key: string]: string }> { + const versions = new Set() await Promise.all( files.map(async (file) => { const filepath = path.join(directory, file) @@ -319,7 +399,7 @@ async function getIndexFileVersions(directory, files) { throw new Error(`Frontmatter in ${filepath} does not contain versions.`) } const fmVersions = getApplicableVersions(data.versions) - fmVersions.forEach((version) => versions.add(version)) + fmVersions.forEach((version: string) => versions.add(version)) }), ) const versionArray = [...versions] @@ -343,9 +423,11 @@ and returns the frontmatter equivalent JSON: ghes: '*' } */ -export async function convertVersionsToFrontmatter(versions) { - const frontmatterVersions = {} - const numberedReleases = {} +export async function convertVersionsToFrontmatter( + versions: string[], +): Promise<{ [key: string]: string }> { + const frontmatterVersions: { [key: string]: string } = {} + const numberedReleases: { [key: string]: { availableReleases: (string | undefined)[] } } = {} // Currently, only GHES is numbered. Number releases have to be // handled differently because they use semantic versioning. @@ -362,7 +444,9 @@ export async function convertVersionsToFrontmatter(versions) { // a release is no longer supported. const i = docsVersion.releases.indexOf(docsVersion.currentRelease) if (!numberedReleases[docsVersion.shortName]) { - const availableReleases = Array(docsVersion.releases.length).fill(undefined) + const availableReleases: (string | undefined)[] = Array(docsVersion.releases.length).fill( + undefined, + ) availableReleases[i] = docsVersion.currentRelease numberedReleases[docsVersion.shortName] = { availableReleases, @@ -388,7 +472,7 @@ export async function convertVersionsToFrontmatter(versions) { .join(' || ') frontmatterVersions[key] = semVer } else { - const semVer = [] + const semVer: string[] = [] if (!availableReleases[availableReleases.length - 1]) { const startVersion = availableReleases.filter(Boolean).pop() semVer.push(`>=${startVersion}`) @@ -402,7 +486,7 @@ export async function convertVersionsToFrontmatter(versions) { }) const sortedFrontmatterVersions = Object.keys(frontmatterVersions) .sort() - .reduce((acc, key) => { + .reduce((acc: { [key: string]: string }, key) => { acc[key] = frontmatterVersions[key] return acc }, {}) @@ -412,7 +496,7 @@ export async function convertVersionsToFrontmatter(versions) { // This is uncommon, but we potentially could have the case where an // article was versioned for say 3.2, not for 3.3, and then again // versioned for 3.4. This will result in a custom semantic version range -function checkVersionContinuity(versions) { +function checkVersionContinuity(versions: (string | undefined)[]): boolean { const availableVersions = [...versions] // values at the beginning or end of the array are not gaps but normal @@ -427,18 +511,18 @@ function checkVersionContinuity(versions) { } // Returns true if the indexFile is the root index.md file -function isRootIndexFile(indexFile) { +function isRootIndexFile(indexFile: string): boolean { return indexFile === ROOT_INDEX_FILE } // Creates a new directory if it doesn't exist -async function createDirectory(targetDirectory) { +async function createDirectory(targetDirectory: string): Promise { if (!existsSync(targetDirectory)) { await mkdirp(targetDirectory) } } -async function getDirectoryInfo(directory) { +async function getDirectoryInfo(directory: string): Promise { if (!existsSync(directory)) { throw new Error(`Directory ${directory} did not exist when attempting to get directory info.`) } @@ -454,7 +538,7 @@ async function getDirectoryInfo(directory) { return { directoryContents, directoryFiles, childDirectories } } -function appendVersionComment(stringifiedContent) { +function appendVersionComment(stringifiedContent: string): string { return stringifiedContent.replace( '\nversions:\n', `\nversions: # DO NOT MANUALLY EDIT. CHANGES WILL BE OVERWRITTEN BY A 🤖\n`, diff --git a/src/automated-pipelines/tests/frontmatter-versions.js b/src/automated-pipelines/tests/frontmatter-versions.ts similarity index 100% rename from src/automated-pipelines/tests/frontmatter-versions.js rename to src/automated-pipelines/tests/frontmatter-versions.ts diff --git a/src/automated-pipelines/tests/rendering.js b/src/automated-pipelines/tests/rendering.ts similarity index 74% rename from src/automated-pipelines/tests/rendering.js rename to src/automated-pipelines/tests/rendering.ts index 1f287fa1fe..031a937c47 100644 --- a/src/automated-pipelines/tests/rendering.js +++ b/src/automated-pipelines/tests/rendering.ts @@ -6,20 +6,31 @@ import { describe, expect, test, vi } from 'vitest' import { loadPages } from '#src/frame/lib/page-data.js' import { get } from '#src/tests/helpers/e2etest.js' +// Type definitions for page objects +type Page = { + autogenerated?: boolean + fullPath: string + permalinks: Array<{ href: string }> + versions: { + feature?: any + [key: string]: any + } +} + // Get a list of the autogenerated pages -const pageList = await loadPages(undefined, ['en']) +const pageList: Page[] = await loadPages(undefined, ['en']) describe('autogenerated docs render', () => { vi.setConfig({ testTimeout: 3 * 60 * 1000 }) - const autogeneratedPages = pageList.filter((page) => page.autogenerated) + const autogeneratedPages = pageList.filter((page: Page) => page.autogenerated) test('all automated pages', async () => { // Each page should render with 200 OK. Also, check for duplicate // heading IDs on each page. const errors = ( await Promise.all( - autogeneratedPages.map(async (page) => { + autogeneratedPages.map(async (page: Page) => { const url = page.permalinks[0].href // Some autogenerated pages can be very slow and might fail. // So we allow a few retries to avoid false positives. @@ -34,21 +45,25 @@ describe('autogenerated docs render', () => { .map((_, el) => $(el).attr('id')) .get() .sort() - const dupes = headingIDs.filter((item, index) => headingIDs.indexOf(item) !== index) + const dupes = headingIDs.filter( + (item: string, index: number) => headingIDs.indexOf(item) !== index, + ) if (dupes.length) { return `In ${url}, the following duplicate heading IDs were found: ${dupes.join(', ')}` } }), ) - ).filter(Boolean) + ).filter(Boolean) as string[] expect(errors.length, errors.join('\n')).toBe(0) }) - const codeqlCliPath = JSON.parse( + const codeqlCliPath: string = JSON.parse( readFileSync('src/codeql-cli/lib/config.json', 'utf-8'), ).targetDirectory - const restPath = JSON.parse(readFileSync('src/rest/lib/config.json', 'utf-8')).targetDirectory - const ghappsPath = JSON.parse( + const restPath: string = JSON.parse( + readFileSync('src/rest/lib/config.json', 'utf-8'), + ).targetDirectory + const ghappsPath: string = JSON.parse( readFileSync('src/github-apps/lib/config.json', 'utf-8'), ).targetDirectory // Right now only the rest and codeqlcli pages get their frontmatter updated automatically. @@ -56,14 +71,14 @@ describe('autogenerated docs render', () => { // single pages. The apps pages are also nested inside of the rest pages. So we want to filter out only // rest pages and the codeql cli pages for this test. const filesWithAutoUpdatedVersions = autogeneratedPages.filter( - (page) => + (page: Page) => (!page.fullPath.startsWith(ghappsPath) && page.fullPath.startsWith(restPath)) || page.fullPath.startsWith(codeqlCliPath), ) test.each(filesWithAutoUpdatedVersions)( 'autogenerated page $fullPath does not use feature based versioning', - (page) => { + (page: Page) => { expect(page.versions.feature).toBe(undefined) }, ) diff --git a/src/automated-pipelines/tests/update-markdown.js b/src/automated-pipelines/tests/update-markdown.ts similarity index 84% rename from src/automated-pipelines/tests/update-markdown.js rename to src/automated-pipelines/tests/update-markdown.ts index a77feb5ae6..c3db522f54 100644 --- a/src/automated-pipelines/tests/update-markdown.js +++ b/src/automated-pipelines/tests/update-markdown.ts @@ -6,15 +6,36 @@ import path from 'path' import { afterAll, beforeAll, describe, expect, test } from 'vitest' import { mkdirp } from 'mkdirp' import matter from 'gray-matter' +import type { FrontmatterVersions } from '#src/types.js' import { updateContentDirectory } from '../lib/update-markdown.js' -const versions = { +// Type definitions +type ContentItem = { + data: { + title: string + versions: FrontmatterVersions + autogenerated: string + } + content: string +} + +type NewContentData = { + [key: string]: ContentItem +} + +type IndexOrder = { + [key: string]: { + startsWith: string[] + } +} + +const versions: FrontmatterVersions = { fpt: '*', ghec: '*', ghes: '*', } -const newContentData = { +const newContentData: NewContentData = { 'actions/secrets.md': { data: { title: 'Secrets', @@ -41,10 +62,10 @@ const newContentData = { }, } -const tempDirectory = `${tmpdir()}/update-content-directory-test` -const tempContentDirectory = `${tempDirectory}/content` -const targetDirectory = path.join(tempContentDirectory, 'rest') -const indexOrder = { +const tempDirectory: string = `${tmpdir()}/update-content-directory-test` +const tempContentDirectory: string = `${tempDirectory}/content` +const targetDirectory: string = path.join(tempContentDirectory, 'rest') +const indexOrder: IndexOrder = { 'content/rest/index.md': { startsWith: ['overview', 'guides'], }, @@ -66,9 +87,9 @@ describe('automated content directory updates', () => { // because outside of testing it only runs in the docs-internal repo. // Because of that, we need to update the content paths to use the // full file path. - const contentDataFullPath = {} + const contentDataFullPath: { [key: string]: ContentItem } = {} Object.keys(newContentData).forEach( - (key) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]), + (key: string) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]), ) // Rewrites the content directory in the operating system's diff --git a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts index 8bde91c546..1c5de5dfdd 100644 --- a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts +++ b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts @@ -40,12 +40,6 @@ export async function populateIndex( options: Options, ) { console.log(chalk.yellow(`\nIndexing ${chalk.bold(indexName)}`)) - const bulkOperations = records.flatMap((doc) => [{ index: { _index: indexAlias } }, doc]) - - const bulkOptions = { - refresh: false, - timeout: '5m', - } const attempts = options.retries || 0 const sleepTime = options.sleepTime || DEFAULT_SLEEPTIME_SECONDS * 1000 @@ -57,7 +51,15 @@ export async function populateIndex( const t0 = new Date() const bulkResponse = await retryOnErrorTest( (error) => error instanceof errors.ResponseError && error.meta.statusCode === 429, - () => client.bulk({ operations: bulkOperations, ...bulkOptions }), + () => + client.helpers.bulk({ + datasource: records, + onDocument: () => ({ index: { _index: indexAlias } }), + flushBytes: 10_000_000, // stop before breaker trips + concurrency: 2, // back-off a bit + refreshOnCompletion: true, + timeout: '5m', + }), { attempts, sleepTime, diff --git a/src/tools/components/Fields.tsx b/src/tools/components/Fields.tsx index 96f9cc3b08..b589907ed4 100644 --- a/src/tools/components/Fields.tsx +++ b/src/tools/components/Fields.tsx @@ -16,7 +16,7 @@ export const Fields = (fieldProps: { const { open, setOpen, items, onSelect, renderItem } = fieldProps return ( - + {items.map((item, i) => item.divider ? ( @@ -42,6 +42,7 @@ export const Fields = (fieldProps: { textAlign: 'left', }, }} + role={item.extra?.arrow || item.extra?.info ? 'menuitem' : 'menuitemradio'} > {renderItem ? renderItem(item) : item.text}