Update OpenAPI workflow to use new GitHub directory format (#52578)
This commit is contained in:
67
src/github-apps/scripts/sync.js
Normal file → Executable file
67
src/github-apps/scripts/sync.js
Normal file → Executable file
@@ -6,8 +6,9 @@ import { readFile, writeFile } from 'fs/promises'
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { slug } from 'github-slugger'
|
import { slug } from 'github-slugger'
|
||||||
import yaml from 'js-yaml'
|
import yaml from 'js-yaml'
|
||||||
|
import walk from 'walk-sync'
|
||||||
|
|
||||||
import { getContents } from '#src/workflows/git-utils.js'
|
import { getContents, getDirectoryContents } from '#src/workflows/git-utils.js'
|
||||||
import permissionSchema from './permission-list-schema.js'
|
import permissionSchema from './permission-list-schema.js'
|
||||||
import enabledSchema from './enabled-list-schema.js'
|
import enabledSchema from './enabled-list-schema.js'
|
||||||
import { validateJson } from '#src/tests/lib/validate-json-schema.js'
|
import { validateJson } from '#src/tests/lib/validate-json-schema.js'
|
||||||
@@ -156,22 +157,26 @@ export async function getProgAccessData(progAccessSource, isRest = false) {
|
|||||||
let progAccessDataRaw
|
let progAccessDataRaw
|
||||||
let progActorResources
|
let progActorResources
|
||||||
const progAccessFilepath = 'config/access_control/programmatic_access.yaml'
|
const progAccessFilepath = 'config/access_control/programmatic_access.yaml'
|
||||||
const progActorFilepath = 'config/locales/programmatic_actor_fine_grained_resources.en.yml'
|
const progActorDirectory =
|
||||||
|
'config/access_control/fine_grained_permissions/programmatic_actor_fine_grained_resources'
|
||||||
|
|
||||||
if (!useRemoteGitHubFiles) {
|
if (!useRemoteGitHubFiles) {
|
||||||
progAccessDataRaw = yaml.load(
|
progAccessDataRaw = yaml.load(
|
||||||
await readFile(path.join(progAccessSource, progAccessFilepath), 'utf8'),
|
await readFile(path.join(progAccessSource, progAccessFilepath), 'utf8'),
|
||||||
)
|
)
|
||||||
progActorResources = yaml.load(
|
progActorResources = await getProgActorResourceContent({
|
||||||
await readFile(path.join(progAccessSource, progActorFilepath), 'utf8'),
|
localDirectory: path.join(progAccessSource, progActorDirectory),
|
||||||
).en.programmatic_actor_fine_grained_resources
|
})
|
||||||
} else {
|
} else {
|
||||||
progAccessDataRaw = yaml.load(
|
progAccessDataRaw = yaml.load(
|
||||||
await getContents('github', 'github', 'master', progAccessFilepath),
|
await getContents('github', 'github', 'master', progAccessFilepath),
|
||||||
)
|
)
|
||||||
progActorResources = yaml.load(
|
progActorResources = await getProgActorResourceContent({
|
||||||
await getContents('github', 'github', 'master', progActorFilepath),
|
owner: 'github',
|
||||||
).en.programmatic_actor_fine_grained_resources
|
repo: 'github',
|
||||||
|
branch: 'master',
|
||||||
|
path: progActorDirectory,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const progAccessData = {}
|
const progAccessData = {}
|
||||||
@@ -291,3 +296,49 @@ async function validateAppData(data, pageType) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// When getting files from the GitHub repo locally (or in a Codespace)
|
||||||
|
// you can pass the full or relative path to the `github` repository
|
||||||
|
// directory on disk.
|
||||||
|
// When the source directory is `rest-api-description` (which is more common)
|
||||||
|
// you can pass the `owner`, `repo`, `branch`, and `path` (repository path)
|
||||||
|
async function getProgActorResourceContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
branch,
|
||||||
|
path,
|
||||||
|
gitHubSourceDirectory = null,
|
||||||
|
}) {
|
||||||
|
// Get files either locally from disk or from the GitHub remote repo
|
||||||
|
let files
|
||||||
|
if (gitHubSourceDirectory) {
|
||||||
|
files = await getProgActorContentFromDisk(gitHubSourceDirectory)
|
||||||
|
} else {
|
||||||
|
files = await getDirectoryContents(owner, repo, branch, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need to format the file content into a single object. Each file
|
||||||
|
// contains a single key and a single value that needs to be added
|
||||||
|
// to the object.
|
||||||
|
const progActorResources = {}
|
||||||
|
for (const file of files) {
|
||||||
|
const fileContent = yaml.load(file)
|
||||||
|
// Each file should only contain a single key and value.
|
||||||
|
if (Object.keys(fileContent).length !== 1) {
|
||||||
|
throw new Error(`Error: The file ${JSON.stringify(fileContent)} must only have one key.`)
|
||||||
|
}
|
||||||
|
Object.entries(fileContent).forEach(([key, value]) => {
|
||||||
|
progActorResources[key] = value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return progActorResources
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getProgActorContentFromDisk(directory) {
|
||||||
|
const files = walk(directory, {
|
||||||
|
includeBasePath: true,
|
||||||
|
directories: false,
|
||||||
|
})
|
||||||
|
const promises = files.map(async (file) => await readFile(file, 'utf8'))
|
||||||
|
return await Promise.all(promises)
|
||||||
|
}
|
||||||
|
|||||||
@@ -255,3 +255,28 @@ async function secondaryRateLimitRetry(callable, args, maxAttempts = 10, sleepTi
|
|||||||
throw err
|
throw err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Recursively gets the contents of a directory within a repo. Returns an
|
||||||
|
// array of file contents. This function could be modified to return an array
|
||||||
|
// of objects that include the path and the content of the file if needed
|
||||||
|
// in the future.
|
||||||
|
export async function getDirectoryContents(owner, repo, branch, path) {
|
||||||
|
const { data } = await getContent(owner, repo, branch, path)
|
||||||
|
const files = []
|
||||||
|
|
||||||
|
for (const blob of data) {
|
||||||
|
if (blob.type === 'dir') {
|
||||||
|
files.push(...(await getDirectoryContents(owner, repo, branch, blob.path)))
|
||||||
|
} else if (blob.type === 'file') {
|
||||||
|
if (!data.content) {
|
||||||
|
const blobContents = await getContentsForBlob(owner, repo, blob.sha)
|
||||||
|
files.push(blobContents)
|
||||||
|
} else {
|
||||||
|
// decode Base64 encoded contents
|
||||||
|
const decodedContent = Buffer.from(blob.content, 'base64').toString()
|
||||||
|
files.push(decodedContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return files
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user