1
0
mirror of synced 2025-12-19 18:10:59 -05:00

Remove unused things, mostly Azure-related (#54192)

Co-authored-by: Kevin Heis <heiskr@users.noreply.github.com>
Co-authored-by: Evan Bonsignori <ebonsignori@github.com>
This commit is contained in:
Hector Alfaro
2025-01-30 14:35:57 -05:00
committed by GitHub
parent 7a16634dda
commit 2897713437
18 changed files with 11 additions and 409 deletions

View File

@@ -39,9 +39,6 @@ jobs:
- uses: github/codeql-action/init@eb055d739abdc2e8de2e5f4ba1a8b246daa779aa # v3.26.0 - uses: github/codeql-action/init@eb055d739abdc2e8de2e5f4ba1a8b246daa779aa # v3.26.0
with: with:
languages: javascript # comma separated list of values from {go, python, javascript, java, cpp, csharp, ruby} languages: javascript # comma separated list of values from {go, python, javascript, java, cpp, csharp, ruby}
config: |
paths-ignore:
- 'src/open-source/scripts/add-pr-links.js'
- uses: github/codeql-action/analyze@eb055d739abdc2e8de2e5f4ba1a8b246daa779aa # v3.26.0 - uses: github/codeql-action/analyze@eb055d739abdc2e8de2e5f4ba1a8b246daa779aa # v3.26.0
continue-on-error: true continue-on-error: true

View File

@@ -1,6 +1,5 @@
data/release-notes/ data/release-notes/
src/bookmarklets/ src/bookmarklets/
src/open-source/scripts/add-pr-links.js
/.next/ /.next/
/.coverage /.coverage

View File

@@ -232,8 +232,6 @@
"!/.*", "!/.*",
"/.next/", "/.next/",
"src/bookmarklets/*", "src/bookmarklets/*",
"src/open-source/scripts/add-pr-links.js",
"src/open-source/scripts/pr-link-source.js",
"rest-api-description/", "rest-api-description/",
"docs-internal-data/", "docs-internal-data/",
"src/code-scanning/scripts/generate-code-scanning-query-list.ts" "src/code-scanning/scripts/generate-code-scanning-query-list.ts"

View File

@@ -39,11 +39,3 @@ The installation requires a few steps:
1. Paste the path in place of where it says `REPLACE_ME` in line 1 (make sure to leave the single quotes around it). 1. Paste the path in place of where it says `REPLACE_ME` in line 1 (make sure to leave the single quotes around it).
1. Change the title to something like `Open in VSC`. 1. Change the title to something like `Open in VSC`.
1. Drag the generated link onto your bookmarks bar. 1. Drag the generated link onto your bookmarks bar.
## Add preview links to PRs
[`src/bookmarklets/add-pr-links.js`](./add-pr-links.js)
This bookmarklet modifies the `Files changed` page of a GitHub pull request that has a current staging deployment. For each Markdown file in the diff view, it adds links to the preview deployment of the file for each version: `FPT / GHEC / GHES / AE`. (Some of these may redirect to another version or 404 if that version of the page doesn't exist.)
Note: readable JavaScript source lives in `src/bookmarklets/pr-link-source.js`. The bookmarklet code was generated via https://chriszarate.github.io/bookmarkleter.

View File

@@ -5,7 +5,7 @@ const router = express.Router()
/** /**
* Returns the healthiness of the service. * Returns the healthiness of the service.
* This may be used by azure app service (forthcoming) to determine whether this * This may be used by Moda to determine whether this
* instance remains in the pool to handle requests * instance remains in the pool to handle requests
* For example: if we have a failing database connection we may return a 500 status here. * For example: if we have a failing database connection we may return a 500 status here.
*/ */

View File

@@ -115,9 +115,8 @@ export default function (app: Express) {
app.use(datadog) app.use(datadog)
} }
// Put this early to make it as fast as possible because it's used, // Put this early to make it as fast as possible because it's used
// and used very often, by the Azure load balancer to check the // to check the health of each cluster.
// health of each node.
app.use('/healthcheck', healthcheck) app.use('/healthcheck', healthcheck)
// Must appear before static assets and all other requests // Must appear before static assets and all other requests

View File

@@ -30,10 +30,10 @@ describe('robots.txt', () => {
).toBe(true) ).toBe(true)
}) })
test('disallows indexing of azurecontainer.io domains', async () => { test('disallows indexing of internal domains', async () => {
const res = await get('/robots.txt', { const res = await get('/robots.txt', {
headers: { headers: {
host: 'docs-internal-preview-12345-asdfz.azurecontainer.io', host: 'docs-internal.github.com',
}, },
}) })
expect(res.body).toEqual('User-agent: *\nDisallow: /') expect(res.body).toEqual('User-agent: *\nDisallow: /')

View File

@@ -26,7 +26,7 @@ Periodically, translators read the `content/**` and `data/**` directories from `
## Deployment of translated content ## Deployment of translated content
In the deployment workflow, we [checkout](https://github.com/github/docs-internal/blob/a8e52aad1a6b67f41da92d314bd7fd8cd84193a4/.github/workflows/azure-prod-build-deploy.yml#L90-L92) each and every translation repo and put their contents into the `translations/` directory. During the build step of our deployment, we checkout every translation repo into the `translations/` directory.
The enabled languages and their source directories are interpreted in [`src/languages/lib/languages.js`](https://github.com/github/docs-internal/blob/a8e52aad1a6b67f41da92d314bd7fd8cd84193a4/src/languages/lib/languages.js), which ensures English and translated content are in the same Docker image we deploy. The enabled languages and their source directories are interpreted in [`src/languages/lib/languages.js`](https://github.com/github/docs-internal/blob/a8e52aad1a6b67f41da92d314bd7fd8cd84193a4/src/languages/lib/languages.js), which ensures English and translated content are in the same Docker image we deploy.

View File

@@ -1,74 +0,0 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import walk from 'walk-sync'
import frontmatter from 'src/frame/lib/read-frontmatter.js'
import { loadPages } from 'src/frame/lib/page-data.js'
import patterns from 'src/frame/lib/patterns.js'
import loadRedirects from 'src/redirects/lib/precompile.js'
import { allVersionKeys } from 'src/versions/lib/all-versions.js'
// get all content and data files
const files = ['content', 'data']
.map((dir) => {
return walk(path.join(process.cwd(), dir), {
includeBasePath: true,
directories: false,
}).filter((file) => file.endsWith('.md') && !file.endsWith('README.md'))
})
.flat()
// match [foo](/v3) and [bar](/v4) Markdown links
const linkRegex = /\(\/v[34].*?\)/g
main()
async function main() {
// we need to load the pages so we can get the redirects
const englishPages = (await loadPages()).filter((p) => p.languageCode === 'en')
const redirects = await loadRedirects(englishPages)
for (const file of files) {
const { data, content } = frontmatter(fs.readFileSync(file, 'utf8'))
const links = content?.match(linkRegex)
if (!links) continue
// remove parentheses: (/v3) -> /v3
// also remove trailing slash before closing parens if there is one
const devLinks = links.map((link) => link.replace('(', '').replace(/\/?\)/, ''))
let newContent = content
for (const devLink of devLinks) {
const [link, fragment] = devLink.split(/\/?#/)
let redirect = redirects[link]
if (!redirect) {
console.log(`no redirect found for ${devLink} in ${file}`)
continue
}
// do some cleanup
redirect = redirect
// remove language code segment
.replace(patterns.getLanguageCode, '')
// remove version segment
.replace(new RegExp(`/(${allVersionKeys.join('|')})`), '')
// re-add the fragment after removing any fragment added via the redirect
// otherwise /v3/git/refs/#create-a-reference will become /rest/reference/git#refs#create-a-reference
// we want to preserve the #create-a-reference fragment, not #refs
const newLink = fragment ? redirect.replace(/#.+?$/, '') + '#' + fragment : redirect
// first replace the old link with the new link
// then remove any trailing slashes
newContent = newContent?.replace(new RegExp(`${devLink}/?(?=\\))`), newLink)
}
fs.writeFileSync(file, frontmatter.stringify(newContent || '', data || {}))
}
console.log('Done!')
}

View File

@@ -24,7 +24,6 @@ export default new StatsD({
// DD_AGENT_HOST and DD_DOGSTATSD_PORT environment variables. // DD_AGENT_HOST and DD_DOGSTATSD_PORT environment variables.
// If undefined, the host will default to 'localhost' and the port // If undefined, the host will default to 'localhost' and the port
// will default to 8125. // will default to 8125.
// Azure docker templates configure DD_AGENT_HOST but not DD_DOGSTATSD_PORT.
// Moda configuration defines DD_DOGSTATSD_PORT but not DD_AGENT_HOST. // Moda configuration defines DD_DOGSTATSD_PORT but not DD_AGENT_HOST.
// For Moda, the host must be set to the Kubernetes node name, which is // For Moda, the host must be set to the Kubernetes node name, which is
// set in KUBE_NODE_HOSTNAME. // set in KUBE_NODE_HOSTNAME.

View File

@@ -1,3 +0,0 @@
# Open source
The open source subject folder contains files that relate to the github/docs repository, so that we can support open source contributors to docs.github.com.

View File

@@ -1 +0,0 @@
javascript:void%20function(){!async%20function(){function%20a(a,b,c){var%20d=document.createElement(%22A%22);return%20d.innerHTML=b,d.href=c,d.target=%22_blank%22,a.appendChild(d),a}const%20b=/https:\/\/github.com\/github\/([^\/]*)\/pull\/\d*\/files/;if(!window.location.href.match(b))return%20void%20window.alert(%22You're%20not%20on%20a%20PR%20'Files%20changed'%20page.%20\uD83D\uDE43%22);let%20c=window.location.href.replace(/files.*/g,%22%22),d=await%20fetch(c).then(function(a){return%20a.text()}).then(function(a){for(var%20b=new%20DOMParser,c=b.parseFromString(a,%22text/html%22),d=c.getElementsByClassName(%22TimelineItem%22),e=0;e%3Cd.length;e++)for(var%20f,g=d[e],h=g.getElementsByTagName(%22a%22),k=0;k%3Ch.length;k++)if(f=h[k],f.innerText.match(/View%20deployment/))var%20l=f.getAttribute(%22href%22);return%20l});if(null==d)return%20void%20window.alert(%22No%20preview%20deployment%20found!%20\uD83D\uDE2D%22);d=d.replace(/\/$/,%22%22);var%20e=d+%22/en%22,f=d+%22/en/enterprise-cloud%40latest%22,g=d+%22/en/enterprise-server%40latest%22,h=d+%22/en/github-ae%40latest%22;const%20j=document.querySelectorAll(%22div.file-info%22);for(var%20k,l=0;l%3Cj.length;l++)if(k=j[l].querySelector(%22a%22).title,0===k.search(%22data/%22))continue;else{var%20m=/\.md$/,n=0%3C=k.search(m);if(n){console.log(%22link:%20%22+k),k=k.replace(m,%22%22),k=k.replace(/^content/,%22%22),k=k.replace(/\/index/,%22%22);var%20o=document.createElement(%22SPAN%22);o.style.fontFamily=%22-apple-system,BlinkMacSystemFont,Segoe%20UI,Helvetica,Arial,sans-serif%22,o.innerHTML=%22%26nbsp;%20View:%20%22,o=a(o,%22FPT%22,e+k),o.innerHTML+=%22%20/%20%22,o=a(o,%22GHEC%22,f+k),o.innerHTML+=%22%20/%20%22,o=a(o,%22GHES%22,g+k),o.innerHTML+=%22%20/%20%22,o=a(o,%22AE%22,h+k),j[l].appendChild(o)}}}()}();

View File

@@ -1,87 +0,0 @@
#!/usr/bin/env node
// [start-readme]
//
// In the .github/workflows, We use...
//
// uses: some/action@95cb08cb2672c73d4ffd2f422e6d11953d2a9c70
//
// But sometimes we fail to update the uniformly. This script
// is for finding these unicorns.
//
// [end-readme]
//
//
import fs from 'fs'
import { program } from 'commander'
import walk from 'walk-sync'
import chalk from 'chalk'
program
.description('Finds action shas that are unusual')
.option('-v, --verbose', 'Verbose outputs')
.parse(process.argv)
main(program.opts(), program.args)
async function main(opts, args) {
const files = walk('.github/workflows', { globs: ['*.yml'], includeBasePath: true })
const counts = {}
const places = {}
for (const file of files) {
const content = fs.readFileSync(file, 'utf-8')
let lineNo = 0
for (const line of content.split(/\n/g)) {
lineNo++
if (line.includes('uses:') && /@[a-f0-9]{40}/.test(line)) {
const match = line.match(/\b(?<name>[\w/-]+)@(?<sha>[a-f0-9]{40})/)
const whole = match[0]
if (!places[whole]) {
places[whole] = []
}
places[whole].push({ file, line, lineNo })
const { name, sha } = match.groups
if (!counts[name]) {
counts[name] = {}
}
counts[name][sha] = 1 + (counts[name][sha] || 0)
}
}
}
const suspects = Object.fromEntries(
Object.entries(counts).filter(([, shas]) => Object.keys(shas).length > 1),
)
const countSuspects = Object.keys(suspects).length
if (countSuspects) {
console.log(chalk.yellow(`Found ${countSuspects} suspect${countSuspects === 1 ? '' : 's'}\n`))
for (const [action, shas] of Object.entries(suspects)) {
const total = Object.values(shas).reduce((a, b) => a + b, 0)
const flat = Object.entries(shas)
.map(([sha, count]) => [count, sha])
.sort((a, b) => b[0] - a[0])
const mostPopular = flat[0]
for (const [count, sha] of flat.slice(1)) {
console.log(chalk.bold('Suspect:'), `${action}@${chalk.yellow(sha)}`)
console.log(
`is only used ${count} time${count === 1 ? '' : 's'} (${((100 * count) / total).toFixed(
1,
)}%) compared to ${mostPopular[1]} (used ${mostPopular[0]} times)`,
)
console.log(chalk.bold(`Consider changing to ${action}@${mostPopular[1]}`))
console.log('in...')
for (const { file, lineNo } of places[`${action}@${sha}`]) {
console.log(`\t${file} (line ${lineNo})`)
}
console.log('\n')
}
}
} else {
console.log(chalk.green('All good! No suspects found 😎'))
}
}

View File

@@ -1,92 +0,0 @@
!(async function () {
const regexp = /https:\/\/github.com\/github\/([^\/]*)\/pull\/\d*\/files/
if (!window.location.href.match(regexp)) {
window.alert("You're not on a PR 'Files changed' page. 🙃")
return
}
let conversation_url = window.location.href.replace(/files.*/g, '')
// get the preview deployment URL by loading the 'Conversation' page, and searching for the 'View deployment' link
let deployment_url = await fetch(conversation_url)
.then(function (response) {
return response.text()
})
.then(function (html) {
// Convert the HTML string into a document object
var parser = new DOMParser()
var doc = parser.parseFromString(html, 'text/html')
var elements = doc.getElementsByClassName('TimelineItem')
// Find the element that is a link that contains the text "View deployment"
for (var i = 0; i < elements.length; i++) {
var element = elements[i]
var links = element.getElementsByTagName('a')
for (var j = 0; j < links.length; j++) {
var link = links[j]
if (link.innerText.match(/View deployment/)) {
// Get the href of the link
var deployment_url = link.getAttribute('href')
}
}
}
// This should return the last link that contains the text "View deployment" (there might be multiple ones if there are multiple deployments)
return deployment_url
})
if (deployment_url == null) {
window.alert('No preview deployment found! 😭')
return
}
// strip any trailing slash from deployment_url
deployment_url = deployment_url.replace(/\/$/, '')
var url_fpt = deployment_url + '/en'
var url_ghec = deployment_url + '/en/enterprise-cloud@latest'
var url_ghes = deployment_url + '/en/enterprise-server@latest'
var url_ae = deployment_url + '/en/github-ae@latest'
var fpt = 'FPT'
var ghes = 'GHES'
var ghec = 'GHEC'
var ae = 'AE'
const file_info = document.querySelectorAll('div.file-info')
for (var i = 0; i < file_info.length; i++) {
var link = file_info[i].querySelector('a').title
if (link.search('data/') === 0) {
continue
} else {
var regex = /\.md$/
var markdownfile = link.search(regex) >= 0
if (markdownfile) {
console.log('link: ' + link)
link = link.replace(regex, '')
link = link.replace(/^content/, '')
link = link.replace(/\/index/, '')
var span = document.createElement('SPAN')
span.style.fontFamily =
'-apple-system,BlinkMacSystemFont,Segoe UI,Helvetica,Arial,sans-serif'
span.innerHTML = '&nbsp; View: '
span = addLink(span, fpt, url_fpt + link)
span.innerHTML += ' / '
span = addLink(span, ghec, url_ghec + link)
span.innerHTML += ' / '
span = addLink(span, ghes, url_ghes + link)
span.innerHTML += ' / '
span = addLink(span, ae, url_ae + link)
file_info[i].appendChild(span)
}
}
}
function addLink(span, link_name, link_href) {
var anchor = document.createElement('A')
anchor.innerHTML = link_name
anchor.href = link_href
anchor.target = '_blank'
span.appendChild(anchor)
return span
}
})()

View File

@@ -23,13 +23,13 @@ At its root, the `src/shielding/frame/middleware/index.js` is injected into our
Express server. From there, it loads all its individual middleware handlers. Express server. From there, it loads all its individual middleware handlers.
Each middleware is one file that focuses on a single use-case. The Each middleware is one file that focuses on a single use-case. The
use-cases are borne from studying log files (CDN and Azure App Service) to use-cases are borne from studying log files to
spot patterns of request abuse. spot patterns of request abuse.
## Notes ## Notes
- The best place to do shielding is as close to the client(s) as possible, - The best place to do shielding is as close to the client(s) as possible,
i.e. in the CDN or in Azure Frontdoor. Having the code in our own backend i.e. in the CDN. Having the code in our own backend
has the advantage that it's easier to write custom business logic has the advantage that it's easier to write custom business logic
along with end-to-end tests. along with end-to-end tests.
- Some shielding "tricks" appear in other places throughout the code - Some shielding "tricks" appear in other places throughout the code

View File

@@ -19,8 +19,8 @@ export function createRateLimiter(max = MAX) {
// 1 minute // 1 minute
windowMs: EXPIRES_IN_AS_SECONDS * 1000, windowMs: EXPIRES_IN_AS_SECONDS * 1000,
// limit each IP to X requests per windowMs // limit each IP to X requests per windowMs
// We currently have about 25 instances in production. That's routed // We currently have about 12 instances in production. That's routed
// in Azure to spread the requests to each healthy instance. // in Moda to spread the requests to each healthy instance.
// So, the true rate limit, per `windowMs`, is this number multiplied // So, the true rate limit, per `windowMs`, is this number multiplied
// by the current number of instances. // by the current number of instances.
max: max, max: max,
@@ -32,7 +32,7 @@ export function createRateLimiter(max = MAX) {
keyGenerator: (req) => { keyGenerator: (req) => {
let { ip } = req let { ip } = req
// In our Azure preview environment, with the way the proxying works, // In our previous environments, with the way the proxying works,
// the `x-forwarded-for` is always the origin IP with a port number // the `x-forwarded-for` is always the origin IP with a port number
// attached. E.g. `75.40.90.27:56675, 169.254.129.1` // attached. E.g. `75.40.90.27:56675, 169.254.129.1`
// This port number portion changes with every request, so we strip it. // This port number portion changes with every request, so we strip it.

View File

@@ -1,103 +0,0 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"appName": {
"defaultValue": null,
"type": "string",
"minLength": 5,
"maxLength": 63,
"metadata": {
"description": "A unique name for the app"
}
},
"containerImage": {
"type": "string",
"defaultValue": null,
"metadata": {
"description": "Container image to deploy"
}
},
"dockerRegistryUrl": {
"type": "String",
"metadata": {
"description": "Should be a valid host name without protocol"
}
},
"dockerRegistryUsername": {
"type": "String"
},
"dockerRegistryPassword": {
"type": "SecureString"
}
},
"resources": [
{
"type": "Microsoft.ContainerInstance/containerGroups",
"name": "[parameters('appName')]",
"apiVersion": "2021-07-01",
"location": "[resourceGroup().location]",
"properties": {
"containers": [
{
"name": "app",
"properties": {
"image": "[parameters('containerImage')]",
"ports": [
{
"protocol": "TCP",
"port": 4000
}
],
"environmentVariables": [
{
"name": "PORT",
"value": "4000"
},
{
"name": "NODE_ENV",
"value": "production"
},
{
"name": "WEB_CONCURRENCY",
"value": "1"
}
],
"resources": {
"requests": {
"memoryInGB": 4,
"cpu": 1
}
}
}
}
],
"imageRegistryCredentials": [
{
"server": "[parameters('dockerRegistryUrl')]",
"username": "[parameters('dockerRegistryUsername')]",
"password": "[parameters('dockerRegistryPassword')]"
}
],
"restartPolicy": "Always",
"ipAddress": {
"ports": [
{
"protocol": "TCP",
"port": 4000
}
],
"type": "Public",
"dnsNameLabel": "[parameters('appName')]"
},
"osType": "Linux"
}
}
],
"outputs": {
"defaultHostName": {
"value": "[reference(resourceId('Microsoft.ContainerInstance/containerGroups', parameters('appName'))).ipAddress.fqdn]",
"type": "string"
}
}
}

View File

@@ -1,22 +0,0 @@
import got from 'got'
// Will try for 20 minutes, (15 * 80) seconds / 60 [seconds]
const RETRIES = 80
const DELAY_SECONDS = 15
/*
* Promise resolves once url is healthy or fails if timeout has passed
* @param {string} url - health url, e.g. docs.com/healthcheck
*/
export async function waitUntilUrlIsHealthy(url: string) {
try {
await got.head(url, {
retry: {
limit: RETRIES,
calculateDelay: ({ computedValue }) => Math.min(computedValue, DELAY_SECONDS * 1000),
},
})
return true
} catch {}
return false
}