1
0
mirror of synced 2025-12-19 18:10:59 -05:00

Removing algolia (#20633)

This commit is contained in:
Grace Park
2021-08-05 10:10:51 -07:00
committed by GitHub
parent 13caa0b91f
commit 2dbea3737a
25 changed files with 121 additions and 506 deletions

View File

@@ -1,3 +1 @@
ALGOLIA_API_KEY=
ALGOLIA_APPLICATION_ID=
ALLOW_TRANSLATION_COMMITS=

View File

@@ -19,6 +19,10 @@ jobs:
# Even if if doesn't do anything
- name: Checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
with:
lfs: true
- name: Checkout LFS objects
run: git lfs checkout
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f

View File

@@ -26,7 +26,5 @@ jobs:
run: npm run build
- name: (Dry run) sync indices
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search-dry-run

View File

@@ -1,38 +1,95 @@
name: Sync search indexes
# **What it does**: This updates our search indexes after each deployment.
# **What it does**: This workflow syncs the Lunr search indexes.
# The search indexes are checked into the lib/search/indexes directory.
# Search indexes are checked directly into the `main` branch on both the
# internal and open-source docs repositories. This workflow should be the
# only mechanism that the search indexes are modified. Because of that,
# repo-sync will not sync the search indexes because it should not detect
# a change.
# **Why we have it**: We want our search indexes kept up to date.
# **Who does it impact**: Anyone using search on docs.
# **Testing: To test this workflow, use the workflow_dispatch event and trigger
# the workflow from the action tab. Select the branch with the changes to the
# workflow. Set `fetch-depth: 0` as an input to the checkout action to get all
# branches, including your test branch. Otherwise, you'll only get the main
# branch. For git lfs push and git push commands use the --dry-run switch to
# prevent pushes (e.g., git push --dry-run origin main --no-verify and
# git lfs push --dry-run public-docs-repo).
# The dry-run switch does everything but actually send the updates.
on:
workflow_dispatch:
push:
branches:
- main
schedule:
- cron: '53 0/4 * * *' # Run every four hours at 53 minutes past the hour
jobs:
updateIndices:
name: Update indices
updateIndexes:
name: Update indexes
if: github.repository == 'github/docs-internal'
runs-on: ubuntu-latest
steps:
# Check out internal docs repository
- name: checkout
uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
with:
token: ${{ secrets.DOCS_BOT }}
- name: Setup Node
uses: actions/setup-node@38d90ce44d5275ad62cc48384b3d8a58c500bb5f
with:
node-version: 16.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Run build scripts
run: npm run build
- name: sync indices
- name: Update search indexes
env:
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run sync-search
# TODO remove version and language after first successful run to test
run: VERSION=github-ae@latest LANGUAGE=pt npm run sync-search
- name: Update private docs repository search indexes
# Git pre-push hooks push the LFS objects, so if you don't run them and
# don't push the LFS objects manually, the LFS objects won't get
# pushed. That will likely result in the push getting rejected.
# So if you don't use the pre-push hooks or you run with --no-verify
# the LFS objects need to be pushed first.
run: |
echo 'git checkout main'
git checkout main
echo 'git config user.name "GitHub Actions"'
git config user.name "GitHub Actions"
echo 'git config user.email action@github.com'
git config user.email action@github.com
echo 'git commit -am "update search indexes"'
git commit -am "update search indexes"
echo 'git lfs push origin'
git lfs push origin
echo 'git push origin main --no-verify'
git push origin main --no-verify
- name: Update open-source docs repository search indexes
# Git pre-push hooks push the LFS objects, so if you don't run them and
# don't push the LFS objects manually, the LFS objects won't get
# pushed. That will likely result in the push getting rejected.
# So if you don't use the pre-push hooks or you run with --no-verify
# the LFS objects need to be pushed first.
run: |
echo 'git remote add public-docs-repo https://github.com/github/docs.git'
git remote add public-docs-repo https://github.com/github/docs.git
echo 'git lfs push public-docs-repo'
git lfs push public-docs-repo
echo 'git pull public-docs-repo main'
git pull public-docs-repo main
echo 'git push public-docs-repo main --no-verify'
git push public-docs-repo main --no-verify
- name: Send slack notification if workflow run fails
uses: someimportantcompany/github-actions-slack-message@0b470c14b39da4260ed9e3f9a4f1298a74ccdefd
if: failure()

View File

@@ -5,6 +5,7 @@ name: Sync Single English Index
# **Who does it impact**:
on:
workflow_dispatch:
pull_request:
types:
- labeled
@@ -39,9 +40,15 @@ jobs:
env:
VERSION: ${{ steps.getVersion.outputs.versionToSync }}
LANGUAGE: 'en'
ALGOLIA_APPLICATION_ID: ${{ secrets.ALGOLIA_APPLICATION_ID }}
ALGOLIA_API_KEY: ${{ secrets.ALGOLIA_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
npm run build
npm run sync-search
- name: Check in search index
uses: EndBug/add-and-commit@2bdc0a61a03738a1d1bda24d566ad0dbe3083d87
with:
# The arguments for the `git add` command
add: 'lib/search/indexes'
# The message for the commit
message: 'Updated search index for ${{ steps.getVersion.outputs.versionToSync }}'

View File

@@ -16,6 +16,7 @@ on:
- 'lib/redirects/**'
- 'lib/rest/**'
- 'lib/webhooks/**'
- 'lib/search/indexes/**'
- 'scripts/**'
- 'translations/**'
- 'package*.json'
@@ -76,6 +77,7 @@ jobs:
- 'lib/redirects/**'
- 'lib/rest/**'
- 'lib/webhooks/**'
- 'lib/search/indexes/**'
- 'scripts/**'
- 'translations/**'
- 'package*.json'
@@ -101,6 +103,7 @@ jobs:
'lib/redirects/**',
'lib/rest/**',
'lib/webhooks/**',
'lib/search/indexes/**'
'scripts/**',
'translations/**',
'package*.json',

1
.gitignore vendored
View File

@@ -1,4 +1,3 @@
.algolia-cache
.search-cache
.DS_Store
.env

View File

@@ -41,7 +41,7 @@ export function Search({ isStandalone = false, updateSearchParams = true, childr
// If the user shows up with a query in the URL, go ahead and search for it
useEffect(() => {
if (router.query.query) {
if (updateSearchParams && router.query.query) {
/* await */ fetchSearchResults((router.query.query as string).trim())
}
}, [])

View File

@@ -48,7 +48,7 @@ export const Header = () => {
{/* <!-- GitHub.com homepage and 404 page has a stylized search; Enterprise homepages do not --> */}
{relativePath !== 'index.md' && error !== '404' && (
<div className="d-inline-block ml-4">
<Search />
{router.asPath !== '/graphql/overview/explorer' && <Search />}
</div>
)}
</div>
@@ -112,7 +112,7 @@ export const Header = () => {
{/* <!-- GitHub.com homepage and 404 page has a stylized search; Enterprise homepages do not --> */}
{relativePath !== 'index.md' && error !== '404' && (
<div className="pt-3 border-top">
<Search />
{router.asPath !== '/graphql/overview/explorer' && <Search />}
</div>
)}
</div>

View File

@@ -37,17 +37,19 @@ Using the attribute `topics` in your query will only return results that have th
## Production deploys
A [GitHub Actions workflow](.github/workflows/sync-search-indices.yml) triggered by pushes to the `main` branch syncs the search data. This process generates structured data for all pages on the site, compares that data to what's currently on search, then adds, updates, or removes indices based on the diff of the local and remote data, being careful not to create duplicate records and avoiding any unnecessary (and costly) indexing operations.
A [GitHub Actions workflow](.github/workflows/sync-search-indices.yml) that runs every four hours syncs the search data. This process generates structured data for all pages on the site, compares that data to what's currently on search, then adds, updates, or removes indices based on the diff of the local and remote data, being careful not to create duplicate records and avoiding any unnecessary (and costly) indexing operations.
The Actions workflow progress can be viewed (by GitHub employees) in the [Actions tab](https://github.com/github/docs/actions?query=workflow%3Asearch) of the repo.
Because the workflow runs after a branch is merged to `main`, there is a slight delay for search data updates to appear on the site.
## Manually triggering the search index update workflow
## Manual sync from a checkout
You can manually run the workflow to generate the indexes after you push your changes to `main` to speed up the indexing when needed. To run it manually, click "Run workflow" button in the [Actions tab](https://github.com/github/docs-internal/actions/workflows/sync-search-indices.yml).
It is also possible to manually sync the indices from your local checkout of the repo, before your branch is merged to `main`.
## Generating search indexes for your local checkout
**Prerequisite:** Make sure the environment variables `ALGOLIA_APPLICATION_ID` and `ALGOLIA_API_KEY` are set in your `.env` file. You can find these values on [Algolia](https://www.algolia.com/apps/ZI5KPY1HBE/api-keys/all). _Remove this paragraph when we switch to Lunr._
You can locally generate search indexes, but please do not check them into your local branch because they can get out-of-sync with the `main` branch quickly.
To locally generate the English version of the Dotcom search index locally, run `LANGUAGE=en VERSION=free-pro-team@latest npm run sync-search`. See [Build and sync](#build-and-sync) below for more details. To revert those files run `git checkout lib/search/indexes`.
### Build without sync (dry run)
@@ -75,7 +77,7 @@ VERSION=<PLAN@RELEASE LANGUAGE=<TWO-LETTER CODE> npm run sync-search
```
You can set `VERSION` and `LANGUAGE` individually, too.
Substitute a currently supported version for `<PLAN@RELEASE>` and a currently supported two-letter language code for `<TWO-LETTER-CODE>`.
Substitute a currently supported version for `<PLAN@RELEASE>` and a currently supported two-letter language code for `<TWO-LETTER-CODE>`. Languages and versions are lowercase. The options for version are currently `free-pro-team`, `github-ae`, and `enterprise-server`.
## Label-triggered Actions workflow
@@ -95,7 +97,7 @@ Why do we need this? For our daily shipping needs, it's tolerable that search up
### Actions workflow files
- [`.github/workflows/sync-search-indices.yml`](.github/workflows/sync-search-indices.yml) - Builds and syncs search indices whenever the `main` branch is pushed to (that is, on production deploys).
- [`.github/workflows/sync-search-indices.yml`](.github/workflows/sync-search-indices.yml) - Builds and syncs search indices on the `main` branch every four hours. Search indices are committed directly to the `main` branch on both the `github/docs-internal` and `github/docs` repositories. It can also be run manually. To run it manually, click "Run workflow" button in the [Actions tab](https://github.com/github/docs-internal/actions/workflows/sync-search-indices.yml).
- [`.github/workflows/dry-run-sync-search-indices.yml`](.github/workflows/dry-run-sync-search-indices.yml) - This workflow can be run manually (via `workflow_dispatch`) to do a dry run build of all the indices. Useful for confirming that the indices can build without erroring out.
- [`.github/workflows/sync-single-english-index.yml`](.github/workflows/sync-single-english-index.yml) - This workflow is run when a label in the right format is applied to a PR. See "[Label-triggered Actions workflow](#label-triggered-actions-workflow)" for details.

View File

@@ -1,31 +0,0 @@
import algoliasearch from 'algoliasearch'
import { get } from 'lodash-es'
import { namePrefix } from './config.js'
// https://www.algolia.com/apps/ZI5KPY1HBE/dashboard
// This API key is public. There's also a private API key for writing to the Algolia API
const searchClient = algoliasearch('ZI5KPY1HBE', '685df617246c3a10abba589b4599288f')
export default async function loadAlgoliaResults({ version, language, query, filters, limit }) {
const indexName = `${namePrefix}-${version}-${language}`
const index = searchClient.initIndex(indexName)
// allows "phrase queries" and "prohibit operator"
// https://www.algolia.com/doc/api-reference/api-parameters/advancedSyntax/
const { hits } = await index.search(query, {
hitsPerPage: limit,
advancedSyntax: true,
highlightPreTag: '<mark>',
highlightPostTag: '</mark>',
filters,
})
return hits.map((hit) => ({
url: hit.objectID,
breadcrumbs: get(hit, '_highlightResult.breadcrumbs.value'),
heading: get(hit, '_highlightResult.heading.value'),
title: get(hit, '_highlightResult.title.value'),
content: get(hit, '_highlightResult.content.value'),
topics: hit.topics,
}))
}

View File

@@ -12,7 +12,7 @@ export default function csp(req, res, next) {
const csp = {
directives: {
defaultSrc: ["'none'"],
connectSrc: ["'self'", '*.algolia.net', '*.algolianet.com'],
connectSrc: ["'self'"],
fontSrc: ["'self'", 'data:', AZURE_STORAGE_URL],
imgSrc: [
"'self'",

View File

@@ -2,7 +2,6 @@ import express from 'express'
import libLanguages from '../lib/languages.js'
import searchVersions from '../lib/search/versions.js'
import loadLunrResults from '../lib/search/lunr-search.js'
import loadAlgoliaResults from '../lib/search/algolia-search.js'
const languages = new Set(Object.keys(libLanguages))
const versions = new Set(Object.values(searchVersions))
@@ -24,11 +23,12 @@ router.get('/', async function postSearch(req, res, next) {
}
try {
const results =
process.env.AIRGAP || req.cookies.AIRGAP
? await loadLunrResults({ version, language, query: `${query} ${filters || ''}`, limit })
: await loadAlgoliaResults({ version, language, query, filters, limit })
const results = await loadLunrResults({
version,
language,
query: `${query} ${filters || ''}`,
limit,
})
// Only reply if the headers have not been sent and the request was not aborted...
if (!res.headersSent && !req.aborted) {
return res.status(200).json(results)

273
package-lock.json generated
View File

@@ -15,7 +15,6 @@
"accept-language-parser": "^1.5.0",
"ajv": "^8.4.0",
"ajv-formats": "^2.1.0",
"algoliasearch": "^4.10.3",
"browser-date-formatter": "^3.0.3",
"change-case": "^4.1.2",
"cheerio": "^1.0.0-rc.10",
@@ -205,121 +204,6 @@
"tunnel": "0.0.6"
}
},
"node_modules/@algolia/cache-browser-local-storage": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.10.3.tgz",
"integrity": "sha512-TD1N7zg5lb56/PLjjD4bBl2eccEvVHhC7yfgFu2r9k5tf+gvbGxEZ3NhRZVKu2MObUIcEy2VR4LVLxOQu45Hlg==",
"dependencies": {
"@algolia/cache-common": "4.10.3"
}
},
"node_modules/@algolia/cache-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.10.3.tgz",
"integrity": "sha512-q13cPPUmtf8a2suBC4kySSr97EyulSXuxUkn7l1tZUCX/k1y5KNheMp8npBy8Kc8gPPmHpacxddRSfOncjiKFw=="
},
"node_modules/@algolia/cache-in-memory": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.10.3.tgz",
"integrity": "sha512-JhPajhOXAjUP+TZrZTh6KJpF5VKTKyWK2aR1cD8NtrcVHwfGS7fTyfXfVm5BqBqkD9U0gVvufUt/mVyI80aZww==",
"dependencies": {
"@algolia/cache-common": "4.10.3"
}
},
"node_modules/@algolia/client-account": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.10.3.tgz",
"integrity": "sha512-S/IsJB4s+e1xYctdpW3nAbwrR2y3pjSo9X21fJGoiGeIpTRdvQG7nydgsLkhnhcgAdLnmqBapYyAqMGmlcyOkg==",
"dependencies": {
"@algolia/client-common": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/@algolia/client-analytics": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.10.3.tgz",
"integrity": "sha512-vlHTbBqJktRgclh3v7bPQLfZvFIqY4erNFIZA5C7nisCj9oLeTgzefoUrr+R90+I+XjfoLxnmoeigS1Z1yg1vw==",
"dependencies": {
"@algolia/client-common": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/@algolia/client-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.10.3.tgz",
"integrity": "sha512-uFyP2Z14jG2hsFRbAoavna6oJf4NTXaSDAZgouZUZlHlBp5elM38sjNeA5HR9/D9J/GjwaB1SgB7iUiIWYBB4w==",
"dependencies": {
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/@algolia/client-personalization": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.10.3.tgz",
"integrity": "sha512-NS7Nx8EJ/nduGXT8CFo5z7kLF0jnFehTP3eC+z+GOEESH3rrs7uR12IZHxv5QhQswZa9vl925zCOZDcDVoENCg==",
"dependencies": {
"@algolia/client-common": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/@algolia/client-search": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.10.3.tgz",
"integrity": "sha512-Zwnp2G94IrNFKWCG/k7epI5UswRkPvL9FCt7/slXe2bkjP2y/HA37gzRn+9tXoLVRwd7gBzrtOA4jFKIyjrtVw==",
"dependencies": {
"@algolia/client-common": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/@algolia/logger-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.10.3.tgz",
"integrity": "sha512-M6xi+qov2bkgg1H9e1Qtvq/E/eKsGcgz8RBbXNzqPIYoDGZNkv+b3b8YMo3dxd4Wd6M24HU1iqF3kmr1LaXndg=="
},
"node_modules/@algolia/logger-console": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.10.3.tgz",
"integrity": "sha512-vVgRI7b4PHjgBdRkv/cRz490twvkLoGdpC4VYzIouSrKj8SIVLRhey3qgXk7oQXi3xoxVAv6NrklHfpO8Bpx0w==",
"dependencies": {
"@algolia/logger-common": "4.10.3"
}
},
"node_modules/@algolia/requester-browser-xhr": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.10.3.tgz",
"integrity": "sha512-4WIk1zreFbc1EF6+gsfBTQvwSNjWc20zJAAExRWql/Jq5yfVHmwOqi/CajA53/cXKFBqo80DAMRvOiwP+hOLYw==",
"dependencies": {
"@algolia/requester-common": "4.10.3"
}
},
"node_modules/@algolia/requester-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.10.3.tgz",
"integrity": "sha512-PNfLHmg0Hujugs3rx55uz/ifv7b9HVdSFQDb2hj0O5xZaBEuQCNOXC6COrXR8+9VEfqp2swpg7zwgtqFxh+BtQ=="
},
"node_modules/@algolia/requester-node-http": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.10.3.tgz",
"integrity": "sha512-A9ZcGfEvgqf0luJApdNcIhsRh6MShn2zn2tbjwjGG1joF81w+HUY+BWuLZn56vGwAA9ZB9n00IoJJpxibbfofg==",
"dependencies": {
"@algolia/requester-common": "4.10.3"
}
},
"node_modules/@algolia/transporter": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.10.3.tgz",
"integrity": "sha512-n1lRyKDbrckbMEgm7QXtj3nEWUuzA3aKLzVQ43/F/RCFib15j4IwtmYhXR6OIBRSc7+T0Hm48S0J6F+HeYCQkw==",
"dependencies": {
"@algolia/cache-common": "4.10.3",
"@algolia/logger-common": "4.10.3",
"@algolia/requester-common": "4.10.3"
}
},
"node_modules/@babel/code-frame": {
"version": "7.12.11",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz",
@@ -5233,27 +5117,6 @@
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
},
"node_modules/algoliasearch": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.10.3.tgz",
"integrity": "sha512-OLY0AWlPKGLbSaw14ivMB7BT5fPdp8VdzY4L8FtzZnqmLKsyes24cltGlf7/X96ACkYEcT390SReCDt/9SUIRg==",
"dependencies": {
"@algolia/cache-browser-local-storage": "4.10.3",
"@algolia/cache-common": "4.10.3",
"@algolia/cache-in-memory": "4.10.3",
"@algolia/client-account": "4.10.3",
"@algolia/client-analytics": "4.10.3",
"@algolia/client-common": "4.10.3",
"@algolia/client-personalization": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/logger-common": "4.10.3",
"@algolia/logger-console": "4.10.3",
"@algolia/requester-browser-xhr": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/requester-node-http": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"node_modules/anser": {
"version": "1.4.9",
"resolved": "https://registry.npmjs.org/anser/-/anser-1.4.9.tgz",
@@ -25174,121 +25037,6 @@
"tunnel": "0.0.6"
}
},
"@algolia/cache-browser-local-storage": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.10.3.tgz",
"integrity": "sha512-TD1N7zg5lb56/PLjjD4bBl2eccEvVHhC7yfgFu2r9k5tf+gvbGxEZ3NhRZVKu2MObUIcEy2VR4LVLxOQu45Hlg==",
"requires": {
"@algolia/cache-common": "4.10.3"
}
},
"@algolia/cache-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.10.3.tgz",
"integrity": "sha512-q13cPPUmtf8a2suBC4kySSr97EyulSXuxUkn7l1tZUCX/k1y5KNheMp8npBy8Kc8gPPmHpacxddRSfOncjiKFw=="
},
"@algolia/cache-in-memory": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.10.3.tgz",
"integrity": "sha512-JhPajhOXAjUP+TZrZTh6KJpF5VKTKyWK2aR1cD8NtrcVHwfGS7fTyfXfVm5BqBqkD9U0gVvufUt/mVyI80aZww==",
"requires": {
"@algolia/cache-common": "4.10.3"
}
},
"@algolia/client-account": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.10.3.tgz",
"integrity": "sha512-S/IsJB4s+e1xYctdpW3nAbwrR2y3pjSo9X21fJGoiGeIpTRdvQG7nydgsLkhnhcgAdLnmqBapYyAqMGmlcyOkg==",
"requires": {
"@algolia/client-common": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"@algolia/client-analytics": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.10.3.tgz",
"integrity": "sha512-vlHTbBqJktRgclh3v7bPQLfZvFIqY4erNFIZA5C7nisCj9oLeTgzefoUrr+R90+I+XjfoLxnmoeigS1Z1yg1vw==",
"requires": {
"@algolia/client-common": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"@algolia/client-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.10.3.tgz",
"integrity": "sha512-uFyP2Z14jG2hsFRbAoavna6oJf4NTXaSDAZgouZUZlHlBp5elM38sjNeA5HR9/D9J/GjwaB1SgB7iUiIWYBB4w==",
"requires": {
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"@algolia/client-personalization": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.10.3.tgz",
"integrity": "sha512-NS7Nx8EJ/nduGXT8CFo5z7kLF0jnFehTP3eC+z+GOEESH3rrs7uR12IZHxv5QhQswZa9vl925zCOZDcDVoENCg==",
"requires": {
"@algolia/client-common": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"@algolia/client-search": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.10.3.tgz",
"integrity": "sha512-Zwnp2G94IrNFKWCG/k7epI5UswRkPvL9FCt7/slXe2bkjP2y/HA37gzRn+9tXoLVRwd7gBzrtOA4jFKIyjrtVw==",
"requires": {
"@algolia/client-common": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"@algolia/logger-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.10.3.tgz",
"integrity": "sha512-M6xi+qov2bkgg1H9e1Qtvq/E/eKsGcgz8RBbXNzqPIYoDGZNkv+b3b8YMo3dxd4Wd6M24HU1iqF3kmr1LaXndg=="
},
"@algolia/logger-console": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.10.3.tgz",
"integrity": "sha512-vVgRI7b4PHjgBdRkv/cRz490twvkLoGdpC4VYzIouSrKj8SIVLRhey3qgXk7oQXi3xoxVAv6NrklHfpO8Bpx0w==",
"requires": {
"@algolia/logger-common": "4.10.3"
}
},
"@algolia/requester-browser-xhr": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.10.3.tgz",
"integrity": "sha512-4WIk1zreFbc1EF6+gsfBTQvwSNjWc20zJAAExRWql/Jq5yfVHmwOqi/CajA53/cXKFBqo80DAMRvOiwP+hOLYw==",
"requires": {
"@algolia/requester-common": "4.10.3"
}
},
"@algolia/requester-common": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.10.3.tgz",
"integrity": "sha512-PNfLHmg0Hujugs3rx55uz/ifv7b9HVdSFQDb2hj0O5xZaBEuQCNOXC6COrXR8+9VEfqp2swpg7zwgtqFxh+BtQ=="
},
"@algolia/requester-node-http": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.10.3.tgz",
"integrity": "sha512-A9ZcGfEvgqf0luJApdNcIhsRh6MShn2zn2tbjwjGG1joF81w+HUY+BWuLZn56vGwAA9ZB9n00IoJJpxibbfofg==",
"requires": {
"@algolia/requester-common": "4.10.3"
}
},
"@algolia/transporter": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.10.3.tgz",
"integrity": "sha512-n1lRyKDbrckbMEgm7QXtj3nEWUuzA3aKLzVQ43/F/RCFib15j4IwtmYhXR6OIBRSc7+T0Hm48S0J6F+HeYCQkw==",
"requires": {
"@algolia/cache-common": "4.10.3",
"@algolia/logger-common": "4.10.3",
"@algolia/requester-common": "4.10.3"
}
},
"@babel/code-frame": {
"version": "7.12.11",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz",
@@ -29159,27 +28907,6 @@
"integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==",
"requires": {}
},
"algoliasearch": {
"version": "4.10.3",
"resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.10.3.tgz",
"integrity": "sha512-OLY0AWlPKGLbSaw14ivMB7BT5fPdp8VdzY4L8FtzZnqmLKsyes24cltGlf7/X96ACkYEcT390SReCDt/9SUIRg==",
"requires": {
"@algolia/cache-browser-local-storage": "4.10.3",
"@algolia/cache-common": "4.10.3",
"@algolia/cache-in-memory": "4.10.3",
"@algolia/client-account": "4.10.3",
"@algolia/client-analytics": "4.10.3",
"@algolia/client-common": "4.10.3",
"@algolia/client-personalization": "4.10.3",
"@algolia/client-search": "4.10.3",
"@algolia/logger-common": "4.10.3",
"@algolia/logger-console": "4.10.3",
"@algolia/requester-browser-xhr": "4.10.3",
"@algolia/requester-common": "4.10.3",
"@algolia/requester-node-http": "4.10.3",
"@algolia/transporter": "4.10.3"
}
},
"anser": {
"version": "1.4.9",
"resolved": "https://registry.npmjs.org/anser/-/anser-1.4.9.tgz",

View File

@@ -17,7 +17,6 @@
"accept-language-parser": "^1.5.0",
"ajv": "^8.4.0",
"ajv-formats": "^2.1.0",
"algoliasearch": "^4.10.3",
"browser-date-formatter": "^3.0.3",
"change-case": "^4.1.2",
"cheerio": "^1.0.0-rc.10",

View File

@@ -458,7 +458,7 @@ Run this script to standardize frontmatter fields in all content files, per the
### [`sync-search-indices.js`](sync-search-indices.js)
This script is run automatically via GitHub Actions on every push to `main` to generate searchable data. It can also be run manually. For more info see [contributing/search.md](contributing/search.md)
This script is run on a schedule very four hours to generate searchable data. It can also be run manually. To run it manually, click "Run workflow" button in the [Actions tab](https://github.com/github/docs-internal/actions/workflows/sync-search-indices.yml). For more info see [contributing/search.md](contributing/search.md)
---

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env node
import dotenv from 'dotenv'
import algoliasearch from 'algoliasearch'
dotenv.config()
const { ALGOLIA_APPLICATION_ID, ALGOLIA_API_KEY } = process.env
export default function () {
return algoliasearch(ALGOLIA_APPLICATION_ID, ALGOLIA_API_KEY)
}

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env node
import { namePrefix } from '../../lib/search/config.js'
import getAlgoliaClient from './algolia-client.js'
export default async function getRemoteIndexNames() {
const algoliaClient = getAlgoliaClient()
const indices = await algoliaClient.listIndices()
// ignore other indices that may be present in the Algolia account like `helphub-`, etc
const indexNames = indices.items
.map((field) => field.name)
.filter((name) => name.startsWith(namePrefix))
return indexNames
}

View File

@@ -1,94 +0,0 @@
#!/usr/bin/env node
import { chain, chunk, difference } from 'lodash-es'
import objectHash from 'object-hash'
import rank from './rank.js'
import validateRecords from './validate-records.js'
import getAlgoliaClient from './algolia-client.js'
class AlgoliaIndex {
constructor(name, records) {
this.name = name
this.records = records.map((record) => {
record.customRanking = rank(record)
return record
})
this.validate()
return this
}
validate() {
return validateRecords(this.name, this.records)
}
// This method consumes Algolia's `browseObjects` event emitter,
// aggregating results into an array of all the records
// https://www.algolia.com/doc/api-client/getting-started/upgrade-guides/javascript/#the-browse-and-browsefrom-methods
async fetchExistingRecords() {
const client = getAlgoliaClient()
// return an empty array if the index does not exist yet
const { items: indices } = await client.listIndices()
if (!indices.find((index) => index.name === this.name)) {
console.log(`index '${this.name}' does not exist!`)
return []
}
const index = client.initIndex(this.name)
let records = []
await index.browseObjects({
batch: (batch) => (records = records.concat(batch)),
})
return records
}
async syncWithRemote() {
const client = getAlgoliaClient()
console.log('\n\nsyncing %s with remote', this.name)
this.validate()
const existingRecords = await this.fetchExistingRecords()
const existingIds = chain(existingRecords).map('objectID').value()
const currentIds = chain(this.records).map('objectID').value()
const deprecatedIds = difference(existingIds, currentIds)
const newIds = difference(currentIds, existingIds)
// Create a hash of every existing record, to compare to the new records
// The `object-hash` module is indifferent to object key order by default. :+1:
const existingHashes = existingRecords.map((record) => objectHash(record))
// If a hash is found, that means the existing Algolia record contains the
// same data as new record, and the record doesn't need to be updated.
const recordsToUpdate = this.records.filter((record) => {
return !existingHashes.includes(objectHash(record))
})
console.log('deprecated objectIDs:', deprecatedIds)
console.log('new objectIDs:', newIds)
console.log('total current records:', this.records.length)
console.log('records to update:', recordsToUpdate.length)
const index = client.initIndex(this.name)
if (deprecatedIds.length) {
console.log('deleting %d deprecated record(s)', deprecatedIds.length)
await index.deleteObjects(deprecatedIds)
}
if (recordsToUpdate.length) {
console.log('uploading %d new or modified record(s)', recordsToUpdate.length)
const chunks = chunk(recordsToUpdate, 1000)
for (const batch of chunks) {
// https://www.algolia.com/doc/api-client/getting-started/upgrade-guides/javascript/#the-addobject-and-addobjects-methods
index.saveObjects(batch, { autoGenerateObjectIDIfNotExist: true })
}
}
}
}
export default AlgoliaIndex

0
script/search/lunr-search-index.js Normal file → Executable file
View File

View File

@@ -10,16 +10,12 @@ import buildRecords from './build-records.js'
import findIndexablePages from './find-indexable-pages.js'
import { allVersions } from '../../lib/all-versions.js'
import { namePrefix } from '../../lib/search/config.js'
import getRemoteIndexNames from './algolia-get-remote-index-names.js'
import AlgoliaIndex from './algolia-search-index.js'
import LunrIndex from './lunr-search-index.js'
import getLunrIndexNames from './lunr-get-index-names.js'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const cacheDir = path.join(process.cwd(), './.search-cache')
// Algolia
// Lunr
// Build a search data file for every combination of product version and language
@@ -86,31 +82,22 @@ export default async function syncSearchIndexes(opts = {}) {
// The page version will be the new version, e.g., free-pro-team@latest, enterprise-server@2.22
const records = await buildRecords(indexName, indexablePages, pageVersion, languageCode)
const index = process.env.AIRGAP
? new LunrIndex(indexName, records)
: new AlgoliaIndex(indexName, records)
const index = new LunrIndex(indexName, records)
if (opts.dryRun) {
const cacheFile = path.join(cacheDir, `${indexName}.json`)
fs.writeFileSync(cacheFile, JSON.stringify(index, null, 2))
console.log('wrote dry-run index to disk: ', cacheFile)
} else {
if (process.env.AIRGAP) {
await index.write()
console.log('wrote index to file: ', indexName)
} else {
await index.syncWithRemote()
console.log('synced index with remote: ', indexName)
}
await index.write()
console.log('wrote index to file: ', indexName)
}
}
}
// Fetch a list of index names and cache it for tests
// to ensure that an index exists for every language and GHE version
const remoteIndexNames = process.env.AIRGAP
? await getLunrIndexNames()
: await getRemoteIndexNames()
const remoteIndexNames = await getLunrIndexNames()
const cachedIndexNamesFile = path.join(__dirname, '../../lib/search/cached-index-names.json')
fs.writeFileSync(cachedIndexNamesFile, JSON.stringify(remoteIndexNames, null, 2))

View File

@@ -1,8 +1,9 @@
import sleep from 'await-sleep'
import { jest } from '@jest/globals'
import { latest } from '../../lib/enterprise-server-releases.js'
import languages from '../../lib/languages.js'
jest.useFakeTimers()
/* global page, browser */
describe('homepage', () => {
jest.setTimeout(60 * 1000)
@@ -108,6 +109,8 @@ describe('browser search', () => {
})
describe('survey', () => {
jest.setTimeout(3 * 60 * 1000)
it('sends an event to /events when submitting form', async () => {
// Visit a page that displays the prompt
await page.goto(
@@ -136,8 +139,6 @@ describe('survey', () => {
// When I fill in my email and submit the form
await page.type('[data-testid=survey-form] [type="email"]', 'test@example.com')
await sleep(1000)
await page.click('[data-testid=survey-form] [type="submit"]')
// (sent a PUT request to /events/{id})
// I see the feedback
@@ -271,7 +272,6 @@ describe('tool specific content', () => {
const toolSelector = await page.$$('nav#tool-switcher')
const switches = await page.$$('a.tool-switcher')
const selectedSwitch = await page.$$('a.tool-switcher.selected')
console.log(switches.length)
expect(toolSelector.length).toBeGreaterThan(1)
expect(switches.length).toBeGreaterThan(1)
expect(selectedSwitch.length).toEqual(toolSelector.length)
@@ -437,30 +437,14 @@ describe('language banner', () => {
})
})
// The Explorer in the iFrame will not be accessible on localhost, but we can still
// test the query param handling
// The Explorer in the iFrame will not be accessible on localhost
// There's a url in github.com that uses ?query= for a graphql query instead of a search query, so we're hiding the Search bar on this page
describe('GraphQL Explorer', () => {
it('preserves query strings on the Explorer page without opening search', async () => {
const queryString = `query {
viewer {
foo
}
}`
// Encoded as: query%20%7B%0A%20%20viewer%20%7B%0A%20%20%20%20foo%0A%20%20%7D%0A%7D
const encodedString = encodeURIComponent(queryString)
it('hides search bar on GraphQL Explorer page', async () => {
const explorerUrl = 'http://localhost:4001/en/graphql/overview/explorer'
await page.goto(`${explorerUrl}?query=${encodedString}`)
// On non-Explorer pages, query params handled by search JS get form-encoded using `+` instead of `%20`.
// So on these pages, the following test will be false; but on the Explorer page, it should be true.
expect(page.url().endsWith(encodedString)).toBe(true)
// On non-Explorer pages, query params handled by search JS will populate in the search box and the `js-open`
// class is added. On these pages, the following test will NOT be null; but on the Explorer page, it should be null.
await page.waitForSelector('#search-results-container')
const searchResult = await page.$('#search-results-container.js-open')
expect(searchResult).toBeNull()
await page.goto(`${explorerUrl}`)
const searchBar = await page.$$('[data-testid=site-search-input]')
expect(searchBar.length).toBe(0)
})
})

View File

@@ -47,11 +47,12 @@ describe('check if a GitHub-owned private repository is referenced', () => {
const filenames = walkSync(process.cwd(), {
directories: false,
ignore: [
'.algolia-cache',
'.git',
'.github/actions-scripts/enterprise-server-issue-templates/*.md',
'.github/review-template.md',
'.github/workflows/sync-search-indices.yml',
'.next',
'contributing/search.md',
'dist',
'node_modules',
'translations',
@@ -70,6 +71,7 @@ describe('check if a GitHub-owned private repository is referenced', () => {
'**/*.ico',
'**/*.woff',
'script/deploy',
'script/README.md',
],
})

View File

@@ -141,8 +141,6 @@ describe('server', () => {
expect(csp.get('font-src').includes(AZURE_STORAGE_URL)).toBe(true)
expect(csp.get('connect-src').includes("'self'")).toBe(true)
expect(csp.get('connect-src').includes('*.algolia.net')).toBe(true)
expect(csp.get('connect-src').includes('*.algolianet.com')).toBe(true)
expect(csp.get('img-src').includes("'self'")).toBe(true)
expect(csp.get('img-src').includes(AZURE_STORAGE_URL)).toBe(true)

View File

@@ -188,7 +188,7 @@ describe('JS and CSS assets', () => {
it('returns the expected node_modules', async () => {
const result = await supertest(app)
.get('/node_modules/algoliasearch/dist/algoliasearch.min.js')
.get('/node_modules/instantsearch.js/dist/instantsearch.production.min.js')
.set('Referrer', '/en/enterprise/2.17')
expect(result.statusCode).toBe(200)