Merge pull request #8201 from pruett/pruett.algolia-index-ci

Stub Algolia indexing job via CI
This commit is contained in:
Kevin Pruett 2020-06-24 12:17:47 -04:00 committed by GitHub
commit 96ba670f7d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 1850 additions and 3058 deletions

19
.circleci/config.yml generated
View File

@ -1048,6 +1048,19 @@ jobs:
- GOTESTSUM_JUNITFILE: /tmp/test-reports/results.xml
- NOMAD_SLOW_TEST: 1
- PAGER: cat
algolia_index:
docker:
- image: circleci/buildpack-deps
shell: /usr/bin/env bash -euo pipefail -c
steps:
- checkout
- setup_remote_docker
- run:
command: |
cd website/
npm i
node scripts/index_search_content.js
name: Push content to Algolia Index
test-docker:
machine:
image: circleci/classic:201808-01
@ -1460,4 +1473,10 @@ workflows:
only:
- master
context: static-sites
- algolia_index:
filters:
branches:
only:
- stable-website
context: static-sites
version: 2

View File

@ -0,0 +1,12 @@
docker:
- image: circleci/buildpack-deps
shell: /usr/bin/env bash -euo pipefail -c
steps:
- checkout
- setup_remote_docker
- run:
name: Push content to Algolia Index
command: |
cd website/
npm i
node scripts/index_search_content.js

View File

@ -5,3 +5,10 @@ jobs:
branches:
only:
- master
- algolia_index:
context: static-sites
filters:
branches:
only:
- stable-website

3
website/.env Normal file
View File

@ -0,0 +1,3 @@
NEXT_PUBLIC_ALGOLIA_APP_ID=YY0FFNI7MF
NEXT_PUBLIC_ALGOLIA_INDEX=poc_NOMAD
NEXT_PUBLIC_ALGOLIA_SEARCH_ONLY_API_KEY=5037da4824714676226913c65e961ca0

4
website/.gitignore vendored
View File

@ -3,3 +3,7 @@ node_modules
.next
out
.mdx-data
# As per Next.js conventions (https://nextjs.org/docs/basic-features/environment-variables#default-environment-variables)
.env*.local
!.env*

4761
website/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -27,6 +27,7 @@
"@hashicorp/react-text-split-with-image": "^1.3.0",
"@hashicorp/react-use-cases": "^1.0.4",
"@hashicorp/react-vertical-text-block-list": "^2.0.1",
"algoliasearch": "^4.3.0",
"babel-plugin-import-glob-array": "^0.2.0",
"imagemin-mozjpeg": "^9.0.0",
"imagemin-optipng": "^8.0.0",

View File

@ -0,0 +1,101 @@
require('dotenv').config()
const algoliasearch = require('algoliasearch')
const glob = require('glob')
const path = require('path')
const matter = require('gray-matter')
// In addition to the content of the page,
// define additional front matter attributes that will be search-indexable
const SEARCH_DIMENSIONS = ['page_title', 'description']
main()
async function main() {
const pagesFolder = path.join(__dirname, '../pages')
// Grab all search-indexable content and format for Algolia
const searchObjects = glob
.sync(path.join(pagesFolder, '**/*.mdx'))
.map((fullPath) => {
const { content, data } = matter.read(fullPath)
// Get path relative to `pages`
const __resourcePath = fullPath.replace(`${pagesFolder}/`, '')
// Use clean URL for Algolia id
const objectID = __resourcePath.replace('.mdx', '')
const searchableDimensions = Object.keys(data)
.filter((key) => SEARCH_DIMENSIONS.includes(key))
.map((dimension) => ({
[dimension]: data[dimension],
}))
return {
...searchableDimensions,
content,
__resourcePath,
objectID,
}
})
try {
await indexSearchContent(searchObjects)
} catch (e) {
console.error(e)
process.exit(1)
}
}
async function indexSearchContent(objects) {
const {
NEXT_PUBLIC_ALGOLIA_APP_ID: appId,
NEXT_PUBLIC_ALGOLIA_INDEX: index,
ALGOLIA_API_KEY: apiKey,
} = process.env
if (!apiKey || !appId || !index) {
throw new Error(
`[*** Algolia Search Indexing Error ***] Received: ALGOLIA_API_KEY=${apiKey} ALGOLIA_APP_ID=${appId} ALGOLIA_INDEX=${index} \n Please ensure all Algolia Search-related environment vars are set in CI settings.`
)
}
console.log(`updating ${objects.length} indices...`)
try {
const searchClient = algoliasearch(appId, apiKey)
const searchIndex = searchClient.initIndex(index)
await searchIndex.partialUpdateObjects(objects, {
createIfNotExists: true,
})
// Remove indices for items that aren't included in the new batch
const newObjectIds = objects.map(({ objectID }) => objectID)
let staleObjects = []
await searchIndex.browseObjects({
query: '',
batch: (batch) => {
staleObjects = staleObjects.concat(
batch.filter(({ objectID }) => !newObjectIds.includes(objectID))
)
},
})
const staleIds = staleObjects.map(({ objectID }) => objectID)
if (staleIds.length > 0) {
console.log(`deleting ${staleIds.length} stale indices:`)
console.log(staleIds)
await searchIndex.deleteObjects(staleIds)
}
console.log('done')
process.exit(0)
} catch (error) {
throw new Error(error)
}
}