Integrate @hashicorp/react-search/tools script
This commit is contained in:
parent
26f3299fb7
commit
e5cd5a9360
|
@ -1710,14 +1710,20 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@hashicorp/react-search": {
|
"@hashicorp/react-search": {
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@hashicorp/react-search/-/react-search-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@hashicorp/react-search/-/react-search-2.1.0.tgz",
|
||||||
"integrity": "sha512-nMH0pUPNEOEcuIcQ/NRiqlZOXJkK7XnIs+KcwFoRxX6UyWSUUkhHPUU+TGy4Wgs8T65tFl5y4Ksnv+za+3CdPQ==",
|
"integrity": "sha512-vaTht+2G9ipsVyusK3b3TtUpuy9ccsxj3NMSWXJyGsoT39K1Oovb8aLiIlbUU5Ll72KEi5yq5OS3WAJDdSqW+g==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@hashicorp/react-inline-svg": "^1.0.2",
|
"@hashicorp/react-inline-svg": "^1.0.2",
|
||||||
"@hashicorp/remark-plugins": "^3.0.0",
|
"@hashicorp/remark-plugins": "^3.0.0",
|
||||||
|
"algoliasearch": "^4.4.0",
|
||||||
|
"dotenv": "^8.2.0",
|
||||||
|
"glob": "^7.1.6",
|
||||||
|
"gray-matter": "^4.0.2",
|
||||||
"react-instantsearch-dom": "^6.7.0",
|
"react-instantsearch-dom": "^6.7.0",
|
||||||
"search-insights": "^1.6.0"
|
"remark": "^12.0.1",
|
||||||
|
"search-insights": "^1.6.0",
|
||||||
|
"unist-util-visit": "^2.0.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hashicorp/react-inline-svg": {
|
"@hashicorp/react-inline-svg": {
|
||||||
|
|
|
@ -13,20 +13,14 @@
|
||||||
"@hashicorp/react-head": "1.1.1",
|
"@hashicorp/react-head": "1.1.1",
|
||||||
"@hashicorp/react-mega-nav": "4.0.1-2",
|
"@hashicorp/react-mega-nav": "4.0.1-2",
|
||||||
"@hashicorp/react-product-downloader": "4.0.2",
|
"@hashicorp/react-product-downloader": "4.0.2",
|
||||||
"@hashicorp/react-search": "^2.0.0",
|
"@hashicorp/react-search": "^2.1.0",
|
||||||
"@hashicorp/react-section-header": "2.0.0",
|
"@hashicorp/react-section-header": "2.0.0",
|
||||||
"@hashicorp/react-subnav": "3.2.3",
|
"@hashicorp/react-subnav": "3.2.3",
|
||||||
"@hashicorp/react-vertical-text-block-list": "2.0.1",
|
"@hashicorp/react-vertical-text-block-list": "2.0.1",
|
||||||
"algoliasearch": "^4.4.0",
|
|
||||||
"babel-plugin-import-glob-array": "0.2.0",
|
"babel-plugin-import-glob-array": "0.2.0",
|
||||||
"dotenv": "8.2.0",
|
|
||||||
"glob": "^7.1.6",
|
|
||||||
"gray-matter": "4.0.2",
|
|
||||||
"next": "9.4.4",
|
"next": "9.4.4",
|
||||||
"react": "16.13.1",
|
"react": "16.13.1",
|
||||||
"react-dom": "16.13.1",
|
"react-dom": "16.13.1"
|
||||||
"remark": "^12.0.1",
|
|
||||||
"unist-util-visit": "^2.0.3"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"dart-linkcheck": "2.0.15",
|
"dart-linkcheck": "2.0.15",
|
||||||
|
|
|
@ -1,129 +1,3 @@
|
||||||
require('dotenv').config()
|
const { indexDocsContent } = require('@hashicorp/react-search/tools')
|
||||||
|
|
||||||
const algoliasearch = require('algoliasearch')
|
indexDocsContent()
|
||||||
const glob = require('glob')
|
|
||||||
const matter = require('gray-matter')
|
|
||||||
const path = require('path')
|
|
||||||
const remark = require('remark')
|
|
||||||
const visit = require('unist-util-visit')
|
|
||||||
|
|
||||||
// In addition to the content of the page,
|
|
||||||
// define additional front matter attributes that will be search-indexable
|
|
||||||
const SEARCH_DIMENSIONS = ['page_title', 'description']
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const pagesFolder = path.join(__dirname, '../pages')
|
|
||||||
|
|
||||||
// Grab all search-indexable content and format for Algolia
|
|
||||||
const searchObjects = await Promise.all(
|
|
||||||
glob
|
|
||||||
.sync(path.join(pagesFolder, '**/*.mdx'), {
|
|
||||||
ignore: path.join(pagesFolder, 'partials/**/*'),
|
|
||||||
})
|
|
||||||
.map(async (fullPath) => {
|
|
||||||
const { content, data } = matter.read(fullPath)
|
|
||||||
|
|
||||||
const searchableDimensions = SEARCH_DIMENSIONS.reduce(
|
|
||||||
(acc, dimension) => {
|
|
||||||
return { ...acc, [dimension]: data[dimension] }
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
|
|
||||||
const headings = await collectHeadings(content)
|
|
||||||
|
|
||||||
// Get path relative to `pages`
|
|
||||||
const __resourcePath = fullPath.replace(`${pagesFolder}/`, '')
|
|
||||||
|
|
||||||
// Use clean URL for Algolia id
|
|
||||||
const objectID = __resourcePath.replace('.mdx', '')
|
|
||||||
|
|
||||||
return {
|
|
||||||
...searchableDimensions,
|
|
||||||
headings,
|
|
||||||
objectID,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
try {
|
|
||||||
await indexSearchContent(searchObjects)
|
|
||||||
} catch (e) {
|
|
||||||
console.error(e)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function indexSearchContent(objects) {
|
|
||||||
const {
|
|
||||||
NEXT_PUBLIC_ALGOLIA_APP_ID: appId,
|
|
||||||
NEXT_PUBLIC_ALGOLIA_INDEX: index,
|
|
||||||
ALGOLIA_API_KEY: apiKey,
|
|
||||||
} = process.env
|
|
||||||
|
|
||||||
if (!apiKey || !appId || !index) {
|
|
||||||
throw new Error(
|
|
||||||
`[*** Algolia Search Indexing Error ***] Received: ALGOLIA_API_KEY=${apiKey} ALGOLIA_APP_ID=${appId} ALGOLIA_INDEX=${index} \n Please ensure all Algolia Search-related environment vars are set in CI settings.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`updating ${objects.length} indices...`)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const searchClient = algoliasearch(appId, apiKey)
|
|
||||||
const searchIndex = searchClient.initIndex(index)
|
|
||||||
|
|
||||||
const { objectIDs } = await searchIndex.partialUpdateObjects(objects, {
|
|
||||||
createIfNotExists: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
let staleIds = []
|
|
||||||
|
|
||||||
await searchIndex.browseObjects({
|
|
||||||
query: '',
|
|
||||||
batch: (batch) => {
|
|
||||||
staleIds = staleIds.concat(
|
|
||||||
batch
|
|
||||||
.filter(({ objectID }) => !objectIDs.includes(objectID))
|
|
||||||
.map(({ objectID }) => objectID)
|
|
||||||
)
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (staleIds.length > 0) {
|
|
||||||
console.log(`deleting ${staleIds.length} stale indices:`)
|
|
||||||
console.log(staleIds)
|
|
||||||
|
|
||||||
await searchIndex.deleteObjects(staleIds)
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('done')
|
|
||||||
process.exit(0)
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function collectHeadings(mdxContent) {
|
|
||||||
const headings = []
|
|
||||||
|
|
||||||
const headingMapper = () => (tree) => {
|
|
||||||
visit(tree, 'heading', (node) => {
|
|
||||||
const title = node.children.reduce((m, n) => {
|
|
||||||
if (n.value) m += n.value
|
|
||||||
return m
|
|
||||||
}, '')
|
|
||||||
// Only include level 1 or level 2 headings
|
|
||||||
if (node.depth < 3) {
|
|
||||||
headings.push(title)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return remark()
|
|
||||||
.use(headingMapper)
|
|
||||||
.process(mdxContent)
|
|
||||||
.then(() => headings)
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in New Issue