website: add refactored remote-plugin-docs utilities

This commit is contained in:
Zach Shilton 2021-03-18 12:21:53 -04:00
parent 3227d3da43
commit 341308c582
No known key found for this signature in database
GPG Key ID: 487BAB946192712E
5 changed files with 418 additions and 0 deletions

View File

@ -0,0 +1,61 @@
const fetch = require('isomorphic-unfetch')
const parseSourceZip = require('./parse-source-zip')
const parseDocsZip = require('./parse-docs-zip')
// Given a repo and tag,
//
// return [null, docsMdxFiles] if docs files
// are successfully fetched and valid,
// where docsMdxFiles is an array of { filePath, fileString } items.
//
// otherwise, return [err, null]
// where err is an error message describing whether the
// docs files were missing or invalid, with a path to resolution
async function fetchDocsFiles({ repo, tag }) {
// If there's a docs.zip asset, we'll prefer that
const docsZipUrl = `https://github.com/${repo}/releases/download/${tag}/docs.zip`
const docsZipResponse = await fetch(docsZipUrl, { method: 'GET' })
const hasDocsZip = docsZipResponse.status === 200
// Note: early return!
if (hasDocsZip) return await parseDocsZip(docsZipResponse)
// Else if docs.zip is not present, and we only have the "latest" tag,
// then throw an error - we can't resolve the fallback source ZIP
// unless we resort to calling the GitHub API, which we do not want to do
if (tag === 'latest') {
const err = `Failed to fetch. Could not find "docs.zip" at ${docsZipUrl}. To fall back to parsing docs from "source", please provide a specific tag instead of "${tag}".`
return [err, null]
}
// Else if docs.zip is not present, and we have a specific tag, then
// fall back to parsing docs files from the source zip
const sourceZipUrl = `https://github.com/${repo}/archive/${tag}.zip`
const sourceZipResponse = await fetch(sourceZipUrl, { method: 'GET' })
const missingSourceZip = sourceZipResponse.status !== 200
if (missingSourceZip) {
const err = `Failed to fetch. Could not find "docs.zip" at ${docsZipUrl}, and could not find fallback source ZIP at ${sourceZipUrl}. Please ensure one of these assets is available.`
return [err, null]
}
// Handle parsing from plugin source zip
return await parseSourceZip(sourceZipResponse)
}
async function fetchPluginDocs({ repo, tag }) {
const [err, docsMdxFiles] = await fetchDocsFiles({ repo, tag })
if (err) {
const errMsg = `Invalid plugin docs ${repo}, on release ${tag}. ${err}`
throw new Error(errMsg)
}
return docsMdxFiles
}
function memoize(method) {
let cache = {}
return async function () {
let args = JSON.stringify(arguments)
if (!cache[args]) {
cache[args] = method.apply(this, arguments)
}
return cache[args]
}
}
module.exports = memoize(fetchPluginDocs)

View File

@ -0,0 +1,44 @@
const path = require('path')
const AdmZip = require('adm-zip')
const validatePluginDocsFiles = require('./validate-plugin-docs-files')
// Given a response from fetching a docs.zip file,
// which is a compressed "docs" folder,
//
// return [null, docsMdxFiles] if docs files
// are successfully fetched and valid,
// where docsMdxFiles is an array of { filePath, fileString } items.
//
// otherwise, return [err, null]
// where err is an error message describing whether the
// docs files were missing or invalid, with a path to resolution
async function parseDocsZip(response) {
// the file path from the repo root is the same as the zip entryName,
// which includes the docs directory as the first part of the path
const responseBuffer = Buffer.from(await response.arrayBuffer())
const responseZip = new AdmZip(responseBuffer)
const docsEntries = responseZip.getEntries()
// Validate the file paths within the "docs" folder
const docsFilePaths = docsEntries.map((e) => e.entryName)
const validationError = validatePluginDocsFiles(docsFilePaths)
if (validationError) return [validationError, null]
// If valid, filter for MDX files only, and return
// a { filePath, fileString } object for each mdx file
const docsMdxFiles = docsEntries
.filter((e) => {
return path.extname(e.entryName) === '.mdx'
})
.map((e) => {
const filePath = e.entryName
const fileString = e.getData().toString()
return { filePath, fileString }
})
return [null, docsMdxFiles]
}
/*
const dirs = path.dirname(e.entryName).split('/')
const pathFromDocsDir = dirs.slice(1).join('/')
*/
module.exports = parseDocsZip

View File

@ -0,0 +1,49 @@
const path = require('path')
const AdmZip = require('adm-zip')
const validatePluginDocsFiles = require('./validate-plugin-docs-files')
// Given a response from fetching a source .zip file,
// which contains a "docs" folder,
//
// return [null, docsMdxFiles] if docs files
// are successfully fetched and valid,
// where docsMdxFiles is an array of { filePath, fileString } items.
//
// otherwise, return [err, null]
// where err is an error message describing whether the
// docs files were missing or invalid, with a path to resolution
async function parseSourceZip(response) {
const responseBuffer = Buffer.from(await response.arrayBuffer())
const responseZip = new AdmZip(responseBuffer)
const sourceEntries = responseZip.getEntries()
const docsEntries = sourceEntries.filter((entry) => {
// filter for zip entries in the docs subfolder only
const dirs = path.dirname(entry.entryName).split('/')
return dirs.length > 1 && dirs[1] === 'docs'
})
// Validate the file paths within the "docs" folder
const docsFilePaths = docsEntries.map((e) => {
// We need to remove the leading directory,
// which will be something like packer-plugin-docs-0.0.5
const filePath = e.entryName.split('/').slice(1).join('/')
return filePath
})
const validationError = validatePluginDocsFiles(docsFilePaths)
if (validationError) return [validationError, null]
// If valid, filter for MDX files only, and return
// a { filePath, fileString } object for each mdx file
const docsMdxFiles = docsEntries
.filter((e) => {
return path.extname(e.entryName) === '.mdx'
})
.map((e) => {
// We need to remove the leading directory,
// which will be something like packer-plugin-docs-0.0.5
const filePath = e.entryName.split('/').slice(1).join('/')
const fileString = e.getData().toString()
return { filePath, fileString }
})
return [null, docsMdxFiles]
}
module.exports = parseSourceZip

View File

@ -0,0 +1,218 @@
const fs = require('fs')
const path = require('path')
const grayMatter = require('gray-matter')
const fetchPluginDocs = require('./fetch-plugin-docs')
const validateFilePaths = require('@hashicorp/react-docs-sidenav/utils/validate-file-paths')
const validateRouteStructure = require('@hashicorp/react-docs-sidenav/utils/validate-route-structure')
/**
* Resolves nav-data from file, including optional
* resolution of remote plugin docs entries
*
* @param {string} navDataFile path to the nav-data.json file, relative to the cwd. Example: "data/docs-nav-data.json".
* @param {string} localContentDir path to the content root, relative to the cwd. Example: "content/docs".
* @param {object} options optional configuration object
* @param {string} options.remotePluginsFile path to a remote-plugins.json file, relative to the cwd. Example: "data/docs-remote-plugins.json".
* @returns {array} the resolved navData. This includes NavBranch nodes pulled from remote plugin repositories, as well as filePath properties on all local NavLeaf nodes, and remoteFile properties on all NavLeafRemote nodes.
*/
async function resolveNavData(navDataFile, localContentDir, options = {}) {
const { remotePluginsFile } = options
// Read in files
const navDataPath = path.join(process.cwd(), navDataFile)
const navData = JSON.parse(fs.readFileSync(navDataPath, 'utf8'))
// Fetch remote plugin docs, if applicable
let withPlugins = navData
if (remotePluginsFile) {
// Resolve plugins, this yields branches with NavLeafRemote nodes
withPlugins = await mergeRemotePlugins(remotePluginsFile, navData)
}
// Resolve local filePaths for NavLeaf nodes
const withFilePaths = await validateFilePaths(withPlugins, localContentDir)
validateRouteStructure(withFilePaths)
// Return the nav data with:
// 1. Plugins merged, transformed into navData structures with NavLeafRemote nodes
// 2. filePaths added to all local NavLeaf nodes
return withFilePaths
}
// Given a remote plugins config file, and the full tree of docs navData which
// contains top-level branch routes that match plugin component types,
// fetch and parse all remote plugin docs, merge them into the
// broader tree of docs navData, and return the docs navData
// with the merged plugin docs
async function mergeRemotePlugins(remotePluginsFile, navData) {
// Read in and parse the plugin configuration JSON
const remotePluginsPath = path.join(process.cwd(), remotePluginsFile)
const pluginEntries = JSON.parse(fs.readFileSync(remotePluginsPath, 'utf-8'))
// Add navData for each plugin's component.
// Note that leaf nodes include a remoteFile property object with the full MDX fileString
const pluginEntriesWithDocs = await Promise.all(
pluginEntries.map(resolvePluginEntryDocs)
)
// group navData by component type, to prepare to merge plugin docs
// into the broader tree of navData.
const pluginDocsByComponent = pluginEntriesWithDocs.reduce(
(acc, pluginEntry) => {
const { components } = pluginEntry
Object.keys(components).forEach((type) => {
const navData = components[type]
if (!navData) return
if (!acc[type]) acc[type] = []
acc[type].push(navData[0])
})
return acc
},
{}
)
// merge plugin docs, by plugin component type,
// into the corresponding top-level component NavBranch
const navDataWithPlugins = navData.slice().map((n) => {
// we only care about top-level NavBranch nodes
if (!n.routes) return n
// for each component type, check if this NavBranch
// is the parent route for that type
const componentTypes = Object.keys(pluginDocsByComponent)
let typeMatch = false
for (var i = 0; i < componentTypes.length; i++) {
const componentType = componentTypes[i]
const routeMatches = n.routes.filter((r) => r.path === componentType)
if (routeMatches.length > 0) {
typeMatch = componentType
break
}
}
// if this NavBranch does not match a component type slug,
// then return it unmodified
if (!typeMatch) return n
// if there are no matching remote plugin components,
// then return the navBranch unmodified
const pluginsOfType = pluginDocsByComponent[typeMatch]
if (!pluginsOfType || pluginsOfType.length == 0) return n
// if this NavBranch is the parent route for the type,
// then append all remote plugins of this type to the
// NavBranch's child routes
const routesWithPlugins = n.routes.slice().concat(pluginsOfType)
// console.log(JSON.stringify(routesWithPlugins, null, 2))
// Also, sort the child routes so the order is alphabetical
routesWithPlugins.sort((a, b) => {
// (exception: "Overview" comes first)
if (a.title == 'Overview') return -1
if (b.title === 'Overview') return 1
// (exception: "Community-Supported" comes last)
if (a.title == 'Community-Supported') return 1
if (b.title === 'Community-Supported') return -1
// (exception: "Custom" comes second-last)
if (a.title == 'Custom') return 1
if (b.title === 'Custom') return -1
return a.title < b.title ? -1 : a.title > b.title ? 1 : 0
})
// return n
return { ...n, routes: routesWithPlugins }
})
// return the merged navData, which now includes special NavLeaf nodes
// for plugin docs with { filePath, fileString } remoteFile properties
return navDataWithPlugins
}
// Fetch remote plugin docs .mdx files, and
// transform each plugin's array of .mdx files into navData.
// Organize this navData by component, add it to the plugin config entry,
// and return the modified entry.
//
// Note that navData leaf nodes have a special remoteFile property,
// which contains { filePath, fileString } data for the remote
// plugin doc .mdx file
async function resolvePluginEntryDocs(pluginConfigEntry) {
const { title, path: slug, repo, version } = pluginConfigEntry
const docsMdxFiles = await fetchPluginDocs({ repo, tag: version })
// We construct a special kind of "NavLeaf" node, with a remoteFile property,
// consisting of a { filePath, fileString, sourceUrl }, where:
// - filePath is the path to the source file in the source repo
// - fileString is a string representing the file source
// - sourceUrl is a link to the original file in the source repo
// We also add a pluginTier attribute
const navNodes = docsMdxFiles.map((mdxFile) => {
const { filePath, fileString } = mdxFile
// Process into a NavLeaf, with a remoteFile attribute
const dirs = path.dirname(filePath).split('/')
const dirUrl = dirs.slice(2).join('/')
const basename = path.basename(filePath)
// build urlPath
// note that this will be prefixed to get to our final path
const isIndexFile = basename === 'index'
const urlPath = isIndexFile ? dirUrl : path.join(dirUrl, basename)
// parse title, either from frontmatter or file name
const { data: frontmatter } = grayMatter(fileString)
const { nav_title, sidebar_title } = frontmatter
const title = nav_title || sidebar_title || basename
// construct sourceUrl
const sourceUrl = `https://github.com/${repo}/blob/${version}/${filePath}`
// determine pluginTier
const pluginOwner = repo.split('/')[0]
const pluginTier = pluginOwner === 'hashicorp' ? 'official' : 'community'
// Construct and return a NavLeafRemote node
return {
title,
path: urlPath,
remoteFile: { filePath, fileString, sourceUrl },
pluginTier,
}
})
//
const navNodesByComponent = navNodes.reduce((acc, navLeaf) => {
const componentType = navLeaf.remoteFile.filePath.split('/')[1]
if (!acc[componentType]) acc[componentType] = []
acc[componentType].push(navLeaf)
return acc
}, {})
//
const components = Object.keys(navNodesByComponent).map((type) => {
// Plugins many not contain every component type,
// we return null if this is the case
const rawNavNodes = navNodesByComponent[type]
if (!rawNavNodes) return null
// Avoid unnecessary nesting if there's only a single doc file
const navData = normalizeNavNodes(title, rawNavNodes)
// Prefix paths to fit into broader docs nav-data
const pathPrefix = path.join(type, slug)
const withPrefixedPaths = visitNavLeaves(navData, (n) => {
const prefixedPath = path.join(pathPrefix, n.path)
return { ...n, path: prefixedPath }
})
//
return { type, navData: withPrefixedPaths }
})
const componentsObj = components.reduce((acc, component) => {
if (!component) return acc
acc[component.type] = component.navData
return acc
}, {})
return { ...pluginConfigEntry, components: componentsObj }
}
// For components with a single doc file, transform so that
// a single leaf node renders, rather than a nav branch
function normalizeNavNodes(pluginName, routes) {
const isSingleLeaf =
routes.length === 1 && typeof routes[0].path !== 'undefined'
const navData = isSingleLeaf
? [{ ...routes[0], path: '' }]
: [{ title: pluginName, routes }]
return navData
}
// Traverse a clone of the given navData,
// modifying any NavLeaf nodes with the provided visitFn
function visitNavLeaves(navData, visitFn) {
return navData.slice().map((navNode) => {
if (typeof navNode.path !== 'undefined') {
return visitFn(navNode)
}
if (navNode.routes) {
return { ...navNode, routes: visitNavLeaves(navNode.routes, visitFn) }
}
return navNode
})
}
module.exports = resolveNavData

View File

@ -0,0 +1,46 @@
const path = require('path')
const COMPONENT_TYPES = [
'builders',
'datasources',
'post-processors',
'provisioners',
]
// Given an array of file paths within the "docs" folder,
// validate that no unexpected files are being included,
// and that there is at least one component subfolder
// with at least one .mdx file within it.
function validatePluginDocsFiles(filePaths) {
function isValidPath(filePath) {
const isDocsRoot = filePath === 'docs/'
const isComponentRoot = COMPONENT_TYPES.reduce((acc, type) => {
return acc || filePath === `docs/${type}/`
}, false)
const isComponentMdx = COMPONENT_TYPES.reduce((acc, type) => {
const mdxPathRegex = new RegExp(`^docs/${type}/(.*).mdx$`)
return acc || mdxPathRegex.test(filePath)
}, false)
const isValidPath = isDocsRoot || isComponentRoot || isComponentMdx
return isValidPath
}
const invalidPaths = filePaths.filter((f) => !isValidPath(f))
if (invalidPaths.length > 0) {
return `Found invalid files or folders in the docs directory: ${JSON.stringify(
invalidPaths
)}. Please ensure the docs folder contains only component subfolders and .mdx files within those subfolders. Valid component types are: ${JSON.stringify(
COMPONENT_TYPES
)}.`
}
const validPaths = filePaths.filter(isValidPath)
const mdxFiles = validPaths.filter((fp) => path.extname(fp) === '.mdx')
const isMissingDocs = mdxFiles.length == 0
if (isMissingDocs) {
return `Could not find valid .mdx files. Please ensure there is at least one component subfolder in the docs directory, which contains at least one .mdx file. Valid component types are: ${JSON.stringify(
COMPONENT_TYPES
)}.`
}
return null
}
module.exports = validatePluginDocsFiles