build(docs-infra): store search dictionary as a string (#41447)
Previously, the search index info file contained an array of strings that is the dictionary of terms in the corpus. Storing this as a space separated string reduces the size of the file. PR Close #41447
This commit is contained in:
parent
65cd44e731
commit
14a64acb72
|
@ -22,7 +22,7 @@ interface PageInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
interface EncodedPages {
|
interface EncodedPages {
|
||||||
dictionary: string[];
|
dictionary: string;
|
||||||
pages: EncodedPage[];
|
pages: EncodedPage[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,11 +91,12 @@ function makeRequest(url: string, callback: (response: any) => void): void {
|
||||||
// Create the search index from the searchInfo which contains the information about each page to be
|
// Create the search index from the searchInfo which contains the information about each page to be
|
||||||
// indexed
|
// indexed
|
||||||
function loadIndex({dictionary, pages}: EncodedPages): IndexLoader {
|
function loadIndex({dictionary, pages}: EncodedPages): IndexLoader {
|
||||||
|
const dictionaryArray = dictionary.split(' ');
|
||||||
return (indexBuilder: lunr.Builder) => {
|
return (indexBuilder: lunr.Builder) => {
|
||||||
// Store the pages data to be used in mapping query results back to pages
|
// Store the pages data to be used in mapping query results back to pages
|
||||||
// Add search terms from each page to the search index
|
// Add search terms from each page to the search index
|
||||||
pages.forEach(encodedPage => {
|
pages.forEach(encodedPage => {
|
||||||
const page = decodePage(encodedPage, dictionary);
|
const page = decodePage(encodedPage, dictionaryArray);
|
||||||
indexBuilder.add(page);
|
indexBuilder.add(page);
|
||||||
pageMap[page.path] = page;
|
pageMap[page.path] = page;
|
||||||
});
|
});
|
||||||
|
|
|
@ -87,7 +87,7 @@ module.exports = function generateKeywordsProcessor(log) {
|
||||||
|
|
||||||
// Now process all the search data and collect it up to be used in creating a new document
|
// Now process all the search data and collect it up to be used in creating a new document
|
||||||
const searchData = {
|
const searchData = {
|
||||||
dictionary: Array.from(dictionary.keys()),
|
dictionary: Array.from(dictionary.keys()).join(' '),
|
||||||
pages: filteredDocs.map(page => {
|
pages: filteredDocs.map(page => {
|
||||||
// Copy the properties from the searchTerms object onto the search data object
|
// Copy the properties from the searchTerms object onto the search data object
|
||||||
const searchObj = {
|
const searchObj = {
|
||||||
|
|
|
@ -73,7 +73,7 @@ describe('generateKeywords processor', () => {
|
||||||
];
|
];
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
expect(docs[docs.length - 1].data).toEqual({
|
expect(docs[docs.length - 1].data).toEqual({
|
||||||
dictionary: [ 'fooclass', 'barinterfac', 'captur', 'content' ],
|
dictionary: 'fooclass barinterfac captur content',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({ title: 'FooClass', type: 'class', keywords: [0] }),
|
jasmine.objectContaining({ title: 'FooClass', type: 'class', keywords: [0] }),
|
||||||
jasmine.objectContaining({ title: 'BarInterface', type: 'interface', keywords: [1, 2, 3] }),
|
jasmine.objectContaining({ title: 'BarInterface', type: 'interface', keywords: [1, 2, 3] }),
|
||||||
|
@ -93,7 +93,7 @@ describe('generateKeywords processor', () => {
|
||||||
];
|
];
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
expect(docs[docs.length - 1].data).toEqual({
|
expect(docs[docs.length - 1].data).toEqual({
|
||||||
dictionary: ['class', 'fooclass', 'content', 'insid', 'tabl'],
|
dictionary: 'class fooclass content insid tabl',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({keywords: [0, 1, 2, 3, 4] })
|
jasmine.objectContaining({keywords: [0, 1, 2, 3, 4] })
|
||||||
],
|
],
|
||||||
|
@ -142,7 +142,7 @@ describe('generateKeywords processor', () => {
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
const keywordsDoc = docs[docs.length - 1];
|
const keywordsDoc = docs[docs.length - 1];
|
||||||
expect(keywordsDoc.data).toEqual({
|
expect(keywordsDoc.data).toEqual({
|
||||||
dictionary: ['class', 'publicexport', 'head', 'secondari'],
|
dictionary: 'class publicexport head secondari',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({ headings: [2, 3, 2] })
|
jasmine.objectContaining({ headings: [2, 3, 2] })
|
||||||
]
|
]
|
||||||
|
@ -175,7 +175,7 @@ describe('generateKeywords processor', () => {
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
const keywordsDoc = docs[docs.length - 1];
|
const keywordsDoc = docs[docs.length - 1];
|
||||||
expect(keywordsDoc.data).toEqual({
|
expect(keywordsDoc.data).toEqual({
|
||||||
dictionary: ['class', 'publicexport', 'content', 'ngclass', 'instancemethoda','instancepropertya','instancemethodb','instancepropertyb','staticmethoda','staticpropertya','staticmethodb','staticpropertyb', 'head'],
|
dictionary: 'class publicexport content ngclass instancemethoda instancepropertya instancemethodb instancepropertyb staticmethoda staticpropertya staticmethodb staticpropertyb head',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({
|
jasmine.objectContaining({
|
||||||
members: [4, 5, 6, 7, 8, 9, 10, 11]
|
members: [4, 5, 6, 7, 8, 9, 10, 11]
|
||||||
|
@ -220,7 +220,7 @@ describe('generateKeywords processor', () => {
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
const keywordsDoc = docs[docs.length - 1];
|
const keywordsDoc = docs[docs.length - 1];
|
||||||
expect(keywordsDoc.data).toEqual({
|
expect(keywordsDoc.data).toEqual({
|
||||||
dictionary: ['class', 'child', 'childmember1', 'childmember2', 'parentmember1', 'parentmember2', 'parentmember3', 'parentclass', 'interfac', 'parentinterfac'],
|
dictionary: 'class child childmember1 childmember2 parentmember1 parentmember2 parentmember3 parentclass interfac parentinterfac',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({
|
jasmine.objectContaining({
|
||||||
title: 'Child',
|
title: 'Child',
|
||||||
|
@ -252,7 +252,7 @@ describe('generateKeywords processor', () => {
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
const keywordsDoc = docs[docs.length - 1];
|
const keywordsDoc = docs[docs.length - 1];
|
||||||
expect(keywordsDoc.data).toEqual({
|
expect(keywordsDoc.data).toEqual({
|
||||||
dictionary: ['class', 'publicexport', 'ngcontrol', 'control', 'content', 'ngclass', 'ngmodel', 'model'],
|
dictionary: 'class publicexport ngcontrol control content ngclass ngmodel model',
|
||||||
pages: [
|
pages: [
|
||||||
jasmine.objectContaining({
|
jasmine.objectContaining({
|
||||||
headings: [6, 7],
|
headings: [6, 7],
|
||||||
|
@ -284,7 +284,7 @@ describe('generateKeywords processor', () => {
|
||||||
processor.$process(docs);
|
processor.$process(docs);
|
||||||
const keywordsDoc = docs[docs.length - 1];
|
const keywordsDoc = docs[docs.length - 1];
|
||||||
expect(JSON.parse(keywordsDoc.renderedContent)).toEqual({
|
expect(JSON.parse(keywordsDoc.renderedContent)).toEqual({
|
||||||
dictionary: ['class', 'someclass', 'document', 'api', 'head', 'someclass2', 'descript', 'member1'],
|
dictionary: 'class someclass document api head someclass2 descript member1',
|
||||||
pages: [{
|
pages: [{
|
||||||
'title':'SomeClass',
|
'title':'SomeClass',
|
||||||
'type':'class',
|
'type':'class',
|
||||||
|
|
Loading…
Reference in New Issue