PERF: Improve JS app boot speed by optimizing `customResolve()` (#14990)
Time spent in the 'find module with suffix' portion of our `customResolve` function were adding up to around 100ms-150ms when booting the app. This time is spread over 150+ calls, so it's not immediately obvious in flamegraphs. This commit implements a (reversed) [Trie](https://en.wikipedia.org/wiki/Trie) which enables fast suffix-based lookups on a list of strings. In my tests, this requires < 5ms to initialize, and brings the cumulative 'find module with suffix' time down to `< 5ms`. This corresponds to a ~100ms improvement in LCP metrics in my browser. The only behavior change is to remove support for module filenames which are **not** dasherized. I haven't found any core/theme/plugin modules which are not dasherized in their filenames.
This commit is contained in:
parent
a102673522
commit
135fdd59ed
|
@ -0,0 +1,87 @@
|
|||
class TrieNode {
|
||||
constructor(name, parent) {
|
||||
this.name = name;
|
||||
this.parent = parent;
|
||||
this.children = new Map();
|
||||
this.leafIndex = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Given a set of strings, this class can allow efficient lookups
|
||||
// based on suffixes.
|
||||
//
|
||||
// By default, it will create one Trie node per character. If your data
|
||||
// has known delimiters (e.g. / in file paths), you can pass a separator
|
||||
// to the constructor for better performance.
|
||||
//
|
||||
// Matching results will be returned in insertion order
|
||||
export default class SuffixTrie {
|
||||
constructor(separator = "") {
|
||||
this._trie = new TrieNode();
|
||||
this.separator = separator;
|
||||
this._nextIndex = 0;
|
||||
}
|
||||
|
||||
add(value) {
|
||||
const nodeNames = value.split(this.separator);
|
||||
let currentNode = this._trie;
|
||||
|
||||
// Iterate over the nodes backwards. The last one should be
|
||||
// at the root of the tree
|
||||
for (let i = nodeNames.length - 1; i >= 0; i--) {
|
||||
let newNode = currentNode.children.get(nodeNames[i]);
|
||||
if (!newNode) {
|
||||
newNode = new TrieNode(nodeNames[i], currentNode);
|
||||
currentNode.children.set(nodeNames[i], newNode);
|
||||
}
|
||||
currentNode = newNode;
|
||||
}
|
||||
|
||||
currentNode.leafIndex = this._nextIndex++;
|
||||
}
|
||||
|
||||
withSuffix(suffix, resultCount = null) {
|
||||
const nodeNames = suffix.split(this.separator);
|
||||
|
||||
// Traverse the tree to find the root node for this suffix
|
||||
let node = this._trie;
|
||||
for (let i = nodeNames.length - 1; i >= 0; i--) {
|
||||
node = node.children.get(nodeNames[i]);
|
||||
if (!node) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Find all the leaves which are descendents of that node
|
||||
const leaves = [];
|
||||
const descendentNodes = [node];
|
||||
while (descendentNodes.length > 0) {
|
||||
const thisDescendent = descendentNodes.pop();
|
||||
if (thisDescendent.leafIndex !== null) {
|
||||
leaves.push(thisDescendent);
|
||||
}
|
||||
descendentNodes.push(...thisDescendent.children.values());
|
||||
}
|
||||
|
||||
// Sort them in-place according to insertion order
|
||||
leaves.sort((a, b) => (a.leafIndex < b.leafIndex ? -1 : 1));
|
||||
|
||||
// If a subset of results have been requested, truncate
|
||||
if (resultCount !== null) {
|
||||
leaves.splice(resultCount);
|
||||
}
|
||||
|
||||
// Calculate their full names, and return the joined string
|
||||
return leaves.map((leafNode) => {
|
||||
const parts = [leafNode.name];
|
||||
|
||||
let ancestorNode = leafNode;
|
||||
while (typeof ancestorNode.parent?.name === "string") {
|
||||
parts.push(ancestorNode.parent.name);
|
||||
ancestorNode = ancestorNode.parent;
|
||||
}
|
||||
|
||||
return parts.join(this.separator);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -2,8 +2,10 @@ import { classify, dasherize } from "@ember/string";
|
|||
import deprecated from "discourse-common/lib/deprecated";
|
||||
import { findHelper } from "discourse-common/lib/helpers";
|
||||
import { get } from "@ember/object";
|
||||
import SuffixTrie from "discourse-common/lib/suffix-trie";
|
||||
|
||||
let _options = {};
|
||||
let moduleSuffixTrie = null;
|
||||
|
||||
export function setResolverOption(name, value) {
|
||||
_options[name] = value;
|
||||
|
@ -34,6 +36,18 @@ function parseName(fullName) {
|
|||
};
|
||||
}
|
||||
|
||||
function lookupModuleBySuffix(suffix) {
|
||||
if (!moduleSuffixTrie) {
|
||||
moduleSuffixTrie = new SuffixTrie("/");
|
||||
Object.keys(requirejs.entries).forEach((name) => {
|
||||
if (!name.includes("/templates/")) {
|
||||
moduleSuffixTrie.add(name);
|
||||
}
|
||||
});
|
||||
}
|
||||
return moduleSuffixTrie.withSuffix(suffix, 1)[0];
|
||||
}
|
||||
|
||||
export function buildResolver(baseName) {
|
||||
return Ember.DefaultResolver.extend({
|
||||
parseName,
|
||||
|
@ -107,13 +121,7 @@ export function buildResolver(baseName) {
|
|||
// If we end with the name we want, use it. This allows us to define components within plugins.
|
||||
const suffix = parsedName.type + "s/" + parsedName.fullNameWithoutType,
|
||||
dashed = dasherize(suffix),
|
||||
moduleName = Object.keys(requirejs.entries).find(function (e) {
|
||||
return (
|
||||
e.indexOf("/templates/") === -1 &&
|
||||
(e.indexOf(suffix, e.length - suffix.length) !== -1 ||
|
||||
e.indexOf(dashed, e.length - dashed.length) !== -1)
|
||||
);
|
||||
});
|
||||
moduleName = lookupModuleBySuffix(dashed);
|
||||
|
||||
let module;
|
||||
if (moduleName) {
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
import { module, test } from "qunit";
|
||||
import SuffixTrie from "discourse-common/lib/suffix-trie";
|
||||
|
||||
module("Unit | SuffixTrie", function () {
|
||||
test("SuffixTrie", function (assert) {
|
||||
const t = new SuffixTrie("/");
|
||||
t.add("a/b/c/d");
|
||||
t.add("b/a/c/d");
|
||||
t.add("c/b/a/d");
|
||||
t.add("d/c/b/a");
|
||||
|
||||
t.add("a/b/c/d/");
|
||||
t.add("/a/b/c/d/");
|
||||
|
||||
// Simple lookups
|
||||
assert.deepEqual(t.withSuffix("d"), ["a/b/c/d", "b/a/c/d", "c/b/a/d"]);
|
||||
assert.deepEqual(t.withSuffix("c/d"), ["a/b/c/d", "b/a/c/d"]);
|
||||
assert.deepEqual(t.withSuffix("b/c/d"), ["a/b/c/d"]);
|
||||
assert.deepEqual(t.withSuffix("a/b/c/d"), ["a/b/c/d"]);
|
||||
assert.deepEqual(t.withSuffix("b/a"), ["d/c/b/a"]);
|
||||
|
||||
// With leading/trailing delimiters
|
||||
assert.deepEqual(t.withSuffix("c/d/"), ["a/b/c/d/", "/a/b/c/d/"]);
|
||||
assert.deepEqual(t.withSuffix("/a/b/c/d/"), ["/a/b/c/d/"]);
|
||||
|
||||
// Limited lookups
|
||||
assert.deepEqual(t.withSuffix("d", 1), ["a/b/c/d"]);
|
||||
assert.deepEqual(t.withSuffix("d", 2), ["a/b/c/d", "b/a/c/d"]);
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue