Skip to content

Commit 83b8bbd

Browse files
committed
Add a small LRU cache in front of calls to normalize
We are almost always normalizing the same strings over and over again. Think about when iterating over mappings: we are reconstructing each mapping's source, but the mappings are in sorted order, and so will likely have the same source every time. This makes us take .09x the time that we used to take on the "iterate.already.parsed" benchmark!! Without the LRU cache: Samples Total (ms) Mean (ms) Standard Deviation (ms) 50 257604.64 5152.09 221.19 With the new LRU cache: Samples Total (ms) Mean (ms) Standard Deviation (ms) 50 23301.74 466.03 56.14
1 parent 57d2dcb commit 83b8bbd

File tree

2 files changed

+78
-4
lines changed

2 files changed

+78
-4
lines changed

dist/source-map.js

+39-2
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,43 @@ function urlGenerate(aParsedUrl) {
145145
}
146146
exports.urlGenerate = urlGenerate;
147147

148+
const MAX_CACHED_INPUTS = 32;
149+
150+
/**
151+
* Takes some function `f(input) -> result` and returns a memoized version of
152+
* `f`.
153+
*
154+
* We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The
155+
* memoization is a dumb-simple, linear least-recently-used cache.
156+
*/
157+
function lruMemoize(f) {
158+
const cache = [];
159+
160+
return function (input) {
161+
for (var i = 0; i < cache.length; i++) {
162+
if (cache[i].input === input) {
163+
var temp = cache[0];
164+
cache[0] = cache[i];
165+
cache[i] = temp;
166+
return cache[0].result;
167+
}
168+
}
169+
170+
var result = f(input);
171+
172+
cache.unshift({
173+
input,
174+
result,
175+
});
176+
177+
if (cache.length > MAX_CACHED_INPUTS) {
178+
cache.pop();
179+
}
180+
181+
return result;
182+
};
183+
}
184+
148185
/**
149186
* Normalizes a path, or the path portion of a URL:
150187
*
@@ -156,7 +193,7 @@ exports.urlGenerate = urlGenerate;
156193
*
157194
* @param aPath The path or url to normalize.
158195
*/
159-
function normalize(aPath) {
196+
var normalize = lruMemoize(function normalize(aPath) {
160197
var path = aPath;
161198
var url = urlParse(aPath);
162199
if (url) {
@@ -216,7 +253,7 @@ function normalize(aPath) {
216253
return urlGenerate(url);
217254
}
218255
return path;
219-
}
256+
});
220257
exports.normalize = normalize;
221258

222259
/**

lib/util.js

+39-2
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,43 @@ function urlGenerate(aParsedUrl) {
6666
}
6767
exports.urlGenerate = urlGenerate;
6868

69+
const MAX_CACHED_INPUTS = 32;
70+
71+
/**
72+
* Takes some function `f(input) -> result` and returns a memoized version of
73+
* `f`.
74+
*
75+
* We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The
76+
* memoization is a dumb-simple, linear least-recently-used cache.
77+
*/
78+
function lruMemoize(f) {
79+
const cache = [];
80+
81+
return function (input) {
82+
for (var i = 0; i < cache.length; i++) {
83+
if (cache[i].input === input) {
84+
var temp = cache[0];
85+
cache[0] = cache[i];
86+
cache[i] = temp;
87+
return cache[0].result;
88+
}
89+
}
90+
91+
var result = f(input);
92+
93+
cache.unshift({
94+
input,
95+
result,
96+
});
97+
98+
if (cache.length > MAX_CACHED_INPUTS) {
99+
cache.pop();
100+
}
101+
102+
return result;
103+
};
104+
}
105+
69106
/**
70107
* Normalizes a path, or the path portion of a URL:
71108
*
@@ -77,7 +114,7 @@ exports.urlGenerate = urlGenerate;
77114
*
78115
* @param aPath The path or url to normalize.
79116
*/
80-
function normalize(aPath) {
117+
var normalize = lruMemoize(function normalize(aPath) {
81118
var path = aPath;
82119
var url = urlParse(aPath);
83120
if (url) {
@@ -137,7 +174,7 @@ function normalize(aPath) {
137174
return urlGenerate(url);
138175
}
139176
return path;
140-
}
177+
});
141178
exports.normalize = normalize;
142179

143180
/**

0 commit comments

Comments
 (0)