summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorhackademix2019-10-08 11:20:30 +0200
committerhackademix2019-10-08 11:21:43 +0200
commit9769846552a33a8f258bda4c10d534773afe5428 (patch)
tree99835f8425e98c81bcd6346c0d85e85f90850949 /src
parent23351415908c19a67ed4fdbe43b8e29f73bc6835 (diff)
downloadnoscript-9769846552a33a8f258bda4c10d534773afe5428.tar.gz
noscript-9769846552a33a8f258bda4c10d534773afe5428.tar.xz
noscript-9769846552a33a8f258bda4c10d534773afe5428.zip
Support for splitting sync storage items into chunks, to allow synchronization of big policies across devices.
Diffstat (limited to 'src')
-rw-r--r--src/common/Storage.js163
-rw-r--r--src/test/Storage_test.js69
-rw-r--r--src/test/Test.js9
-rw-r--r--src/test/run.js1
4 files changed, 206 insertions, 36 deletions
diff --git a/src/common/Storage.js b/src/common/Storage.js
index 0e8181e..c534bfb 100644
--- a/src/common/Storage.js
+++ b/src/common/Storage.js
@@ -1,64 +1,165 @@
-var Storage = {
+"use strict";
+var Storage = (() => {
- async safeOp(op, type, keys) {
+ let chunksKey = k => `${k}/CHUNKS`;
+
+ async function safeOp(op, type, keys) {
let sync = type === "sync";
- if (sync && op === "get") {
- let localFallback = await this.localFallback();
- if (localFallback.size) {
- for (let k of Array.isArray(keys) ? keys : [keys]) {
- if (localFallback.has(k)) {
- type = "local";
- break;
+
+ try {
+ if (sync) {
+ let remove = op === "remove";
+ if (remove || op === "get") {
+ keys = [].concat(keys); // don't touch the passed argument
+ let mergeResults = {};
+ let localFallback = await getLocalFallback();
+ if (localFallback.size) {
+ let localKeys = keys.filter(k => localFallback.has(k));
+ if (localKeys.length) {
+ if (remove) {
+ await browser.storage.local.remove(localKeys);
+ for (let k of localKeys) {
+ localFallback.delete(k);
+ }
+ await setLocalFallback(localFallback);
+ } else {
+ mergeResults = await browser.storage.local.get(localKeys);
+ }
+ keys = keys.filter(k => !localFallback.has(k));
+ }
+ }
+
+ if (keys.length) { // we may not have non-fallback keys anymore
+ let chunkCounts = Object.entries(await browser.storage.sync.get(
+ keys.map(chunksKey)))
+ .map(([k, count]) => [k.split("/")[0], count]);
+ if (chunkCounts.length) {
+ let chunkedKeys = [];
+ for (let [k, count] of chunkCounts) {
+ // prepare to fetch all the chunks at once
+ while (count-- > 0) chunkedKeys.push(`${k}/${count}`);
+ }
+ if (remove) {
+ let doomedKeys = keys
+ .concat(chunkCounts.map(([k, count]) => chunksKey(k)))
+ .concat(chunkedKeys);
+ return await browser.storage.sync.remove(doomedKeys);
+ } else {
+ let chunks = await browser.storage.sync.get(chunkedKeys);
+ for (let [k, count] of chunkCounts) {
+ let orderedChunks = [];
+ for (let j = 0; j < count; j++) {
+ orderedChunks.push(chunks[`${k}/${j}`]);
+ }
+ let whole = orderedChunks.join('');
+ try {
+ mergeResults[k] = JSON.parse(whole);
+ keys.splice(keys.indexOf(k), 1); // remove from "main" keys
+ } catch (e) {
+ error(e, "Could not parse chunked storage key %s (%s).", k, whole);
+ }
+ }
+ }
+ }
+ }
+ return keys.length ?
+ Object.assign(mergeResults, await browser.storage.sync[op](keys))
+ : mergeResults;
+ } else if (op === "set") {
+ keys = Object.assign({}, keys); // don't touch the passed argument
+ const MAX_ITEM_SIZE = 4096;
+ // Firefox Sync's max object BYTEs size is 16384, Chrome's 8192.
+ // Rather than mesuring actual bytes, we play it safe by halving then
+ // lowest to cope with escapes / multibyte characters.
+ for (let k of Object.keys(keys)) {
+ let s = JSON.stringify(keys[k]);
+ if (s.length > MAX_ITEM_SIZE) {
+ let count = Math.ceil(s.length / MAX_ITEM_SIZE);
+ let chunksCountKey = chunksKey(k);
+ let oldCount = await browser.storage.sync.get(chunksCountKey);
+ let chunks = {
+ [chunksCountKey]: count
+ };
+ for(let j = 0, o = 0; j < count; ++j, o += MAX_ITEM_SIZE) {
+ chunks[`${k}/${j}`] = s.substr(o, MAX_ITEM_SIZE);
+ }
+ await browser.storage.sync.set(chunks);
+ keys[k] = "[CHUNKED]";
+ if (oldCount-- > count) {
+ let oldChunks = [];
+ do {
+ oldChunks.push(`${k}${oldCount}`);
+ } while(oldCount-- > count);
+ await browser.storage.sync.remove(oldChunks);
+ }
+ }
}
}
}
- }
- try {
+
let ret = await browser.storage[type][op](keys);
if (sync && op === "set") {
- let localFallback = await this.localFallback();
+ let localFallback = await getLocalFallback();
let size = localFallback.size;
if (size > 0) {
for (let k of Object.keys(keys)) {
localFallback.delete(k);
}
- if (size > localFallback.size) this.localFallback(localFallback);
+ if (size > localFallback.size) {
+ await setLocalFallback(localFallback);
+ }
}
}
return ret;
} catch (e) {
+ error(e, "%s.%s(%o)", type, op, keys);
if (sync) {
debug("Sync disabled? Falling back to local storage (%s %o)", op, keys);
- let localFallback = await this.localFallback();
+ let localFallback = await getLocalFallback();
let failedKeys = Array.isArray(keys) ? keys
: typeof keys === "string" ? [keys] : Object.keys(keys);
for (let k of failedKeys) {
localFallback.add(k);
}
- await this.localFallback(localFallback);
+ await setLocalFallback(localFallback);
} else {
- error(e);
throw e;
}
}
return await browser.storage.local[op](keys);
- },
+ }
- async get(type, keys) {
- return await this.safeOp("get", type, keys);
- },
+ const LFK_NAME = "__fallbackKeys";
+ async function setLocalFallback(keys) {
+ return await browser.storage.local.set({[LFK_NAME]: [...keys]});
+ }
+ async function getLocalFallback() {
+ let keys = (await browser.storage.local.get(LFK_NAME))[LFK_NAME];
+ return new Set(Array.isArray(keys) ? keys : []);
+ }
- async set(type, keys) {
- return await this.safeOp("set", type, keys);
- },
+ return {
+ async get(type, keys) {
+ return await safeOp("get", type, keys);
+ },
- async localFallback(keys) {
- let name = "__fallbackKeys";
- if (keys) {
- return await browser.storage.local.set({[name]: [...keys]});
+ async set(type, keys) {
+ return await safeOp("set", type, keys);
+ },
+
+ async remove(type, keys) {
+ return await safeOp("remove", type, keys);
+ },
+
+ async hasLocalFallback(key) {
+ return (await getLocalFallback()).has(key);
+ },
+
+ async isChunked(key) {
+ let ccKey = chunksKey(key);
+ let data = await browser.storage.sync.get([key, ccKey]);
+ return data[key] === "[CHUNKED]" && parseInt(data[ccKey]);
}
- let fallbackKeys = (await browser.storage.local.get(name))[name];
- return new Set(Array.isArray(fallbackKeys) ? fallbackKeys : []);
- }
-}
+ };
+})()
diff --git a/src/test/Storage_test.js b/src/test/Storage_test.js
new file mode 100644
index 0000000..c0a5af8
--- /dev/null
+++ b/src/test/Storage_test.js
@@ -0,0 +1,69 @@
+"use strict";
+{
+ let makeBigObj = propsNum => {
+ let bigObj = {};
+ for (let j = propsNum; j-- > 0;) {
+ let x = "0000".concat(j.toString(16)).slice(-4);
+ bigObj[`k${x}`] = `v${x}`;
+ }
+ log("[TEST] created bigObj %s JSON characters long.", JSON.stringify(bigObj).length)
+ return bigObj;
+ }
+ let HUGE_SIZE = 16000,
+ BIG_SIZE = 1000;
+ let bigObject = makeBigObj(BIG_SIZE);
+ let hugeObject = makeBigObj(HUGE_SIZE);
+ let items = {"small1": {x: 1, y: 2}, bigObject, "small2": {k:3, j: 4}};
+ let keys = Object.keys(items);
+ keys.push("hugeObject");
+
+ let eq = async (key, prop, val) => {
+ let current = (await Storage.get("sync", key))[key];
+ let ok = current[prop] === val;
+ log("[TEST] sync.%s.%s %s %s\n(%o)", key, prop, ok ? "==" : "!=", val, current);
+ return ok;
+ };
+
+ let fallbackOrChunked = async key => {
+ let fallback = await Storage.hasLocalFallback(key);
+ let chunked = await Storage.isChunked(key);
+ log("[TEST] %s fallback: %s, chunked: %s", key, fallback, chunked);
+ return fallback ? !chunked : chunked;
+ }
+
+ let checkSize = async (key, size) =>
+ Object.keys((await Storage.get("sync", key))[key]).length === size;
+
+ let all;
+
+ (async () => {
+ for(let t of [
+ async () => {
+ await Storage.set("sync", items)
+ await Storage.set("sync", {hugeObject}); // fallback to local
+ all = await Storage.get("sync", keys);
+ log("[TEST] Storage:\nsync %o\nlocal %o\nfiltered (%o) %o",
+ await browser.storage.sync.get(),
+ await browser.storage.local.get(),
+ keys, all);
+ return Object.keys(all).length === keys.length;
+ },
+ async () => checkSize("hugeObject", HUGE_SIZE),
+ async () => checkSize("bigObject", BIG_SIZE),
+ async () => await fallbackOrChunked("bigObject"),
+ async () => await fallbackOrChunked("hugeObject"),
+ async () => await eq("small1", "y", 2),
+ async () => await eq("small2", "k", 3),
+ async () => await eq("bigObject", "k0000", "v0000"),
+ async () => await eq("hugeObject", "k0001", "v0001"),
+ async () => {
+ await Storage.remove("sync", keys);
+ let myItems = await Storage.get("sync", keys);
+ return Object.keys(myItems).length === 0;
+ },
+ ]) {
+ await Test.run(t);
+ }
+ Test.report();
+ })();
+}
diff --git a/src/test/Test.js b/src/test/Test.js
index 22145a6..d76f6e9 100644
--- a/src/test/Test.js
+++ b/src/test/Test.js
@@ -26,17 +26,16 @@ var Test = (() => {
error(e);
}
this[r ? "passed" : "failed"]++;
- log(`${r ? "PASSED" : "FAILED"} ${msg || test}`);
+ log(`[TEST] ${r ? "PASSED" : "FAILED"} ${msg || test}`);
if (typeof callback === "function") try {
- callback(r, test, msg);
+ await callback(r, test, msg);
} catch(e) {
- error(e);
+ error(e, "[TEST]");
}
},
-
report() {
let {passed, failed} = this;
- log(`FAILED: ${failed}, PASSED: ${passed}, TOTAL ${passed + failed}.`);
+ log(`[TESTS] FAILED: ${failed}, PASSED: ${passed}, TOTAL ${passed + failed}.`);
}
};
diff --git a/src/test/run.js b/src/test/run.js
index 4325a40..9fc3165 100644
--- a/src/test/run.js
+++ b/src/test/run.js
@@ -2,6 +2,7 @@
await include("/test/Test.js");
Test.include([
"Policy",
+ "Storage",
"XSS",
"embargoed/XSS",
]);