aboutsummaryrefslogtreecommitdiffstats
path: root/worker.js
diff options
context:
space:
mode:
authoralyx <alyx@aleteoryx.me>2022-06-02 00:26:49 +0000
committerAleteoryx <alyx@aleteoryx.me>2022-06-02 00:26:49 +0000
commitc1e64bf0ccdaeb1b676f1a53967b0d0448e1ca44 (patch)
tree923762a989184bb6a13054d0ca668c85c53db94e /worker.js
parentb1ec23e3579b1528e6e7eb12f5c9f89793b5390e (diff)
downloadRRCUtils-c1e64bf0ccdaeb1b676f1a53967b0d0448e1ca44.tar.gz
RRCUtils-c1e64bf0ccdaeb1b676f1a53967b0d0448e1ca44.tar.bz2
RRCUtils-c1e64bf0ccdaeb1b676f1a53967b0d0448e1ca44.zip
basic PWA things, fixed the searcher again
Diffstat (limited to 'worker.js')
-rw-r--r--worker.js116
1 files changed, 116 insertions, 0 deletions
diff --git a/worker.js b/worker.js
new file mode 100644
index 0000000..39375e4
--- /dev/null
+++ b/worker.js
@@ -0,0 +1,116 @@
+var resolver;
+
+addEventListener("fetch", e => {
+ console.log("Fetch: ", e)
+ e.waitUntil(async () => {
+ const mc = await caches.open(main);
+ e.respondWith(await mc.match(e.request) ?? await fetch(e.request));
+ console.log(await mc.match(e.request));
+ })
+})
+
+addEventListener("install", async function(e) {
+ console.log('Install!');
+ e.waitUntil(new Promise((res) => {resolver=res}));
+ //auto halt after 5s
+ setTimeout(resolver, 10000);
+
+ setInterval(reload, 5*60*1000); //reload every hour, just in case.
+ await reload();
+
+ resolver();
+
+});
+
+addEventListener("activate", clients.claim.bind(clients));
+
+async function reload() {
+ const {ver} = await fetch("/pwathings/appver.json").then(v => v.json());
+ const cache = await caches.open(String(ver));
+
+ if ((await cache.keys()).length == 0) {
+ console.log("Reloading all resources...");
+ await recursiveLinkWalker("/", cache);
+ console.log("Done!");
+ console.log("Overwriting main cache.");
+ await caches.delete("main");
+ const mc = await caches.open("main");
+ for (const k of await cache.keys())
+ await mc.put(k, await cache.match(k));
+ console.log("Done!");
+ }
+}
+
+async function recursiveLinkWalker(url, cache, visits=[], constrain=["cv2.aleteoryx.me"]) {
+ const urlobj = new URL(url, "https://cv2.aleteoryx.me");
+ console.dir(urlobj, String(urlobj));
+ if (!constrain.includes(urlobj.hostname) || visits.includes(url))
+ return;
+
+ try {
+ const data = await fetch(url);
+ if (!data.ok) throw new Error("Failed to get the page!")
+ const blob = await data.blob();
+ await cache.put(url, new Response(blob));
+ visits.push(url);
+
+ const mime = blob.type;
+ const text = await blob.text();
+ console.log(url, mime);
+
+ switch (mime.split("/").at(-1)) {
+ case 'css':
+ const cssregex = /url\((?:'|"|)([^'")]+)(?:'|"|)\)/gm;
+ var match;
+ while (match = cssregex.exec(text)) {
+ await recursiveLinkWalker(match[1], cache, visits, constrain);
+ }
+ break;
+ case 'html':/*
+ Because the gods hate me, service workers lack this functionality below.
+ Regex time!
+
+ const doc = new Document();
+ doc.documentElement.innerHTML = text;
+
+ const HREFed = [...(["link", "a"].map(n => Array.from(doc.getElementsByTagName(n))))].flatten();
+ const SRCes = [...(["img", "script"].map(n => Array.from(doc.getElementsByTagName(n))))].flatten();
+
+ for (const h of HREFed) await recursiveLinkWalker(h.getAttribute('href'), cache, visits, constrain);
+ for (const s of SRCes) await recursiveLinkWalker(h.getAttribute('src'), cache, visits, constrain);
+ */
+ const hreftagre = /<\w+\s+(?:(?!href=)\w+(?:="[^"]*")?\s+)*href="([^"]*)"(?:\s+\w+(?:="[^"]*")?)*\s*\/?>/gm
+ const srctagre = /<\w+\s+(?:(?!src=)\w+(?:="[^"]*")?\s+)*src="([^"]*)"(?:\s+\w+(?:="[^"]*")?)*\s*\/?>/gm;
+
+ var match;
+ while (match = hreftagre.exec(text))
+ await recursiveLinkWalker(match[1], cache, visits, constrain);
+ while (match = srctagre.exec(text))
+ await recursiveLinkWalker(match[1], cache, visits, constrain);
+ break;
+
+ case 'json':
+ //to make sure we don't go rifling through all the fascinating URLs in 'circuitsv2.json'
+ const whitelist = ["/pwathings/manifest.json"]
+ if(!whitelist.map(m => url.endsWith(m)).reduce((a,b) => a||b)) break;
+ const walker = async (obj) => {
+ switch (typeof obj) {
+ case 'string':
+ await recursiveLinkWalker(obj, cache, visits, constrain);
+ break;
+ case 'object':
+ for (const k of Object.values(obj))
+ await walker(k);
+ break;
+ }
+ }
+ await walker(JSON.parse(text));
+ break;
+
+ }
+ } catch (e) {
+ console.log(e);
+ return;
+ }
+ console.log("returning naturally", url, visits);
+} \ No newline at end of file