Skip to content

Commit

Permalink
Getting the public suffix list down to <30k
Browse files Browse the repository at this point in the history
going a bit crazy here. Browsers don't support gzip/deflate data yet
(waiting for the Compression Streams API) and other compression
schemes where reasonable libs are available simply don't cut it
on the compression rate.

in the mean time, png is lossless and deflate compression -
exactly what we need  :)   So this patch pre-process theh PSL
list for easy lookup (and removes a lot of reduntant text) and
export the result as a json dictionary.

this is then converted to png by imagemagick.

The browser loads the image, we access the pixel values and end
up with our desired json dict.

GH-68
  • Loading branch information
ttyridal committed Nov 26, 2021
1 parent 747f5a0 commit aca7a77
Show file tree
Hide file tree
Showing 8 changed files with 218 additions and 50 deletions.
70 changes: 70 additions & 0 deletions ext/webextension/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 6 additions & 3 deletions ext/webextension/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@
"unittest": "node --experimental-vm-modules node_modules/jest/bin/jest.js --silent=false src/*.test.js"
},
"type": "module",
"jest": { "transform": {} },
"jest": {
"transform": {}
},
"devDependencies": {
"eslint": "^8.2.0",
"jest": "^27.3.1",
"jest-puppeteer": "^6.0.0",
"jest-webextension-mock": "^3.7.19",
"puppeteer": "^11.0.0",
"eslint": "^8.2.0"
"pngjs3": "^6.0.1",
"puppeteer": "^11.0.0"
}
}
93 changes: 62 additions & 31 deletions ext/webextension/src/lib/getdomain.js
Original file line number Diff line number Diff line change
@@ -1,36 +1,67 @@
import {tldlookup, tldcommon} from './getdomain_lut.js'

export function getDomain(url) {
//TODO decode Punycodeed urls (RFC 3492 and RFC 5891)
const parts = url.split('.').reverse();
let res = [];
let lut = tldlookup;
let v;

for (v=0; v < parts.length; v++) {
const part = parts[v];
if (!lut) break;
if (part in lut) {
res.push(part);
lut = lut[part]
}
else if ('*' in lut) {
res.push(e);
lut = null;
} else
break;
function loadImage(url) {
let img = new Image();
return new Promise(res=>{
img.onload = ()=>{
res(img);
}
img.src = url;
});
}

async function getPixels(url) {
let img = await loadImage(url);
let canvas = document.createElement('canvas');
canvas.height = img.height;
canvas.width = img.width;
let context = canvas.getContext('2d');
context.drawImage(img, 0, 0);
return context.getImageData(0, 0, img.width, img.height).data;
}


function pixeldata_to_json(pixeldata) {
pixeldata = pixeldata.filter((_,i)=> i%4 ==0);
const blob = new Blob([pixeldata], {type: 'text/plain; charset=utf-8'});
return blob.text();
}

export class PslLookup {
constructor(args) {
args = args || {};
args = Object.assign({tableLoader: getPixels, tableurl: "./getdomain.json.png"}, args);
this.psltable = args.tableLoader(args.tableurl)
.then(pixeldata_to_json)
.then(JSON.parse)
.catch(e=>{console.log("something is failing",e)});
}
if (v < parts.length)
res.push(parts[v]);

if (parts.length > 2 && parts[1] in tldcommon
&& tldcommon[parts[1]].includes(parts[0]) && res.length < 3) {
res = parts.slice(0, 3);
async waitTableReady() {
let lut = await this.psltable;
this.psltable = lut;
}

v = parts.indexOf('blogspot');
if (v >= 0)
res = parts.slice(0, v + 2);

return res.reverse().join('.');
getDomain(url) {
let lut = this.psltable;
const parts = url.split('.').reverse();
let res = [];
let v;

for (v=0; v < parts.length; v++) {
const part = parts[v];
if (!lut) break;
if (part in lut) {
res.push(part);
lut = lut[part]
}
else if ('*' in lut) {
res.push(part);
lut = null;
} else
break;
}
if (v < parts.length)
res.push(parts[v]);

return res.reverse().join('.');
}
}
Binary file added ext/webextension/src/lib/getdomain.json.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
36 changes: 33 additions & 3 deletions ext/webextension/src/lib/getdomain.test.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,38 @@
/* globals global */
"use strict";
import {jest, it, expect, beforeEach} from '@jest/globals'
import {getDomain} from './getdomain.js'
import {it, expect} from '@jest/globals'
import {PslLookup} from './getdomain.js'
import fs from 'fs';
import {PNG} from 'pngjs3'
// import { sync as PNGSync } from 'pngjs3';
import { URL } from 'url';

function pngPixels(url) {
const url_abspath = new URL(url, import.meta.url).pathname;
const data = fs.readFileSync(url_abspath);

return new Promise(resolve=>{
new PNG().parse(data, function (error, data) {
resolve(data.data);
});
});
}

class MockBlob {
constructor(data/*, params*/) {
let txt = data.toString("utf8");
this.text = ()=>{return Promise.resolve(txt)};
}
}
global.Blob = MockBlob;

it('gets the correct domain from url', async () => {


const psl = new PslLookup({tableLoader: pngPixels});
await psl.waitTableReady()
const getDomain = psl.getDomain.bind(psl);

it('gets the correct domain from url', () => {
expect(getDomain('example.com')).toBe('example.com');
expect(getDomain('amazon.com')).toBe('amazon.com');
expect(getDomain('show.amazon.com')).toBe('amazon.com');
Expand Down
2 changes: 0 additions & 2 deletions ext/webextension/src/lib/getdomain_lut.js

This file was deleted.

4 changes: 4 additions & 0 deletions publicsuffixlist/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
../ext/webextension/src/lib/getdomain.json.png:
python tld.py | convert pgm:- -define png:compression-filter=1 ../ext/webextension/src/lib/getdomain.json.png


54 changes: 43 additions & 11 deletions publicsuffixlist/tld.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,11 @@
}
tree = lambda: defaultdict(tree)

def is_ascii(s): return all(ord(c) < 128 for c in s)

others = tree()

#get from https://publicsuffix.org/list/public_suffix_list.dat
for l in open("public_suffix_list.dat"):
l = l.strip()
if not l or l[0] == '/' or '.' not in l: continue
Expand All @@ -29,31 +32,60 @@

if l[0]=='!': continue ## deal with those later

if 'blogspot' in x: ## special case them.. always x.blogspot....
pass
elif len(x) == 2 and x[-2] in common:
common[x[-2]].append(x[-1])
if 0: pass
## if 'blogspot' in x: ## special case them.. always x.blogspot....
## pass
## elif len(x) == 2 and x[-2] in common:
## common[x[-2]].append(x[-1])
else:
x = x[::-1]
d = others
for q in x:
if not is_ascii(q):
q = "xn-"+q.encode('punycode').decode('ascii')
d = d[q]

def walk(d, dst,lvl=0):
def walk(d, dst,lvl=0, printer=lambda x,y: None):
for k,v in d.items():
if v:
print((" "*lvl)+k)
printer(lvl,k)
dst[k] = dict()
walk(v, dst[k],lvl+1)
walk(v, dst[k],lvl+1,printer)
else:
print((" "*lvl)+k)
printer(lvl,k)
dst[k] = 0


table=dict()
walk(others, table)
## print("export const tldlookup =", json.dumps(table),";")
## print("export const tldcommon =", json.dumps(common),";")
caparr=[]

def printer(lvl, k):
global caparr
if not is_ascii(k):
k = 'xn-' + k.encode('punycode').decode('ascii')

caparr.append((" "*lvl)+k+'?')

walk(others, table,0,printer)


def pgmdump(s):
rows = int(len(s) / 4096) + 1
cols = int(len(s)/rows) + 1
padding = rows*cols - len(s)

print("P5")
print(cols)
print(rows)
print(255)
print(s, end='')
print(" "*padding)

#pgmdump("".join(caparr))

s = json.dumps(table).replace(' ','')
pgmdump(s)

sys.exit(0)
def lookup(url, d):
k = url.split('.')[::-1]
Expand Down

0 comments on commit aca7a77

Please sign in to comment.