rebuild with Zola, YLT and Deno

pull/224/head
koehr 2 years ago
parent 870786af7e
commit 214c05df56

3
.gitignore vendored

@ -1,3 +1,2 @@
node_modules
.svelte
build/*
yltresults/*

@ -6,7 +6,7 @@ Inspired by [Bradley Taunt's 1MB.club](https://1mb.club/).
## But why?
I love the idea of a list of webpages that are still reasonably usable with a slow internet connection. But 1MB is, in my honest opinion, still way too much. Nobody wants to wait 10 seconds — on good days — to load a web site. But a very large chunk of the world population isn't gifted with Gigabit internet connections.
I love the idea of a list of webpages that are still reasonably usable with a slow internet connection. But 1MB is, in my honest opinion, still way too much. Nobody wants to wait 10 seconds — on good days — to load a web site. And a very large chunk of the world population isn't gifted with Gigabit internet connections.
Of course, the absolute size of a website is not a perfect indicator. A page might contain a lot of text or images as important part of their content. It would be unfair to call them bloated in this case. This is why, additionally to the absolute size the ratio of visible to invisible content is shown as well.
@ -31,14 +31,25 @@ pretty good already.
## Hacking this page
This page is built with [Svelte](https://svelte.dev). You can clone the repository and run the application in development mode like this:
This page needs three components to work:
### [Deno](https://deno.land/)
The application that updates the page information is build with Typescript 4.6 and Deno 1.20. It uses no external packages except `encoding/toml` from the standard library.
### [YellowLabTools](https://yellowlab.tools/)
A local (docker) version of YellowLabTools is used for the page analysis. It uses [Phantomas](https://github.com/macbre/phantomas) as well as other tools to create a exhaustive metric.
### [Zola](https://www.getzola.org/)
The page analyser application generates markdown files that are rendered to a static web page by Zola.
```sh
git clone https://git.sr.ht/~koehr/the-250kb-club 250kb-club
# or: git clone https://github.com/nkoehring/250kb-club.git
cd 250kb-club
yarn
yarn dev
```
And build the page with `yarn build`.

@ -0,0 +1,79 @@
import { retryFetch } from './toolkit.ts'
const STATUS_URL = 'http://localhost:8383/api/runs/'
const RESULT_URL = 'http://localhost:8383/api/results/'
const METRIC_DEFAULTS = {
device: 'desktop',
waitForResponse: false,
screenshot: true,
}
// requests metrics and returns runId
export async function requestMetricsRun (url:string): Promise<string|null> {
const body = JSON.stringify({
url,
...METRIC_DEFAULTS,
})
const response = await fetch(STATUS_URL, {
method: "POST",
headers: [
["Content-Type", "application/json"],
["User-Agent", "250kb-club"]
],
body,
})
if (response.ok) {
const json: { runId: string } = await response.json()
return json.runId
} else {
const err = await response.text()
console.error(`Failed to request metrics run for ${url}: ${err}`)
return null
}
}
export async function checkStatus (runId: string, retries = 3): Promise<Status> {
const json = await retryFetch(`${STATUS_URL}${runId}`)
if (!json) return { url: '', status: 'failed' }
const url = json.params.url
const status = json.status.statusCode
return { url, status }
}
export async function retrieveMetrics (runId: string): Promise<Metric|null> {
const json = await retryFetch(`${RESULT_URL}${runId}`)
if (!json) return null
return {
scores: {
pageWeight: json.scoreProfiles.generic.categories.pageWeight.categoryScore,
requests: json.scoreProfiles.generic.categories.requests.categoryScore,
domComplexity: json.scoreProfiles.generic.categories.domComplexity.categoryScore,
javascriptComplexity: json.scoreProfiles.generic.categories.javascriptComplexity.categoryScore,
badJavascript: json.scoreProfiles.generic.categories.badJavascript.categoryScore,
jQuery: json.scoreProfiles.generic.categories.jQuery.categoryScore,
cssComplexity: json.scoreProfiles.generic.categories.cssComplexity.categoryScore,
badCSS: json.scoreProfiles.generic.categories.badCSS.categoryScore,
fonts: json.scoreProfiles.generic.categories.fonts.categoryScore,
serverConfig: json.scoreProfiles.generic.categories.serverConfig.categoryScore,
globalScore: json.scoreProfiles.generic.globalScore,
},
metrics: {
requests: json.toolsResults.phantomas.metrics.requests,
bodySize: json.toolsResults.phantomas.metrics.bodySize,
contentLength: json.toolsResults.phantomas.metrics.contentLength,
htmlSize: json.toolsResults.phantomas.metrics.htmlSize,
cssSize: json.toolsResults.phantomas.metrics.cssSize,
jsSize: json.toolsResults.phantomas.metrics.jsSize,
jsonSize: json.toolsResults.phantomas.metrics.jsonSize,
imageSize: json.toolsResults.phantomas.metrics.imageSize,
videoSize: json.toolsResults.phantomas.metrics.videoSize,
webfontSize: json.toolsResults.phantomas.metrics.webfontSize,
base64Size: json.toolsResults.phantomas.metrics.base64Size,
otherSize: json.toolsResults.phantomas.metrics.otherSize,
}
}
}

@ -0,0 +1,74 @@
import { parse as tomlParse, stringify as tomlStringify } from "https://deno.land/std@0.130.0/encoding/toml.ts"
const reFrontmatter = /^\+\+\+([\s\S]*)^\+\+\+$([\s\S]*)/m;
export function url2title (url: string): string {
return url
.replace(/^https?:\/\//, '') // remove leading http(s)://
.replace(/\/$/, '') // remove trailing slash
}
// gets an URL like https://foo.bar and returns ./content/foo_baz.md
function url2filepath (url: string, output_path: string): string {
const filename = url2title(url)
.replaceAll(/[\.\/]/g, '_') // replace dots and slashes with underscores
return `${output_path}/${filename}.md`
}
// deprecated in deno std, but also simple to replicate
// see: https://deno.land/std@0.130.0/fs/exists.ts
async function exists (path: string): Promise<boolean> {
try {
return !!(await Deno.lstat(path))
} catch (err) {
if (err instanceof Deno.errors.NotFound) return false
throw err
}
}
// checks if URL has a record already and returns time since last check or null
export async function getPageRecord (url: string, output_path: string): Promise<PageRecord|null> {
const path = url2filepath(url, output_path)
const hasRecord = await exists(path)
if (!hasRecord) return null
const fileContents = await Deno.readTextFile(path)
const match = fileContents.match(reFrontmatter)
if (!match) return null // that should never happen but who knows
return tomlParse(match[1].trim()) as PageRecord
}
export async function writeRecord (record: PageRecord, url: string, output_path: string): Promise<boolean> {
const path = url2filepath(url, output_path)
const toml = tomlStringify(record)
try {
await Deno.writeTextFile(path, `+++\n${toml}+++\n`)
return true
} catch {
return false
}
}
function delay (ms: number): Promise<unknown> {
return new Promise(resolve => setTimeout(resolve, ms));
}
export async function retryFetch (url: string, retries=3, msDelay=1000): Promise<any> {
try {
const response = await fetch(url)
if (!response.ok) return false
const json = await response.json()
return json
} catch (err) {
if (retries > 0) {
console.warn(`Failed to fetch ${url}, retrying in ${msDelay}ms.`)
await delay(msDelay)
return retryFetch(url, retries - 1, msDelay)
} else {
console.error(`Fetching ${url} failed too often. Giving up.`)
return false
}
}
}

@ -1,4 +0,0 @@
#main-footer.svelte-12uobvg{border-top:1px solid lightgrey;margin:3rem auto 0;font-size:85%}/*# sourceMappingURL=MainFooter.css.map */
#info-toggle.svelte-nu5fg0.svelte-nu5fg0.svelte-nu5fg0{display:none}#info-toggle.svelte-nu5fg0~label.svelte-nu5fg0.svelte-nu5fg0{text-align:center}#info-toggle.svelte-nu5fg0~label.svelte-nu5fg0>.info-close.svelte-nu5fg0{display:none}#info-toggle.svelte-nu5fg0:checked~label.svelte-nu5fg0>.info-close.svelte-nu5fg0{display:inline}#info-toggle.svelte-nu5fg0:checked~label.svelte-nu5fg0>.info-text.svelte-nu5fg0{display:none}#info-popup.svelte-nu5fg0.svelte-nu5fg0.svelte-nu5fg0{display:none;position:absolute;top:2.5em;left:-1em;width:calc(720px - 2em - 6px);max-width:calc(100vw - 2em - 6px);padding:0 1em;background:#FFF;border:3px solid #DDD}#info-toggle.svelte-nu5fg0:checked~#info-popup.svelte-nu5fg0.svelte-nu5fg0{display:block}@media(prefers-color-scheme: dark){#info-popup.svelte-nu5fg0.svelte-nu5fg0.svelte-nu5fg0{background:#000;border-color:#444}}/*# sourceMappingURL=InfoPopup.css.map */
.entry.svelte-2ysuep.svelte-2ysuep{display:flex;flex-flow:row nowrap;justify-content:space-between;padding:.5em .5em 0;height:2em;line-height:2em;font-size:1.3em}.entry.svelte-2ysuep>.url.svelte-2ysuep{flex:1 1 auto;width:60%;overflow:hidden;text-overflow:ellipsis}.entry.svelte-2ysuep>.size.svelte-2ysuep,.entry.svelte-2ysuep>.ratio.svelte-2ysuep{flex:0 0 auto;width:20%;text-align:right}.entry-size-bar.svelte-2ysuep.svelte-2ysuep,.entry-ratio-bar.svelte-2ysuep.svelte-2ysuep{height:0;margin-bottom:2px;border-bottom:2px solid}.entry-size-bar.highlighted.svelte-2ysuep.svelte-2ysuep,.entry-ratio-bar.highlighted.svelte-2ysuep.svelte-2ysuep{border-bottom-width:4px}.entry-size-bar.svelte-2ysuep.svelte-2ysuep{border-bottom-color:#E44;width:calc(var(--size)/250 * 100%)}.entry-ratio-bar.svelte-2ysuep.svelte-2ysuep{border-bottom-color:#56B;width:var(--ratio)}/*# sourceMappingURL=PageList.css.map */
/*# sourceMappingURL=index-edfdf87b.css.map */

@ -1 +0,0 @@
{"version":3,"file":"index-edfdf87b.css","sources":["","",""],"sourcesContent":["","",""],"names":[],"mappings":";;"}

@ -1,2 +0,0 @@
function e(e){return Promise.all(e.map((function(e){return new Promise((function(t,n){var r=new URL(e,import.meta.url),l=document.baseURI;if(!l){var o=document.getElementsByTagName("base");l=o.length?o[0].href:document.URL}var u=(""+r).substring(l.length),a=document.querySelector('link[rel=stylesheet][href="'+u+'"]')||document.querySelector('link[rel=stylesheet][href="'+r+'"]');a||((a=document.createElement("link")).rel="stylesheet",a.href=r,document.head.appendChild(a)),a.sheet?t():(a.onload=function(){return t()},a.onerror=n)}))})))}export default e;
//# sourceMappingURL=inject_styles-cd877ae9.js.map

@ -1 +0,0 @@
{"version":3,"file":"inject_styles-cd877ae9.js","sources":["../../../../inject_styles.js"],"sourcesContent":["export default function(files) {\n\treturn Promise.all(files.map(function(file) { return new Promise(function(fulfil, reject) {\n\t\tvar href = new URL(file, import.meta.url);\n\t\tvar baseURI = document.baseURI;\n\t\tif (!baseURI) {\n\t\t\tvar baseTags = document.getElementsByTagName('base');\n\t\t\tbaseURI = baseTags.length ? baseTags[0].href : document.URL;\n\t\t}\n\t\tvar relative = ('' + href).substring(baseURI.length);\n\t\tvar link = document.querySelector('link[rel=stylesheet][href=\"' + relative + '\"]')\n\t\t\t|| document.querySelector('link[rel=stylesheet][href=\"' + href + '\"]');\n\t\tif (!link) {\n\t\t\tlink = document.createElement('link');\n\t\t\tlink.rel = 'stylesheet';\n\t\t\tlink.href = href;\n\t\t\tdocument.head.appendChild(link);\n\t\t}\n\t\tif (link.sheet) {\n\t\t\tfulfil();\n\t\t} else {\n\t\t\tlink.onload = function() { return fulfil() };\n\t\t\tlink.onerror = reject;\n\t\t}\n\t})}));\n};"],"names":["files","Promise","all","map","file","fulfil","reject","href","URL","import","meta","url","baseURI","document","baseTags","getElementsByTagName","length","relative","substring","link","querySelector","createElement","rel","head","appendChild","sheet","onload","onerror"],"mappings":"AAAe,WAASA,GACvB,OAAOC,QAAQC,IAAIF,EAAMG,KAAI,SAASC,GAAQ,OAAO,IAAIH,SAAQ,SAASI,EAAQC,GACjF,IAAIC,EAAO,IAAIC,IAAIJ,EAAMK,OAAOC,KAAKC,KACjCC,EAAUC,SAASD,QACvB,IAAKA,EAAS,CACb,IAAIE,EAAWD,SAASE,qBAAqB,QAC7CH,EAAUE,EAASE,OAASF,EAAS,GAAGP,KAAOM,SAASL,IAEzD,IAAIS,GAAY,GAAKV,GAAMW,UAAUN,EAAQI,QACzCG,EAAON,SAASO,cAAc,8BAAgCH,EAAW,OACzEJ,SAASO,cAAc,8BAAgCb,EAAO,MAC7DY,KACJA,EAAON,SAASQ,cAAc,SACzBC,IAAM,aACXH,EAAKZ,KAAOA,EACZM,SAASU,KAAKC,YAAYL,IAEvBA,EAAKM,MACRpB,KAEAc,EAAKO,OAAS,WAAa,OAAOrB,KAClCc,EAAKQ,QAAUrB"}

File diff suppressed because it is too large Load Diff

@ -1,82 +0,0 @@
import fs from 'fs'
import chalk from 'chalk'
import phantomas from 'phantomas'
import pageData from './src/components/pages.mjs'
const INPUT_FILE = './pages.txt'
const OUTPUT_FILE = './src/components/pages.mjs'
const RECHECK_THRESHOLD = 60*60*24*7*1000 // recheck pages older than 1 week
const REJECT_THRESHOLD = 256000
const LOGGING_PREFIXES = {
info: `[${chalk.bold.white('II')}]`,
warn: `[${chalk.bold.yellow('WW')}]`,
error: `[${chalk.bold.red('EE')}]`,
debug: `[${chalk.bold.white('DD')}]`,
}
function log (level='info') {
const args = [...arguments].slice(1)
let prefix = LOGGING_PREFIXES[level]
console.log(prefix, ...args)
}
function info () { log('info', ...arguments) }
function warn () { log('warn', ...arguments) }
function error () { log('error', ...arguments) }
function debug () { log('debug', ...arguments) }
function calcWeights (url, m) {
const extraWeight = m.cssSize + m.jsSize + m.webfontSize + m.otherSize
const contentWeight = m.htmlSize + m.jsonSize + m.imageSize + m.base64Size + m.videoSize
if (m.contentSize > REJECT_THRESHOLD) {
warn(url, 'oversized by', m.contentSize - REJECT_THRESHOLD)
}
return { url, contentWeight, extraWeight, stamp: Date.now() }
}
async function generateMetrics (urls) {
debug('Checking', urls)
const metricsList = []
const keyedPageData = pageData.reduce((acc, page) => {
// stores url/stamp pairs to decide for recheck
acc[page.url] = page
return acc
}, {})
const knownURLs = Object.keys(keyedPageData)
const now = Date.now()
for (const url of urls) {
if (knownURLs.indexOf(url) >= 0) {
if (now - keyedPageData[url].stamp < RECHECK_THRESHOLD) {
debug('skipping known URL', url)
metricsList.push(keyedPageData[url]) // push old data to list
continue
}
}
try {
debug('fetching and analyzing', url)
const results = await phantomas(url)
const weights = calcWeights(url, results.getMetrics())
metricsList.push(weights) // TODO: what to do with oversized pages?
} catch(err) {
error(`failed to analyze ${url}`, err)
}
}
try {
// TODO: poor mans JSON to JS converter?
fs.writeFileSync(OUTPUT_FILE, 'export default ' + JSON.stringify(metricsList))
} catch (err) {
error(`failed to write results to ${OUTPUT_FILE}`, err)
}
}
try {
const rawString = fs.readFileSync(INPUT_FILE, 'utf8')
const urls = rawString.split('\n').filter(line => line.startsWith('http'))
generateMetrics(urls)
} catch (err) {
error(`failed to read page list from ${INPUT_FILE}`, err)
}

@ -0,0 +1,28 @@
base_url = "https://250kb.club/"
title = "The 250kb Club"
description = "An exclusive membership for web pages presenting themselves in no more than 250kb."
default_language = "en"
compile_sass = false
build_search_index = true
minify_html = true
generate_feed = true
feed_filename = "rss.xml"
[markdown]
highlight_code = false
render_emoji = false
external_links_target_blank = true
external_links_no_follow = true
external_links_no_referrer = false
smart_punctuation = true
[search]
include_title = true
include_description = false
include_path = false
include_content = true
[extra]
author = "Norman Köhring"
author_homepage = "https://koehr.in"

@ -0,0 +1,11 @@
+++
title = "0xedward.io"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3235
[extra]
source = "https://0xedward.io/"
ratio = 88
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "0xff.nu"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3848
[extra]
source = "https://0xff.nu/"
ratio = 71
size = 4
+++

@ -0,0 +1,11 @@
+++
title = "10kbclub.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 7056
[extra]
source = "https://10kbclub.com"
ratio = 100
size = 7
+++

@ -0,0 +1,11 @@
+++
title = "1mb.club"
date = "2022-03-22"
updated = "2022-03-22"
weight = 13547
[extra]
source = "https://1mb.club/"
ratio = 96
size = 13
+++

@ -0,0 +1,11 @@
+++
title = "250kb.club"
date = "2022-03-22"
updated = "2022-03-22"
weight = 35677
[extra]
source = "https://250kb.club"
ratio = 29
size = 35
+++

@ -0,0 +1,11 @@
+++
title = "512kb.club"
date = "2022-03-22"
updated = "2022-03-22"
weight = 11651
[extra]
source = "https://512kb.club/"
ratio = 80
size = 11
+++

@ -0,0 +1,5 @@
+++
title = "koehr learned"
paginate_by = 100
sort_by = "weight"
+++

@ -0,0 +1,11 @@
+++
title = "ache.one"
date = "2022-03-22"
updated = "2022-03-22"
weight = 145041
[extra]
source = "https://ache.one/"
ratio = 4
size = 142
+++

@ -0,0 +1,11 @@
+++
title = "alexanderobenauer.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 98346
[extra]
source = "https://alexanderobenauer.com"
ratio = 8
size = 96
+++

@ -0,0 +1,11 @@
+++
title = "alexschroeder.ch"
date = "2022-03-22"
updated = "2022-03-22"
weight = 33997
[extra]
source = "https://alexschroeder.ch/"
ratio = 87
size = 33
+++

@ -0,0 +1,11 @@
+++
title = "allien.work"
date = "2022-03-22"
updated = "2022-03-22"
weight = 110055
[extra]
source = "https://allien.work/"
ratio = 7
size = 107
+++

@ -0,0 +1,11 @@
+++
title = "anabeatriz.dev"
date = "2022-03-22"
updated = "2022-03-22"
weight = 75336
[extra]
source = "https://anabeatriz.dev/"
ratio = 1
size = 74
+++

@ -0,0 +1,11 @@
+++
title = "antranigv.am"
date = "2022-03-22"
updated = "2022-03-22"
weight = 16216
[extra]
source = "https://antranigv.am/"
ratio = 44
size = 16
+++

@ -0,0 +1,11 @@
+++
title = "arfer.net"
date = "2022-03-22"
updated = "2022-03-22"
weight = 7495
[extra]
source = "https://arfer.net/"
ratio = 14
size = 7
+++

@ -0,0 +1,11 @@
+++
title = "armaanb.net"
date = "2022-03-22"
updated = "2022-03-22"
weight = 23493
[extra]
source = "https://armaanb.net/"
ratio = 9
size = 23
+++

@ -0,0 +1,11 @@
+++
title = "artemislena.eu"
date = "2022-03-22"
updated = "2022-03-22"
weight = 4761
[extra]
source = "https://artemislena.eu/"
ratio = 80
size = 5
+++

@ -0,0 +1,11 @@
+++
title = "bcachefs.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 8381
[extra]
source = "https://bcachefs.org/"
ratio = 40
size = 8
+++

@ -0,0 +1,11 @@
+++
title = "bduck.xyz"
date = "2022-03-22"
updated = "2022-03-22"
weight = 150549
[extra]
source = "https://bduck.xyz/"
ratio = 5
size = 147
+++

@ -0,0 +1,11 @@
+++
title = "beh.uk"
date = "2022-03-22"
updated = "2022-03-22"
weight = 68732
[extra]
source = "https://beh.uk/"
ratio = 8
size = 67
+++

@ -0,0 +1,11 @@
+++
title = "benharr.is"
date = "2022-03-22"
updated = "2022-03-22"
weight = 4192
[extra]
source = "https://benharr.is/"
ratio = 100
size = 4
+++

@ -0,0 +1,11 @@
+++
title = "benovermyer.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 148616
[extra]
source = "https://benovermyer.com/"
ratio = 1
size = 145
+++

@ -0,0 +1,11 @@
+++
title = "berkshirehathaway.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 65468
[extra]
source = "https://berkshirehathaway.com"
ratio = 9
size = 64
+++

@ -0,0 +1,11 @@
+++
title = "bernsteinbear.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 31344
[extra]
source = "https://bernsteinbear.com/"
ratio = 10
size = 31
+++

@ -0,0 +1,11 @@
+++
title = "bestmotherfucking.website"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3395
[extra]
source = "https://bestmotherfucking.website/"
ratio = 100
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "bettermotherfuckingwebsite.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 23701
[extra]
source = "http://bettermotherfuckingwebsite.com/"
ratio = 10
size = 23
+++

@ -0,0 +1,11 @@
+++
title = "binyam.in"
date = "2022-03-22"
updated = "2022-03-22"
weight = 43489
[extra]
source = "https://binyam.in/"
ratio = 5
size = 42
+++

@ -0,0 +1,11 @@
+++
title = "blakehawkins.com/blog"
date = "2022-03-22"
updated = "2022-03-22"
weight = 56806
[extra]
source = "https://blakehawkins.com/blog"
ratio = 6
size = 55
+++

@ -0,0 +1,11 @@
+++
title = "blmayer.dev"
date = "2022-03-22"
updated = "2022-03-22"
weight = 5667
[extra]
source = "https://blmayer.dev/"
ratio = 100
size = 6
+++

@ -0,0 +1,11 @@
+++
title = "blog.bshah.in"
date = "2022-03-22"
updated = "2022-03-22"
weight = 10417
[extra]
source = "https://blog.bshah.in/"
ratio = 32
size = 10
+++

@ -0,0 +1,11 @@
+++
title = "blog.circuitsofimagination.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 156892
[extra]
source = "https://blog.circuitsofimagination.com/"
ratio = 2
size = 153
+++

@ -0,0 +1,11 @@
+++
title = "blog.fefe.de"
date = "2022-03-22"
updated = "2022-03-22"
weight = 7047
[extra]
source = "https://blog.fefe.de"
ratio = 100
size = 7
+++

@ -0,0 +1,11 @@
+++
title = "blog.fossterer.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 16034
[extra]
source = "https://blog.fossterer.com/"
ratio = 9
size = 16
+++

@ -0,0 +1,11 @@
+++
title = "bnolet.me"
date = "2022-03-22"
updated = "2022-03-22"
weight = 157689
[extra]
source = "https://bnolet.me"
ratio = 3
size = 154
+++

@ -0,0 +1,11 @@
+++
title = "boehs.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 80644
[extra]
source = "https://boehs.org/"
ratio = 2
size = 79
+++

@ -0,0 +1,11 @@
+++
title = "box.matto.nl"
date = "2022-03-22"
updated = "2022-03-22"
weight = 6945
[extra]
source = "https://box.matto.nl"
ratio = 60
size = 7
+++

@ -0,0 +1,11 @@
+++
title = "bridge.simplefin.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 7677
[extra]
source = "https://bridge.simplefin.org"
ratio = 15
size = 7
+++

@ -0,0 +1,11 @@
+++
title = "buchh.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 40043
[extra]
source = "https://buchh.org/"
ratio = 5
size = 39
+++

@ -0,0 +1,11 @@
+++
title = "bvnf.space"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3234
[extra]
source = "https://bvnf.space/"
ratio = 78
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "cat-v.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 10115
[extra]
source = "http://cat-v.org/"
ratio = 66
size = 10
+++

@ -0,0 +1,11 @@
+++
title = "ccsleep.net"
date = "2022-03-22"
updated = "2022-03-22"
weight = 2564
[extra]
source = "https://ccsleep.net/"
ratio = 58
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "chad.hirsch.host"
date = "2022-03-22"
updated = "2022-03-22"
weight = 26701
[extra]
source = "https://chad.hirsch.host"
ratio = 26
size = 26
+++

@ -0,0 +1,11 @@
+++
title = "chrisportela.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 197441
[extra]
source = "https://chrisportela.com"
ratio = 1
size = 193
+++

@ -0,0 +1,11 @@
+++
title = "christine.website"
date = "2022-03-22"
updated = "2022-03-22"
weight = 43476
[extra]
source = "https://christine.website/"
ratio = 7
size = 42
+++

@ -0,0 +1,11 @@
+++
title = "cnx.srht.site"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3398
[extra]
source = "https://cnx.srht.site/"
ratio = 48
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "codelayer.de"
date = "2022-03-22"
updated = "2022-03-22"
weight = 139124
[extra]
source = "https://codelayer.de"
ratio = 5
size = 136
+++

@ -0,0 +1,11 @@
+++
title = "codevoid.de"
date = "2022-03-22"
updated = "2022-03-22"
weight = 9427
[extra]
source = "https://codevoid.de/"
ratio = 92
size = 9
+++

@ -0,0 +1,11 @@
+++
title = "codingbobby.xyz"
date = "2022-03-22"
updated = "2022-03-22"
weight = 152899
[extra]
source = "https://codingbobby.xyz/"
ratio = 2
size = 149
+++

@ -0,0 +1,11 @@
+++
title = "codingotaku.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 9989
[extra]
source = "https://codingotaku.com/"
ratio = 37
size = 10
+++

@ -0,0 +1,11 @@
+++
title = "concise-encoding.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 20755
[extra]
source = "https://concise-encoding.org/"
ratio = 18
size = 20
+++

@ -0,0 +1,11 @@
+++
title = "consoom.soy"
date = "2022-03-22"
updated = "2022-03-22"
weight = 5257
[extra]
source = "https://consoom.soy/"
ratio = 32
size = 5
+++

@ -0,0 +1,11 @@
+++
title = "coolmathgames.tech"
date = "2022-03-22"
updated = "2022-03-22"
weight = 58380
[extra]
source = "http://coolmathgames.tech/"
ratio = 8
size = 57
+++

@ -0,0 +1,11 @@
+++
title = "cosmo.red"
date = "2022-03-22"
updated = "2022-03-22"
weight = 1047
[extra]
source = "https://cosmo.red"
ratio = 100
size = 1
+++

@ -0,0 +1,11 @@
+++
title = "crackle.dev"
date = "2022-03-22"
updated = "2022-03-22"
weight = 1447
[extra]
source = "https://crackle.dev/"
ratio = 51
size = 1
+++

@ -0,0 +1,11 @@
+++
title = "cronokirby.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 227397
[extra]
source = "https://cronokirby.com"
ratio = 2
size = 222
+++

@ -0,0 +1,11 @@
+++
title = "customformats.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 243154
[extra]
source = "https://customformats.com/"
ratio = 2
size = 237
+++

@ -0,0 +1,11 @@
+++
title = "cycloneblaze.net"
date = "2022-03-22"
updated = "2022-03-22"
weight = 23824
[extra]
source = "https://cycloneblaze.net/"
ratio = 7
size = 23
+++

@ -0,0 +1,11 @@
+++
title = "daniel-siepmann.de"
date = "2022-03-22"
updated = "2022-03-22"
weight = 16464
[extra]
source = "https://daniel-siepmann.de/"
ratio = 73
size = 16
+++

@ -0,0 +1,11 @@
+++
title = "danielcuttridge.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 1614
[extra]
source = "https://danielcuttridge.com/"
ratio = 100
size = 2
+++

@ -0,0 +1,11 @@
+++
title = "danielsada.tech"
date = "2022-03-22"
updated = "2022-03-22"
weight = 139152
[extra]
source = "https://danielsada.tech/"
ratio = 3
size = 136
+++

@ -0,0 +1,11 @@
+++
title = "danluu.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 4896
[extra]
source = "https://danluu.com"
ratio = 100
size = 5
+++

@ -0,0 +1,11 @@
+++
title = "decentnet.github.io"
date = "2022-03-22"
updated = "2022-03-22"
weight = 10680
[extra]
source = "https://decentnet.github.io/"
ratio = 78
size = 10
+++

@ -0,0 +1,11 @@
+++
title = "dotfilehub.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 2728
[extra]
source = "https://dotfilehub.com"
ratio = 37
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "dpldocs.info/this-week-in-d/Blog.html"
date = "2022-03-22"
updated = "2022-03-22"
weight = 100505
[extra]
source = "http://dpldocs.info/this-week-in-d/Blog.html"
ratio = 72
size = 98
+++

@ -0,0 +1,11 @@
+++
title = "drewdevault.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 29948
[extra]
source = "https://drewdevault.com/"
ratio = 51
size = 29
+++

@ -0,0 +1,11 @@
+++
title = "dusanmitrovic.xyz"
date = "2022-03-22"
updated = "2022-03-22"
weight = 10993
[extra]
source = "https://dusanmitrovic.xyz/"
ratio = 23
size = 11
+++

@ -0,0 +1,11 @@
+++
title = "dyremyhr.no"
date = "2022-03-22"
updated = "2022-03-22"
weight = 109108
[extra]
source = "https://dyremyhr.no/"
ratio = 3
size = 107
+++

@ -0,0 +1,11 @@
+++
title = "editions-du-26-octobre.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 121539
[extra]
source = "https://editions-du-26-octobre.com/"
ratio = 11
size = 119
+++

@ -0,0 +1,11 @@
+++
title = "emersion.fr"
date = "2022-03-22"
updated = "2022-03-22"
weight = 185330
[extra]
source = "https://emersion.fr/"
ratio = 1
size = 181
+++

@ -0,0 +1,11 @@
+++
title = "fabioartuso.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 1643
[extra]
source = "https://fabioartuso.com/"
ratio = 100
size = 2
+++

@ -0,0 +1,11 @@
+++
title = "fanael.github.io"
date = "2022-03-22"
updated = "2022-03-22"
weight = 8888
[extra]
source = "https://fanael.github.io/"
ratio = 60
size = 9
+++

@ -0,0 +1,11 @@
+++
title = "featyre.xyz"
date = "2022-03-22"
updated = "2022-03-22"
weight = 16960
[extra]
source = "https://featyre.xyz/"
ratio = 9
size = 17
+++

@ -0,0 +1,11 @@
+++
title = "felt.dev"
date = "2022-03-22"
updated = "2022-03-22"
weight = 94379
[extra]
source = "https://felt.dev/"
ratio = 2
size = 92
+++

@ -0,0 +1,11 @@
+++
title = "flatpackapps.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 19260
[extra]
source = "https://flatpackapps.com"
ratio = 14
size = 19
+++

@ -0,0 +1,11 @@
+++
title = "fmarier.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 111140
[extra]
source = "https://fmarier.org/"
ratio = 2
size = 109
+++

@ -0,0 +1,11 @@
+++
title = "fossdd.codeberg.page"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3393
[extra]
source = "https://fossdd.codeberg.page/"
ratio = 39
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "foxwells.garden"
date = "2022-03-22"
updated = "2022-03-22"
weight = 38194
[extra]
source = "https://foxwells.garden/"
ratio = 12
size = 37
+++

@ -0,0 +1,11 @@
+++
title = "free.mg"
date = "2022-03-22"
updated = "2022-03-22"
weight = 9024
[extra]
source = "https://free.mg/"
ratio = 34
size = 9
+++

@ -0,0 +1,11 @@
+++
title = "freesolitaire.win"
date = "2022-03-22"
updated = "2022-03-22"
weight = 88903
[extra]
source = "https://freesolitaire.win/"
ratio = 23
size = 87
+++

@ -0,0 +1,11 @@
+++
title = "frontaid.io"
date = "2022-03-22"
updated = "2022-03-22"
weight = 147074
[extra]
source = "https://frontaid.io"
ratio = 2
size = 144
+++

@ -0,0 +1,11 @@
+++
title = "fullstackpython.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 26402
[extra]
source = "https://fullstackpython.com"
ratio = 22
size = 26
+++

@ -0,0 +1,11 @@
+++
title = "funnylookinhat.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 3572
[extra]
source = "https://funnylookinhat.com/"
ratio = 75
size = 3
+++

@ -0,0 +1,11 @@
+++
title = "gabnotes.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 30328
[extra]
source = "https://gabnotes.org/"
ratio = 12
size = 30
+++

@ -0,0 +1,11 @@
+++
title = "gallant.dev"
date = "2022-03-22"
updated = "2022-03-22"
weight = 39130
[extra]
source = "https://gallant.dev/"
ratio = 31
size = 38
+++

@ -0,0 +1,11 @@
+++
title = "gennext.net.au"
date = "2022-03-22"
updated = "2022-03-22"
weight = 85656
[extra]
source = "https://gennext.net.au/"
ratio = 26
size = 84
+++

@ -0,0 +1,11 @@
+++
title = "gerikson.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 2283
[extra]
source = "http://gerikson.com/"
ratio = 39
size = 2
+++

@ -0,0 +1,11 @@
+++
title = "gerikson.com/hnlo"
date = "2022-03-22"
updated = "2022-03-22"
weight = 33104
[extra]
source = "http://gerikson.com/hnlo/"
ratio = 81
size = 32
+++

@ -0,0 +1,11 @@
+++
title = "getindiekit.com"
date = "2022-03-22"
updated = "2022-03-22"
weight = 125607
[extra]
source = "https://getindiekit.com/"
ratio = 3
size = 123
+++

@ -0,0 +1,11 @@
+++
title = "grapheneos.org"
date = "2022-03-22"
updated = "2022-03-22"
weight = 5464
[extra]
source = "https://grapheneos.org/"
ratio = 80
size = 5
+++

@ -0,0 +1,11 @@
+++
title = "gtrr.artemislena.eu"
date = "2022-03-22"
updated = "2022-03-22"
weight = 2531
[extra]
source = "https://gtrr.artemislena.eu/"
ratio = 82
size = 2
+++

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save