remove js fork backup

This commit is contained in:
Wouter Groeneveld 2021-04-09 18:00:15 +02:00
parent 7f2c540448
commit f74c0c6e5b
28 changed files with 0 additions and 7880 deletions

BIN
jsfork/.DS_Store vendored

Binary file not shown.

View File

@ -1,34 +0,0 @@
{
"name": "serve-my-jams",
"version": "1.0.0",
"repository": {
"url": "https://github.com/wgroeneveld/serve-my-jams",
"type": "git"
},
"author": "Wouter Groeneveld",
"license": "MIT",
"main": "src/serve.js",
"scripts": {
"test": "jest",
"dev": "node src/serve.js | pino-pretty",
"jam": "node src/serve.js"
},
"devDependencies": {
"jest": "^26.6.3",
"mockdate": "^3.0.2"
},
"dependencies": {
"dayjs": "^1.10.4",
"fast-xml-parser": "^3.19.0",
"got": "^11.8.2",
"koa": "^2.13.1",
"koa-body": "^4.2.0",
"koa-helmet": "^6.0.0",
"koa-pino-logger": "^3.0.0",
"koa-router": "^10.0.0",
"koa2-ratelimit": "^0.9.0",
"md5": "^2.3.0",
"microformats-parser": "^1.4.0",
"pino": "^6.11.2"
}
}

BIN
jsfork/src/.DS_Store vendored

Binary file not shown.

View File

@ -1,37 +0,0 @@
const { existsSync, mkdirSync } = require('fs')
const allowedWebmentionSources = [
"brainbaking.com",
"jefklakscodex.com"
]
// do NOT send out webmentions to any of these domains.
const disallowedWebmentionDomains = [
"youtube.com"
]
function setupDataDirs() {
allowedWebmentionSources.forEach(domain => {
const dir = `data/${domain}`
console.log(` -- configured for ${domain}`)
if(!existsSync(dir)) {
mkdirSync(dir, {
recursive: true
})
}
})
}
module.exports = {
port: process.env.PORT || 1337,
host: "localhost",
token: process.env.TOKEN || "miauwkes",
utcOffset: 60,
allowedWebmentionSources,
disallowedWebmentionDomains,
setupDataDirs
}

View File

@ -1,57 +0,0 @@
const got = require('got')
const { mf2 } = require("microformats-parser")
const log = require('pino')()
const baseUrlOf = (url) => {
if(url.match(/\//g).length <= 2) {
return url
}
const split = url.split('/')
return split[0] + '//' + split[2]
}
const buildWebmentionHeaderLink = (link) => {
// e.g. Link: <http://aaronpk.example/webmention-endpoint>; rel="webmention"
return link
.split(";")[0]
.replace("<" ,"")
.replace(">", "")
}
// see https://www.w3.org/TR/webmention/#sender-discovers-receiver-webmention-endpoint
async function discover(target) {
try {
const endpoint = await got(target)
if(endpoint.headers.link?.indexOf("webmention") >= 0) {
return {
link: buildWebmentionHeaderLink(endpoint.headers.link),
type: "webmention"
}
} else if(endpoint.headers["X-Pingback"]) {
return {
link: endpoint.headers["X-Pingback"],
type: "pingback"
}
}
const format = mf2(endpoint.body, {
// this also complies with w3.org regulations: relative endpoint could be possible
baseUrl: baseUrlOf(target)
})
const webmention = format.rels?.webmention?.[0]
const pingback = format.rels?.pingback?.[0]
return {
link: webmention ? webmention : (pingback ? pingback : ""),
type: webmention ? "webmention" : (pingback ? "pingback" : "unknown")
}
} catch(err) {
log.warn(err, ' -- whoops, failed to discover ${target}')
return { type: "unknown" }
}
}
module.exports = {
discover
}

View File

@ -1,65 +0,0 @@
const webmentionReceiver = require('./../webmention/receive')
const config = require('./../config')
const parser = require("fast-xml-parser")
const log = require('pino')()
/**
See https://www.hixie.ch/specs/pingback/pingback#refsXMLRPC
---
Sample XML:
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://brainbaking.com/kristien.html</string></value>
</param>
<param>
<value><string>https://kristienthoelen.be/2021/03/22/de-stadia-van-een-burn-out-in-welk-stadium-zit-jij/</string></value>
</param>
</params>
</methodCall>
*/
const isValidDomain = (url) => {
return config.allowedWebmentionSources.some(domain => {
return url.indexOf(domain) !== -1
})
}
function xmlparse(body) {
try {
return parser.parse(body)
} catch(e) {
log.error('%s %s', 'fast-xml-parser was unable to parse the following body:', body)
throw e
}
}
function validate(body) {
const xml = xmlparse(body)
if(!xml) return false
if(!xml.methodCall || xml.methodCall.methodName !== "pingback.ping") return false
if(!xml.methodCall.params || !xml.methodCall.params.param || xml.methodCall.params.param.length !== 2) return false
if(!xml.methodCall.params.param.every(param => param?.value?.string?.startsWith('http'))) return false
if(!isValidDomain(xml.methodCall.params.param[1].value.string)) return false
return true
}
// we treat a pingback as a webmention.
// Wordpress pingback processing source: https://developer.wordpress.org/reference/classes/wp_xmlrpc_server/pingback_ping/
async function receive(body) {
const xml = xmlparse(body)
const webmentionBody = {
source: xml.methodCall.params.param[0].value.string,
target: xml.methodCall.params.param[1].value.string
}
log.info('%s %o', 'OK: looks like a valid pingback', webmentionBody)
await webmentionReceiver.receive(webmentionBody)
}
module.exports = {
receive,
validate
}

View File

@ -1,75 +0,0 @@
const pingbackReceiver = require('./receive')
const log = require('pino')()
function success(msg) {
return `<?xml version="1.0" encoding="UTF-8"?>
<methodResponse>
<params>
<param>
<value>
<string>
${msg}
</string>
</value>
</param>
</params>
</methodResponse>
`
}
function err(e) {
log.error(e, 'pingback receive went wrong')
return `<?xml version="1.0" encoding="UTF-8"?>
<methodResponse>
<fault>
<value>
<struct>
<member>
<name>
faultCode
</name>
<value>
<int>
0
</int>
</value>
</member>
<member>
<name>
faultString
</name>
<value>
<string>
Sorry pal. Malformed request? Or something else, who knows...
</string>
</value>
</member>
</struct>
</value>
</fault>
</methodResponse>`
}
function route(router) {
router.post("pingback receive endpoint", "/pingback", async (ctx) => {
try {
if(!pingbackReceiver.validate(ctx.request.body)) {
throw "malformed pingback request"
}
// we do NOT await this on purpose.
pingbackReceiver.receive(ctx.request.body)
ctx.status = 200
ctx.body = success("Thanks, bro. Will process this pingback soon, pinky swear!")
} catch(e) {
ctx.status = 200
ctx.body = err(e)
}
});
}
module.exports = {
route
}

View File

@ -1,31 +0,0 @@
const got = require('got')
const log = require('pino')()
async function sendPingbackToEndpoint(endpoint, source, target) {
const body = `<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>${source}</string></value>
</param>
<param>
<value><string>${target}</string></value>
</param>
</params>
</methodCall>`
await got.post(endpoint, {
contentType: "text/xml",
body,
retry: {
limit: 5,
methods: ["POST"]
}
})
log.info(` OK: pingback@${endpoint}, sent: source ${source}, target ${target}`)
}
module.exports = {
sendPingbackToEndpoint
}

View File

@ -1,40 +0,0 @@
"use strict";
const Koa = require("koa");
const pino = require('koa-pino-logger')()
const log = require('pino')()
const bodyParser = require('koa-body');
const koaRouter = require("koa-router");
const helmet = require("koa-helmet");
const { RateLimit } = require('koa2-ratelimit');
// koa docs: https://koajs.com/#application
const app = new Koa();
const router = new koaRouter();
// see https://www.npmjs.com/package/koa2-ratelimit, simple brute-force with helmet will suffice.
app.use(RateLimit.middleware({
interval: { min: 15 },
max: 100
}));
app.use(helmet());
app.use(pino);
// enable ctx.request.body parsing for x-www-form-urlencoded webmentions etc
app.use(bodyParser({
multipart: true,
urlencoded: true
}));
// route docs: https://github.com/koajs/router/blob/HEAD/API.md#module_koa-router--Router+get%7Cput%7Cpost%7Cpatch%7Cdelete%7Cdel
require("./webmention/route").route(router);
require("./pingback/route").route(router);
const config = require("./config");
config.setupDataDirs();
app.use(router.routes()).use(router.allowedMethods());
app.listen(config.port, config.host, () => {
log.info(`Started localhost at port ${config.port}`)
});

View File

@ -1,24 +0,0 @@
const config = require("../config")
const fsp = require('fs').promises
function validate(params) {
return params.token === config.token &&
config.allowedWebmentionSources.includes(params.domain)
}
async function load(domain) {
const fileEntries = await fsp.readdir(`data/${domain}`, { withFileTypes: true });
const files = await Promise.all(fileEntries.map(async (file) => {
const contents = await fsp.readFile(`data/${domain}/${file.name}`, 'utf-8')
return JSON.parse(contents)
}));
return files
}
module.exports = {
validate,
load
}

View File

@ -1,135 +0,0 @@
const got = require('got')
const config = require('./../config')
const fsp = require('fs').promises
const md5 = require('md5')
const { mf2 } = require("microformats-parser");
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
dayjs.extend(utc)
const log = require('pino')()
function validate(request) {
// DONE
}
async function isValidTargetUrl(target) {
try {
await got(target)
return true
} catch(unknownTarget) {
}
log.warn(` ABORT: invalid target url`)
return false
}
function asPath(source, target) {
const filename = md5(`source=${source},target=${target}`)
const domain = config.allowedWebmentionSources.find(d => target.indexOf(d) >= 0)
return `data/${domain}/${filename}.json`
}
async function deletePossibleOlderWebmention(source, target) {
try {
await fsp.unlink(asPath(source, target))
} catch(e) {
// does not matter, file not there.
}
}
async function saveWebmentionToDisk(source, target, mentiondata) {
await fsp.writeFile(asPath(source, target), mentiondata, 'utf-8')
}
function publishedNow() {
return dayjs.utc().utcOffset(config.utcOffset).format("YYYY-MM-DDTHH:mm:ss")
}
function parseBodyAsIndiewebSite(source, target, hEntry) {
function shorten(txt) {
if(!txt || txt.length <= 250) return txt
return txt.substring(0, 250) + "..."
}
const name = hEntry.properties?.name?.[0]
const authorPropName = hEntry.properties?.author?.[0]?.properties?.name?.[0]
const authorValue = hEntry.properties?.author?.[0]?.value
const picture = hEntry.properties?.author?.[0]?.properties?.photo?.[0]
const summary = hEntry.properties?.summary?.[0]
const contentEntry = hEntry.properties?.content?.[0]?.value
const bridgyTwitterContent = hEntry.properties?.["bridgy-twitter-content"]?.[0]
const publishedDate = hEntry.properties?.published?.[0]
const uid = hEntry.properties?.uid?.[0]
const url = hEntry.properties?.url?.[0]
const type = hEntry.properties?.["like-of"]?.length ? "like" : (hEntry.properties?.["bookmark-of"]?.length ? "bookmark" : "mention" )
return {
author: {
name: authorPropName ? authorPropName : authorValue,
picture: picture?.value ? picture?.value : picture
},
name: name,
content: bridgyTwitterContent ? shorten(bridgyTwitterContent) : (summary ? shorten(summary) : shorten(contentEntry)),
published: publishedDate ? publishedDate : publishedNow(),
type,
// Mastodon uids start with "tag:server", but we do want indieweb uids from other sources
url: uid && uid.startsWith("http") ? uid : (url ? url : source),
source,
target
}
}
function parseBodyAsNonIndiewebSite(source, target, body) {
const title = body.match(/<title>(.*?)<\/title>/)?.splice(1, 1)[0]
return {
author: {
name: source
},
name: title,
content: title,
published: publishedNow(),
url: source,
type: "mention",
source,
target
}
}
async function processSourceBody(body, source, target) {
if(body.indexOf(target) === -1) {
log.warn(` ABORT: no mention of ${target} found in html src of source`)
return
}
// fiddle: https://aimee-gm.github.io/microformats-parser/
const microformat = mf2(body, {
// WHY? crashes on relative URL, should be injected using Jest. Don't care.
baseUrl: source.startsWith("http") ? source : `http://localhost/${source}`
})
const hEntry = microformat.items.filter(itm => itm?.type?.includes("h-entry"))?.[0]
const data = hEntry ? parseBodyAsIndiewebSite(source, target, hEntry) : parseBodyAsNonIndiewebSite(source, target, body)
await saveWebmentionToDisk(source, target, JSON.stringify(data))
log.info(` OK: webmention processed`)
}
async function receive(body) {
if(!isValidTargetUrl(body.target)) return
let src = { body: "" }
try {
src = await got(body.source)
} catch(unknownSource) {
log.warn(` ABORT: invalid source url: ` + unknownSource)
await deletePossibleOlderWebmention(body.source, body.target)
return
}
await processSourceBody(src.body, body.source, body.target)
}
module.exports = {
receive,
validate
}

View File

@ -1,53 +0,0 @@
const webmentionReceiver = require('./receive')
const webmentionLoader = require('./loader')
const webmentionSender = require('./send')
const log = require('pino')()
function route(router) {
router.post("webmention receive endpoint", "/webmention", async (ctx) => {
if(!webmentionReceiver.validate(ctx.request)) {
ctx.throw(400, "malformed webmention request")
}
log.info('%s %o', 'OK: looks like a valid webmention', ctx.request.body)
// we do NOT await this on purpose.
webmentionReceiver.receive(ctx.request.body)
ctx.body = "Thanks, bro. Will process this webmention soon, pinky swear!"
ctx.status = 202
});
router.put("webmention send endpoint", "/webmention/:domain/:token", async (ctx) => {
if(!webmentionLoader.validate(ctx.params)) {
ctx.throw(403, "access denied")
}
const since = ctx.request.query?.since
log.info(` OK: someone wants to send mentions from domain ${ctx.params.domain} since ${since}`)
// we do NOT await this on purpose.
webmentionSender.send(ctx.params.domain, since)
ctx.body = "Thanks, bro. Will send these webmentions soon, pinky swear!"
ctx.status = 202
})
router.get("webmention get endpoint", "/webmention/:domain/:token", async (ctx) => {
if(!webmentionLoader.validate(ctx.params)) {
ctx.throw(403, "access denied")
}
log.info(` OK: someone wants a list of mentions at domain ${ctx.params.domain}`)
const result = await webmentionLoader.load(ctx.params.domain)
ctx.body = {
status: 'success',
json: result
}
})
}
module.exports = {
route
}

View File

@ -1,89 +0,0 @@
const parser = require("fast-xml-parser")
const config = require('./../config')
const dayjs = require('dayjs')
const customParseFormat = require('dayjs/plugin/customParseFormat')
dayjs.extend(customParseFormat)
const parseOpts = {
ignoreAttributes: false
}
function collectHrefsFromDescription(description) {
// first thought: use parser.parse() and traverse recursively. turned out to be way too slow.
const linksMatch = description.match(/href="([^"]*")/g)
if(!linksMatch) return []
const links = linksMatch
.map(match => match.replace("href=", "").replace(/\"/g, ""))
.filter(match => !(/\.(gif|zip|rar|bz2|gz|7z|jpe?g|tiff?|png|webp|bmp)$/i).test(match))
.filter(match => !config.disallowedWebmentionDomains.some(domain => match.indexOf(domain) >= 0))
return [...new Set(links)]
}
/**
* a typical RSS item looks like this:
-- if <time/> found in body, assume it's a lastmod update timestamp!
{
title: '@celia @kev I have read both you and Kev&#39;s post on...',
link: 'https://brainbaking.com/notes/2021/03/16h17m07s14/',
comments: 'https://brainbaking.com/notes/2021/03/16h17m07s14/#commento',
pubDate: 'Tue, 16 Mar 2021 17:07:14 +0000',
author: 'Wouter Groeneveld',
guid: {
'#text': 'https://brainbaking.com/notes/2021/03/16h17m07s14/',
'@_isPermaLink': 'true'
},
description: ' \n' +
' \n' +
'\n' +
' <p><span class="h-card"><a class="u-url mention" data-user="A5GVjIHI6MH82H6iLQ" href="https://fosstodon.org/@celia" rel="ugc">@<span>celia</span></a></span> <span class="h-card"><a class="u-url mention" data-user="A54b8g0RBaIgjzczMu" href="https://fosstodon.org/@kev" rel="ugc">@<span>kev</span></a></span> I have read both you and Kev&rsquo;s post on this and agree on some points indeed! But I&rsquo;m not yet ready to give up webmentions. As an academic, the idea of citing/mentioning each other is very alluring 🤓. Plus, I needed an excuse to fiddle some more with JS&hellip; <br><br>As much as I loved using Wordpress before, I can&rsquo;t imagine going back to writing stuff in there instead of in markdown. Gotta keep the workflow short, though. Hope it helps you focus on what matters - content!</p>\n' +
'\n' +
'\n' +
' <p>\n' +
' By <a href="/about">Wouter Groeneveld</a> on <time datetime='2021-03-20'>20 March 2021</time>.\n' +
' </p>\n' +
' '
}
**/
function collect(xml, since = '') {
const root = parser.parse(xml, parseOpts).rss.channel
const sinceDate = dayjs(since)
const enrichWithDateProperties = (item) => {
// example pubDate format: Tue, 16 Mar 2021 17:07:14 +0000
const rawpub = item.pubDate?.split(", ")?.[1]
item.pubDate = rawpub ? dayjs(rawpub, "DD MMM YYYY HH:mm:ss ZZ") : dayjs()
if(!item.pubDate.isValid()) item.pubDate = dayjs()
const dateTimeMatch = item.description.match(/datetime="([^"]*")/g)
// Selecting the first - could be dangerous. Living on the edge. Don't care. etc.
const rawlastmod = dateTimeMatch?.[0]?.replace("datetime=", "")?.replace(/\"/g, "")
item.lastmodDate = rawlastmod ? dayjs(rawlastmod) : dayjs(0)
return item
}
const sincePublicationDate = (item) => {
if(!sinceDate.isValid()) return true
return sinceDate < (item.lastmodDate > item.pubDate ? item.lastmodDate : item.pubDate)
}
const entries = root.item.filter ? root.item : [root.item]
return entries
.map(enrichWithDateProperties)
.filter(sincePublicationDate)
.map(item => {
return {
link: item.link,
hrefs: collectHrefsFromDescription(item.description)
}
})
}
module.exports = {
collect
}

View File

@ -1,60 +0,0 @@
const got = require('got')
const { collect } = require('./rsslinkcollector')
const { discover } = require('./../linkdiscoverer')
const { sendPingbackToEndpoint } = require('./../pingback/send')
const log = require('pino')()
async function sendWebmentionToEndpoint(endpoint, source, target) {
await got.post(endpoint, {
contentType: "x-www-form-urlencoded",
form: {
source,
target
},
retry: {
limit: 5,
methods: ["POST"]
}
})
log.info(` OK: webmention@${endpoint}, sent: source ${source}, target ${target}`)
}
async function mention(opts) {
const { source, target } = opts
const endpoint = await discover(target)
const sendMention = {
"webmention": sendWebmentionToEndpoint,
"pingback": sendPingbackToEndpoint,
"unknown": async function() {}
}
await sendMention[endpoint.type](endpoint.link, source, target)
}
async function parseRssFeed(xml, since) {
const linksToMention = collect(xml, since)
.map(el => el.hrefs
// this strips relative URLs; could be a feature to also send these to own domain?
.filter(href => href.startsWith('http'))
.map(href => {
return {
// SOURCE is own domain this time, TARGET = outbound
target: href,
source: el.link
}
}))
.flat()
await Promise.all(linksToMention.map(mention))
}
async function send(domain, since) {
const feed = await got(`https://${domain}/index.xml`)
await parseRssFeed(feed.body, since)
}
module.exports = {
send
}

BIN
jsfork/test/.DS_Store vendored

Binary file not shown.

View File

@ -1,25 +0,0 @@
const fs = require('fs').promises
async function got(url) {
const relativeUrl = url.replace('https://brainbaking.com/', '')
const body = (await fs.readFile(`./test/__mocks__/${relativeUrl}`, 'utf8')).toString()
let headers = {}
try {
headerFile = await fs.readFile(`./test/__mocks__/${relativeUrl.replace(".html", "")}-headers.json`, 'utf8')
headers = JSON.parse(headerFile.toString())
} catch {
}
return {
headers,
body
}
}
async function gotPostMock(url, opts) {
}
got.post = gotPostMock
module.exports = got

View File

@ -1,24 +0,0 @@
describe("e2e tests", () => {
jest.disableAutomock()
jest.unmock('got')
const { mf2 } = require("microformats-parser");
const got = require("got");
test.skip("microformat fiddling for non-indieweb sites", async () => {
const html = (await got("https://kristienthoelen.be/2021/03/22/de-stadia-van-een-burn-out-in-welk-stadium-zit-jij/")).body
const mf = mf2(html, {
baseUrl: "https://kristienthoelen.be/"
})
//console.log(mf)
const url = "https://kristienthoelen.be/wp-content/uploads/2021/03/burnoutbarometer.jpg"
const occ = html.indexOf(url)
const len = 100
console.log(html.substring(occ - len, occ + url.length + len))
})
})

View File

@ -1,73 +0,0 @@
const { discover } = require('../src/linkdiscoverer')
describe("link discoverer", () => {
test("discover 'unknown' if no link is present", async() => {
const result = await discover("https://brainbaking.com/link-discover-test-none.html")
expect(result).toEqual({
type: "unknown"
})
})
test("prefer webmentions over pingbacks if both links are present", async () => {
const result = await discover("https://brainbaking.com/link-discover-bothtypes.html")
expect(result).toEqual({
link: "http://aaronpk.example/webmention-endpoint",
type: "webmention"
})
})
describe("discovers pingback links", () => {
test("discover link if present in header", async () => {
const result = await discover("https://brainbaking.com/pingback-discover-test.html")
expect(result).toEqual({
link: "http://aaronpk.example/pingback-endpoint",
type: "pingback"
})
})
test("discover link if sole entry somewhere in html", async () => {
const result = await discover("https://brainbaking.com/pingback-discover-test-single.html")
expect(result).toEqual({
link: "http://aaronpk.example/pingback-endpoint-body",
type: "pingback"
})
})
test("use link in header if multiple present in html", async () => {
const result = await discover("https://brainbaking.com/pingback-discover-test-multiple.html")
expect(result).toEqual({
link: "http://aaronpk.example/pingback-endpoint-header",
type: "pingback"
})
})
})
describe("discovers webmention links", () => {
test("discover link if present in header", async () => {
const result = await discover("https://brainbaking.com/link-discover-test.html")
expect(result).toEqual({
link: "http://aaronpk.example/webmention-endpoint",
type: "webmention"
})
})
test("discover link if sole entry somewhere in html", async () => {
const result = await discover("https://brainbaking.com/link-discover-test-single.html")
expect(result).toEqual({
link: "http://aaronpk.example/webmention-endpoint-body",
type: "webmention"
})
})
test("use link in header if multiple present in html", async () => {
const result = await discover("https://brainbaking.com/link-discover-test-multiple.html")
expect(result).toEqual({
link: "http://aaronpk.example/webmention-endpoint-header",
type: "webmention"
})
})
})
})

View File

@ -1,52 +0,0 @@
const fs = require('fs');
const fsp = require('fs').promises;
const { rmdir } = require('./../utils')
jest.mock('got');
const md5 = require('md5')
const { receive } = require('../../src/pingback/receive')
const dumpdir = 'data/brainbaking.com'
describe("receive pingback process tests happy path", () => {
beforeEach(() => {
if(fs.existsSync(dumpdir)) {
rmdir(dumpdir)
}
fs.mkdirSync(dumpdir, {
recursive: true
})
})
function asFilename(body) {
return `${dumpdir}/` + md5(`source=${body.source},target=${body.target}`)
}
test("receive a pingback processes it just like a webmention", async () => {
const body = {
source: "https://brainbaking.com/valid-bridgy-twitter-source.html",
target: "https://brainbaking.com/post/2021/03/the-indieweb-mixed-bag"
}
await receive(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>${body.source}</string></value>
</param>
<param>
<value><string>${body.target}</string></value>
</param>
</params>
</methodCall>
`)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data.content).toContain("Recommended read:")
})
})

View File

@ -1,143 +0,0 @@
describe("pingback receive validation tests", () => {
const { validate } = require('../../src/pingback/receive')
test("not valid if malformed XML as body", () => {
const result = validate("ola pola")
expect(result).toBe(false)
})
test("not valid if methodName is not pingback.ping", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>ka.tsjing</methodName>
<params>
<param>
<value><string>https://cool.site</string></value>
</param>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(false)
})
test("not valid if less than two parameters", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(false)
})
test("not valid if more than two parameters", () => {
const xml = `<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://cool.site</string></value>
</param>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`
expect(validate(xml)).toBe(false)
})
test("not valid if target is not in trusted domains from config", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://cool.site</string></value>
</param>
<param>
<value><string>https://flashballz.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(false)
})
test("not valid if target is not http(s)", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://cool.site</string></value>
</param>
<param>
<value><string>gemini://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(false)
})
test("not valid if source is not http(s)", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>gemini://cool.site</string></value>
</param>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(false)
})
test("is valid if pingback.ping and two http(s) parameters of which target is trusted", () => {
const result = validate(`
<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://cool.site</string></value>
</param>
<param>
<value><string>https://brainbaking.com/post/2021/03/cool-ness</string></value>
</param>
</params>
</methodCall>
`)
expect(result).toBe(true)
})
})

View File

@ -1,29 +0,0 @@
const fs = require('fs');
const path = require('path');
// https://www.codota.com/code/javascript/functions/fs/rmdirSync
// WHY? rmdirSync() does not return on time...
function rmdir(dir) {
if (!fs.existsSync(dir)) {
return null;
}
fs.readdirSync(dir).forEach(f => {
let pathname = path.join(dir, f);
if (!fs.existsSync(pathname)) {
return fs.unlinkSync(pathname);
}
if (fs.statSync(pathname).isDirectory()) {
return rmdir(pathname);
} else {
return fs.unlinkSync(pathname);
}
});
return fs.rmdirSync(dir);
}
module.exports = {
rmdir
}

View File

@ -1,47 +0,0 @@
const { load } = require('../../src/webmention/loader')
const fs = require('fs');
const fsp = require('fs').promises;
const { rmdir } = require('./../utils')
const dumpdir = 'data/brainbaking.com'
const exampleWebmention = {
author: {
name: "Wouter Groeneveld",
picture: "https://brainbaking.com//img/avatar.jpg"
},
content: "This is cool, I just found out about valid indieweb target - so cool...",
source: "https://coolness.com",
target: "https://brainbaking.com/notes/2021/03/02h17m18s46/",
published: "2021-03-06T12:41:00"
}
const exampleWebmention2 = {
author: {
name: "Jef Klakveld"
},
content: "Give it to me baby uhuh-uhuh white flies girls etc",
source: "https://darkness.be",
target: "https://brainbaking.com/about",
published: "2021-03-06T12:41:00"
}
describe("webmention loading of existing json files tests", () => {
beforeEach(() => {
if(fs.existsSync(dumpdir)) {
rmdir(dumpdir)
}
fs.mkdirSync(dumpdir, {
recursive: true
})
})
test("return an array of webmentions from domain dir", async () => {
await fsp.writeFile(`${dumpdir}/test.json`, JSON.stringify(exampleWebmention), 'utf-8')
await fsp.writeFile(`${dumpdir}/test2.json`, JSON.stringify(exampleWebmention2), 'utf-8')
const result = await load("brainbaking.com")
expect(result.length).toBe(2)
})
})

View File

@ -1,33 +0,0 @@
describe("webmention loader validate tests", () => {
const { validate } = require('../../src/webmention/loader')
const config = require('../../src/config')
test("is invalid if token not the same", () => {
const result = validate({
token: "drie roze olifanten hopla in de lucht",
domain: config.allowedWebmentionSources[0]
})
expect(result).toBe(false)
})
test("is invalid if domain not the list of known domains", () => {
const result = validate({
token: config.token,
domain: "woozaas.be"
})
expect(result).toBe(false)
})
test("is valid if domain and token matching", () => {
const result = validate({
token: config.token,
domain: config.allowedWebmentionSources[0]
})
expect(result).toBe(true)
})
})

View File

@ -1,190 +0,0 @@
const fs = require('fs');
const fsp = require('fs').promises;
const { rmdir } = require('./../utils')
jest.mock('got');
const md5 = require('md5')
const { receive } = require('../../src/webmention/receive')
const dumpdir = 'data/brainbaking.com'
const MockDate = require('mockdate')
describe("receive webmention process tests happy path", () => {
beforeEach(() => {
if(fs.existsSync(dumpdir)) {
rmdir(dumpdir)
}
fs.mkdirSync(dumpdir, {
recursive: true
})
MockDate.set('2020-01-01')
})
function asFilename(body) {
return `${dumpdir}/` + md5(`source=${body.source},target=${body.target}`)
}
test("receive a webmention bookmark via twitter", async () => {
const body = {
source: "https://brainbaking.com/valid-bridgy-twitter-source.html",
target: "https://brainbaking.com/post/2021/03/the-indieweb-mixed-bag"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data.type).toEqual("bookmark");
expect(data.content).toContain("Recommended read:")
})
test("receive a brid.gy webmention like", async () => {
const body = {
source: "https://brainbaking.com/valid-bridgy-like.html",
// wrapped in a a class="u-like-of" tag
target: "https://brainbaking.com/valid-indieweb-target.html"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data).toEqual({
author: {
name: "Stampeding Longhorn",
picture: "https://cdn.social.linux.pizza/v1/AUTH_91eb37814936490c95da7b85993cc2ff/sociallinuxpizza/accounts/avatars/000/185/996/original/9e36da0c093cfc9b.png"
},
url: "https://chat.brainbaking.com/notice/A4nx1rFwKUJYSe4TqK#favorited-by-A4nwg4LYyh4WgrJOXg",
name: "",
type: "like",
source: body.source,
target: body.target,
// no dates in bridgy-to-mastodon likes...
published: "2020-01-01T01:00:00"
})
})
test("receive a brid.gy webmention that has a url and photo without value", async () => {
const body = {
source: "https://brainbaking.com/valid-bridgy-source.html",
target: "https://brainbaking.com/valid-indieweb-target.html"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data).toEqual({
author: {
name: "Stampeding Longhorn",
picture: "https://cdn.social.linux.pizza/v1/AUTH_91eb37814936490c95da7b85993cc2ff/sociallinuxpizza/accounts/avatars/000/185/996/original/9e36da0c093cfc9b.png"
},
url: "https://social.linux.pizza/@StampedingLonghorn/105821099684887793",
content: "@wouter The cat pictures are awesome. for jest tests!",
name: "@wouter The cat pictures are awesome. for jest tests!",
type: "mention",
source: body.source,
target: body.target,
published: "2021-03-02T16:17:18.000Z"
})
})
test("receive saves a JSON file of indieweb-metadata if all is valid", async () => {
const body = {
source: "https://brainbaking.com/valid-indieweb-source.html",
target: "https://brainbaking.com/valid-indieweb-target.html"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data).toEqual({
author: {
name: "Wouter Groeneveld",
picture: "https://brainbaking.com//img/avatar.jpg"
},
url: "https://brainbaking.com/notes/2021/03/06h12m41s48/",
content: "This is cool, I just found out about valid indieweb target - so cool",
name: "I just learned about https://www.inklestudios.com/...",
type: "mention",
source: body.source,
target: body.target,
published: "2021-03-06T12:41:00"
})
})
test("receive saves a JSON file of indieweb-metadata with summary as content if present", async () => {
const body = {
source: "https://brainbaking.com/valid-indieweb-source-with-summary.html",
target: "https://brainbaking.com/valid-indieweb-target.html"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data).toEqual({
author: {
name: "Wouter Groeneveld",
picture: "https://brainbaking.com//img/avatar.jpg"
},
url: "https://brainbaking.com/notes/2021/03/06h12m41s48/",
name: "I just learned about https://www.inklestudios.com/...",
content: "This is cool, this is a summary!",
type: "mention",
source: body.source,
target: body.target,
published: "2021-03-06T12:41:00"
})
})
test("receive saves a JSON file of non-indieweb-data such as title if all is valid", async () => {
const body = {
source: "https://brainbaking.com/valid-nonindieweb-source.html",
target: "https://brainbaking.com/valid-indieweb-target.html"
}
await receive(body)
const result = await fsp.readFile(`${asFilename(body)}.json`, 'utf-8')
const data = JSON.parse(result)
expect(data).toEqual({
author: {
name: "https://brainbaking.com/valid-nonindieweb-source.html",
},
content: "Diablo 2 Twenty Years Later: A Retrospective | Jefklaks Codex",
name: "Diablo 2 Twenty Years Later: A Retrospective | Jefklaks Codex",
type: "mention",
url: body.source,
source: body.source,
target: body.target,
published: "2020-01-01T01:00:00"
})
})
test("receive a target that does not point to the source does nothing", async () => {
const body = {
source: "https://brainbaking.com/valid-indieweb-source.html",
target: "https://brainbaking.com/valid-indieweb-source.html"
}
await receive(body)
const data = fs.readdirSync(dumpdir)
expect(data.length).toBe(0)
})
test("receive a source that does not exist should also delete older webmention files", async () => {
const body = {
source: "https://wubanga2001.boom/lolz",
target: "https://brainbaking.com/valid-indieweb-source.html"
}
await fsp.writeFile(`${asFilename(body)}.json`, JSON.stringify({ lolz: "aha" }), 'utf-8')
await receive(body)
const data = fs.readdirSync(dumpdir)
expect(data.length).toBe(0)
})
})

View File

@ -1,117 +0,0 @@
describe("webmention receiver validate tests", () => {
const validhttpurl = "http://brainbaking.com/bla"
const validhttpsurl = "https://brainbaking.com/blie"
const urlfrominvaliddomain = "http://brainthe.bake/jup"
const invalidurl = "lolzw"
const { validate } = require('../../src/webmention/receive')
test("is valid if source and target https urls", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpsurl + "1",
target: validhttpsurl + "2"
}
})
expect(result).toBe(true)
})
test("is NOT valid if target is a valid url but not form valid domain", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpsurl + "2",
target: urlfrominvaliddomain
}
})
expect(result).toBe(false)
})
test("is NOT valid if source and target are the same urls", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpsurl,
target: validhttpsurl
}
})
expect(result).toBe(false)
})
test("is valid if source and target http urls", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpurl + "1",
target: validhttpurl + "2"
}
})
expect(result).toBe(true)
})
test("is NOT valid if source is not a valid url", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: invalidurl,
target: validhttpurl
}
})
expect(result).toBe(false)
})
test("is NOT valid if target is not a valid url", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpurl,
target: invalidurl
}
})
expect(result).toBe(false)
})
test("is NOT valid if source is missing", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
target: validhttpurl
}
})
expect(result).toBe(false)
})
test("is NOT valid if target is missing", () => {
const result = validate({
type: "application/x-www-form-urlencoded",
body: {
source: validhttpurl
}
})
expect(result).toBe(false)
})
test("is NOT valid if no valid encoded form", () => {
const result = validate({
type: "ow-mai-got",
body: {
source: validhttpurl,
target: validhttpurl
}
})
expect(result).toBe(false)
})
test("is NOT valid if body is missing", () => {
const result = validate({
type: "application/x-www-form-urlencoded"
})
expect(result).toBe(false)
})
})

View File

@ -1,86 +0,0 @@
const { collect } = require('../../src/webmention/rsslinkcollector')
const fs = require('fs').promises
const dayjs = require('dayjs')
describe("collect RSS links of articles since certain period", () => {
let xml = ''
beforeEach(async () => {
xml = (await fs.readFile('./test/__mocks__/samplerss.xml')).toString()
})
test("collect should not contain hrefs from blocked domains", () => {
const collected = collect(xml, dayjs('2021-03-10T00:00:00.000Z').toDate())
// test case:
// contains youtube.com/cool link
const last = collected[collected.length - 1]
expect(last.hrefs).toEqual([
"https://dog.estate/@eli_oat",
"https://twitter.com/olesovhcom/status/1369478732247932929",
"/about"
])
})
test("collect should not contain hrefs that point to images", () => {
const collected = collect(xml, dayjs('2021-03-14T00:00:00.000Z').toDate())
// test case:
// contains e.g. https://chat.brainbaking.com/media/6f8b72ca-9bfb-460b-9609-c4298a8cab2b/EuropeBattle%202021-03-14%2016-20-36-87.jpg
const last = collected[collected.length - 1]
expect(last.hrefs).toEqual([
"/about"
])
})
test("collects if time tag found in content that acts as an update stamp", async () => {
// sample item: pubDate 2021-03-16, timestamp updated: 2021-03-20
xml = (await fs.readFile('./test/__mocks__/samplerss-updated-timestamp.xml')).toString()
const collected = collect(xml, dayjs('2021-03-19').toDate())
expect(collected.length).toBe(1)
})
test("does not collect if time tag found in content but still older than since", async () => {
// sample item: pubDate 2021-03-16, timestamp updated: 2021-03-20
xml = (await fs.readFile('./test/__mocks__/samplerss-updated-timestamp.xml')).toString()
const collected = collect(xml, dayjs('2021-03-21').toDate())
expect(collected.length).toBe(0)
})
test("collects nothing if date in future and since nothing new in feed", () => {
const collected = collect(xml, dayjs().add(7, 'day').toDate())
expect(collected.length).toEqual(0)
})
test("collect latest x links when a since parameter is provided", () => {
const collected = collect(xml, dayjs('2021-03-15T00:00:00.000Z').toDate())
expect(collected.length).toEqual(3)
const last = collected[collected.length - 1]
expect(last.link).toBe("https://brainbaking.com/notes/2021/03/15h14m43s49/")
expect(last.hrefs).toEqual([
"http://replit.com",
"http://codepen.io",
"https://kuleuven-diepenbeek.github.io/osc-course/ch1-c/intro/",
"/about"
])
})
test("collect every external link without a valid since date", () => {
const collected = collect(xml)
expect(collected.length).toEqual(141)
const first = collected[0]
expect(first.link).toBe("https://brainbaking.com/notes/2021/03/16h17m07s14/")
expect(first.hrefs).toEqual([
"https://fosstodon.org/@celia",
"https://fosstodon.org/@kev",
"/about"
])
})
})

View File

@ -1,62 +0,0 @@
const got = require('got')
const { send } = require('../../src/webmention/send')
describe("webmention send scenarios", () => {
test("webmention send integration test that can send both webmentions and pingbacks", async () => {
// jest.fn() gives unpredictable and unreadable output if unorderd calledWith... DIY!
let posts = {}
got.post = function(url, opts) {
posts[url] = opts
}
// fetches index.xml
await send("brainbaking.com", '2021-03-16T16:00:00.000Z')
expect(Object.keys(posts).length).toBe(3)
expect(posts["http://aaronpk.example/webmention-endpoint-header"]).toEqual({
contentType: "x-www-form-urlencoded",
form: {
source: "https://brainbaking.com/notes/2021/03/16h17m07s14/",
target: "https://brainbaking.com/link-discover-test-multiple.html"
},
retry: {
limit: 5,
methods: ["POST"]
}
})
expect(posts["http://aaronpk.example/pingback-endpoint-body"]).toEqual({
contentType: "text/xml",
body: `<?xml version="1.0" encoding="UTF-8"?>
<methodCall>
<methodName>pingback.ping</methodName>
<params>
<param>
<value><string>https://brainbaking.com/notes/2021/03/16h17m07s14/</string></value>
</param>
<param>
<value><string>https://brainbaking.com/pingback-discover-test-single.html</string></value>
</param>
</params>
</methodCall>`,
retry: {
limit: 5,
methods: ["POST"]
}
})
expect(posts["http://aaronpk.example/webmention-endpoint-body"]).toEqual({
contentType: "x-www-form-urlencoded",
form: {
source: "https://brainbaking.com/notes/2021/03/16h17m07s14/",
target: "https://brainbaking.com/link-discover-test-single.html"
},
retry: {
limit: 5,
methods: ["POST"]
}
})
})
})

File diff suppressed because it is too large Load Diff