116 lines
3.2 KiB
JavaScript
Executable File
116 lines
3.2 KiB
JavaScript
Executable File
#!/usr/bin/env -S npx nodejsscript
|
|
/* jshint esversion: 11,-W097, -W040, module: true, node: true, expr: true, undef: true *//* global echo, $, pipe, s, fetch, cyclicLoop */
|
|
import { JSDOM } from "jsdom";
|
|
const url_drops= "https://pagenotfound.cz/drop/";
|
|
const { name, version, description }= s.cat("package.json").xargs(JSON.parse);
|
|
/**
|
|
* @typedef {Object} Article
|
|
* @property {string} title
|
|
* @property {string} perex
|
|
* @property {string} author
|
|
* @property {string} loc
|
|
* @property {string} drop
|
|
* */
|
|
/**
|
|
* @typedef {Object} Drop
|
|
* @property {string} drop
|
|
* @property {string} date
|
|
* */
|
|
/**
|
|
* @typedef {Object} Sitemap
|
|
* @property {Article[]} articles
|
|
* @property {Drop[]} drops
|
|
* */
|
|
|
|
$.api()
|
|
.version(version)
|
|
.describe(description)
|
|
.command("pull", "Update article list")
|
|
.action(async function pull(){
|
|
const json= await sitemap();
|
|
toRSS(json);
|
|
$.exit(0);
|
|
})
|
|
.parse();
|
|
|
|
/** @param {Sitemap} json */
|
|
async function toRSS(json){
|
|
const path= "rss.xml";
|
|
const host= "https://pagenotfound.cz";
|
|
const articles= json.articles.map(function({ title, perex, author, loc, drop }){
|
|
return [
|
|
"<item>",
|
|
...[
|
|
`<title>${title}</title>`,
|
|
`<link>${host+loc}</link>`,
|
|
`<description>${perex}</description>`,
|
|
`<author>${author}</author>`,
|
|
`<pubDate>${json.drops.find(d=> d.drop === drop).date}</pubDate>`,
|
|
`<category>${drop}</category>`,
|
|
].map(l=> "\t"+l),
|
|
"</item>"
|
|
].map(l=> "\t"+l).join("\n");
|
|
});
|
|
|
|
s.echo([
|
|
`<?xml version="1.0" encoding="UTF-8" ?>`,
|
|
`<rss version="2.0">`,
|
|
"<channel>",
|
|
` <title>Pagenotfound.cz</title>`,
|
|
` <link>${host}</link>`,
|
|
...articles,
|
|
"</channel>",
|
|
"</rss>"
|
|
].join("\n")).to(path);
|
|
}
|
|
async function sitemap(){
|
|
const path= "sitemap.json";
|
|
|
|
/** @type {Sitemap} */
|
|
const json= s.test("-f", path) ? s.cat(path).xargs(JSON.parse) : { drops: [], articles: [] };
|
|
const [ article_last= { drop: "" } ]= json.articles;
|
|
await syncDrops(json);
|
|
const [ { drop: drop_last } ]= json.drops;
|
|
if(drop_last === article_last.drop) return json;
|
|
|
|
const res= await fetch(url_drops+drop_last);
|
|
if(res.status !== 200) return;
|
|
const dom= new JSDOM(await res.text());
|
|
const diff= [];
|
|
for(const article of dom.window.document.querySelectorAll("article")){
|
|
diff.push({
|
|
title: article.querySelector("h2").textContent.trim(),
|
|
perex: article.querySelector("[class^=ArticleTile_perex]").textContent.trim(),
|
|
author: article.querySelector("[class^=ArticleTile_author]").textContent.trim(),
|
|
loc: article.querySelector("a").href,
|
|
drop: drop_last,
|
|
});
|
|
}
|
|
|
|
json.articles.unshift(...diff);
|
|
s.echo(JSON.stringify(json, null, "\t")).to(path);
|
|
return json;
|
|
}
|
|
/** @param {Sitemap} json */
|
|
async function syncDrops(json){
|
|
const [ { drop: drop_last } ]= json.drops;
|
|
const i_index= drop_last.search(/\d/);
|
|
const pre= drop_last.slice(0, i_index);
|
|
const index= pipe(
|
|
Number,
|
|
i=> i+1,
|
|
i=> i.toString().padStart(drop_last.length - i_index, "0"),
|
|
)(drop_last.slice(i_index));
|
|
|
|
const drop= pre+index;
|
|
const res= await fetch(url_drops+drop, { method: "HEAD" });
|
|
if(res.status !== 200) return json;
|
|
|
|
const date= pipe(
|
|
d=> new Date(d),
|
|
d=> d.toISOString(),
|
|
)(res.headers.get("date"));
|
|
json.drops.unshift({ drop, date });
|
|
return json;
|
|
}
|