135 lines
3.7 KiB
JavaScript
Executable File
135 lines
3.7 KiB
JavaScript
Executable File
#!/usr/bin/env -S npx nodejsscript
|
||
/* jshint esversion: 11,-W097, -W040, module: true, node: true, expr: true, undef: true *//* global echo, $, pipe, s, fetch, cyclicLoop */
|
||
const url_drops= "https://pagenotfound.cz/drop/";
|
||
const { version, description }= s.cat("package.json").xargs(JSON.parse);
|
||
const paths= {
|
||
sitemap: "sitemap.json",
|
||
rss: "rss.xml",
|
||
};
|
||
/**
|
||
* @typedef {Object} Article
|
||
* @property {string} title
|
||
* @property {string} perex
|
||
* @property {string} author
|
||
* @property {string} loc
|
||
* @property {string} drop
|
||
* */
|
||
/**
|
||
* @typedef {Object} Drop
|
||
* @property {string} drop
|
||
* @property {string} date
|
||
* */
|
||
/**
|
||
* @typedef {Object} Sitemap
|
||
* @property {Article[]} articles
|
||
* @property {Drop[]} drops
|
||
* */
|
||
|
||
$.api()
|
||
.version(version)
|
||
.describe(description)
|
||
.command("pull", "Update article list")
|
||
.option("--git", "Update git repository")
|
||
.action(async function pull({ git: is_git= false }){
|
||
const json= await sitemap();
|
||
toRSS(json);
|
||
if(is_git)
|
||
gitCommit(Object.values(paths));
|
||
$.exit(0);
|
||
})
|
||
.parse();
|
||
|
||
function gitCommit(files, des= "not specified"){
|
||
if(!s.run`git diff --numstat`.trim())
|
||
return echo("Nothig todo");
|
||
|
||
echo("Diff to save");
|
||
s.run`git config user.name "Bot"`;
|
||
s.run`git config user.email "${"zc.murtnec@naj.elrdna".split("").reverse().join("")}"`;
|
||
s.run`git add ${files}`;
|
||
s.run`git commit -m "Updated by bot – ${des}"`;
|
||
s.run`git push`;
|
||
s.run`git config --remove-section user`;
|
||
}
|
||
/** @param {Sitemap} json */
|
||
async function toRSS(json){
|
||
const path= paths.rss;
|
||
const host= "https://pagenotfound.cz";
|
||
const articles= json.articles.map(function({ title, perex, author, loc, drop }){
|
||
return [
|
||
"<item>",
|
||
...[
|
||
`<title>${title}</title>`,
|
||
`<link>${host+loc}</link>`,
|
||
`<description>${perex}</description>`,
|
||
`<author>${author}</author>`,
|
||
`<pubDate>${json.drops.find(d=> d.drop === drop).date}</pubDate>`,
|
||
`<category>${drop}</category>`,
|
||
].map(l=> "\t"+l),
|
||
"</item>"
|
||
].map(l=> "\t"+l).join("\n");
|
||
});
|
||
|
||
s.echo([
|
||
`<?xml version="1.0" encoding="UTF-8" ?>`,
|
||
`<rss version="2.0">`,
|
||
"<channel>",
|
||
` <title>Pagenotfound.cz</title>`,
|
||
` <link>${host}</link>`,
|
||
...articles,
|
||
"</channel>",
|
||
"</rss>"
|
||
].join("\n")).to(path);
|
||
}
|
||
import { JSDOM } from "jsdom";
|
||
async function sitemap(){
|
||
const path= paths.sitemap;
|
||
|
||
/** @type {Sitemap} */
|
||
const json= s.test("-f", path) ? s.cat(path).xargs(JSON.parse) : { drops: [], articles: [] };
|
||
const [ article_last= { drop: "" } ]= json.articles;
|
||
await syncDrops(json);
|
||
const [ { drop: drop_last } ]= json.drops;
|
||
if(drop_last === article_last.drop) return json;
|
||
|
||
const res= await fetch(url_drops+drop_last);
|
||
if(res.status !== 200) return;
|
||
const dom= new JSDOM(await res.text());
|
||
const diff= [];
|
||
for(const article of dom.window.document.querySelectorAll("article")){
|
||
diff.push({
|
||
title: article.querySelector("h2").textContent.trim(),
|
||
perex: article.querySelector("[class^=ArticleTile_perex]").textContent.trim(),
|
||
author: article.querySelector("[class^=ArticleTile_author]").textContent.trim(),
|
||
loc: article.querySelector("a").href,
|
||
drop: drop_last,
|
||
});
|
||
}
|
||
|
||
json.articles.unshift(...diff);
|
||
s.echo(JSON.stringify(json, null, "\t")).to(path);
|
||
return json;
|
||
}
|
||
/** @param {Sitemap} json */
|
||
async function syncDrops(json){
|
||
const [ { drop: drop_last } ]= json.drops;
|
||
const i_index= drop_last.search(/\d/);
|
||
const pre= drop_last.slice(0, i_index);
|
||
const index= pipe(
|
||
Number,
|
||
i=> i+1,
|
||
i=> i.toString().padStart(drop_last.length - i_index, "0"),
|
||
)(drop_last.slice(i_index));
|
||
|
||
const drop= pre+index;
|
||
const res= await fetch(url_drops+drop, { method: "HEAD" });
|
||
if(res.status !== 200) return json;
|
||
|
||
const date= pipe(
|
||
d=> new Date(d),
|
||
d=> d.toISOString(),
|
||
)(res.headers.get("date"));
|
||
json.drops.unshift({ drop, date });
|
||
return json;
|
||
}
|