168 lines
3.8 KiB
Rust
168 lines
3.8 KiB
Rust
use anyhow::{
|
|
bail,
|
|
Context,
|
|
Result,
|
|
};
|
|
use clap::{
|
|
Parser,
|
|
};
|
|
use feed_rs::{
|
|
parser::parse,
|
|
model::{
|
|
Entry,
|
|
Feed,
|
|
},
|
|
};
|
|
|
|
use reqwest::{
|
|
IntoUrl,
|
|
StatusCode,
|
|
Url,
|
|
};
|
|
|
|
#[derive(Parser)]
|
|
#[command(version, about, long_about = None)]
|
|
struct Cli {
|
|
/// Path to file containing feeds
|
|
#[arg(long, default_value_t = String::from("feeds.txt"))]
|
|
feeds: String,
|
|
/// Path to output file
|
|
#[arg(long, default_value_t = String::from("index.html"))]
|
|
out: String,
|
|
}
|
|
|
|
struct FeedItem {
|
|
link: String,
|
|
title: String,
|
|
time: chrono::DateTime<chrono::Utc>,
|
|
}
|
|
|
|
fn fetch_feed(url: impl IntoUrl) -> Result<Feed> {
|
|
let r = reqwest::blocking::get(url)
|
|
.context("Failed to fetch feed")?;
|
|
|
|
if r.status() != StatusCode::OK {
|
|
bail!("Fetched feed returned unuseable status code {}", r.status());
|
|
}
|
|
|
|
let content = r.bytes()
|
|
.context("Failed to read feed contents")?;
|
|
|
|
let feed = parse(&content[..])
|
|
.context("Failed to parse feed")?;
|
|
|
|
Ok(feed)
|
|
}
|
|
|
|
struct FeedFile {
|
|
name: String,
|
|
urls: Vec<Url>,
|
|
}
|
|
|
|
fn read_feed_file(path: &str) -> Result<FeedFile> {
|
|
let mut name = String::from(path);
|
|
let mut urls = Vec::new();
|
|
|
|
for line in std::fs::read_to_string(path)?.lines() {
|
|
let line = line.trim().to_string();
|
|
if line == "" {
|
|
continue;
|
|
}
|
|
if line.starts_with("# name: ") {
|
|
name = String::from(&line[8..]);
|
|
continue;
|
|
}
|
|
if line.starts_with("#") {
|
|
continue;
|
|
}
|
|
let url = Url::parse(&line)?;
|
|
|
|
urls.push(url);
|
|
}
|
|
|
|
Ok(FeedFile{
|
|
name: name,
|
|
urls: urls,
|
|
})
|
|
}
|
|
|
|
fn make_feed_item(entry: Entry) -> Result<FeedItem> {
|
|
Ok(FeedItem {
|
|
link: entry.links
|
|
.first()
|
|
.context("Unable to retrieve link from feed entry")?
|
|
.href
|
|
.clone(),
|
|
title: entry.title
|
|
.context("Unable to retrieve title from feed entry")?
|
|
.content
|
|
.clone(),
|
|
time: match entry.published {
|
|
Some(time) => time,
|
|
None => entry.updated
|
|
.context("Unable to retrieve publishing time from feed entry")?,
|
|
},
|
|
})
|
|
}
|
|
|
|
fn main() -> Result<()> {
|
|
let cli = Cli::parse();
|
|
|
|
let feed_file = read_feed_file(&cli.feeds)?;
|
|
|
|
let mut items: Vec<FeedItem> = Vec::new();
|
|
|
|
for feed_url in feed_file.urls {
|
|
println!("Loading feed {}", feed_url);
|
|
|
|
let feed_entries = match fetch_feed(feed_url) {
|
|
Ok(feed) => feed.entries,
|
|
Err(e) => {
|
|
println!("\t{}, skipping", e);
|
|
continue;
|
|
},
|
|
};
|
|
|
|
for entry in feed_entries {
|
|
match make_feed_item(entry) {
|
|
Ok(item) => {
|
|
items.push(item);
|
|
},
|
|
Err(e) => {
|
|
println!("\t{}, skipping", e);
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
items.sort_by(|a, b| a.time.cmp(&b.time).reverse());
|
|
|
|
let mut out = String::new();
|
|
|
|
out.push_str("<!DOCTYPE html>\n");
|
|
out.push_str("<html>\n");
|
|
out.push_str("<head>\n");
|
|
out.push_str(&format!("<title>{} - rainbowrss</title>", feed_file.name));
|
|
out.push_str("</head>\n");
|
|
out.push_str("<body>\n");
|
|
out.push_str(&format!("<h1>{}</h1>", feed_file.name));
|
|
|
|
out.push_str("<ul>\n");
|
|
|
|
for item in items {
|
|
out.push_str(&format!("<li><a href=\"{}\">[l]</a> {} {}</li>\n", item.link, item.title, item.time));
|
|
}
|
|
|
|
out.push_str("</ul>\n");
|
|
|
|
out.push_str(&format!("<small>Last updated: {}</small>", chrono::Utc::now()));
|
|
|
|
out.push_str("</body>\n");
|
|
out.push_str("</html>\n");
|
|
|
|
std::fs::write(&cli.out, out)
|
|
.context("Failed to write output to file")?;
|
|
|
|
Ok(())
|
|
}
|