commit 1ee20288546a55a0757534d864c89b5ffd942d14
parent 252f5b90fefecae52bdc4383ecd27ce47822dd90
Author: lash <dev@holbrook.no>
Date: Sun, 29 Sep 2024 01:53:04 +0100
Settable title, author, valid html content
Diffstat:
4 files changed, 69 insertions(+), 26 deletions(-)
diff --git a/crier-lib/Cargo.toml b/crier-lib/Cargo.toml
@@ -5,6 +5,7 @@ edition = "2021"
rust-version = "1.79"
description = "Chronologically aggregate atom and rss feeds into a single atom feed."
documentation = "https://defalsify.org/pub/doc/crier/0.0.1/crier"
+homepage = "https://defalsify.org/git/crier"
authors = ["Louis Holbrook <dev@holbrook.no>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
diff --git a/crier-lib/src/lib.rs b/crier-lib/src/lib.rs
@@ -7,6 +7,8 @@ use std::fmt::Debug;
use std::io::BufWriter;
use std::str::FromStr;
+use log::error;
+
use rs_sha512::Sha512Hasher;
use chrono::Local;
use atom_syndication::Feed as Feed;
@@ -18,6 +20,7 @@ use atom_syndication::Person as OutPerson;
use atom_syndication::Category as OutCategory;
use atom_syndication::FixedDateTime;
use atom_syndication::Person;
+use atom_syndication::Generator;
use itertools::Itertools;
pub mod io;
@@ -139,6 +142,13 @@ impl<'a> Sequencer<'a> {
feed.set_id("urn:uuid:60a76c80-d399-11d9-b91C-0003939e0af6");
feed.set_updated(Local::now().to_utc());
+ let g = Generator{
+ value: String::from("Crier"),
+ uri: Some(String::from(env!("CARGO_PKG_HOMEPAGE"))),
+ version: Some(String::from(env!("CARGO_PKG_VERSION"))),
+ };
+ feed.set_generator(g);
+
match self.metadata.apply(&mut feed) {
Err(_v) => {
return Err(Error::WriteError);
@@ -153,7 +163,7 @@ impl<'a> Sequencer<'a> {
b = std::str::from_utf8(v.as_slice()).unwrap();
match Entry::from_str(b) {
Err(e) => {
- println!("fromstrerr {:?}", e);
+ error!("fromstrerr {:?}", e);
return Err(Error::CacheError);
},
Ok(o) => {
diff --git a/crier-lib/src/rss.rs b/crier-lib/src/rss.rs
@@ -6,6 +6,7 @@ use crate::Error;
use log::info;
use log::debug;
+use log::error;
use rss::Channel;
use rss::Item;
@@ -113,25 +114,28 @@ fn translate_item(ipt: Item) -> Result<Entry, Error> {
match ipt.description {
Some(v) => {
- opt.set_summary(Some(Text::plain(v)));
+ opt.set_summary(Some(Text::xhtml(v)));
},
- _ => {},
- };
-
- match ipt.content {
- Some(v) => {
- let mut r = Content::default();
- r.set_content_type(Some(String::from("text/html")));
- r.set_value(Some(v));
- match ipt.source {
+ _ => {
+ match ipt.content {
Some(v) => {
- r.set_src(v.url);
+ let mut r = Content::default();
+ r.set_content_type(Some(String::from("text/html")));
+ r.set_value(Some(v));
+ match ipt.source {
+ Some(v) => {
+ r.set_src(v.url);
+ },
+ _ => {},
+ }
+ opt.set_content(Some(r));
},
- _ => {},
- }
- opt.set_content(Some(r));
+ _ => {
+ error!("have neither summary nor content");
+ return Err(Error::IncompleteError);
+ },
+ };
},
- _ => {},
};
match ipt.guid {
@@ -207,6 +211,7 @@ pub fn from_file(fp: &str, allow_entry_fail: bool) -> Result<Feed, Error> {
match Feed::read_from(b) {
Ok(v) => {
+ debug!("have atom feed");
return Ok(v);
},
Err(e) => {},
@@ -217,6 +222,7 @@ pub fn from_file(fp: &str, allow_entry_fail: bool) -> Result<Feed, Error> {
match Channel::read_from(b) {
Ok(v) => {
+ debug!("have RSS feed");
o = v;
},
Err(e) => {
diff --git a/src/main.rs b/src/main.rs
@@ -20,26 +20,53 @@ use crier::Error;
struct Config {
urls: Vec<String>,
+ author: String,
+ title: String,
}
impl Config {
- fn new(urls: Vec<String>) -> Config {
+ fn new(title: String, author: String, urls: Vec<String>) -> Config {
Config {
urls: urls,
+ title: title,
+ author: author,
}
}
}
fn parse() -> Config {
- let m = App::new("crier")
+ let mut o = App::new("crier")
.version(env!("CARGO_PKG_VERSION"))
- .author(env!("CARGO_PKG_AUTHORS"))
- .arg(Arg::with_name("URLS")
+ .author(env!("CARGO_PKG_AUTHORS"));
+
+ o = o.arg(
+ Arg::with_name("title")
+ .long("title")
+ .short("t")
+ .value_name("Aggregated feed title")
+ .takes_value(true)
+ .required(true)
+ );
+
+ o = o.arg(
+ Arg::with_name("author")
+ .long("author")
+ .short("a")
+ .value_name("Aggregated feed author")
+ .takes_value(true)
+ .required(true)
+ );
+
+ o = o.arg(Arg::with_name("URLS")
.multiple(true)
- .help("list of uris to merge"))
- .get_matches();
+ .help("list of uris to merge"));
- Config::new(m.values_of("URLS").unwrap().map(|v| String::from(v)).collect())
+ let m = o.get_matches();
+
+ Config::new(
+ String::from(m.value_of("title").unwrap()),
+ String::from(m.value_of("author").unwrap()),
+ m.values_of("URLS").unwrap().map(|v| String::from(v)).collect())
}
fn add_feed(seq: &mut Sequencer, getter: impl FeedGet, uri: String) -> Result<i64, Error> {
@@ -85,9 +112,8 @@ fn main() {
let mut cache = MemCache::new();
let mut seq = Sequencer::new().with_cache(&mut cache);
- seq.set_title("my new feed");
- seq.set_author("Foo Bar");
-
+ seq.set_title(cfg.title.as_str());
+ seq.set_author(cfg.author.as_str());
env_logger::init();