Add bulk of the bot
This commit is contained in:
commit
e34d343daf
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
/target
|
2044
Cargo.lock
generated
Normal file
2044
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
Cargo.toml
Normal file
17
Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "miniflux-discord"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hyper = { version = "1", features = ["server", "http1", "http2"] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
http-body-util = "0.1"
|
||||
hyper-util = { version = "0.1", features = ["tokio"] }
|
||||
serde = {version = "1.0", features = ["derive"]}
|
||||
serde_json = "1.0"
|
||||
# time = { version = "0.3.34", features = ["serde-well-known"] }
|
||||
chrono = { version = "0.4.35", features = ["serde"] }
|
||||
serenity = "0.12"
|
9
src/config.rs
Normal file
9
src/config.rs
Normal file
@ -0,0 +1,9 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Config {
|
||||
discord_token: String,
|
||||
miniflux_base_url: String,
|
||||
payload_max_size: u64,
|
||||
whitelisted_user_ids: Vec<u64>,
|
||||
}
|
188
src/main.rs
Normal file
188
src/main.rs
Normal file
@ -0,0 +1,188 @@
|
||||
use std::{net::SocketAddr, path::PathBuf};
|
||||
|
||||
use http_body_util::{combinators::BoxBody, BodyExt, Full};
|
||||
use hyper::{
|
||||
body::{self, Body, Bytes},
|
||||
server::conn::http1,
|
||||
service::service_fn,
|
||||
Error, Request, Response,
|
||||
};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use miniflux_requests::{Entry, Feed, MinifluxEvent, NewEntries};
|
||||
use serenity::{
|
||||
all::{
|
||||
CacheHttp, Color, CreateButton, CreateEmbed, CreateEmbedAuthor, CreateEmbedFooter,
|
||||
CreateMessage, EmbedMessageBuilding, GatewayIntents, MessageBuilder, User, UserId,
|
||||
},
|
||||
Client,
|
||||
};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
mod config;
|
||||
mod miniflux_requests;
|
||||
|
||||
const DISCORD_TOKEN: &'static str =
|
||||
"MTIxODMxMTU3NDM1MjAzOTk3Nw.G0ev0l.kZKr02qqgOiHNxO5bccVtYidnozufoKyAeXijQ";
|
||||
|
||||
const MINIFLUX: &'static str = "";
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Create a new instance of the Client, logging in as a bot.
|
||||
let client = Client::builder(&DISCORD_TOKEN, GatewayIntents::empty())
|
||||
.await
|
||||
.expect("Err creating client");
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
|
||||
|
||||
// We create a TcpListener and bind it to 127.0.0.1:3000
|
||||
let listener = TcpListener::bind(addr).await?;
|
||||
|
||||
// We start a loop to continuously accept incoming connections
|
||||
|
||||
loop {
|
||||
let (stream, _) = listener.accept().await?;
|
||||
|
||||
// Use an adapter to access something implementing `tokio::io` traits as if they implement
|
||||
// `hyper::rt` IO traits.
|
||||
let io = TokioIo::new(stream);
|
||||
|
||||
let cache = client.cache.clone();
|
||||
let http = client.http.clone();
|
||||
|
||||
// Spawn a tokio task to serve multiple connections concurrently
|
||||
tokio::task::spawn(async move {
|
||||
// Finally, we bind the incoming connection to our `hello` service
|
||||
if let Err(err) = http1::Builder::new()
|
||||
// `service_fn` converts our function in a `Service`
|
||||
.serve_connection(
|
||||
io,
|
||||
service_fn(|req: Request<body::Incoming>| {
|
||||
let cache = cache.clone();
|
||||
let http = http.clone();
|
||||
async move { hello(req, (&cache, http.as_ref())).await }
|
||||
}),
|
||||
)
|
||||
.await
|
||||
{
|
||||
println!("Error serving connection: {:?}", err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async fn hello(
|
||||
req: Request<body::Incoming>,
|
||||
ctx: impl CacheHttp + Copy,
|
||||
) -> Result<Response<BoxBody<hyper::body::Bytes, hyper::Error>>, Error> {
|
||||
// todo check method
|
||||
let _method = req.method();
|
||||
let headers = req.headers();
|
||||
|
||||
// todo fix unwrap
|
||||
let userid = req
|
||||
.uri()
|
||||
.path()
|
||||
.split("/")
|
||||
.nth(1)
|
||||
.unwrap()
|
||||
.parse::<u64>()
|
||||
.unwrap();
|
||||
|
||||
let user = ctx.http().get_user(UserId::new(userid)).await.unwrap();
|
||||
|
||||
// Todo remove expect
|
||||
// Todo make sure contents match signature
|
||||
let signature = headers
|
||||
.get("x-miniflux-signature")
|
||||
.expect("expected signature");
|
||||
let event_type = headers
|
||||
.get("x-miniflux-event-type")
|
||||
.expect("expected event type");
|
||||
|
||||
let upper = req.body().size_hint().upper().unwrap_or(u64::MAX);
|
||||
if upper > 1024 * 1024 * 10 {
|
||||
let mut resp = Response::new(full("Body too big"));
|
||||
*resp.status_mut() = hyper::StatusCode::PAYLOAD_TOO_LARGE;
|
||||
dbg!("Got message, too big!");
|
||||
return Ok(resp);
|
||||
}
|
||||
|
||||
let whole_body = req.collect().await?.to_bytes();
|
||||
let bytes = whole_body.iter().cloned().collect::<Vec<u8>>();
|
||||
let event: MinifluxEvent = serde_json::from_slice(&bytes).unwrap();
|
||||
|
||||
send(user, ctx, event).await;
|
||||
|
||||
Ok(Response::new(full(vec![])))
|
||||
}
|
||||
|
||||
fn full<T: Into<Bytes>>(chunk: T) -> BoxBody<Bytes, hyper::Error> {
|
||||
Full::new(chunk.into())
|
||||
.map_err(|never| match never {})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
async fn send(user: User, ctx: impl CacheHttp + Copy, event: MinifluxEvent) {
|
||||
match event {
|
||||
MinifluxEvent::New(NewEntries { feed, entries }) => {
|
||||
for entry in entries {
|
||||
user.direct_message(ctx, message_from_entry(&entry, &feed))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn message_from_entry(entry: &Entry, feed: &Feed) -> CreateMessage {
|
||||
let content = MessageBuilder::new()
|
||||
.push("New article from feed ")
|
||||
.push_named_link(&feed.title, &feed.site_url)
|
||||
.push(" published!")
|
||||
.build();
|
||||
|
||||
let author = CreateEmbedAuthor::new(&feed.title).url(&feed.site_url);
|
||||
let footer = CreateEmbedFooter::new(format!("{} minutes", entry.reading_time.to_string()));
|
||||
|
||||
let minreq_url = format!("{}/feed/{}/entry/{}", MINIFLUX, feed.id, entry.id);
|
||||
|
||||
let mut embed = CreateEmbed::new()
|
||||
.title(&entry.title)
|
||||
.url(&entry.url)
|
||||
.footer(footer)
|
||||
.timestamp(entry.published_at)
|
||||
.author(author)
|
||||
.color(Color::from_rgb(
|
||||
(feed.id % 256) as u8,
|
||||
((feed.id * feed.id) % 256) as u8,
|
||||
((feed.id * feed.id * feed.id) % 256) as u8,
|
||||
))
|
||||
.description(&entry.content.chars().take(200).collect::<String>());
|
||||
|
||||
if entry.tags.len() > 0 {
|
||||
embed = embed.field("Tags", entry.tags.join(","), true)
|
||||
}
|
||||
|
||||
if let Some(enclosure) = entry
|
||||
.enclosures
|
||||
.iter()
|
||||
.find(|e| e.mime_type.starts_with("image/"))
|
||||
{
|
||||
embed = embed.image(&enclosure.url);
|
||||
}
|
||||
|
||||
let external_button = CreateButton::new_link(&entry.url)
|
||||
.label("external")
|
||||
.emoji('📤');
|
||||
let minreq_button = CreateButton::new_link(minreq_url)
|
||||
.label("minreq")
|
||||
.emoji('📩');
|
||||
|
||||
CreateMessage::new()
|
||||
.content(content)
|
||||
.embed(embed)
|
||||
.button(external_button)
|
||||
.button(minreq_button)
|
||||
}
|
66
src/miniflux_requests.rs
Normal file
66
src/miniflux_requests.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
|
||||
type UserId = u8;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(tag = "event_type")]
|
||||
pub enum MinifluxEvent {
|
||||
#[serde(rename = "new_entries")]
|
||||
New(NewEntries),
|
||||
#[serde(rename = "save_entry")]
|
||||
Save(SaveEntry),
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct NewEntries {
|
||||
pub feed: Feed,
|
||||
pub entries: Vec<Entry>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct SaveEntry {
|
||||
pub entry: Entry,
|
||||
pub tags: Vec<String>,
|
||||
pub feed: Feed,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Feed {
|
||||
pub id: u32,
|
||||
pub user_id: UserId,
|
||||
pub feed_url: String,
|
||||
pub site_url: String,
|
||||
pub title: String,
|
||||
pub checked_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Entry {
|
||||
pub id: u32,
|
||||
pub user_id: UserId,
|
||||
pub feed_id: u32,
|
||||
pub hash: String,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
pub comments_url: String,
|
||||
pub published_at: DateTime<Utc>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub content: String,
|
||||
pub share_code: String,
|
||||
pub starred: bool,
|
||||
pub reading_time: u16,
|
||||
pub enclosures: Vec<Enclosure>,
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Enclosure {
|
||||
pub id: u32,
|
||||
pub user_id: UserId,
|
||||
pub entry_id: u32,
|
||||
pub url: String,
|
||||
pub mime_type: String,
|
||||
pub size: u32,
|
||||
pub media_progression: u32,
|
||||
}
|
Loading…
Reference in New Issue
Block a user