Add additional info logging (#98)

This commit is contained in:
Daniel Sockwell 2020-03-19 20:54:23 -04:00 committed by GitHub
parent 8c6b8e87f5
commit eda52c20b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 39 additions and 50 deletions

View File

@ -1,7 +1,7 @@
[package] [package]
name = "flodgatt" name = "flodgatt"
description = "A blazingly fast drop-in replacement for the Mastodon streaming api server" description = "A blazingly fast drop-in replacement for the Mastodon streaming api server"
version = "0.6.2" version = "0.6.3"
authors = ["Daniel Long Sockwell <daniel@codesections.com", "Julian Laubstein <contact@julianlaubstein.de>"] authors = ["Daniel Long Sockwell <daniel@codesections.com", "Julian Laubstein <contact@julianlaubstein.de>"]
edition = "2018" edition = "2018"

View File

@ -25,7 +25,7 @@ impl DeploymentConfig<'_> {
cors: Cors::default(), cors: Cors::default(),
}; };
cfg.env = cfg.env.maybe_update(env.get("RUST_ENV")); cfg.env = cfg.env.maybe_update(env.get("RUST_ENV"));
log::warn!("Using deployment configuration:\n {:#?}", &cfg); log::info!("Using deployment configuration:\n {:#?}", &cfg);
cfg cfg
} }
} }

View File

@ -56,9 +56,7 @@ impl PostgresConfig {
port: PgPort::default().maybe_update(env.get("DB_PORT")), port: PgPort::default().maybe_update(env.get("DB_PORT")),
ssl_mode: PgSslMode::default().maybe_update(env.get("DB_SSLMODE")), ssl_mode: PgSslMode::default().maybe_update(env.get("DB_SSLMODE")),
}; };
log::info!("Postgres configuration:\n{:#?}", &cfg);
log::warn!("Postgres configuration:\n{:#?}", &cfg);
cfg cfg
} }

View File

@ -43,8 +43,7 @@ impl EnvVar {
impl RedisConfig { impl RedisConfig {
const USER_SET_WARNING: &'static str = const USER_SET_WARNING: &'static str =
"Redis user specified, but Redis did not ask for a username. Ignoring it."; "Redis user specified, but Redis did not ask for a username. Ignoring it.";
const DB_SET_WARNING: &'static str = const DB_SET_WARNING: &'static str = r"Redis database specified, but PubSub connections do not use databases.
r"Redis database specified, but PubSub connections do not use databases.
For similar functionality, you may wish to set a REDIS_NAMESPACE"; For similar functionality, you may wish to set a REDIS_NAMESPACE";
pub fn from_env(env: EnvVar) -> Self { pub fn from_env(env: EnvVar) -> Self {
@ -69,7 +68,7 @@ For similar functionality, you may wish to set a REDIS_NAMESPACE";
if cfg.user.is_some() { if cfg.user.is_some() {
log::warn!("{}", Self::USER_SET_WARNING); log::warn!("{}", Self::USER_SET_WARNING);
} }
log::warn!("Redis configuration:\n{:#?},", &cfg); log::info!("Redis configuration:\n{:#?},", &cfg);
cfg cfg
} }
} }

View File

@ -1,6 +1,6 @@
use flodgatt::{ use flodgatt::{
config, err, config, err,
parse_client_request::{sse, user, ws}, parse_client_request::{sse, subscription, ws},
redis_to_client_stream::{self, ClientAgent}, redis_to_client_stream::{self, ClientAgent},
}; };
use std::{collections::HashMap, env, fs, net, os::unix::fs::PermissionsExt}; use std::{collections::HashMap, env, fs, net, os::unix::fs::PermissionsExt};
@ -18,7 +18,7 @@ fn main() {
let env_vars = config::EnvVar::new(env_vars_map); let env_vars = config::EnvVar::new(env_vars_map);
pretty_env_logger::init(); pretty_env_logger::init();
log::warn!( log::info!(
"Flodgatt recognized the following environmental variables:{}", "Flodgatt recognized the following environmental variables:{}",
env_vars.clone() env_vars.clone()
); );
@ -26,24 +26,25 @@ fn main() {
let cfg = config::DeploymentConfig::from_env(env_vars.clone()); let cfg = config::DeploymentConfig::from_env(env_vars.clone());
let postgres_cfg = config::PostgresConfig::from_env(env_vars.clone()); let postgres_cfg = config::PostgresConfig::from_env(env_vars.clone());
let pg_pool = user::PgPool::new(postgres_cfg); let pg_pool = subscription::PgPool::new(postgres_cfg);
let client_agent_sse = ClientAgent::blank(redis_cfg, pg_pool.clone()); let client_agent_sse = ClientAgent::blank(redis_cfg, pg_pool.clone());
let client_agent_ws = client_agent_sse.clone_with_shared_receiver(); let client_agent_ws = client_agent_sse.clone_with_shared_receiver();
log::warn!("Streaming server initialized and ready to accept connections"); log::info!("Streaming server initialized and ready to accept connections");
// Server Sent Events // Server Sent Events
let sse_update_interval = *cfg.ws_interval; let sse_update_interval = *cfg.ws_interval;
let sse_routes = sse::extract_user_or_reject(pg_pool.clone()) let sse_routes = sse::extract_user_or_reject(pg_pool.clone())
.and(warp::sse()) .and(warp::sse())
.map( .map(
move |user: user::Subscription, sse_connection_to_client: warp::sse::Sse| { move |subscription: subscription::Subscription,
log::info!("Incoming SSE request"); sse_connection_to_client: warp::sse::Sse| {
log::info!("Incoming SSE request for {:?}", subscription.timeline);
// Create a new ClientAgent // Create a new ClientAgent
let mut client_agent = client_agent_sse.clone_with_shared_receiver(); let mut client_agent = client_agent_sse.clone_with_shared_receiver();
// Assign ClientAgent to generate stream of updates for the user/timeline pair // Assign ClientAgent to generate stream of updates for the user/timeline pair
client_agent.init_for_user(user); client_agent.init_for_user(subscription);
// send the updates through the SSE connection // send the updates through the SSE connection
redis_to_client_stream::send_updates_to_sse( redis_to_client_stream::send_updates_to_sse(
client_agent, client_agent,
@ -60,12 +61,12 @@ fn main() {
let websocket_routes = ws::extract_user_and_token_or_reject(pg_pool.clone()) let websocket_routes = ws::extract_user_and_token_or_reject(pg_pool.clone())
.and(warp::ws::ws2()) .and(warp::ws::ws2())
.map( .map(
move |user: user::Subscription, token: Option<String>, ws: Ws2| { move |subscription: subscription::Subscription, token: Option<String>, ws: Ws2| {
log::info!("Incoming websocket request"); log::info!("Incoming websocket request for {:?}", subscription.timeline);
// Create a new ClientAgent // Create a new ClientAgent
let mut client_agent = client_agent_ws.clone_with_shared_receiver(); let mut client_agent = client_agent_ws.clone_with_shared_receiver();
// Assign that agent to generate a stream of updates for the user/timeline pair // Assign that agent to generate a stream of updates for the user/timeline pair
client_agent.init_for_user(user); client_agent.init_for_user(subscription);
// send the updates through the WS connection (along with the User's access_token // send the updates through the WS connection (along with the User's access_token
// which is sent for security) // which is sent for security)
@ -91,7 +92,7 @@ fn main() {
let health = warp::path!("api" / "v1" / "streaming" / "health").map(|| "OK"); let health = warp::path!("api" / "v1" / "streaming" / "health").map(|| "OK");
if let Some(socket) = &*cfg.unix_socket { if let Some(socket) = &*cfg.unix_socket {
log::warn!("Using Unix socket {}", socket); log::info!("Using Unix socket {}", socket);
fs::remove_file(socket).unwrap_or_default(); fs::remove_file(socket).unwrap_or_default();
let incoming = UnixListener::bind(socket).unwrap().incoming(); let incoming = UnixListener::bind(socket).unwrap().incoming();

View File

@ -1,5 +1,5 @@
//! Parse the client request and return a (possibly authenticated) `User` //! Parse the client request and return a (possibly authenticated) `User`
pub mod query; pub mod query;
pub mod sse; pub mod sse;
pub mod user; pub mod subscription;
pub mod ws; pub mod ws;

View File

@ -1,7 +1,7 @@
//! Filters for all the endpoints accessible for Server Sent Event updates //! Filters for all the endpoints accessible for Server Sent Event updates
use super::{ use super::{
query::{self, Query}, query::{self, Query},
user::{PgPool, Subscription}, subscription::{PgPool, Subscription},
}; };
use warp::{filters::BoxedFilter, path, Filter}; use warp::{filters::BoxedFilter, path, Filter};
#[allow(dead_code)] #[allow(dead_code)]

View File

@ -135,7 +135,7 @@ impl Timeline {
false => Err(custom("Error: Missing access token"))?, false => Err(custom("Error: Missing access token"))?,
}, },
other => { other => {
log::warn!("Client attempted to subscribe to: `{}`", other); log::warn!("Request for nonexistent endpoint: `{}`", other);
Err(custom("Error: Nonexistent endpoint"))? Err(custom("Error: Nonexistent endpoint"))?
} }
}) })

View File

@ -1,7 +1,7 @@
//! Postgres queries //! Postgres queries
use crate::{ use crate::{
config, config,
parse_client_request::user::{Scope, UserData}, parse_client_request::subscription::{Scope, UserData},
}; };
use ::postgres; use ::postgres;
use r2d2_postgres::PostgresConnectionManager; use r2d2_postgres::PostgresConnectionManager;

View File

@ -1,7 +1,7 @@
//! Filters for the WebSocket endpoint //! Filters for the WebSocket endpoint
use super::{ use super::{
query::{self, Query}, query::{self, Query},
user::{PgPool, Subscription}, subscription::{PgPool, Subscription},
}; };
use warp::{filters::BoxedFilter, path, Filter}; use warp::{filters::BoxedFilter, path, Filter};

View File

@ -18,13 +18,12 @@
use super::{message::Message, receiver::Receiver}; use super::{message::Message, receiver::Receiver};
use crate::{ use crate::{
config, config,
parse_client_request::user::{PgPool, Stream::Public, Subscription, Timeline}, parse_client_request::subscription::{PgPool, Stream::Public, Subscription, Timeline},
}; };
use futures::{ use futures::{
Async::{self, NotReady, Ready}, Async::{self, NotReady, Ready},
Poll, Poll,
}; };
use std::sync; use std::sync;
use tokio::io::Error; use tokio::io::Error;
use uuid::Uuid; use uuid::Uuid;
@ -34,7 +33,7 @@ use uuid::Uuid;
pub struct ClientAgent { pub struct ClientAgent {
receiver: sync::Arc<sync::Mutex<Receiver>>, receiver: sync::Arc<sync::Mutex<Receiver>>,
id: uuid::Uuid, id: uuid::Uuid,
subscription: Subscription, pub subscription: Subscription,
} }
impl ClientAgent { impl ClientAgent {
@ -97,6 +96,7 @@ impl futures::stream::Stream for ClientAgent {
}; };
if start_time.elapsed().as_millis() > 1 { if start_time.elapsed().as_millis() > 1 {
log::warn!("Polling the Receiver took: {:?}", start_time.elapsed()); log::warn!("Polling the Receiver took: {:?}", start_time.elapsed());
log::info!("Longer polling yielded: {:#?}", &result);
}; };
let allowed_langs = &self.subscription.allowed_langs; let allowed_langs = &self.subscription.allowed_langs;

View File

@ -1,5 +1,4 @@
use crate::log_fatal; use crate::log_fatal;
use log::{log_enabled, Level};
use serde_json::Value; use serde_json::Value;
use std::{collections::HashSet, string::String}; use std::{collections::HashSet, string::String};
use strum_macros::Display; use strum_macros::Display;
@ -50,11 +49,8 @@ impl Message {
.unwrap_or_else(|| log_fatal!("Could not process `payload` in {:?}", json)) .unwrap_or_else(|| log_fatal!("Could not process `payload` in {:?}", json))
.to_string(), .to_string(),
)), )),
unexpected_event => { other => {
log::warn!( log::warn!("Received unexpected `event` from Redis: {}", other);
"Received an unexpected `event` type from Redis: {}",
unexpected_event
);
Self::UnknownEvent(event.to_string(), json["payload"].clone()) Self::UnknownEvent(event.to_string(), json["payload"].clone())
} }
} }
@ -96,14 +92,9 @@ impl Status {
const REJECT: bool = true; const REJECT: bool = true;
let reject_and_maybe_log = |toot_language| { let reject_and_maybe_log = |toot_language| {
if log_enabled!(Level::Info) { log::info!("Filtering out toot from `{}`", &self.0["account"]["acct"]);
log::info!( log::info!("Toot language: `{}`", toot_language);
"Language `{toot_language}` is not in list `{allowed_langs:?}`", log::info!("Recipient's allowed languages: `{:?}`", allowed_langs);
toot_language = toot_language,
allowed_langs = allowed_langs
);
log::info!("Filtering out toot from `{}`", &self.0["account"]["acct"],);
}
REJECT REJECT
}; };
if allowed_langs.is_empty() { if allowed_langs.is_empty() {

View File

@ -3,7 +3,6 @@ pub mod client_agent;
pub mod message; pub mod message;
pub mod receiver; pub mod receiver;
pub mod redis; pub mod redis;
pub use client_agent::ClientAgent; pub use client_agent::ClientAgent;
use futures::{future::Future, stream::Stream, Async}; use futures::{future::Future, stream::Stream, Async};
use log; use log;
@ -40,6 +39,7 @@ pub fn send_updates_to_ws(
update_interval: time::Duration, update_interval: time::Duration,
) -> impl futures::future::Future<Item = (), Error = ()> { ) -> impl futures::future::Future<Item = (), Error = ()> {
let (ws_tx, mut ws_rx) = socket.split(); let (ws_tx, mut ws_rx) = socket.split();
let timeline = client_agent.subscription.timeline;
// Create a pipe // Create a pipe
let (tx, rx) = futures::sync::mpsc::unbounded(); let (tx, rx) = futures::sync::mpsc::unbounded();
@ -62,16 +62,16 @@ pub fn send_updates_to_ws(
Ok(Async::NotReady) | Ok(Async::Ready(Some(_))) => futures::future::ok(true), Ok(Async::NotReady) | Ok(Async::Ready(Some(_))) => futures::future::ok(true),
Ok(Async::Ready(None)) => { Ok(Async::Ready(None)) => {
// TODO: consider whether we should manually drop closed connections here // TODO: consider whether we should manually drop closed connections here
log::info!("Client closed WebSocket connection"); log::info!("Client closed WebSocket connection for {:?}", timeline);
futures::future::ok(false) futures::future::ok(false)
} }
Err(e) if e.to_string() == "IO error: Broken pipe (os error 32)" => { Err(e) if e.to_string() == "IO error: Broken pipe (os error 32)" => {
// no err, just closed Unix socket // no err, just closed Unix socket
log::info!("Client closed WebSocket connection"); log::info!("Client closed WebSocket connection for {:?}", timeline);
futures::future::ok(false) futures::future::ok(false)
} }
Err(e) => { Err(e) => {
log::warn!("Error in TL {}", e); log::warn!("Error in {:?}: {}", timeline, e);
futures::future::ok(false) futures::future::ok(false)
} }
}); });
@ -98,8 +98,8 @@ pub fn send_updates_to_ws(
}) })
.then(move |result| { .then(move |result| {
// TODO: consider whether we should manually drop closed connections here // TODO: consider whether we should manually drop closed connections here
log::info!("WebSocket connection closed."); log::info!("WebSocket connection for {:?} closed.", timeline);
result result
}) })
.map_err(move |e| log::warn!("Error sending to user: {}", e)) .map_err(move |e| log::warn!("Error sending to {:?}: {}", timeline, e))
} }

View File

@ -1,4 +1,4 @@
use crate::parse_client_request::user::Timeline; use crate::parse_client_request::subscription::Timeline;
use serde_json::Value; use serde_json::Value;
use std::{collections, fmt, time}; use std::{collections, fmt, time};
use uuid::Uuid; use uuid::Uuid;

View File

@ -5,7 +5,7 @@ mod message_queues;
use crate::{ use crate::{
config::{self, RedisInterval}, config::{self, RedisInterval},
log_fatal, log_fatal,
parse_client_request::user::{self, postgres, PgPool, Timeline}, parse_client_request::subscription::{self, postgres, PgPool, Timeline},
pubsub_cmd, pubsub_cmd,
redis_to_client_stream::redis::{redis_cmd, RedisConn, RedisStream}, redis_to_client_stream::redis::{redis_cmd, RedisConn, RedisStream},
}; };
@ -92,7 +92,7 @@ impl Receiver {
} }
fn if_hashtag_timeline_get_hashtag_name(&mut self, timeline: Timeline) -> Option<String> { fn if_hashtag_timeline_get_hashtag_name(&mut self, timeline: Timeline) -> Option<String> {
use user::Stream::*; use subscription::Stream::*;
if let Timeline(Hashtag(id), _, _) = timeline { if let Timeline(Hashtag(id), _, _) = timeline {
let cached_tag = self.cache.id_to_hashtag.get(&id).map(String::from); let cached_tag = self.cache.id_to_hashtag.get(&id).map(String::from);
let tag = match cached_tag { let tag = match cached_tag {