Fix logger

This commit is contained in:
Adrian Woźniak 2020-04-03 14:29:30 +02:00
parent 33a8881cef
commit 900a539da2
7 changed files with 41 additions and 21 deletions

2
.env
View File

@ -1,5 +1,5 @@
DATABASE_URL=postgres://postgres@localhost:5432/jirs DATABASE_URL=postgres://postgres@localhost:5432/jirs
RUST_LOG=actix_web=info,diesel=info RUST_LOG=debug
JIRS_CLIENT_PORT=7000 JIRS_CLIENT_PORT=7000
JIRS_CLIENT_BIND=0.0.0.0 JIRS_CLIENT_BIND=0.0.0.0
JIRS_SERVER_PORT=5000 JIRS_SERVER_PORT=5000

12
Cargo.lock generated
View File

@ -1081,11 +1081,13 @@ dependencies = [
"ipnetwork", "ipnetwork",
"jirs-data", "jirs-data",
"libc", "libc",
"log 0.4.8",
"num-bigint", "num-bigint",
"num-integer", "num-integer",
"num-traits", "num-traits",
"percent-encoding 2.1.0", "percent-encoding 2.1.0",
"pq-sys", "pq-sys",
"pretty_env_logger",
"quickcheck", "quickcheck",
"r2d2", "r2d2",
"serde", "serde",
@ -1398,6 +1400,16 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "pretty_env_logger"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
dependencies = [
"env_logger 0.7.1",
"log 0.4.8",
]
[[package]] [[package]]
name = "proc-macro-hack" name = "proc-macro-hack"
version = "0.5.14" version = "0.5.14"

View File

@ -37,6 +37,8 @@ num-integer = { version = "0.1.32" }
bigdecimal = { version = ">= 0.0.10, <= 0.1.0" } bigdecimal = { version = ">= 0.0.10, <= 0.1.0" }
bitflags = { version = "1.0" } bitflags = { version = "1.0" }
r2d2 = { version = ">= 0.8, < 0.9" } r2d2 = { version = ">= 0.8, < 0.9" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7" env_logger = "0.7"
futures = { version = "*" } futures = { version = "*" }

View File

@ -1,10 +1,11 @@
use crate::db::{DbExecutor, DbPool, SyncQuery};
use crate::errors::ServiceErrors;
use crate::models::{Token, User};
use actix::{Handler, Message}; use actix::{Handler, Message};
use diesel::prelude::*; use diesel::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::db::{DbExecutor, DbPool, SyncQuery};
use crate::errors::ServiceErrors;
use crate::models::{Token, User};
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct AuthorizeUser { pub struct AuthorizeUser {
pub access_token: uuid::Uuid, pub access_token: uuid::Uuid,
@ -28,11 +29,13 @@ impl Handler<AuthorizeUser> for DbExecutor {
let token = tokens let token = tokens
.filter(access_token.eq(msg.access_token)) .filter(access_token.eq(msg.access_token))
.first::<Token>(conn) .first::<Token>(conn)
.map_err(|_e| ServiceErrors::RecordNotFound("Token".to_string()))?; .map_err(|_e| {
ServiceErrors::RecordNotFound(format!("token for {}", msg.access_token))
})?;
let user = users let user = users
.filter(id.eq(token.user_id)) .filter(id.eq(token.user_id))
.first::<User>(conn) .first::<User>(conn)
.map_err(|_e| ServiceErrors::RecordNotFound("User".to_string()))?; .map_err(|_e| ServiceErrors::RecordNotFound(format!("user {}", token.user_id)))?;
Ok(user) Ok(user)
} }
} }

View File

@ -57,9 +57,12 @@ impl Handler<LoadProjectIssues> for DbExecutor {
.0 .0
.get() .get()
.map_err(|_| ServiceErrors::DatabaseConnectionLost)?; .map_err(|_| ServiceErrors::DatabaseConnectionLost)?;
let vec = issues let chain = issues.filter(project_id.eq(msg.project_id)).distinct();
.filter(project_id.eq(msg.project_id)) debug!(
.distinct() "{}",
diesel::debug_query::<diesel::pg::Pg, _>(&chain).to_string()
);
let vec = chain
.load::<Issue>(conn) .load::<Issue>(conn)
.map_err(|_| ServiceErrors::RecordNotFound("project issues".to_string()))?; .map_err(|_| ServiceErrors::RecordNotFound("project issues".to_string()))?;
Ok(vec) Ok(vec)
@ -120,7 +123,10 @@ impl Handler<UpdateIssue> for DbExecutor {
.map(|project_id| dsl::project_id.eq(project_id)), .map(|project_id| dsl::project_id.eq(project_id)),
dsl::updated_at.eq(chrono::Utc::now().naive_utc()), dsl::updated_at.eq(chrono::Utc::now().naive_utc()),
)); ));
diesel::debug_query::<diesel::pg::Pg, _>(&chain); debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&chain).to_string()
);
chain.get_result::<Issue>(conn).map_err(|_| { chain.get_result::<Issue>(conn).map_err(|_| {
ServiceErrors::DatabaseQueryFailed("Failed to update issue".to_string()) ServiceErrors::DatabaseQueryFailed("Failed to update issue".to_string())
})?; })?;

View File

@ -53,6 +53,7 @@ pub mod dev {
use std::ops::Deref; use std::ops::Deref;
use diesel::connection::{AnsiTransactionManager, SimpleConnection}; use diesel::connection::{AnsiTransactionManager, SimpleConnection};
use diesel::debug_query;
use diesel::deserialize::QueryableByName; use diesel::deserialize::QueryableByName;
use diesel::query_builder::{AsQuery, QueryFragment, QueryId}; use diesel::query_builder::{AsQuery, QueryFragment, QueryId};
use diesel::sql_types::HasSqlType; use diesel::sql_types::HasSqlType;
@ -72,8 +73,6 @@ pub mod dev {
impl SimpleConnection for VerboseConnection { impl SimpleConnection for VerboseConnection {
fn batch_execute(&self, query: &str) -> QueryResult<()> { fn batch_execute(&self, query: &str) -> QueryResult<()> {
use diesel::debug_query;
debug_query::<diesel::pg::Pg, _>(&query);
self.inner.batch_execute(query) self.inner.batch_execute(query)
} }
} }
@ -87,8 +86,6 @@ pub mod dev {
} }
fn execute(&self, query: &str) -> QueryResult<usize> { fn execute(&self, query: &str) -> QueryResult<usize> {
use diesel::debug_query;
debug_query::<diesel::pg::Pg, _>(&query);
self.inner.execute(query) self.inner.execute(query)
} }
@ -99,8 +96,6 @@ pub mod dev {
Self::Backend: HasSqlType<T::SqlType>, Self::Backend: HasSqlType<T::SqlType>,
U: Queryable<T::SqlType, Self::Backend>, U: Queryable<T::SqlType, Self::Backend>,
{ {
use diesel::debug_query;
debug_query::<diesel::pg::Pg, _>(&source);
self.inner.query_by_index(source) self.inner.query_by_index(source)
} }
@ -109,8 +104,8 @@ pub mod dev {
T: QueryFragment<Self::Backend> + QueryId, T: QueryFragment<Self::Backend> + QueryId,
U: QueryableByName<Self::Backend>, U: QueryableByName<Self::Backend>,
{ {
use diesel::debug_query; let q = debug_query::<Self::Backend, _>(&source).to_string();
debug_query::<diesel::pg::Pg, _>(&source); debug!("{:?}", q);
self.inner.query_by_name(source) self.inner.query_by_name(source)
} }
@ -118,8 +113,8 @@ pub mod dev {
where where
T: QueryFragment<Self::Backend> + QueryId, T: QueryFragment<Self::Backend> + QueryId,
{ {
use diesel::debug_query; let q = debug_query::<Self::Backend, _>(&source).to_string();
debug_query::<diesel::pg::Pg, _>(&source); debug!("{:?}", q);
self.inner.execute_returning_count(source) self.inner.execute_returning_count(source)
} }

View File

@ -1,5 +1,7 @@
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;
#[macro_use]
extern crate log;
use actix_cors::Cors; use actix_cors::Cors;
use actix_web::{web, App, HttpServer}; use actix_web::{web, App, HttpServer};
@ -13,8 +15,8 @@ pub mod schema;
#[actix_rt::main] #[actix_rt::main]
async fn main() -> Result<(), String> { async fn main() -> Result<(), String> {
env_logger::init();
dotenv::dotenv().ok(); dotenv::dotenv().ok();
pretty_env_logger::init();
let port = std::env::var("JIRS_SERVER_PORT").unwrap_or_else(|_| "3000".to_string()); let port = std::env::var("JIRS_SERVER_PORT").unwrap_or_else(|_| "3000".to_string());
let bind = std::env::var("JIRS_SERVER_BIND").unwrap_or_else(|_| "0.0.0.0".to_string()); let bind = std::env::var("JIRS_SERVER_BIND").unwrap_or_else(|_| "0.0.0.0".to_string());