Fix S3. Split actors into workspaces. Add highlight actor. Remove empty utils.

This commit is contained in:
Adrian Woźniak 2020-12-24 16:24:04 +01:00
parent ac2ba39795
commit 70ca962f8c
153 changed files with 4078 additions and 3851 deletions

8
.gitignore vendored
View File

@ -1,10 +1,16 @@
/target /target
mail.toml mail.toml
mail.test.toml mail.test.toml
web.toml web.toml
web.test.toml web.test.toml
db.toml db.toml
db.test.toml db.test.toml
fs.toml
fs.test.toml
highlight.toml
highlight.test.toml
pkg pkg
jirs-client/pkg jirs-client/pkg
jirs-client/tmp jirs-client/tmp
@ -15,3 +21,5 @@ jirs-cli/target
jirs-bat/bat jirs-bat/bat
highlight/jirs-highlight/build highlight/jirs-highlight/build
uploads
config

564
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -13,6 +13,14 @@ members = [
"./jirs-cli", "./jirs-cli",
"./jirs-server", "./jirs-server",
"./jirs-client", "./jirs-client",
"./jirs-data",
"./jirs-css", "./jirs-css",
"./shared/jirs-config",
"./shared/jirs-data",
"./actors/highlight-actor",
"./actors/database-actor",
"./actors/database-actor/database_actor-derive",
"./actors/web-actor",
"./actors/websocket-actor",
"./actors/mail-actor",
"./actors/filesystem-actor"
] ]

View File

@ -0,0 +1,59 @@
[package]
name = "database-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "database_actor"
path = "./src/lib.rs"
[dependencies]
serde = "*"
bincode = "*"
toml = { version = "*" }
actix = { version = "0.10.0" }
actix-web = { version = "*" }
futures = { version = "0.3.8" }
openssl-sys = { version = "*", features = ["vendored"] }
libc = { version = "0.2.0", default-features = false }
pq-sys = { version = ">=0.3.0, <0.5.0" }
r2d2 = { version = ">= 0.8, < 0.9" }
dotenv = { version = "*" }
byteorder = "1.0"
chrono = { version = "0.4", features = ["serde"] }
time = { version = "0.1" }
url = { version = "2.1.0" }
percent-encoding = { version = "2.1.0" }
uuid = { version = "0.8.1", features = ["serde", "v4", "v5"] }
ipnetwork = { version = ">=0.12.2, <0.17.0" }
num-bigint = { version = ">=0.1.41, <0.3" }
num-traits = { version = "0.2" }
num-integer = { version = "0.1.32" }
bigdecimal = { version = ">= 0.0.10, <= 0.1.0" }
bitflags = { version = "1.0" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["database"]
[dependencies.jirs-data]
path = "../../shared/jirs-data"
features = ["backend"]
[dependencies.diesel]
version = "1.4.5"
features = ["unstable", "postgres", "numeric", "extras", "uuidv07"]

View File

@ -0,0 +1,17 @@
[package]
name = "database-actor-derive"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "database_actor_derive"
path = "./src/lib.rs"
proc-macro = true
[dependencies]

View File

@ -0,0 +1,27 @@
extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
#[proc_macro_derive(DbMsg, attributes(query))]
pub fn db_msg(item: TokenStream) -> TokenStream {
let mut it = item.into_iter();
if let Some(TokenTree::Ident(ident)) = it.next() {
if ident.to_string().as_str() != "pub" {
panic!("Expect to find keyword pub but was found {:?}", ident)
}
} else {
panic!("Expect to find keyword pub but nothing was found")
}
if let Some(TokenTree::Ident(ident)) = it.next() {
if ident.to_string().as_str() != "struct" {
panic!("Expect to find keyword struct but was found {:?}", ident)
}
} else {
panic!("Expect to find keyword struct but nothing was found")
}
let _name = it
.next()
.expect("Expect to struct name but nothing was found");
"".parse().unwrap()
}

View File

@ -0,0 +1,18 @@
use {
crate::{db_find, tokens::FindAccessToken},
diesel::prelude::*,
jirs_data::User,
};
db_find! {
AuthorizeUser,
msg => conn => users => {
let token = FindAccessToken {
token: msg.access_token,
}
.execute(conn)?;
users.find(token.user_id)
},
User,
access_token => uuid::Uuid
}

View File

@ -0,0 +1,51 @@
use {
crate::{db_create, db_delete, db_load, db_update},
diesel::prelude::*,
jirs_data::{Comment, CommentId, IssueId, UserId},
};
db_load! {
LoadIssueComments,
msg => comments => comments.distinct_on(id).filter(issue_id.eq(msg.issue_id)),
Comment,
issue_id => IssueId
}
db_create! {
CreateComment,
msg => comments => diesel::insert_into(comments).values((
body.eq(msg.body),
user_id.eq(msg.user_id),
issue_id.eq(msg.issue_id),
)),
Comment,
issue_id => IssueId,
user_id => UserId,
body => String
}
db_update! {
UpdateComment,
msg => comments => diesel::update(
comments
.filter(user_id.eq(msg.user_id))
.find(msg.comment_id),
)
.set(body.eq(msg.body)),
Comment,
comment_id => CommentId,
user_id => UserId,
body => String
}
db_delete! {
DeleteComment,
msg => comments => diesel::delete(
comments
.filter(user_id.eq(msg.user_id))
.find(msg.comment_id),
),
Comment,
comment_id => CommentId,
user_id => UserId
}

View File

@ -0,0 +1,48 @@
use {
crate::{db_create, db_delete, db_load, db_update},
diesel::prelude::*,
jirs_data::Epic,
};
db_load! {
LoadEpics,
msg => epics => epics.distinct_on(id).filter(project_id.eq(msg.project_id)),
Epic,
project_id => i32
}
db_create! {
CreateEpic,
msg => epics => diesel::insert_into(epics).values((
name.eq(msg.name.as_str()),
user_id.eq(msg.user_id),
project_id.eq(msg.project_id),
)),
Epic,
user_id => i32,
project_id => i32,
name => String
}
db_update! {
UpdateEpic,
msg => epics => diesel::update(
epics
.filter(project_id.eq(msg.project_id))
.find(msg.epic_id),
).set(name.eq(msg.name)),
Epic,
epic_id => i32,
project_id => i32,
name => String
}
db_delete! {
DeleteEpic,
msg => epics => diesel::delete(
epics.filter(user_id.eq(msg.user_id)).find(msg.epic_id)
),
Epic,
user_id => i32,
epic_id => i32
}

View File

@ -0,0 +1,64 @@
use jirs_data::{EmailString, UsernameString};
#[derive(Debug)]
pub enum OperationError {
LoadCollection,
LoadSingle,
Create,
Update,
Delete,
}
#[derive(Debug)]
pub enum ResourceKind {
Epic,
Invitation,
IssueAssignee,
IssueStatus,
Issue,
Message,
Project,
Token,
UserProject,
User,
Comment,
}
#[derive(Debug)]
pub enum InvitationError {
InvitationRevoked,
}
#[derive(Debug)]
pub enum TokenError {
FailedToDisable,
}
#[derive(Debug)]
pub enum UserError {
TakenPair(UsernameString, EmailString),
InvalidPair(UsernameString, EmailString),
UpdateProfile,
}
#[derive(Debug)]
pub enum IssueError {
BadListPosition,
NoIssueStatuses,
}
#[derive(Debug)]
pub enum UserProjectError {
InviteHimself,
}
#[derive(Debug)]
pub enum DatabaseError {
DatabaseConnectionLost,
GenericFailure(OperationError, ResourceKind),
Invitation(InvitationError),
Token(TokenError),
User(UserError),
Issue(IssueError),
UserProject(UserProjectError),
}

View File

@ -0,0 +1,171 @@
use {
crate::{
db_create, db_delete, db_find, db_load, db_pool, db_update,
tokens::CreateBindToken,
users::{LookupUser, Register},
DbExecutor, DbPooledConn, InvitationError,
},
actix::{Handler, Message},
diesel::prelude::*,
jirs_data::{
EmailString, Invitation, InvitationId, InvitationState, InvitationToken, ProjectId, Token,
User, UserId, UserRole, UsernameString,
},
};
db_find! {
FindByBindToken,
msg => invitations => invitations.filter(bind_token.eq(msg.token)),
Invitation,
token => InvitationToken
}
db_load! {
ListInvitation,
msg => invitations => invitations
.filter(invited_by_id.eq(msg.user_id))
.filter(state.ne(InvitationState::Accepted))
.order_by(state.asc())
.then_order_by(updated_at.desc()),
Invitation,
user_id => UserId
}
db_create! {
CreateInvitation,
msg => invitations => diesel::insert_into(invitations).values((
name.eq(msg.name),
email.eq(msg.email),
state.eq(InvitationState::Sent),
project_id.eq(msg.project_id),
invited_by_id.eq(msg.user_id),
role.eq(msg.role),
)),
Invitation,
user_id => UserId,
project_id => ProjectId,
email => EmailString,
name => UsernameString,
role => UserRole
}
db_delete! {
DeleteInvitation,
msg => invitations => diesel::delete(invitations).filter(id.eq(msg.id)),
Invitation,
id => InvitationId
}
db_update! {
UpdateInvitationState,
msg => invitations => diesel::update(invitations)
.set((
state.eq(msg.state),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
.filter(id.eq(msg.id)),
Invitation,
id => InvitationId,
state => InvitationState
}
pub struct RevokeInvitation {
pub id: InvitationId,
}
impl Message for RevokeInvitation {
type Result = Result<(), crate::DatabaseError>;
}
impl Handler<RevokeInvitation> for DbExecutor {
type Result = Result<(), crate::DatabaseError>;
fn handle(&mut self, msg: RevokeInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
UpdateInvitationState {
id: msg.id,
state: InvitationState::Revoked,
}
.execute(conn)?;
Ok(())
}
}
pub struct AcceptInvitation {
pub invitation_token: InvitationToken,
}
impl AcceptInvitation {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, crate::DatabaseError> {
crate::Guard::new(conn)?.run::<Token, _>(|_guard| {
let invitation = crate::invitations::FindByBindToken {
token: self.invitation_token,
}
.execute(conn)?;
if invitation.state == InvitationState::Revoked {
return Err(crate::DatabaseError::Invitation(
InvitationError::InvitationRevoked,
));
}
crate::invitations::UpdateInvitationState {
id: invitation.id,
state: InvitationState::Accepted,
}
.execute(conn)?;
UpdateInvitationState {
id: invitation.id,
state: InvitationState::Accepted,
}
.execute(conn)?;
match {
Register {
name: invitation.name.clone(),
email: invitation.email.clone(),
project_id: Some(invitation.project_id),
role: UserRole::User,
}
.execute(conn)
} {
Ok(_) => (),
Err(crate::DatabaseError::User(crate::UserError::InvalidPair(..))) => (),
Err(e) => return Err(e),
};
let user: User = LookupUser {
name: invitation.name.clone(),
email: invitation.email.clone(),
}
.execute(conn)?;
CreateBindToken { user_id: user.id }.execute(conn)?;
crate::user_projects::CreateUserProject {
user_id: user.id,
project_id: invitation.project_id,
is_current: false,
is_default: false,
role: invitation.role,
}
.execute(conn)?;
crate::tokens::FindUserId { user_id: user.id }.execute(conn)
})
}
}
impl Message for AcceptInvitation {
type Result = Result<Token, crate::DatabaseError>;
}
impl Handler<AcceptInvitation> for DbExecutor {
type Result = Result<Token, crate::DatabaseError>;
fn handle(&mut self, msg: AcceptInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}

View File

@ -0,0 +1,59 @@
use {
crate::{db_create, db_delete, db_load, db_load_field},
diesel::{expression::dsl::not, prelude::*},
jirs_data::{IssueAssignee, IssueId, UserId},
};
db_create! {
AsignMultiple,
msg => issue_assignees => {
use crate::models::CreateIssueAssigneeForm;
let AsignMultiple { user_ids, issue_id: i_id } = msg;
diesel::insert_into(issue_assignees)
.values(user_ids.into_iter().map(|u_id| {
CreateIssueAssigneeForm {
user_id: u_id,
issue_id: i_id
}
}).collect::<Vec<CreateIssueAssigneeForm>>())
},
IssueAssignee,
user_ids => Vec<UserId>,
issue_id => IssueId
}
db_load! {
LoadAssignees,
msg => issue_assignees => issue_assignees
.distinct_on(id)
.filter(issue_id.eq(msg.issue_id)),
IssueAssignee,
issue_id => IssueId
}
db_load_field! {
LoadAssigneesIds,
UserId,
msg => issue_assignees => issue_assignees
.select(user_id)
.filter(issue_id.eq(msg.issue_id)),
IssueAssignee,
issue_id => IssueId
}
db_delete! {
DeleteIssueAssignees,
msg => issue_assignees => diesel::delete(issue_assignees.filter(issue_id.eq(msg.issue_id))),
IssueAssignee,
issue_id => IssueId
}
db_delete! {
DropIssueAssignees,
msg => issue_assignees => diesel::delete(issue_assignees)
.filter(not(user_id.eq_any(msg.user_ids)).and(issue_id.eq(msg.issue_id))),
IssueAssignee,
issue_id => IssueId,
user_ids => Vec<UserId>
}

View File

@ -0,0 +1,55 @@
use {
crate::{db_create, db_delete, db_load, db_update},
diesel::prelude::*,
jirs_data::{IssueStatus, IssueStatusId, Position, ProjectId, TitleString},
};
db_load! {
LoadIssueStatuses,
msg => issue_statuses => issue_statuses
.distinct_on(id)
.filter(project_id.eq(msg.project_id)),
IssueStatus,
project_id => ProjectId
}
db_create! {
CreateIssueStatus,
msg => issue_statuses => diesel::insert_into(issue_statuses).values((
project_id.eq(msg.project_id),
name.eq(msg.name),
position.eq(msg.position),
)),
IssueStatus,
project_id => ProjectId,
position => i32,
name => TitleString
}
db_delete! {
DeleteIssueStatus,
msg => issue_statuses => diesel::delete(issue_statuses)
.filter(id.eq(msg.issue_status_id))
.filter(project_id.eq(msg.project_id)
),
IssueStatus,
project_id => ProjectId,
issue_status_id => IssueStatusId
}
db_update! {
UpdateIssueStatus,
msg => issue_statuses => diesel::update(issue_statuses)
.set((
name.eq(msg.name),
position.eq(msg.position),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
.filter(id.eq(msg.issue_status_id))
.filter(project_id.eq(msg.project_id)),
IssueStatus,
issue_status_id => IssueStatusId,
project_id => ProjectId,
position => Position,
name => TitleString
}

View File

@ -0,0 +1,228 @@
use {
crate::{
db_create_with_conn, db_delete_with_conn, db_find, db_load, db_update_with_conn,
models::Issue,
},
diesel::{expression::sql_literal::sql, prelude::*},
jirs_data::{IssueId, IssuePriority, IssueStatusId, IssueType, ProjectId, UserId},
};
db_find! {
LoadIssue,
msg => issues => issues.filter(id.eq(msg.issue_id)).distinct(),
Issue,
issue_id => IssueId
}
db_load! {
LoadProjectIssues,
msg => issues => issues.filter(project_id.eq(msg.project_id)).distinct(),
Issue,
project_id => ProjectId
}
db_update_with_conn! {
UpdateIssue,
msg => conn => issues => {
if let Some(user_ids) = msg.user_ids {
crate::issue_assignees::DropIssueAssignees {
issue_id: msg.issue_id,
user_ids: user_ids.clone(),
}
.execute(conn)?;
let existing: Vec<UserId> = crate::issue_assignees::LoadAssigneesIds {
issue_id: msg.issue_id,
}
.execute(conn)?;
crate::issue_assignees::AsignMultiple {
issue_id: msg.issue_id,
user_ids: user_ids
.into_iter()
.filter(|u_id| !existing.contains(u_id))
.collect::<Vec<UserId>>(),
}
.execute(conn)?;
}
diesel::update(issues.find(msg.issue_id)).set((
msg.title.map(|v| title.eq(v)),
msg.issue_type.map(|v| issue_type.eq(v)),
msg.issue_status_id.map(|v| issue_status_id.eq(v)),
msg.priority.map(|p| priority.eq(p)),
msg.list_position.map(|pos| list_position.eq(pos)),
msg.description.map(|desc| description.eq(desc)),
msg.description_text.map(|t| description_text.eq(t)),
msg.estimate.map(|v| estimate.eq(v)),
msg.time_spent.map(|v| time_spent.eq(v)),
msg.time_remaining.map(|v| time_remaining.eq(v)),
msg.project_id.map(|v| project_id.eq(v)),
msg.reporter_id.map(|v| reporter_id.eq(v)),
msg.epic_id.map(|v| epic_id.eq(v)),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
},
Issue,
issue_id => i32,
title => Option<String>,
issue_type => Option<IssueType>,
priority => Option<IssuePriority>,
list_position => Option<i32>,
description => Option<String>,
description_text => Option<String>,
estimate => Option<i32>,
time_spent => Option<i32>,
time_remaining => Option<i32>,
project_id => Option<i32>,
user_ids => Option<Vec<i32>>,
reporter_id => Option<i32>,
issue_status_id => Option<i32>,
epic_id => Option<Option<i32>>
}
db_delete_with_conn! {
DeleteIssue,
msg => conn => issues => {
crate::issue_assignees::DeleteIssueAssignees { issue_id: msg.issue_id }
.execute(conn)?;
diesel::delete(issues.find(msg.issue_id))
},
Issue,
issue_id => IssueId
}
mod inner {
use {
crate::{db_create, models::Issue},
diesel::prelude::*,
jirs_data::{IssuePriority, IssueStatusId, IssueType},
};
db_create! {
CreateIssue,
msg => issues => diesel::insert_into(issues)
.values((
title.eq(msg.title),
issue_type.eq(msg.issue_type),
issue_status_id.eq(msg.issue_status_id),
priority.eq(msg.priority),
list_position.eq(msg.list_position),
description.eq(msg.description),
description_text.eq(msg.description_text),
estimate.eq(msg.estimate),
time_spent.eq(msg.time_spent),
time_remaining.eq(msg.time_remaining),
reporter_id.eq(msg.reporter_id),
project_id.eq(msg.project_id),
epic_id.eq(msg.epic_id)
))
.on_conflict_do_nothing(),
Issue,
title => String,
list_position => i32,
issue_type => IssueType,
issue_status_id => IssueStatusId,
priority => IssuePriority,
description => Option<String>,
description_text => Option<String>,
estimate => Option<i32>,
time_spent => Option<i32>,
time_remaining => Option<i32>,
project_id => jirs_data::ProjectId,
reporter_id => jirs_data::UserId,
epic_id => Option<jirs_data::EpicId>
}
}
db_create_with_conn! {
CreateIssue,
msg => conn => issues => {
let pos = issues
.select(sql("COALESCE(max(list_position), 0) + 1"))
.get_result::<i32>(conn)
.map_err(|e| {
log::error!("resolve new issue position failed {}", e);
crate::DatabaseError::Issue(crate::IssueError::BadListPosition)
})?;
let i_s_id: IssueStatusId = if msg.issue_status_id == 0 {
crate::issue_statuses::LoadIssueStatuses { project_id: msg.project_id }
.execute(conn)?
.first()
.ok_or_else(|| crate::DatabaseError::Issue(crate::IssueError::NoIssueStatuses))?
.id
} else {
msg.issue_status_id
};
let assign_users = msg.user_ids
.iter()
.cloned()
.filter(|u_id| *u_id != msg.reporter_id)
.collect::<Vec<UserId>>();
let issue = inner::CreateIssue {
title: msg.title,
list_position: pos,
issue_type: msg.issue_type,
issue_status_id: i_s_id,
priority: msg.priority,
description: msg.description,
description_text: msg.description_text,
estimate: msg.estimate,
time_spent: msg.time_spent,
time_remaining: msg.time_remaining,
project_id: msg.project_id,
reporter_id: msg.reporter_id,
epic_id: msg.epic_id,
}.execute(conn)?;
crate::issue_assignees::AsignMultiple {
issue_id: issue.id,
user_ids: assign_users,
};
issues.find(issue.id)
},
Issue,
title => String,
issue_type => IssueType,
issue_status_id => IssueStatusId,
priority => IssuePriority,
description => Option<String>,
description_text => Option<String>,
estimate => Option<i32>,
time_spent => Option<i32>,
time_remaining => Option<i32>,
project_id => jirs_data::ProjectId,
reporter_id => jirs_data::UserId,
user_ids => Vec<jirs_data::UserId>,
epic_id => Option<jirs_data::EpicId>
}
// impl Handler<CreateIssue> for DbExecutor {
// type Result = Result<Issue, crate::DatabaseError>;
//
// fn handle(&mut self, msg: CreateIssue, ctx: &mut Self::Context) -> Self::Result {
// use crate::schema::issue_assignees::dsl;
// use crate::schema::issues::dsl::issues;
//
// let mut values = vec![];
// for user_id in msg.user_ids.iter() {
// values.push(crate::models::CreateIssueAssigneeForm {
// issue_id: issue.id,
// user_id: *user_id,
// });
// }
// if !msg.user_ids.contains(&msg.reporter_id) {
// values.push(crate::models::CreateIssueAssigneeForm {
// issue_id: issue.id,
// user_id: msg.reporter_id,
// });
// }
//
// diesel::insert_into(dsl::issue_assignees)
// .values(values)
// .execute(conn)
// .map_err(|e| {
// log::error!("{:?}", e);
// crate::DatabaseError::DatabaseConnectionLost
// })?;
//
// Ok(issue)
// }
// }

View File

@ -0,0 +1,109 @@
#![recursion_limit = "256"]
#[macro_use]
extern crate diesel;
pub use errors::*;
use {
actix::{Actor, SyncContext},
diesel::pg::PgConnection,
diesel::r2d2::{self, ConnectionManager},
};
pub mod authorize_user;
pub mod comments;
pub mod epics;
pub mod errors;
pub mod invitations;
pub mod issue_assignees;
pub mod issue_statuses;
pub mod issues;
pub mod messages;
pub mod models;
pub mod prelude;
pub mod projects;
pub mod schema;
pub mod tokens;
pub mod user_projects;
pub mod users;
pub type DbPool = r2d2::Pool<ConnectionManager<PgConnection>>;
pub type DbPooledConn = r2d2::PooledConnection<ConnectionManager<PgConnection>>;
pub struct DbExecutor {
pub pool: DbPool,
pub config: jirs_config::database::Configuration,
}
impl Actor for DbExecutor {
type Context = SyncContext<Self>;
}
impl Default for DbExecutor {
fn default() -> Self {
Self {
pool: build_pool(),
config: jirs_config::database::Configuration::read(),
}
}
}
pub fn build_pool() -> DbPool {
dotenv::dotenv().ok();
let config = jirs_config::database::Configuration::read();
let manager = ConnectionManager::<PgConnection>::new(config.database_url);
r2d2::Pool::builder()
.max_size(config.concurrency as u32)
.build(manager)
.unwrap_or_else(|e| panic!("Failed to create pool. {}", e))
}
pub trait SyncQuery {
type Result;
fn handle(&self, pool: &DbPool) -> Self::Result;
}
pub struct Guard<'l> {
conn: &'l crate::DbPooledConn,
tm: &'l diesel::connection::AnsiTransactionManager,
}
impl<'l> Guard<'l> {
pub fn new(conn: &'l DbPooledConn) -> Result<Self, crate::DatabaseError> {
use diesel::{connection::TransactionManager, prelude::*};
let tm = conn.transaction_manager();
tm.begin_transaction(conn).map_err(|e| {
log::error!("{:?}", e);
crate::DatabaseError::DatabaseConnectionLost
})?;
Ok(Self { conn, tm })
}
pub fn run<R, F: FnOnce(&Guard) -> Result<R, crate::DatabaseError>>(
&self,
f: F,
) -> Result<R, crate::DatabaseError> {
use diesel::connection::TransactionManager;
let r = f(self);
match r {
Ok(r) => {
self.tm.commit_transaction(self.conn).map_err(|e| {
log::error!("{:?}", e);
crate::DatabaseError::DatabaseConnectionLost
})?;
Ok(r)
}
Err(e) => {
log::error!("{:?}", e);
self.tm.rollback_transaction(self.conn).map_err(|e| {
log::error!("{:?}", e);
crate::DatabaseError::DatabaseConnectionLost
})?;
Err(e)
}
}
}
}

View File

@ -0,0 +1,69 @@
use {
crate::{
db_create_with_conn, db_delete, db_load,
users::{FindUser, LookupUser},
},
diesel::prelude::*,
jirs_data::{BindToken, Message, MessageId, MessageType, User, UserId},
};
db_load! {
LoadMessages,
msg => messages => messages.filter(receiver_id.eq(msg.user_id)),
Message,
user_id => UserId
}
db_delete! {
MarkMessageSeen,
msg => messages => diesel::delete(
messages.find(msg.message_id).filter(receiver_id.eq(msg.user_id))
),
Message,
user_id => UserId,
message_id => MessageId
}
#[derive(Debug)]
pub enum CreateMessageReceiver {
Reference(UserId),
Lookup { name: String, email: String },
}
db_create_with_conn! {
CreateMessage,
msg => conn => messages => {
let user: User = match msg.receiver {
CreateMessageReceiver::Lookup { name, email } => {
LookupUser { name, email }.execute(conn)?
}
CreateMessageReceiver::Reference(user_id) => FindUser { user_id }.execute(conn)?,
};
diesel::insert_into(messages).values((
receiver_id.eq(user.id),
sender_id.eq(msg.sender_id),
summary.eq(msg.summary),
description.eq(msg.description),
message_type.eq(msg.message_type),
hyper_link.eq(msg.hyper_link),
))
},
Message,
receiver => CreateMessageReceiver,
sender_id => UserId,
summary => String,
description => String,
message_type => MessageType,
hyper_link => String
}
db_load! {
LookupMessagesByToken,
msg => messages => messages.filter(
hyper_link.eq(format!("#{}", msg.token)).and(receiver_id.eq(msg.user_id)),
),
Message,
token => BindToken,
user_id => UserId
}

View File

@ -1,126 +1,126 @@
use chrono::NaiveDateTime; use {
use serde::{Deserialize, Serialize}; crate::schema::*,
use uuid::Uuid; chrono::NaiveDateTime,
jirs_data::{
use jirs_data::{ EpicId, InvitationState, IssuePriority, IssueStatusId, IssueType, ProjectCategory,
EpicId, InvitationState, IssuePriority, IssueStatusId, IssueType, ProjectCategory, ProjectId, ProjectId, TimeTracking, UserId,
TimeTracking, UserId, },
}; serde::{Deserialize, Serialize},
uuid::Uuid,
use crate::schema::*; };
#[derive(Debug, Serialize, Deserialize, Queryable)] #[derive(Debug, Serialize, Deserialize, Queryable)]
pub struct Issue { pub struct Issue {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub issue_type: IssueType, pub issue_type: IssueType,
pub priority: IssuePriority, pub priority: IssuePriority,
pub list_position: i32, pub list_position: i32,
pub description: Option<String>, pub description: Option<String>,
pub description_text: Option<String>, pub description_text: Option<String>,
pub estimate: Option<i32>, pub estimate: Option<i32>,
pub time_spent: Option<i32>, pub time_spent: Option<i32>,
pub time_remaining: Option<i32>, pub time_remaining: Option<i32>,
pub reporter_id: i32, pub reporter_id: i32,
pub project_id: i32, pub project_id: i32,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime, pub updated_at: NaiveDateTime,
pub issue_status_id: IssueStatusId, pub issue_status_id: IssueStatusId,
pub epic_id: Option<EpicId>, pub epic_id: Option<EpicId>,
} }
impl Into<jirs_data::Issue> for Issue { impl Into<jirs_data::Issue> for Issue {
fn into(self) -> jirs_data::Issue { fn into(self) -> jirs_data::Issue {
jirs_data::Issue { jirs_data::Issue {
id: self.id, id: self.id,
title: self.title, title: self.title,
issue_type: self.issue_type, issue_type: self.issue_type,
priority: self.priority, priority: self.priority,
list_position: self.list_position, list_position: self.list_position,
description: self.description, description: self.description,
description_text: self.description_text, description_text: self.description_text,
estimate: self.estimate, estimate: self.estimate,
time_spent: self.time_spent, time_spent: self.time_spent,
time_remaining: self.time_remaining, time_remaining: self.time_remaining,
reporter_id: self.reporter_id, reporter_id: self.reporter_id,
project_id: self.project_id, project_id: self.project_id,
created_at: self.created_at, created_at: self.created_at,
updated_at: self.updated_at, updated_at: self.updated_at,
issue_status_id: self.issue_status_id, issue_status_id: self.issue_status_id,
epic_id: self.epic_id, epic_id: self.epic_id,
user_ids: vec![], user_ids: vec![],
} }
} }
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "issues"] #[table_name = "issues"]
pub struct CreateIssueForm { pub struct CreateIssueForm {
pub title: String, pub title: String,
pub issue_type: IssueType, pub issue_type: IssueType,
pub priority: IssuePriority, pub priority: IssuePriority,
pub list_position: i32, pub list_position: i32,
pub description: Option<String>, pub description: Option<String>,
pub description_text: Option<String>, pub description_text: Option<String>,
pub estimate: Option<i32>, pub estimate: Option<i32>,
pub time_spent: Option<i32>, pub time_spent: Option<i32>,
pub time_remaining: Option<i32>, pub time_remaining: Option<i32>,
pub reporter_id: UserId, pub reporter_id: UserId,
pub project_id: ProjectId, pub project_id: ProjectId,
pub issue_status_id: IssueStatusId, pub issue_status_id: IssueStatusId,
pub epic_id: Option<EpicId>, pub epic_id: Option<EpicId>,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "issue_assignees"] #[table_name = "issue_assignees"]
pub struct CreateIssueAssigneeForm { pub struct CreateIssueAssigneeForm {
pub issue_id: i32, pub issue_id: i32,
pub user_id: i32, pub user_id: i32,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "projects"] #[table_name = "projects"]
pub struct UpdateProjectForm { pub struct UpdateProjectForm {
pub name: Option<String>, pub name: Option<String>,
pub url: Option<String>, pub url: Option<String>,
pub description: Option<String>, pub description: Option<String>,
pub category: Option<ProjectCategory>, pub category: Option<ProjectCategory>,
pub time_tracking: Option<TimeTracking>, pub time_tracking: Option<TimeTracking>,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "projects"] #[table_name = "projects"]
pub struct CreateProjectForm { pub struct CreateProjectForm {
pub name: String, pub name: String,
pub url: String, pub url: String,
pub description: String, pub description: String,
pub category: ProjectCategory, pub category: ProjectCategory,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "users"] #[table_name = "users"]
pub struct UserForm { pub struct UserForm {
pub name: String, pub name: String,
pub email: String, pub email: String,
pub avatar_url: Option<String>, pub avatar_url: Option<String>,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "tokens"] #[table_name = "tokens"]
pub struct TokenForm { pub struct TokenForm {
pub user_id: i32, pub user_id: i32,
pub access_token: Uuid, pub access_token: Uuid,
pub refresh_token: Uuid, pub refresh_token: Uuid,
pub bind_token: Option<Uuid>, pub bind_token: Option<Uuid>,
} }
#[derive(Debug, Serialize, Deserialize, Insertable)] #[derive(Debug, Serialize, Deserialize, Insertable)]
#[table_name = "invitations"] #[table_name = "invitations"]
pub struct InvitationForm { pub struct InvitationForm {
pub name: String, pub name: String,
pub email: String, pub email: String,
pub state: InvitationState, pub state: InvitationState,
pub project_id: i32, pub project_id: i32,
pub invited_by_id: i32, pub invited_by_id: i32,
} }

View File

@ -0,0 +1,284 @@
#[macro_export]
macro_rules! db_pool {
($self: expr) => {
&$self.pool.get().map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::DatabaseConnectionLost
})?
};
($self: expr, $pool: expr) => {
&$pool.get().map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::DatabaseConnectionLost
})?
};
}
#[macro_export]
macro_rules! q {
($q: expr) => {{
let q = $q;
log::debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&q).to_string()
);
q
}};
}
#[macro_export]
macro_rules! db_find {
($action: ident, $self: ident => $conn: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, $conn: &$crate::DbPooledConn) -> Result<$resource, crate::DatabaseError> {
use crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.first($conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::LoadCollection,
$crate::ResourceKind::$resource,
)
})
}
}
impl actix::Message for $action {
type Result = Result<$resource, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<$resource, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let $conn = $crate::db_pool!(self);
msg.execute($conn)
}
}
};
($action: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
$crate::db_find! { $action, $self => conn => $schema => $q, $resource, $($field => $ty),+ }
};
}
#[macro_export]
macro_rules! db_load {
($action: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, conn: &$crate::DbPooledConn) -> Result<Vec<$resource>, $crate::DatabaseError> {
use crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.load(conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::LoadCollection,
$crate::ResourceKind::$resource,
)
})
}
}
impl actix::Message for $action {
type Result = Result<Vec<$resource>, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<Vec<$resource>, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let conn = $crate::db_pool!(self);
msg.execute(conn)
}
}
};
}
#[macro_export]
macro_rules! db_load_field {
($action: ident, $return_type: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, conn: &$crate::DbPooledConn) -> Result<Vec<$return_type>, $crate::DatabaseError> {
use crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.load(conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::LoadCollection,
$crate::ResourceKind::$resource,
)
})
}
}
impl actix::Message for $action {
type Result = Result<Vec<$return_type>, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<Vec<$return_type>, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let conn = $crate::db_pool!(self);
msg.execute(conn)
}
}
};
}
#[macro_export]
macro_rules! db_create {
($action: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
$crate::db_create_with_conn! { $action, $self => conn => $schema => $q, $resource, $($field => $ty),+ }
}
}
#[macro_export]
macro_rules! db_create_with_conn {
($action: ident, $self: ident => $conn: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, $conn: &$crate::DbPooledConn) -> Result<$resource, crate::DatabaseError> {
crate::Guard::new($conn)?.run(|_guard| {
use crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.get_result::<$resource>($conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::Create,
$crate::ResourceKind::$resource,
)
})
})
}
}
impl actix::Message for $action {
type Result = Result<$resource, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<$resource, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let $conn = $crate::db_pool!(self);
msg.execute($conn)
}
}
};
}
#[macro_export]
macro_rules! db_update {
($action: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
$crate::db_update_with_conn! { $action, $self => conn => $schema => $q, $resource, $($field => $ty),+ }
};
}
#[macro_export]
macro_rules! db_update_with_conn {
($action: ident, $self: ident => $conn: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, $conn: &$crate::DbPooledConn) -> Result<$resource, crate::DatabaseError> {
use crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.get_result::<$resource>($conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::Update,
$crate::ResourceKind::$resource,
)
})
}
}
impl actix::Message for $action {
type Result = Result<$resource, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<$resource, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let $conn = $crate::db_pool!(self);
msg.execute ( $conn )
}
}
};
}
#[macro_export]
macro_rules! db_delete {
($action: ident, $self: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
$crate::db_delete_with_conn! { $action, $self => conn => $schema => $q, $resource, $($field => $ty),+ }
};
}
#[macro_export]
macro_rules! db_delete_with_conn {
($action: ident, $self: ident => $conn: ident => $schema: ident => $q: expr, $resource: ident, $($field: ident => $ty: ty),+) => {
pub struct $action {
$(pub $field : $ty),+
}
impl $action {
pub fn execute(self, $conn: &$crate::DbPooledConn) -> Result<usize, $crate::DatabaseError> {
use $crate::schema:: $schema ::dsl::*;
let $self = self;
$crate::q!($q)
.execute($conn)
.map_err(|e| {
log::error!("{:?}", e);
$crate::DatabaseError::GenericFailure(
$crate::OperationError::Delete,
$crate::ResourceKind::$resource,
)
})
}
}
impl actix::Message for $action {
type Result = Result<usize, $crate::DatabaseError>;
}
impl actix::Handler<$action> for $crate::DbExecutor {
type Result = Result<usize, $crate::DatabaseError>;
fn handle(&mut self, msg: $action, _ctx: &mut Self::Context) -> Self::Result {
let $conn = $crate::db_pool!(self);
msg.execute($conn)
}
}
};
}

View File

@ -0,0 +1,97 @@
use {
crate::{db_create_with_conn, db_find, db_load, db_update},
diesel::prelude::*,
jirs_data::{NameString, Project, ProjectCategory, ProjectId, TimeTracking, UserId},
};
db_find! {
LoadCurrentProject,
msg => projects => projects.find(msg.project_id),
Project,
project_id => ProjectId
}
mod inner {
use {
crate::db_create,
diesel::prelude::*,
jirs_data::{NameString, Project, ProjectCategory, TimeTracking},
};
db_create! {
CreateProject,
msg => projects => diesel::insert_into(projects)
.values((
name.eq(msg.name),
msg.url.map(|v| url.eq(v)),
msg.description.map(|v| description.eq(v)),
msg.category.map(|v| category.eq(v)),
msg.time_tracking.map(|v| time_tracking.eq(v)),
))
.returning(crate::schema::projects::all_columns),
Project,
name => NameString,
url => Option<String>,
description => Option<String>,
category => Option<ProjectCategory>,
time_tracking => Option<TimeTracking>
}
}
db_create_with_conn! {
CreateProject,
msg => conn => projects => {
let p = inner::CreateProject {
name: msg.name,
url: msg.url,
description: msg.description,
category: msg.category,
time_tracking: msg.time_tracking,
}.execute(conn)?;
crate::issue_statuses::CreateIssueStatus {
project_id: p.id,
position: 0,
name: "TODO".to_string(),
}
.execute(conn)?;
projects.find(p.id)
},
Project,
name => NameString,
url => Option<String>,
description => Option<String>,
category => Option<ProjectCategory>,
time_tracking => Option<TimeTracking>
}
db_update! {
UpdateProject,
msg => projects => diesel::update(projects.find(msg.project_id)).set((
msg.name.map(|v| name.eq(v)),
msg.url.map(|v| url.eq(v)),
msg.description.map(|v| description.eq(v)),
msg.category.map(|v| category.eq(v)),
msg.time_tracking.map(|v| time_tracking.eq(v)),
)),
Project,
project_id => ProjectId,
name => Option<NameString>,
url => Option<String>,
description => Option<String>,
category => Option<ProjectCategory>,
time_tracking => Option<TimeTracking>
}
db_load! {
LoadProjects,
msg => projects => {
use crate::schema::user_projects::{dsl::{user_projects, user_id, project_id}};
projects
.inner_join(user_projects.on(project_id.eq(id)))
.filter(user_id.eq(msg.user_id))
.distinct_on(id)
.select(crate::schema::projects::all_columns)
},
Project,
user_id => UserId
}

View File

@ -0,0 +1,48 @@
use {
crate::{db_create, db_find, db_update_with_conn},
diesel::prelude::*,
jirs_data::{Token, UserId},
};
db_find! {
FindUserId,
msg => tokens => tokens.filter(user_id.eq(msg.user_id)).order_by(id.desc()),
Token,
user_id => UserId
}
db_find! {
FindBindToken,
msg => tokens => tokens.filter(bind_token.eq(Some(msg.token))),
Token,
token => uuid::Uuid
}
db_update_with_conn! {
UseBindToken,
msg => conn => tokens => {
let token = FindBindToken { token: msg.token }.execute(conn)?;
diesel::update(tokens.find(token.id)).set(bind_token.eq(None as Option<uuid::Uuid>))
},
Token,
token => uuid::Uuid
}
db_find! {
FindAccessToken,
msg => tokens => tokens.filter(access_token.eq(msg.token)),
Token,
token => uuid::Uuid
}
db_create! {
CreateBindToken,
msg => tokens => diesel::insert_into(tokens).values((
user_id.eq(msg.user_id),
access_token.eq(uuid::Uuid::new_v4()),
refresh_token.eq(uuid::Uuid::new_v4()),
bind_token.eq(Some(uuid::Uuid::new_v4())),
)),
Token,
user_id => UserId
}

View File

@ -0,0 +1,134 @@
use {
crate::{db_create, db_delete_with_conn, db_find, db_load, db_update_with_conn},
diesel::prelude::*,
jirs_data::{ProjectId, UserId, UserProject, UserProjectId, UserRole},
};
db_find! {
CurrentUserProject,
msg => user_projects => user_projects.filter(user_id.eq(msg.user_id).and(is_current.eq(true))),
UserProject,
user_id => UserId
}
db_find! {
FindUserProject,
msg => user_projects => user_projects.filter(id.eq(msg.id).and(user_id.eq(msg.user_id))),
UserProject,
id => UserProjectId,
user_id => UserId
}
db_load! {
LoadUserProjects,
msg => user_projects => user_projects.filter(user_id.eq(msg.user_id)),
UserProject,
user_id => UserId
}
mod inner {
use {
crate::db_update,
diesel::prelude::*,
jirs_data::{UserId, UserProject, UserProjectId},
};
db_update! {
ChangeProjectIsCurrent,
msg => user_projects => {
match msg.id {
Some(v) => diesel::update(user_projects.filter(user_id.eq(msg.user_id).and(id.eq(v)))).set(is_current.eq(msg.is_current)).into_boxed(),
_ => diesel::update(user_projects.filter(user_id.eq(msg.user_id))).set(is_current.eq(msg.is_current)).into_boxed(),
}
},
UserProject,
id => Option<UserProjectId>,
user_id => UserId,
is_current => bool
}
}
db_update_with_conn! {
ChangeCurrentUserProject,
msg => conn => user_projects => {
FindUserProject {
id: msg.id,
user_id: msg.user_id,
}
.execute(conn)?;
inner::ChangeProjectIsCurrent {
id: None,
user_id: msg.user_id,
is_current: false,
}
.execute(conn)?;
inner::ChangeProjectIsCurrent {
id: Some(msg.id),
user_id: msg.user_id,
is_current: false,
}
.execute(conn)?;
user_projects.find(msg.id)
},
UserProject,
id => UserProjectId,
user_id => UserId
}
db_find! {
FindByRole,
msg => user_projects => user_projects
.filter(user_id.eq(msg.user_id)
.and(project_id.eq(msg.project_id))
.and(role.eq(msg.role)
)
),
UserProject,
user_id => UserId,
project_id => ProjectId,
role => UserRole
}
db_delete_with_conn! {
RemoveInvitedUser,
msg => conn => user_projects => {
if msg.invited_id == msg.inviter_id {
return Err(crate::DatabaseError::UserProject(crate::UserProjectError::InviteHimself));
}
FindByRole {
user_id: msg.inviter_id,
project_id: msg.project_id,
role: UserRole::Owner,
}
.execute(conn)?;
diesel::delete(user_projects)
.filter(
user_id.eq(msg.invited_id)
.and(project_id.eq(msg.project_id)
)
)
},
UserProject,
invited_id => UserId,
inviter_id => UserId,
project_id => ProjectId
}
db_create! {
CreateUserProject,
msg => user_projects => diesel::insert_into(user_projects).values((
user_id.eq(msg.user_id),
project_id.eq(msg.project_id),
is_current.eq(msg.is_current),
is_default.eq(msg.is_default),
role.eq(msg.role),
)),
UserProject,
user_id => UserId,
project_id => ProjectId,
is_current => bool,
is_default => bool,
role => UserRole
}

View File

@ -0,0 +1,277 @@
use {
crate::{
db_create, db_create_with_conn, db_find, db_load, db_update, projects::CreateProject, q,
user_projects::CreateUserProject, DbPooledConn,
},
diesel::prelude::*,
jirs_data::{EmailString, IssueId, ProjectId, User, UserId, UserRole, UsernameString},
};
db_find! {
FindUser,
msg => users => users.find(msg.user_id),
User,
user_id => UserId
}
db_find! {
LookupUser,
msg => users => users
.distinct_on(id)
.filter(email.eq(msg.email.as_str()))
.filter(name.eq(msg.name.as_str())),
User,
name => UsernameString,
email => EmailString
}
db_load! {
LoadProjectUsers,
msg => users => {
use crate::schema::user_projects::dsl::{project_id, user_id, user_projects};
use crate::schema::users::all_columns;
users
.distinct_on(id)
.inner_join(user_projects.on(user_id.eq(id)))
.filter(project_id.eq(msg.project_id))
.select(all_columns)
},
User,
project_id => ProjectId
}
db_load! {
LoadIssueAssignees,
msg => users => {
use crate::schema::issue_assignees::dsl::{issue_assignees, issue_id, user_id};
users
.distinct_on(id)
.inner_join(issue_assignees.on(user_id.eq(id)))
.filter(issue_id.eq(msg.issue_id))
.select(users::all_columns())
},
User,
issue_id => IssueId
}
db_create! {
CreateUser,
msg => users => diesel::insert_into(users)
.values((name.eq(msg.name.as_str()), email.eq(msg.email.as_str()))),
User,
name => UsernameString,
email => EmailString
}
/*impl CreateUser {
pub fn execute(self, conn: &DbPooledConn) -> Result<User, crate::DatabaseError> {
use crate::schema::users::dsl::*;
q!(diesel::insert_into(users)
.values((name.eq(self.name.as_str()), email.eq(self.email.as_str()))))
.get_result(conn)
.map_err(|e| {
log::error!("{:?}", e);
let ws = match e {
Error::InvalidCString(_) => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::DatabaseError(diesel::result::DatabaseErrorKind::UniqueViolation, _) => {
crate::DatabaseError::User(UserError::TakenPair(self.name, self.email))
}
Error::DatabaseError(_, _) => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::NotFound => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::QueryBuilderError(_) => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::DeserializationError(_) => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::SerializationError(_) => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::RollbackTransaction => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::AlreadyInTransaction => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
Error::__Nonexhaustive => {
crate::DatabaseError::User(UserError::InvalidPair(self.name, self.email))
}
};
crate::DatabaseError::Error(ws)
})
}
}*/
db_create_with_conn! {
Register,
msg => conn => users => {
if count_matching_users(msg.name.as_str(), msg.email.as_str(), conn) > 0 {
return Err(crate::DatabaseError::User(crate::UserError::InvalidPair(msg.name, msg.email)));
}
let current_project_id: ProjectId = match msg.project_id {
Some(current_project_id) => current_project_id,
_ => {
CreateProject {
name: "initial".to_string(),
url: None,
description: None,
category: None,
time_tracking: None,
}
.execute(conn)?
.id
}
};
let user: User = CreateUser {
name: msg.name,
email: msg.email,
}
.execute(conn)?;
CreateUserProject {
user_id: user.id,
project_id: current_project_id,
is_current: true,
is_default: true,
role: msg.role,
}
.execute(conn)?;
users.find(user.id)
},
User,
name => UsernameString,
email => EmailString,
project_id => Option<ProjectId>,
role => UserRole
}
db_load! {
LoadInvitedUsers,
msg => users => {
use crate::schema::invitations::dsl::{email as i_email, invitations, invited_by_id};
users
.inner_join(invitations.on(i_email.eq(email)))
.filter(invited_by_id.eq(msg.user_id))
.select(users::all_columns())
},
User,
user_id => UserId
}
fn count_matching_users(name: &str, email: &str, conn: &DbPooledConn) -> i64 {
use crate::schema::users::dsl;
q!(dsl::users
.filter(dsl::email.eq(email).and(dsl::name.ne(name)))
.or_filter(dsl::email.ne(email).and(dsl::name.eq(name)))
.or_filter(dsl::email.eq(email).and(dsl::name.eq(name)))
.count())
.get_result::<i64>(conn)
.unwrap_or(1)
}
db_update! {
UpdateAvatarUrl,
msg => users => diesel::update(users.find(msg.user_id))
.set(avatar_url.eq(msg.avatar_url)),
User,
user_id => UserId,
avatar_url => Option<String>
}
db_update! {
ProfileUpdate,
msg => users => diesel::update(users.find(msg.user_id))
.set((email.eq(msg.email), name.eq(msg.name))),
User,
user_id => UserId,
name => String,
email => String
}
#[cfg(test)]
mod tests {
use diesel::connection::TransactionManager;
use jirs_data::{Project, ProjectCategory};
use crate::build_pool;
use super::*;
#[test]
fn check_collision() {
use crate::schema::projects::dsl::projects;
use crate::schema::user_projects::dsl::user_projects;
use crate::schema::users::dsl::users;
let pool = build_pool();
let conn = &pool.get().unwrap();
let tm = conn.transaction_manager();
tm.begin_transaction(conn).unwrap();
diesel::delete(user_projects).execute(conn).unwrap();
diesel::delete(users).execute(conn).unwrap();
diesel::delete(projects).execute(conn).unwrap();
let project: Project = {
use crate::schema::projects::dsl::*;
diesel::insert_into(projects)
.values((
name.eq("baz".to_string()),
url.eq("/uz".to_string()),
description.eq("None".to_string()),
category.eq(ProjectCategory::Software),
))
.get_result::<Project>(conn)
.unwrap()
};
let user: User = {
use crate::schema::users::dsl::*;
diesel::insert_into(users)
.values((
name.eq("Foo".to_string()),
email.eq("foo@example.com".to_string()),
))
.get_result(conn)
.unwrap()
};
{
use crate::schema::user_projects::dsl::*;
diesel::insert_into(user_projects)
.values((
user_id.eq(user.id),
project_id.eq(project.id),
is_current.eq(true),
is_default.eq(true),
))
.execute(conn)
.unwrap();
}
let res1 = count_matching_users("Foo", "bar@example.com", conn);
let res2 = count_matching_users("Bar", "foo@example.com", conn);
let res3 = count_matching_users("Foo", "foo@example.com", conn);
tm.rollback_transaction(conn).unwrap();
assert_eq!(res1, 1);
assert_eq!(res2, 1);
assert_eq!(res3, 1);
}
}

View File

@ -0,0 +1,36 @@
[package]
name = "filesystem-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "filesystem_actor"
path = "./src/lib.rs"
[dependencies]
actix = { version = "0.10.0" }
futures = { version = "0.3.8" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
bytes = { version = "0.5.6" }
# Local storage
[dependencies.actix-files]
version = "*"
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["local-storage"]
[dependencies.tokio]
version = "0.2.23"
features = ["dns"]

View File

@ -0,0 +1,81 @@
use {
actix::SyncContext,
actix_files::{self, Files},
jirs_config::fs::Configuration,
std::{io::Write, path::PathBuf},
};
#[derive(Debug)]
pub enum FsError {
CopyFailed,
UnableToRemove,
CreateFile,
WriteFile,
}
pub struct FileSystemExecutor {
config: Configuration,
}
impl FileSystemExecutor {
pub fn client_path(&self) -> &str {
self.config.client_path.as_str()
}
pub fn tmp_path(&self) -> &str {
self.config.tmp_path.as_str()
}
}
impl Default for FileSystemExecutor {
fn default() -> Self {
Self {
config: Configuration::read(),
}
}
}
impl actix::Actor for FileSystemExecutor {
type Context = SyncContext<Self>;
}
#[derive(actix::Message)]
#[rtype(result = "Result<usize, FsError>")]
pub struct CreateFile {
pub source: tokio::sync::broadcast::Receiver<bytes::Bytes>,
pub file_name: String,
}
impl actix::Handler<CreateFile> for FileSystemExecutor {
type Result = Result<usize, FsError>;
fn handle(&mut self, msg: CreateFile, _ctx: &mut Self::Context) -> Self::Result {
let Configuration { store_path, .. } = &self.config;
let CreateFile {
mut source,
file_name,
} = msg;
let target = PathBuf::new().join(store_path).join(file_name);
let _ = std::fs::remove_file(&target);
let mut f = std::fs::File::create(target).map_err(|_| FsError::CreateFile)?;
let count = futures::executor::block_on(async move {
let mut mem = 0;
while let Ok(b) = source.recv().await {
mem += f.write(&b).unwrap_or_default();
}
mem
});
Ok(count)
}
}
pub fn service() -> Files {
let Configuration {
store_path,
client_path,
..
} = Configuration::read();
Files::new(client_path.as_str(), store_path.as_str())
}

View File

@ -0,0 +1,36 @@
[package]
name = "highlight-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "highlight_actor"
path = "./src/lib.rs"
[dependencies]
serde = "*"
bincode = "*"
toml = { version = "*" }
actix = { version = "0.10.0" }
flate2 = { version = "*" }
syntect = { version = "*" }
lazy_static = { version = "*" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["hi"]
[dependencies.jirs-data]
path = "../../shared/jirs-data"
features = ["backend"]

View File

@ -0,0 +1,61 @@
use {
actix::{Actor, Handler, SyncContext},
std::sync::Arc,
syntect::{
easy::HighlightLines,
highlighting::{Style, ThemeSet},
parsing::SyntaxSet,
},
};
mod load;
lazy_static::lazy_static! {
pub static ref THEME_SET: Arc<ThemeSet> = Arc::new(load::integrated_themeset());
pub static ref SYNTAX_SET: Arc<SyntaxSet> = Arc::new(load::integrated_syntaxset());
}
#[derive(Debug)]
pub enum HighlightError {
UnknownLanguage,
UnknownTheme,
ResultUnserializable,
}
fn hi<'l>(code: &'l str, lang: &'l str) -> Result<Vec<(Style, &'l str)>, HighlightError> {
let set = SYNTAX_SET
.as_ref()
.find_syntax_by_name(lang)
.ok_or_else(|| HighlightError::UnknownLanguage)?;
let theme: &syntect::highlighting::Theme = THEME_SET
.as_ref()
.themes
.get("GitHub")
.ok_or_else(|| HighlightError::UnknownTheme)?;
let mut hi = HighlightLines::new(set, theme);
Ok(hi.highlight(code, SYNTAX_SET.as_ref()))
}
#[derive(Debug, Default)]
pub struct HighlightActor {}
impl Actor for HighlightActor {
type Context = SyncContext<Self>;
}
#[derive(actix::Message)]
#[rtype(result = "Result<Vec<u8>, HighlightError>")]
pub struct HighlightCode {
pub code: String,
pub lang: String,
}
impl Handler<HighlightCode> for HighlightActor {
type Result = Result<Vec<u8>, HighlightError>;
fn handle(&mut self, msg: HighlightCode, _ctx: &mut Self::Context) -> Self::Result {
let res = hi(&msg.code, &msg.lang)?;
bincode::serialize(&res).map_err(|_| HighlightError::ResultUnserializable)
}
}

View File

@ -0,0 +1,38 @@
[package]
name = "mail-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "mail_actor"
path = "./src/lib.rs"
[dependencies]
actix = { version = "0.10.0" }
serde = "*"
toml = { version = "*" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
dotenv = { version = "*" }
uuid = { version = "0.8.1", features = ["serde", "v4", "v5"] }
futures = { version = "*" }
openssl-sys = { version = "*", features = ["vendored"] }
libc = { version = "0.2.0", default-features = false }
lettre = { version = "*" }
lettre_email = { version = "*" }
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["mail", "web"]

View File

@ -3,7 +3,7 @@ use actix::{Handler, Message};
// use lettre_email; // use lettre_email;
use uuid::Uuid; use uuid::Uuid;
use crate::mail::MailExecutor; use crate::MailExecutor;
#[derive(Debug)] #[derive(Debug)]
pub struct Invite { pub struct Invite {
@ -23,7 +23,7 @@ impl Handler<Invite> for MailExecutor {
use lettre::Transport; use lettre::Transport;
let transport = &mut self.transport; let transport = &mut self.transport;
let from = self.config.from.as_str(); let from = self.config.from.as_str();
let addr = crate::web::Configuration::read().full_addr(); let addr = jirs_config::web::Configuration::read().full_addr();
let html = format!( let html = format!(
r#" r#"

View File

@ -0,0 +1,46 @@
use actix::{Actor, SyncContext};
// use lettre;
pub mod invite;
pub mod welcome;
pub type MailTransport = lettre::SmtpTransport;
pub struct MailExecutor {
pub transport: MailTransport,
pub config: jirs_config::mail::Configuration,
}
impl Actor for MailExecutor {
type Context = SyncContext<Self>;
}
impl Default for MailExecutor {
fn default() -> Self {
let config = jirs_config::mail::Configuration::read();
Self {
transport: mail_transport(&config),
config,
}
}
}
fn mail_client(config: &jirs_config::mail::Configuration) -> lettre::SmtpClient {
let mail_user = config.user.as_str();
let mail_pass = config.pass.as_str();
let mail_host = config.host.as_str();
lettre::SmtpClient::new_simple(mail_host)
.expect("Failed to init SMTP client")
.credentials(lettre::smtp::authentication::Credentials::new(
mail_user.to_string(),
mail_pass.to_string(),
))
.connection_reuse(lettre::smtp::ConnectionReuseParameters::ReuseUnlimited)
.smtp_utf8(true)
}
fn mail_transport(config: &jirs_config::mail::Configuration) -> MailTransport {
mail_client(config).transport()
}

View File

@ -3,7 +3,7 @@ use actix::{Handler, Message};
// use lettre_email; // use lettre_email;
use uuid::Uuid; use uuid::Uuid;
use crate::mail::MailExecutor; use crate::MailExecutor;
#[derive(Debug)] #[derive(Debug)]
pub struct Welcome { pub struct Welcome {

View File

@ -0,0 +1,85 @@
[package]
name = "web-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "web_actor"
path = "./src/lib.rs"
[features]
local-storage = ["filesystem-actor"]
aws-s3 = ["rusoto_s3", "rusoto_core"]
default = ["local-storage", "aws-s3"]
[dependencies]
serde = "*"
bincode = "*"
toml = { version = "*" }
actix = { version = "0.10.0" }
actix-web = { version = "*" }
actix-cors = { version = "*" }
actix-service = { version = "*" }
actix-rt = "1"
actix-web-actors = "*"
actix-multipart = "*"
bytes = { version = "0.5.6" }
futures = { version = "0.3.8" }
openssl-sys = { version = "*", features = ["vendored"] }
libc = { version = "0.2.0", default-features = false }
flate2 = { version = "*" }
syntect = { version = "*" }
lazy_static = { version = "*" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
uuid = { version = "0.8.1", features = ["serde", "v4", "v5"] }
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["mail", "web", "local-storage"]
[dependencies.jirs-data]
path = "../../shared/jirs-data"
features = ["backend"]
[dependencies.database-actor]
path = "../database-actor"
[dependencies.mail-actor]
path = "../mail-actor"
[dependencies.websocket-actor]
path = "../websocket-actor"
[dependencies.filesystem-actor]
path = "../filesystem-actor"
optional = true
# Amazon S3
[dependencies.rusoto_s3]
optional = true
version = "0.45.0"
[dependencies.rusoto_core]
optional = true
version = "0.45.0"
[dependencies.rusoto_signature]
optional = true
version = "0.45.0"
[dependencies.tokio]
version = "0.2.23"
features = ["dns"]

View File

@ -0,0 +1,127 @@
use std::io::Write;
#[cfg(feature = "local-storage")]
use filesystem_actor;
use {
actix::Addr,
actix_multipart::{Field, Multipart},
actix_web::{http::header::ContentDisposition, post, web, web::Data, Error, HttpResponse},
database_actor::{
authorize_user::AuthorizeUser, user_projects::CurrentUserProject, users::UpdateAvatarUrl,
DbExecutor,
},
futures::{executor::block_on, StreamExt, TryStreamExt},
jirs_data::{User, UserId, WsMsg},
websocket_actor::server::{InnerMsg::BroadcastToChannel, WsServer},
};
#[post("/")]
pub async fn upload(
mut payload: Multipart,
db: Data<Addr<DbExecutor>>,
ws: Data<Addr<WsServer>>,
fs: Data<Addr<filesystem_actor::FileSystemExecutor>>,
) -> Result<HttpResponse, Error> {
let mut user_id: Option<UserId> = None;
let mut avatar_url: Option<String> = None;
while let Ok(Some(field)) = payload.try_next().await {
let disposition: ContentDisposition = match field.content_disposition() {
Some(d) => d,
_ => continue,
};
if !disposition.is_form_data() {
return Ok(HttpResponse::BadRequest().finish());
}
match disposition.get_name() {
Some("token") => {
user_id = Some(handle_token(field, db.clone()).await?);
}
Some("avatar") => {
let id = user_id.ok_or_else(|| HttpResponse::Unauthorized().finish())?;
avatar_url = Some(
crate::handlers::upload_avatar_image::handle_image(
id,
field,
disposition,
fs.clone(),
)
.await?,
);
}
_ => continue,
};
}
let user_id = match user_id {
Some(id) => id,
_ => return Ok(HttpResponse::Unauthorized().finish()),
};
let project_id = match block_on(db.send(CurrentUserProject { user_id })) {
Ok(Ok(user_project)) => user_project.project_id,
_ => return Ok(HttpResponse::UnprocessableEntity().finish()),
};
match (user_id, avatar_url) {
(user_id, Some(avatar_url)) => {
let user = update_user_avatar(user_id, avatar_url.clone(), db).await?;
ws.send(BroadcastToChannel(
project_id,
WsMsg::AvatarUrlChanged(user.id, avatar_url),
))
.await
.map_err(|_| HttpResponse::UnprocessableEntity().finish())?;
Ok(HttpResponse::NoContent().finish())
}
_ => Ok(HttpResponse::UnprocessableEntity().finish()),
}
}
async fn update_user_avatar(
user_id: UserId,
new_url: String,
db: Data<Addr<DbExecutor>>,
) -> Result<User, Error> {
match db
.send(UpdateAvatarUrl {
user_id,
avatar_url: Some(new_url),
})
.await
{
Ok(Ok(user)) => Ok(user),
Ok(Err(e)) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
Err(e) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
}
}
async fn handle_token(mut field: Field, db: Data<Addr<DbExecutor>>) -> Result<UserId, Error> {
let mut f: Vec<u8> = vec![];
while let Some(chunk) = field.next().await {
let data = chunk.unwrap();
f = web::block(move || f.write_all(&data).map(|_| f)).await?;
}
let access_token = String::from_utf8(f)
.unwrap_or_default()
.parse::<uuid::Uuid>()
.map_err(|_| HttpResponse::Unauthorized().finish())?;
match db.send(AuthorizeUser { access_token }).await {
Ok(Ok(user)) => Ok(user.id),
Ok(Err(e)) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
Err(e) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
}
}

View File

@ -0,0 +1,74 @@
use actix_web::HttpResponse;
use jirs_data::{msg::WsError, ErrorResponse};
const TOKEN_NOT_FOUND: &str = "Token not found";
const DATABASE_CONNECTION_FAILED: &str = "Database connection failed";
#[derive(Debug)]
pub enum HighlightError {
UnknownLanguage,
UnknownTheme,
ResultUnserializable,
}
#[derive(Debug)]
pub enum ServiceError {
Unauthorized,
DatabaseConnectionLost,
DatabaseQueryFailed(String),
RecordNotFound(String),
RegisterCollision,
Error(WsError),
Highlight(HighlightError),
}
impl ServiceError {
pub fn into_http_response(self) -> HttpResponse {
self.into()
}
}
impl Into<HttpResponse> for ServiceError {
fn into(self) -> HttpResponse {
match self {
ServiceError::Unauthorized => HttpResponse::Unauthorized().json(ErrorResponse {
errors: vec![TOKEN_NOT_FOUND.to_owned()],
}),
ServiceError::DatabaseConnectionLost => {
HttpResponse::InternalServerError().json(ErrorResponse {
errors: vec![DATABASE_CONNECTION_FAILED.to_owned()],
})
}
ServiceError::DatabaseQueryFailed(error) => {
HttpResponse::BadRequest().json(ErrorResponse {
errors: vec![error],
})
}
ServiceError::RecordNotFound(resource_name) => {
HttpResponse::BadRequest().json(ErrorResponse {
errors: vec![format!("Resource not found {}", resource_name)],
})
}
ServiceError::RegisterCollision => HttpResponse::Unauthorized().json(ErrorResponse {
errors: vec!["Register collision".to_string()],
}),
ServiceError::Error(error) => HttpResponse::BadRequest().json(ErrorResponse {
errors: vec![error.to_str().to_string()],
}),
ServiceError::Highlight(HighlightError::UnknownTheme) => HttpResponse::BadRequest()
.json(ErrorResponse::single(
"Code highlight Failed. Unexpected theme",
)),
ServiceError::Highlight(HighlightError::UnknownLanguage) => HttpResponse::BadRequest()
.json(ErrorResponse::single(
"Can't highlight in given language. It's unknown",
)),
ServiceError::Highlight(HighlightError::ResultUnserializable) => {
HttpResponse::BadRequest().json(ErrorResponse::single(
"Highlight succeed but result can't be send",
))
}
}
}
}

View File

@ -0,0 +1 @@
pub mod upload_avatar_image;

View File

@ -0,0 +1,214 @@
#[cfg(feature = "local-storage")]
use filesystem_actor::FileSystemExecutor;
use {
actix::Addr,
actix_multipart::Field,
actix_web::{http::header::ContentDisposition, web::Data, Error},
futures::{StreamExt, TryStreamExt},
jirs_data::UserId,
rusoto_core::ByteStream,
tokio::sync::broadcast::{Receiver, Sender},
};
#[cfg(feature = "aws-s3")]
use {
jirs_config::web::AmazonS3Storage,
rusoto_s3::{PutObjectRequest, S3Client, S3},
};
#[cfg(all(feature = "local-storage", feature = "aws-s3"))]
pub(crate) async fn handle_image(
user_id: UserId,
mut field: Field,
disposition: ContentDisposition,
fs: Data<Addr<FileSystemExecutor>>,
) -> Result<String, Error> {
let filename = disposition.get_filename().unwrap();
let system_file_name = format!("{}-{}", user_id, filename);
let (sender, receiver) = tokio::sync::broadcast::channel(4);
let fs_fut = tokio::task::spawn(local_storage_write(
system_file_name.clone(),
fs.clone(),
user_id,
sender.subscribe(),
));
// Upload to AWS S3
let aws_fut = tokio::task::spawn(aws_s3(system_file_name, receiver));
read_form_data(&mut field, sender).await;
let mut new_link = None;
if let Ok(url) = fs_fut.await {
new_link = url;
}
if let Ok(url) = aws_fut.await {
new_link = url;
}
Ok(new_link.unwrap_or_default())
}
#[cfg(all(not(feature = "local-storage"), feature = "aws-s3"))]
pub(crate) async fn handle_image(
user_id: UserId,
mut field: Field,
disposition: ContentDisposition,
fs: Data<Addr<FileSystemExecutor>>,
) -> Result<String, Error> {
let filename = disposition.get_filename().unwrap();
let system_file_name = format!("{}-{}", user_id, filename);
let (sender, receiver) = tokio::sync::broadcast::channel(4);
// Upload to AWS S3
let aws_fut = aws_s3(system_file_name, receiver);
read_form_data(&mut field, sender).await;
let new_link = tokio::select! {
b = aws_fut => b,
};
{
use filesystem_actor::RemoveTmpFile;
let _ = fs
.send(RemoveTmpFile {
file_name: format!("{}-{}", user_id, filename),
})
.await
.ok();
};
Ok(new_link.unwrap_or_default())
}
#[cfg(all(feature = "local-storage", not(feature = "aws-s3")))]
pub(crate) async fn handle_image(
user_id: UserId,
mut field: Field,
disposition: ContentDisposition,
fs: Data<Addr<FileSystemExecutor>>,
) -> Result<String, Error> {
let filename = disposition.get_filename().unwrap();
let system_file_name = format!("{}-{}", user_id, filename);
let (sender, receiver) = tokio::sync::broadcast::channel(4);
let fs_fut = local_storage_write(
system_file_name.clone(),
fs.clone(),
user_id,
sender.subscribe(),
);
read_form_data(&mut field, sender).await;
let new_link = tokio::select! {
a = fs_fut => a,
};
{
use filesystem_actor::RemoveTmpFile;
let _ = fs
.send(RemoveTmpFile {
file_name: format!("{}-{}", user_id, filename),
})
.await
.ok();
};
Ok(new_link.unwrap_or_default())
}
/// Read file from client
async fn read_form_data(field: &mut Field, sender: Sender<bytes::Bytes>) {
while let Some(chunk) = field.next().await {
let data = chunk.unwrap();
if let Err(err) = sender.send(data) {
log::error!("{:?}", err);
}
}
}
/// Stream bytes directly to AWS S3 Service
#[cfg(feature = "aws-s3")]
async fn aws_s3(system_file_name: String, mut receiver: Receiver<bytes::Bytes>) -> Option<String> {
let web_config = jirs_config::web::Configuration::read();
let s3 = &web_config.s3;
if !s3.active {
return None;
}
s3.set_variables();
log::debug!("{:?}", s3);
let mut v: Vec<u8> = vec![];
use bytes::Buf;
while let Ok(b) = receiver.recv().await {
v.extend_from_slice(b.bytes())
}
// let stream = receiver.into_stream();
// let stream = stream.map_err(|_e| std::io::Error::from_raw_os_error(1));
let client = S3Client::new(s3.region());
let put_object = PutObjectRequest {
bucket: s3.bucket.clone(),
key: system_file_name.clone(),
// body: Some(ByteStream::new(stream)),
body: Some(v.into()),
..Default::default()
};
let id = match client.put_object(put_object).await {
Ok(obj) => obj,
Err(e) => {
log::error!("{}", e);
return None;
}
};
log::debug!("{:?}", id);
Some(aws_s3_url(system_file_name.as_str(), s3))
}
///
#[cfg(feature = "local-storage")]
async fn local_storage_write(
system_file_name: String,
fs: Data<Addr<filesystem_actor::FileSystemExecutor>>,
user_id: jirs_data::UserId,
receiver: Receiver<bytes::Bytes>,
) -> Option<String> {
let web_config = jirs_config::web::Configuration::read();
let fs_config = jirs_config::fs::Configuration::read();
let _ = fs
.send(filesystem_actor::CreateFile {
source: receiver,
file_name: system_file_name.clone(),
})
.await;
Some(format!(
"{proto}://{bind}{port}{client_path}/{user_id}-{filename}",
proto = if web_config.ssl { "https" } else { "http" },
bind = web_config.bind,
port = match web_config.port.as_str() {
"80" | "443" => "".to_string(),
p => format!(":{}", p),
},
client_path = fs_config.client_path,
user_id = user_id,
filename = system_file_name
))
}
#[cfg(feature = "aws-s3")]
fn aws_s3_url(key: &str, config: &AmazonS3Storage) -> String {
format!(
"https://{bucket}.s3.{region}.amazonaws.com/{key}",
bucket = config.bucket,
region = config.region_name,
key = key
)
}

View File

@ -0,0 +1,36 @@
#[macro_use]
extern crate log;
pub use errors::*;
use {
crate::middleware::authorize::token_from_headers,
actix::Addr,
actix_web::{web::Data, HttpRequest, HttpResponse},
database_actor::{authorize_user::AuthorizeUser, DbExecutor},
jirs_data::User,
};
pub mod avatar;
pub mod errors;
pub mod handlers;
pub mod middleware;
pub async fn user_from_request(
req: HttpRequest,
db: &Data<Addr<DbExecutor>>,
) -> Result<User, HttpResponse> {
let token = match token_from_headers(req.headers()) {
Ok(uuid) => uuid,
_ => return Err(ServiceError::Unauthorized.into_http_response()),
};
match db
.send(AuthorizeUser {
access_token: token,
})
.await
{
Ok(Ok(user)) => Ok(user),
Ok(Err(_e)) => Err(HttpResponse::InternalServerError().body("Critical database error")),
_ => Err(ServiceError::Unauthorized.into_http_response()),
}
}

View File

@ -1,16 +1,17 @@
use std::task::{Context, Poll}; use {
actix_service::{Service, Transform},
actix_web::{
dev::{ServiceRequest, ServiceResponse},
http::header::{self},
http::HeaderMap,
Error,
},
futures::future::{ok, FutureExt, LocalBoxFuture, Ready},
jirs_data::User,
std::task::{Context, Poll},
};
use actix_service::{Service, Transform}; type Db = actix_web::web::Data<database_actor::DbPool>;
use actix_web::http::header::{self};
use actix_web::http::HeaderMap;
use actix_web::{dev::ServiceRequest, dev::ServiceResponse, Error};
use futures::future::{ok, FutureExt, LocalBoxFuture, Ready};
use jirs_data::User;
use crate::db::SyncQuery;
type Db = actix_web::web::Data<crate::db::DbPool>;
#[derive(Default)] #[derive(Default)]
pub struct Authorize; pub struct Authorize;
@ -97,8 +98,13 @@ fn check_token(
pool: Db, pool: Db,
) -> std::result::Result<User, crate::errors::ServiceError> { ) -> std::result::Result<User, crate::errors::ServiceError> {
token_from_headers(headers).and_then(|access_token| { token_from_headers(headers).and_then(|access_token| {
use crate::db::authorize_user::AuthorizeUser; use database_actor::authorize_user::AuthorizeUser;
AuthorizeUser { access_token }.handle(&pool) let conn = pool
.get()
.map_err(|_| crate::errors::ServiceError::DatabaseConnectionLost)?;
AuthorizeUser { access_token }
.execute(&conn)
.map_err(|_| crate::errors::ServiceError::Unauthorized)
}) })
} }

View File

@ -0,0 +1,50 @@
[package]
name = "websocket-actor"
version = "0.1.0"
authors = ["Adrian Wozniak <adrian.wozniak@ita-prog.pl>"]
edition = "2018"
description = "JIRS (Simplified JIRA in Rust) shared data types"
repository = "https://gitlab.com/adrian.wozniak/jirs"
license = "MPL-2.0"
#license-file = "../LICENSE"
[lib]
name = "websocket_actor"
path = "./src/lib.rs"
[dependencies]
serde = "*"
bincode = "*"
toml = { version = "*" }
actix = { version = "0.10.0" }
actix-web = { version = "*" }
actix-web-actors = "*"
futures = { version = "0.3.8" }
openssl-sys = { version = "*", features = ["vendored"] }
libc = { version = "0.2.0", default-features = false }
flate2 = { version = "*" }
syntect = { version = "*" }
lazy_static = { version = "*" }
log = "0.4"
pretty_env_logger = "0.4"
env_logger = "0.7"
uuid = { version = "0.8.1", features = ["serde", "v4", "v5"] }
[dependencies.jirs-config]
path = "../../shared/jirs-config"
features = ["websocket"]
[dependencies.jirs-data]
path = "../../shared/jirs-data"
features = ["backend"]
[dependencies.database-actor]
path = "../database-actor"
[dependencies.mail-actor]
path = "../mail-actor"

View File

@ -1,13 +1,15 @@
use actix::AsyncContext; use {
use futures::executor::block_on; crate::{WebSocketActor, WsHandler, WsResult},
actix::AsyncContext,
use jirs_data::{Token, WsMsg}; database_actor::{
authorize_user::AuthorizeUser,
use crate::db::authorize_user::AuthorizeUser; tokens::{CreateBindToken, FindBindToken},
use crate::db::tokens::{CreateBindToken, FindBindToken}; users::LookupUser,
use crate::db::users::LookupUser; },
use crate::mail::welcome::Welcome; futures::executor::block_on,
use crate::ws::{WebSocketActor, WsHandler, WsResult}; jirs_data::{Token, WsMsg},
mail_actor::welcome::Welcome,
};
pub struct Authenticate { pub struct Authenticate {
pub name: String, pub name: String,
@ -21,22 +23,22 @@ impl WsHandler<Authenticate> for WebSocketActor {
let user = match block_on(self.db.send(LookupUser { name, email })) { let user = match block_on(self.db.send(LookupUser { name, email })) {
Ok(Ok(user)) => user, Ok(Ok(user)) => user,
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
}; };
let token = match block_on(self.db.send(CreateBindToken { user_id: user.id })) { let token = match block_on(self.db.send(CreateBindToken { user_id: user.id })) {
Ok(Ok(token)) => token, Ok(Ok(token)) => token,
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
}; };
@ -47,11 +49,11 @@ impl WsHandler<Authenticate> for WebSocketActor {
})) { })) {
Ok(Ok(_)) => (), Ok(Ok(_)) => (),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
} }

View File

@ -2,7 +2,7 @@ use futures::executor::block_on;
use jirs_data::{CommentId, CreateCommentPayload, IssueId, UpdateCommentPayload, WsMsg}; use jirs_data::{CommentId, CreateCommentPayload, IssueId, UpdateCommentPayload, WsMsg};
use crate::ws::{WebSocketActor, WsHandler, WsResult}; use crate::{WebSocketActor, WsHandler, WsResult};
pub struct LoadIssueComments { pub struct LoadIssueComments {
pub issue_id: IssueId, pub issue_id: IssueId,
@ -12,16 +12,16 @@ impl WsHandler<LoadIssueComments> for WebSocketActor {
fn handle_msg(&mut self, msg: LoadIssueComments, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: LoadIssueComments, _ctx: &mut Self::Context) -> WsResult {
self.require_user()?; self.require_user()?;
let comments = match block_on(self.db.send(crate::db::comments::LoadIssueComments { let comments = match block_on(self.db.send(database_actor::comments::LoadIssueComments {
issue_id: msg.issue_id, issue_id: msg.issue_id,
})) { })) {
Ok(Ok(comments)) => comments, Ok(Ok(comments)) => comments,
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
}; };
@ -32,7 +32,7 @@ impl WsHandler<LoadIssueComments> for WebSocketActor {
impl WsHandler<CreateCommentPayload> for WebSocketActor { impl WsHandler<CreateCommentPayload> for WebSocketActor {
fn handle_msg(&mut self, mut msg: CreateCommentPayload, ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, mut msg: CreateCommentPayload, ctx: &mut Self::Context) -> WsResult {
use crate::db::comments::CreateComment; use database_actor::comments::CreateComment;
let user_id = self.require_user()?.id; let user_id = self.require_user()?.id;
if msg.user_id.is_none() { if msg.user_id.is_none() {
@ -46,11 +46,11 @@ impl WsHandler<CreateCommentPayload> for WebSocketActor {
})) { })) {
Ok(Ok(_)) => (), Ok(Ok(_)) => (),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
}; };
@ -60,7 +60,7 @@ impl WsHandler<CreateCommentPayload> for WebSocketActor {
impl WsHandler<UpdateCommentPayload> for WebSocketActor { impl WsHandler<UpdateCommentPayload> for WebSocketActor {
fn handle_msg(&mut self, msg: UpdateCommentPayload, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: UpdateCommentPayload, _ctx: &mut Self::Context) -> WsResult {
use crate::db::comments::UpdateComment; use database_actor::comments::UpdateComment;
let user_id = self.require_user()?.id; let user_id = self.require_user()?.id;
@ -76,11 +76,11 @@ impl WsHandler<UpdateCommentPayload> for WebSocketActor {
})) { })) {
Ok(Ok(comment)) => comment, Ok(Ok(comment)) => comment,
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
}; };
@ -95,7 +95,7 @@ pub struct DeleteComment {
impl WsHandler<DeleteComment> for WebSocketActor { impl WsHandler<DeleteComment> for WebSocketActor {
fn handle_msg(&mut self, msg: DeleteComment, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: DeleteComment, _ctx: &mut Self::Context) -> WsResult {
use crate::db::comments::DeleteComment; use database_actor::comments::DeleteComment;
let user_id = self.require_user()?.id; let user_id = self.require_user()?.id;
@ -104,17 +104,15 @@ impl WsHandler<DeleteComment> for WebSocketActor {
user_id, user_id,
}; };
match block_on(self.db.send(m)) { match block_on(self.db.send(m)) {
Ok(Ok(_)) => (), Ok(Ok(n)) => Ok(Some(WsMsg::CommentDeleted(msg.comment_id, n))),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); Ok(None)
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); Ok(None)
} }
}; }
Ok(Some(WsMsg::CommentDeleted(msg.comment_id)))
} }
} }

View File

@ -2,14 +2,15 @@ use futures::executor::block_on;
use jirs_data::{EpicId, NameString, UserProject, WsMsg}; use jirs_data::{EpicId, NameString, UserProject, WsMsg};
use crate::ws::{WebSocketActor, WsHandler, WsResult}; use crate::{WebSocketActor, WsHandler, WsResult};
pub struct LoadEpics; pub struct LoadEpics;
impl WsHandler<LoadEpics> for WebSocketActor { impl WsHandler<LoadEpics> for WebSocketActor {
fn handle_msg(&mut self, _msg: LoadEpics, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, _msg: LoadEpics, _ctx: &mut Self::Context) -> WsResult {
let project_id = self.require_user_project()?.project_id; let project_id = self.require_user_project()?.project_id;
let epics = query_db_or_print!(self, crate::db::epics::LoadEpics { project_id }); let epics =
crate::query_db_or_print!(self, database_actor::epics::LoadEpics { project_id });
Ok(Some(WsMsg::EpicsLoaded(epics))) Ok(Some(WsMsg::EpicsLoaded(epics)))
} }
} }
@ -26,9 +27,9 @@ impl WsHandler<CreateEpic> for WebSocketActor {
project_id, project_id,
.. ..
} = self.require_user_project()?; } = self.require_user_project()?;
let epic = query_db_or_print!( let epic = crate::query_db_or_print!(
self, self,
crate::db::epics::CreateEpic { database_actor::epics::CreateEpic {
user_id: *user_id, user_id: *user_id,
project_id: *project_id, project_id: *project_id,
name, name,
@ -47,9 +48,9 @@ impl WsHandler<UpdateEpic> for WebSocketActor {
fn handle_msg(&mut self, msg: UpdateEpic, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: UpdateEpic, _ctx: &mut Self::Context) -> WsResult {
let UpdateEpic { epic_id, name } = msg; let UpdateEpic { epic_id, name } = msg;
let UserProject { project_id, .. } = self.require_user_project()?; let UserProject { project_id, .. } = self.require_user_project()?;
let epic = query_db_or_print!( let epic = crate::query_db_or_print!(
self, self,
crate::db::epics::UpdateEpic { database_actor::epics::UpdateEpic {
project_id: *project_id, project_id: *project_id,
epic_id: epic_id, epic_id: epic_id,
name: name.clone(), name: name.clone(),
@ -67,13 +68,13 @@ impl WsHandler<DeleteEpic> for WebSocketActor {
fn handle_msg(&mut self, msg: DeleteEpic, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: DeleteEpic, _ctx: &mut Self::Context) -> WsResult {
let DeleteEpic { epic_id } = msg; let DeleteEpic { epic_id } = msg;
let UserProject { user_id, .. } = self.require_user_project()?; let UserProject { user_id, .. } = self.require_user_project()?;
query_db_or_print!( let n = crate::query_db_or_print!(
self, self,
crate::db::epics::DeleteEpic { database_actor::epics::DeleteEpic {
user_id: *user_id, user_id: *user_id,
epic_id: epic_id, epic_id: epic_id,
} }
); );
Ok(Some(WsMsg::EpicDeleted(epic_id))) Ok(Some(WsMsg::EpicDeleted(epic_id, n)))
} }
} }

View File

@ -1,13 +1,12 @@
use futures::executor::block_on; use {
crate::{server::InnerMsg, WebSocketActor, WsHandler, WsMessageSender, WsResult},
use jirs_data::{ database_actor::{invitations, messages::CreateMessageReceiver},
EmailString, InvitationId, InvitationToken, MessageType, UserRole, UsernameString, WsMsg, futures::executor::block_on,
jirs_data::{
EmailString, InvitationId, InvitationToken, MessageType, UserRole, UsernameString, WsMsg,
},
}; };
use crate::db::invitations;
use crate::db::messages::CreateMessageReceiver;
use crate::ws::{InnerMsg, WebSocketActor, WsHandler, WsMessageSender, WsResult};
pub struct ListInvitation; pub struct ListInvitation;
impl WsHandler<ListInvitation> for WebSocketActor { impl WsHandler<ListInvitation> for WebSocketActor {
@ -19,11 +18,11 @@ impl WsHandler<ListInvitation> for WebSocketActor {
let res = match block_on(self.db.send(invitations::ListInvitation { user_id })) { let res = match block_on(self.db.send(invitations::ListInvitation { user_id })) {
Ok(Ok(v)) => Some(WsMsg::InvitationListLoaded(v)), Ok(Ok(v)) => Some(WsMsg::InvitationListLoaded(v)),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); log::error!("{:?}", e);
return Ok(None); return Ok(None);
} }
Err(e) => { Err(e) => {
error!("{}", e); log::error!("{}", e);
return Ok(None); return Ok(None);
} }
}; };
@ -46,24 +45,25 @@ impl WsHandler<CreateInvitation> for WebSocketActor {
let (user_id, inviter_name) = self.require_user().map(|u| (u.id, u.name.clone()))?; let (user_id, inviter_name) = self.require_user().map(|u| (u.id, u.name.clone()))?;
let CreateInvitation { email, name, role } = msg; let CreateInvitation { email, name, role } = msg;
let invitation = match block_on(self.db.send(crate::db::invitations::CreateInvitation { let invitation =
user_id, match block_on(self.db.send(database_actor::invitations::CreateInvitation {
project_id, user_id,
email: email.clone(), project_id,
name: name.clone(), email: email.clone(),
role, name: name.clone(),
})) { role,
Ok(Ok(invitation)) => invitation, })) {
Ok(Err(e)) => { Ok(Ok(invitation)) => invitation,
error!("{:?}", e); Ok(Err(e)) => {
return Ok(Some(WsMsg::InvitationSendFailure)); error!("{:?}", e);
} return Ok(Some(WsMsg::InvitationSendFailure));
Err(e) => { }
error!("{}", e); Err(e) => {
return Ok(Some(WsMsg::InvitationSendFailure)); error!("{}", e);
} return Ok(Some(WsMsg::InvitationSendFailure));
}; }
match block_on(self.mail.send(crate::mail::invite::Invite { };
match block_on(self.mail.send(mail_actor::invite::Invite {
bind_token: invitation.bind_token, bind_token: invitation.bind_token,
email: invitation.email, email: invitation.email,
inviter_name, inviter_name,
@ -80,7 +80,7 @@ impl WsHandler<CreateInvitation> for WebSocketActor {
} }
// If user exists then send message to him // If user exists then send message to him
if let Ok(Ok(message)) = block_on(self.db.send(crate::db::messages::CreateMessage { if let Ok(Ok(message)) = block_on(self.db.send(database_actor::messages::CreateMessage {
receiver: CreateMessageReceiver::Lookup { name, email }, receiver: CreateMessageReceiver::Lookup { name, email },
sender_id: user_id, sender_id: user_id,
summary: "You have been invited to project".to_string(), summary: "You have been invited to project".to_string(),
@ -166,19 +166,22 @@ impl WsHandler<AcceptInvitation> for WebSocketActor {
} }
}; };
for message in block_on(self.db.send(crate::db::messages::LookupMessagesByToken { for message in block_on(
token: invitation_token, self.db
user_id: token.user_id, .send(database_actor::messages::LookupMessagesByToken {
})) token: invitation_token,
.unwrap_or_else(|_| Ok(vec![])) user_id: token.user_id,
.unwrap_or_default() }),
)
.unwrap_or_else(|_| Ok(vec![]))
.unwrap_or_default()
{ {
match block_on(self.db.send(crate::db::messages::MarkMessageSeen { match block_on(self.db.send(database_actor::messages::MarkMessageSeen {
user_id: token.user_id, user_id: token.user_id,
message_id: message.id, message_id: message.id,
})) { })) {
Ok(Ok(id)) => { Ok(Ok(n)) => {
ctx.send_msg(&WsMsg::MessageMarkedSeen(id)); ctx.send_msg(&WsMsg::MessageMarkedSeen(message.id, n));
} }
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); error!("{:?}", e);

View File

@ -1,9 +1,9 @@
use futures::executor::block_on; use futures::executor::block_on;
use database_actor::issue_statuses;
use jirs_data::{IssueStatusId, Position, TitleString, WsMsg}; use jirs_data::{IssueStatusId, Position, TitleString, WsMsg};
use crate::db::issue_statuses; use crate::{WebSocketActor, WsHandler, WsResult};
use crate::ws::{WebSocketActor, WsHandler, WsResult};
pub struct LoadIssueStatuses; pub struct LoadIssueStatuses;
@ -71,7 +71,7 @@ impl WsHandler<DeleteIssueStatus> for WebSocketActor {
issue_status_id, issue_status_id,
project_id, project_id,
})) { })) {
Ok(Ok(is)) => Some(WsMsg::IssueStatusDeleted(is)), Ok(Ok(n)) => Some(WsMsg::IssueStatusDeleted(msg.issue_status_id, n)),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); error!("{:?}", e);
return Ok(None); return Ok(None);

View File

@ -1,12 +1,13 @@
use std::collections::HashMap; use {
crate::{WebSocketActor, WsHandler, WsResult},
use futures::executor::block_on; database_actor::{
issue_assignees::LoadAssignees,
use jirs_data::{CreateIssuePayload, IssueAssignee, IssueFieldId, IssueId, PayloadVariant, WsMsg}; issues::{LoadProjectIssues, UpdateIssue},
},
use crate::db::issue_assignees::LoadAssignees; futures::executor::block_on,
use crate::db::issues::{LoadProjectIssues, UpdateIssue}; jirs_data::{CreateIssuePayload, IssueAssignee, IssueFieldId, IssueId, PayloadVariant, WsMsg},
use crate::ws::{WebSocketActor, WsHandler, WsResult}; std::collections::HashMap,
};
pub struct UpdateIssueHandler { pub struct UpdateIssueHandler {
pub id: i32, pub id: i32,
@ -24,8 +25,23 @@ impl WsHandler<UpdateIssueHandler> for WebSocketActor {
payload, payload,
} = msg; } = msg;
let mut msg = UpdateIssue::default(); let mut msg = UpdateIssue {
msg.issue_id = id; issue_id: id,
title: None,
issue_type: None,
priority: None,
list_position: None,
description: None,
description_text: None,
estimate: None,
time_spent: None,
time_remaining: None,
project_id: None,
user_ids: None,
reporter_id: None,
issue_status_id: None,
epic_id: None,
};
match (field_id, payload) { match (field_id, payload) {
(IssueFieldId::Type, PayloadVariant::IssueType(t)) => { (IssueFieldId::Type, PayloadVariant::IssueType(t)) => {
msg.issue_type = Some(t); msg.issue_type = Some(t);
@ -96,7 +112,7 @@ impl WsHandler<UpdateIssueHandler> for WebSocketActor {
impl WsHandler<CreateIssuePayload> for WebSocketActor { impl WsHandler<CreateIssuePayload> for WebSocketActor {
fn handle_msg(&mut self, msg: CreateIssuePayload, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: CreateIssuePayload, _ctx: &mut Self::Context) -> WsResult {
self.require_user()?; self.require_user()?;
let msg = crate::db::issues::CreateIssue { let msg = database_actor::issues::CreateIssue {
title: msg.title, title: msg.title,
issue_type: msg.issue_type, issue_type: msg.issue_type,
issue_status_id: msg.issue_status_id, issue_status_id: msg.issue_status_id,
@ -135,9 +151,9 @@ impl WsHandler<DeleteIssue> for WebSocketActor {
self.require_user()?; self.require_user()?;
let m = match block_on( let m = match block_on(
self.db self.db
.send(crate::db::issues::DeleteIssue { issue_id: msg.id }), .send(database_actor::issues::DeleteIssue { issue_id: msg.id }),
) { ) {
Ok(Ok(_)) => Some(WsMsg::IssueDeleted(msg.id)), Ok(Ok(n)) => Some(WsMsg::IssueDeleted(msg.id, n)),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); error!("{:?}", e);
return Ok(None); return Ok(None);

View File

@ -1,9 +1,9 @@
use futures::executor::block_on; use futures::executor::block_on;
use database_actor::messages;
use jirs_data::{MessageId, WsMsg}; use jirs_data::{MessageId, WsMsg};
use crate::db::messages; use crate::{WebSocketActor, WsHandler, WsResult};
use crate::ws::{WebSocketActor, WsHandler, WsResult};
pub struct LoadMessages; pub struct LoadMessages;
@ -35,7 +35,7 @@ impl WsHandler<MarkMessageSeen> for WebSocketActor {
message_id: msg.id, message_id: msg.id,
user_id, user_id,
})) { })) {
Ok(Ok(id)) => Ok(Some(WsMsg::MessageMarkedSeen(id))), Ok(Ok(count)) => Ok(Some(WsMsg::MessageMarkedSeen(msg.id, count))),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); error!("{:?}", e);
Ok(None) Ok(None)

View File

@ -0,0 +1,15 @@
pub use {
auth::*, comments::*, epics::*, invitations::*, issue_statuses::*, issues::*, messages::*,
projects::*, user_projects::*, users::*,
};
pub mod auth;
pub mod comments;
pub mod epics;
pub mod invitations;
pub mod issue_statuses;
pub mod issues;
pub mod messages;
pub mod projects;
pub mod user_projects;
pub mod users;

View File

@ -1,9 +1,9 @@
use futures::executor::block_on; use futures::executor::block_on;
use database_actor as db;
use jirs_data::{UpdateProjectPayload, UserProject, WsMsg}; use jirs_data::{UpdateProjectPayload, UserProject, WsMsg};
use crate::db; use crate::{WebSocketActor, WsHandler, WsResult};
use crate::ws::{WebSocketActor, WsHandler, WsResult};
impl WsHandler<UpdateProjectPayload> for WebSocketActor { impl WsHandler<UpdateProjectPayload> for WebSocketActor {
fn handle_msg(&mut self, msg: UpdateProjectPayload, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, msg: UpdateProjectPayload, _ctx: &mut Self::Context) -> WsResult {
@ -12,7 +12,7 @@ impl WsHandler<UpdateProjectPayload> for WebSocketActor {
project_id, project_id,
.. ..
} = self.require_user_project()?; } = self.require_user_project()?;
match block_on(self.db.send(crate::db::projects::UpdateProject { match block_on(self.db.send(database_actor::projects::UpdateProject {
project_id: *project_id, project_id: *project_id,
name: msg.name, name: msg.name,
url: msg.url, url: msg.url,
@ -32,7 +32,7 @@ impl WsHandler<UpdateProjectPayload> for WebSocketActor {
}; };
let projects = match block_on( let projects = match block_on(
self.db self.db
.send(crate::db::projects::LoadProjects { user_id: *user_id }), .send(database_actor::projects::LoadProjects { user_id: *user_id }),
) { ) {
Ok(Ok(projects)) => projects, Ok(Ok(projects)) => projects,
Ok(Err(e)) => { Ok(Err(e)) => {

View File

@ -1,9 +1,9 @@
use futures::executor::block_on; use futures::executor::block_on;
use database_actor as db;
use jirs_data::{UserProjectId, WsMsg}; use jirs_data::{UserProjectId, WsMsg};
use crate::db; use crate::{WebSocketActor, WsHandler, WsResult};
use crate::ws::{WebSocketActor, WsHandler, WsResult};
pub struct LoadUserProjects; pub struct LoadUserProjects;

View File

@ -1,17 +1,16 @@
use futures::executor::block_on; use futures::executor::block_on;
use jirs_data::{UserId, UserProject, UserRole, WsMsg}; use jirs_data::{UserId, UserProject, UserRole, WsMsg};
use {
use crate::{ crate::{handlers::auth::Authenticate, WebSocketActor, WsHandler, WsResult},
db::{self, users::Register as DbRegister}, database_actor::{self, users::Register as DbRegister},
ws::{auth::Authenticate, WebSocketActor, WsHandler, WsResult},
}; };
pub struct LoadProjectUsers; pub struct LoadProjectUsers;
impl WsHandler<LoadProjectUsers> for WebSocketActor { impl WsHandler<LoadProjectUsers> for WebSocketActor {
fn handle_msg(&mut self, _msg: LoadProjectUsers, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, _msg: LoadProjectUsers, _ctx: &mut Self::Context) -> WsResult {
use crate::db::users::LoadProjectUsers as Msg; use database_actor::users::LoadProjectUsers as Msg;
let project_id = self.require_user_project()?.project_id; let project_id = self.require_user_project()?.project_id;
let m = match block_on(self.db.send(Msg { project_id })) { let m = match block_on(self.db.send(Msg { project_id })) {
@ -66,7 +65,10 @@ impl WsHandler<LoadInvitedUsers> for WebSocketActor {
fn handle_msg(&mut self, _msg: LoadInvitedUsers, _ctx: &mut Self::Context) -> WsResult { fn handle_msg(&mut self, _msg: LoadInvitedUsers, _ctx: &mut Self::Context) -> WsResult {
let user_id = self.require_user()?.id; let user_id = self.require_user()?.id;
let users = match block_on(self.db.send(crate::db::users::LoadInvitedUsers { user_id })) { let users = match block_on(
self.db
.send(database_actor::users::LoadInvitedUsers { user_id }),
) {
Ok(Ok(users)) => users, Ok(Ok(users)) => users,
_ => return Ok(None), _ => return Ok(None),
}; };
@ -85,7 +87,7 @@ impl WsHandler<ProfileUpdate> for WebSocketActor {
let user_id = self.require_user()?.id; let user_id = self.require_user()?.id;
let ProfileUpdate { name, email } = msg; let ProfileUpdate { name, email } = msg;
match block_on(self.db.send(crate::db::users::ProfileUpdate { match block_on(self.db.send(database_actor::users::ProfileUpdate {
user_id, user_id,
name, name,
email, email,
@ -119,11 +121,14 @@ impl WsHandler<RemoveInvitedUser> for WebSocketActor {
project_id, project_id,
.. ..
} = self.require_user_project()?.clone(); } = self.require_user_project()?.clone();
match block_on(self.db.send(db::user_projects::RemoveInvitedUser { match block_on(
invited_id, self.db
inviter_id, .send(database_actor::user_projects::RemoveInvitedUser {
project_id, invited_id,
})) { inviter_id,
project_id,
}),
) {
Ok(Ok(_users)) => Ok(Some(WsMsg::InvitedUserRemoveSuccess(invited_id))), Ok(Ok(_users)) => Ok(Some(WsMsg::InvitedUserRemoveSuccess(invited_id))),
Ok(Err(e)) => { Ok(Err(e)) => {
error!("{:?}", e); error!("{:?}", e);

View File

@ -1,58 +1,28 @@
use std::collections::HashMap; #[macro_use]
extern crate log;
use actix::{ use {
Actor, ActorContext, Addr, AsyncContext, Context, Handler, Message, Recipient, StreamHandler, crate::{
}; handlers::*,
use actix_web::{ server::{InnerMsg, WsServer},
get, },
web::{self, Data}, actix::{Actor, ActorContext, Addr, AsyncContext, Handler, Recipient, StreamHandler},
Error, HttpRequest, HttpResponse, actix_web::{
}; get,
use actix_web_actors::ws; web::{self, Data},
use futures::executor::block_on; Error, HttpRequest, HttpResponse,
},
use jirs_data::{Project, ProjectId, User, UserId, UserProject, WsMsg}; actix_web_actors::ws,
database_actor::{projects::LoadCurrentProject, user_projects::CurrentUserProject, DbExecutor},
use crate::db::{projects::LoadCurrentProject, user_projects::CurrentUserProject, DbExecutor}; futures::executor::block_on,
use crate::mail::MailExecutor; jirs_data::{Project, User, UserProject, WsMsg},
use crate::ws::{ log::*,
auth::*, mail_actor::MailExecutor,
comments::*,
invitations::*,
issue_statuses::*,
issues::*,
messages::*,
projects::*,
user_projects::{LoadUserProjects, SetCurrentUserProject},
users::*,
}; };
macro_rules! query_db_or_print { pub mod handlers;
($s:expr,$msg:expr) => { pub mod prelude;
match block_on($s.db.send($msg)) { pub mod server;
Ok(Ok(r)) => r,
Ok(Err(e)) => {
error!("{:?}", e);
return Ok(None);
}
Err(e) => {
error!("{}", e);
return Ok(None);
}
}
};
}
pub mod auth;
pub mod comments;
pub mod epics;
pub mod invitations;
pub mod issue_statuses;
pub mod issues;
pub mod messages;
pub mod projects;
pub mod user_projects;
pub mod users;
pub type WsResult = std::result::Result<Option<WsMsg>, WsMsg>; pub type WsResult = std::result::Result<Option<WsMsg>, WsMsg>;
@ -346,120 +316,6 @@ where
fn handle_msg(&mut self, msg: Message, _ctx: &mut <Self as Actor>::Context) -> WsResult; fn handle_msg(&mut self, msg: Message, _ctx: &mut <Self as Actor>::Context) -> WsResult;
} }
#[derive(Message, Debug)]
#[rtype(result = "()")]
pub enum InnerMsg {
Join(ProjectId, UserId, Recipient<InnerMsg>),
Leave(ProjectId, UserId, Recipient<InnerMsg>),
BroadcastToChannel(ProjectId, WsMsg),
SendToUser(UserId, WsMsg),
Transfer(WsMsg),
}
pub struct WsServer {
sessions: HashMap<UserId, Vec<Recipient<InnerMsg>>>,
rooms: HashMap<ProjectId, HashMap<UserId, i32>>,
}
impl Default for WsServer {
fn default() -> Self {
Self {
sessions: HashMap::new(),
rooms: HashMap::new(),
}
}
}
impl Message for WsServer {
type Result = ();
}
impl Actor for WsServer {
type Context = Context<Self>;
}
impl Handler<InnerMsg> for WsServer {
type Result = ();
fn handle(&mut self, msg: InnerMsg, _ctx: &mut <Self as Actor>::Context) -> Self::Result {
debug!("receive {:?}", msg);
match msg {
InnerMsg::Join(project_id, user_id, recipient) => {
let v = self
.sessions
.entry(user_id)
.or_insert_with(Default::default);
v.push(recipient);
self.ensure_room(project_id);
if let Some(room) = self.rooms.get_mut(&project_id) {
let n = *room.entry(user_id).or_insert(0);
room.insert(user_id, n + 1);
}
}
InnerMsg::Leave(project_id, user_id, recipient) => {
self.ensure_room(project_id);
let room = match self.rooms.get_mut(&project_id) {
Some(room) => room,
None => return,
};
let n = *room.entry(user_id).or_insert(0);
if n <= 1 {
room.remove(&user_id);
self.sessions.remove(&user_id);
} else {
let v = self.sessions.entry(user_id).or_insert_with(Vec::new);
let mut old = vec![];
std::mem::swap(&mut old, v);
for r in old {
if r != recipient {
v.push(r);
}
}
}
}
InnerMsg::SendToUser(user_id, msg) => {
if let Some(v) = self.sessions.get(&user_id) {
self.send_to_recipients(v, &msg);
}
}
InnerMsg::BroadcastToChannel(project_id, msg) => {
debug!("Begin broadcast to channel {} msg {:?}", project_id, msg);
let set = match self.rooms.get(&project_id) {
Some(s) => s,
_ => return debug!(" channel not found, aborting..."),
};
for r in set.keys() {
let v = match self.sessions.get(r) {
Some(v) => v,
_ => {
debug!("recipient is dead, skipping...");
continue;
}
};
self.send_to_recipients(v, &msg);
}
}
_ => (),
}
}
}
impl WsServer {
pub fn ensure_room(&mut self, room: i32) {
self.rooms.entry(room).or_insert_with(HashMap::new);
}
fn send_to_recipients(&self, recipients: &[Recipient<InnerMsg>], msg: &WsMsg) {
for recipient in recipients.iter() {
match recipient.do_send(InnerMsg::Transfer(msg.clone())) {
Ok(_) => debug!("msg sent"),
Err(e) => error!("{}", e),
};
}
}
}
#[get("/ws/")] #[get("/ws/")]
pub async fn index( pub async fn index(
req: HttpRequest, req: HttpRequest,

View File

@ -0,0 +1,16 @@
#[macro_export]
macro_rules! query_db_or_print {
($s:expr,$msg:expr) => {
match block_on($s.db.send($msg)) {
Ok(Ok(r)) => r,
Ok(Err(e)) => {
log::error!("{:?}", e);
return Ok(None);
}
Err(e) => {
log::error!("{}", e);
return Ok(None);
}
}
};
}

View File

@ -0,0 +1,119 @@
use std::collections::HashMap;
use actix::{Actor, Context, Recipient};
use jirs_data::{ProjectId, UserId, WsMsg};
#[derive(actix::Message, Debug)]
#[rtype(result = "()")]
pub enum InnerMsg {
Join(ProjectId, UserId, Recipient<InnerMsg>),
Leave(ProjectId, UserId, Recipient<InnerMsg>),
BroadcastToChannel(ProjectId, WsMsg),
SendToUser(UserId, WsMsg),
Transfer(WsMsg),
}
pub struct WsServer {
sessions: HashMap<UserId, Vec<Recipient<InnerMsg>>>,
rooms: HashMap<ProjectId, HashMap<UserId, i32>>,
}
impl Default for WsServer {
fn default() -> Self {
Self {
sessions: HashMap::new(),
rooms: HashMap::new(),
}
}
}
impl actix::Message for WsServer {
type Result = ();
}
impl actix::Actor for WsServer {
type Context = Context<Self>;
}
impl actix::Handler<InnerMsg> for WsServer {
type Result = ();
fn handle(&mut self, msg: InnerMsg, _ctx: &mut <Self as Actor>::Context) -> Self::Result {
debug!("receive {:?}", msg);
match msg {
InnerMsg::Join(project_id, user_id, recipient) => {
let v = self
.sessions
.entry(user_id)
.or_insert_with(Default::default);
v.push(recipient);
self.ensure_room(project_id);
if let Some(room) = self.rooms.get_mut(&project_id) {
let n = *room.entry(user_id).or_insert(0);
room.insert(user_id, n + 1);
}
}
InnerMsg::Leave(project_id, user_id, recipient) => {
self.ensure_room(project_id);
let room = match self.rooms.get_mut(&project_id) {
Some(room) => room,
None => return,
};
let n = *room.entry(user_id).or_insert(0);
if n <= 1 {
room.remove(&user_id);
self.sessions.remove(&user_id);
} else {
let v = self.sessions.entry(user_id).or_insert_with(Vec::new);
let mut old = vec![];
std::mem::swap(&mut old, v);
for r in old {
if r != recipient {
v.push(r);
}
}
}
}
InnerMsg::SendToUser(user_id, msg) => {
if let Some(v) = self.sessions.get(&user_id) {
self.send_to_recipients(v, &msg);
}
}
InnerMsg::BroadcastToChannel(project_id, msg) => {
debug!("Begin broadcast to channel {} msg {:?}", project_id, msg);
let set = match self.rooms.get(&project_id) {
Some(s) => s,
_ => return debug!(" channel not found, aborting..."),
};
for r in set.keys() {
let v = match self.sessions.get(r) {
Some(v) => v,
_ => {
debug!("recipient is dead, skipping...");
continue;
}
};
self.send_to_recipients(v, &msg);
}
}
_ => (),
}
}
}
impl WsServer {
pub fn ensure_room(&mut self, room: i32) {
self.rooms.entry(room).or_insert_with(HashMap::new);
}
fn send_to_recipients(&self, recipients: &[Recipient<InnerMsg>], msg: &WsMsg) {
for recipient in recipients.iter() {
match recipient.do_send(InnerMsg::Transfer(msg.clone())) {
Ok(_) => debug!("msg sent"),
Err(e) => error!("{}", e),
};
}
}
}

View File

@ -2,7 +2,7 @@
# see diesel.rs/guides/configuring-diesel-cli # see diesel.rs/guides/configuring-diesel-cli
[print_schema] [print_schema]
file = "src/schema.rs" file = "database-actor/src/schema.rs"
import_types = ["diesel::sql_types::*", "jirs_data::sql::*"] import_types = ["diesel::sql_types::*", "jirs_data::sql::*"]
with_docs = true with_docs = true
patch_file = "./src/schema.patch" patch_file = "./database-actor/src/schema.patch"

View File

@ -14,7 +14,7 @@ name = "jirs_client"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
jirs-data = { path = "../jirs-data", features = ["frontend"] } jirs-data = { path = "../shared/jirs-data", features = ["frontend"] }
wee_alloc = "*" wee_alloc = "*"

View File

@ -292,7 +292,7 @@ fn after_mount(url: Url, orders: &mut impl Orders<Msg>) -> AfterMount<Model> {
HOST_URL = "".to_string(); HOST_URL = "".to_string();
WS_URL = "".to_string(); WS_URL = "".to_string();
} }
model.page = resolve_page(url).unwrap_or_else(|| Page::Project); model.page = resolve_page(url).unwrap_or(Page::Project);
open_socket(&mut model, orders); open_socket(&mut model, orders);
AfterMount::new(model).url_handling(UrlHandling::PassToRoutes) AfterMount::new(model).url_handling(UrlHandling::PassToRoutes)
} }

View File

@ -25,7 +25,10 @@ pub fn update(msg: &Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
orders, orders,
); );
} }
Msg::WebSocketChange(WebSocketChanged::WsMsg(WsMsg::IssueStatusDeleted(_))) => { Msg::WebSocketChange(WebSocketChanged::WsMsg(WsMsg::IssueStatusDeleted(
_id,
_n_deleted,
))) => {
orders.skip().send_msg(Msg::ModalDropped); orders.skip().send_msg(Msg::ModalDropped);
} }
_ => (), _ => (),

View File

@ -45,16 +45,18 @@ pub fn update(msg: Msg, model: &mut crate::model::Model, orders: &mut impl Order
} }
} }
} }
Msg::WebSocketChange(WebSocketChanged::WsMsg(WsMsg::IssueDeleted(id))) => { Msg::WebSocketChange(WebSocketChanged::WsMsg(WsMsg::IssueDeleted(id, count)))
let mut old: Vec<Issue> = vec![]; if count > 0 =>
std::mem::swap(&mut old, &mut model.issues); {
for is in old { let mut old: Vec<Issue> = vec![];
if is.id != id { std::mem::swap(&mut old, &mut model.issues);
model.issues.push(is); for is in old {
if is.id != id {
model.issues.push(is);
}
} }
orders.skip().send_msg(Msg::ModalDropped);
} }
orders.skip().send_msg(Msg::ModalDropped);
}
Msg::StyledSelectChanged( Msg::StyledSelectChanged(
FieldId::EditIssueModal(EditIssueModalSection::Issue(IssueFieldId::Type)), FieldId::EditIssueModal(EditIssueModalSection::Issue(IssueFieldId::Type)),
StyledSelectChanged::Text(text), StyledSelectChanged::Text(text),

View File

@ -160,10 +160,10 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
} }
model model
.issue_statuses .issue_statuses
.sort_by(|a, b| a.position.cmp(&b.position)); .sort_by(|a, b| a.position.cmp(&b.position));
} }
WsMsg::IssueStatusDeleted(dropped_id) => { WsMsg::IssueStatusDeleted(dropped_id, _count) => {
let mut old = vec![]; let mut old = vec![];
std::mem::swap(&mut model.issue_statuses, &mut old); std::mem::swap(&mut model.issue_statuses, &mut old);
for is in old { for is in old {
@ -172,10 +172,10 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
} }
model model
.issue_statuses .issue_statuses
.sort_by(|a, b| a.position.cmp(&b.position)); .sort_by(|a, b| a.position.cmp(&b.position));
} }
WsMsg::IssueDeleted(id) => { WsMsg::IssueDeleted(id, _count) => {
let mut old = vec![]; let mut old = vec![];
std::mem::swap(&mut model.issue_statuses, &mut old); std::mem::swap(&mut model.issue_statuses, &mut old);
for is in old { for is in old {
@ -185,7 +185,7 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
model.issue_statuses.push(is); model.issue_statuses.push(is);
} }
model model
.issue_statuses .issue_statuses
.sort_by(|a, b| a.position.cmp(&b.position)); .sort_by(|a, b| a.position.cmp(&b.position));
} }
// users // users
@ -216,7 +216,7 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
} }
} }
WsMsg::CommentDeleted(comment_id) => { WsMsg::CommentDeleted(comment_id, _count) => {
let mut old = vec![]; let mut old = vec![];
std::mem::swap(&mut model.comments, &mut old); std::mem::swap(&mut model.comments, &mut old);
for comment in old.into_iter() { for comment in old.into_iter() {
@ -254,7 +254,7 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
model.messages = v.clone(); model.messages = v.clone();
model.messages.sort_by(|a, b| a.id.cmp(&b.id)); model.messages.sort_by(|a, b| a.id.cmp(&b.id));
} }
WsMsg::MessageMarkedSeen(id) => { WsMsg::MessageMarkedSeen(id, _count) => {
let mut old = vec![]; let mut old = vec![];
std::mem::swap(&mut old, &mut model.messages); std::mem::swap(&mut old, &mut model.messages);
for m in old { for m in old {
@ -285,7 +285,7 @@ pub fn update(msg: &WsMsg, model: &mut Model, orders: &mut impl Orders<Msg>) {
} }
model.epics.sort_by(|a, b| a.id.cmp(&b.id)); model.epics.sort_by(|a, b| a.id.cmp(&b.id));
} }
WsMsg::EpicDeleted(id) => { WsMsg::EpicDeleted(id, _count) => {
let mut old = vec![]; let mut old = vec![];
std::mem::swap(&mut old, &mut model.epics); std::mem::swap(&mut old, &mut model.epics);
for current in old { for current in old {

View File

@ -13,29 +13,20 @@ name = "jirs_server"
path = "./src/main.rs" path = "./src/main.rs"
[features] [features]
aws-s3 = [ aws-s3 = []
"rusoto_s3", local-storage = []
"rusoto_core"
]
local-storage = [
"actix-files"
]
default = [ default = [
"aws-s3", "aws-s3",
"local-storage", "local-storage",
] ]
[dependencies] [dependencies]
actix = { version = "*" } actix = { version = "0.10.0" }
actix-web = { version = "*" } actix-web = { version = "*" }
actix-cors = { version = "*" } actix-cors = { version = "*" }
actix-service = { version = "*" } actix-service = { version = "*" }
actix-rt = "1" actix-rt = "1"
actix-web-actors = "*" actix-web-actors = "*"
actix-multipart = { version = "*" }
pq-sys = { version = ">=0.3.0, <0.5.0" }
r2d2 = { version = ">= 0.8, < 0.9" }
dotenv = { version = "*" } dotenv = { version = "*" }
@ -67,34 +58,45 @@ futures = { version = "*" }
openssl-sys = { version = "*", features = ["vendored"] } openssl-sys = { version = "*", features = ["vendored"] }
libc = { version = "0.2.0", default-features = false } libc = { version = "0.2.0", default-features = false }
lettre = { version = "*" } [dependencies.jirs-config]
lettre_email = { version = "*" } path = "../shared/jirs-config"
features = ["web", "websocket", "local-storage", "hi", "database"]
flate2 = { version = "*" }
syntect = { version = "*" }
lazy_static = { version = "*" }
[dependencies.diesel]
version = "1.4.5"
features = ["unstable", "postgres", "numeric", "extras", "uuidv07"]
[dependencies.jirs-data] [dependencies.jirs-data]
path = "../jirs-data" path = "../shared/jirs-data"
features = ["backend"] features = ["backend"]
# Amazon S3 [dependencies.highlight-actor]
[dependencies.rusoto_s3] path = "../actors/highlight-actor"
optional = true
version = "0.43.0"
[dependencies.rusoto_core] [dependencies.database-actor]
optional = true path = "../actors/database-actor"
version = "0.43.0"
[dependencies.web-actor]
path = "../actors/web-actor"
[dependencies.websocket-actor]
path = "../actors/websocket-actor"
[dependencies.mail-actor]
path = "../actors/mail-actor"
[dependencies.filesystem-actor]
path = "../actors/filesystem-actor"
# Amazon S3
#[dependencies.rusoto_s3]
#optional = true
#version = "0.43.0"
#
#[dependencies.rusoto_core]
#optional = true
#version = "0.43.0"
# Local storage # Local storage
[dependencies.actix-files] #[dependencies.actix-files]
optional = true #optional = true
version = "*" #version = "*"
[dependencies.tokio] [dependencies.tokio]
version = "0.2.23" version = "0.2.23"

View File

@ -1,52 +0,0 @@
use actix::{Handler, Message};
use jirs_data::User;
use crate::{
db::{tokens::FindAccessToken, DbExecutor, DbPool, DbPooledConn, SyncQuery},
db_pool,
errors::ServiceError,
};
pub struct AuthorizeUser {
pub access_token: uuid::Uuid,
}
impl Message for AuthorizeUser {
type Result = Result<User, ServiceError>;
}
impl AuthorizeUser {
pub fn execute(&self, conn: &DbPooledConn) -> Result<User, ServiceError> {
let token = FindAccessToken {
token: self.access_token,
}
.execute(conn)?;
crate::db::users::FindUser {
user_id: token.user_id,
}
.execute(conn)
}
}
impl Handler<AuthorizeUser> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: AuthorizeUser, _: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
impl SyncQuery for AuthorizeUser {
type Result = std::result::Result<User, crate::errors::ServiceError>;
fn handle(&self, pool: &DbPool) -> Self::Result {
let conn = pool.get().map_err(|e| {
error!("{:?}", e);
crate::errors::ServiceError::DatabaseConnectionLost
})?;
self.execute(&conn)
}
}

View File

@ -1,148 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::{msg::WsError, Comment};
use crate::{
db::{DbExecutor, DbPooledConn},
db_pool,
errors::ServiceError,
q,
};
pub struct LoadIssueComments {
pub issue_id: i32,
}
impl LoadIssueComments {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<Comment>, ServiceError> {
use crate::schema::comments::dsl::*;
q!(comments.distinct_on(id).filter(issue_id.eq(self.issue_id)))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadComments)
})
}
}
impl Message for LoadIssueComments {
type Result = Result<Vec<Comment>, ServiceError>;
}
impl Handler<LoadIssueComments> for DbExecutor {
type Result = Result<Vec<Comment>, ServiceError>;
fn handle(&mut self, msg: LoadIssueComments, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct CreateComment {
pub user_id: i32,
pub issue_id: i32,
pub body: String,
}
impl CreateComment {
pub fn execute(self, conn: &DbPooledConn) -> Result<Comment, ServiceError> {
use crate::schema::comments::dsl::*;
q!(diesel::insert_into(comments).values((
body.eq(self.body),
user_id.eq(self.user_id),
issue_id.eq(self.issue_id),
)))
.get_result::<Comment>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::InvalidComment)
})
}
}
impl Message for CreateComment {
type Result = Result<Comment, ServiceError>;
}
impl Handler<CreateComment> for DbExecutor {
type Result = Result<Comment, ServiceError>;
fn handle(&mut self, msg: CreateComment, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct UpdateComment {
pub comment_id: i32,
pub user_id: i32,
pub body: String,
}
impl UpdateComment {
pub fn execute(self, conn: &DbPooledConn) -> Result<Comment, ServiceError> {
use crate::schema::comments::dsl::*;
q!(diesel::update(
comments
.filter(user_id.eq(self.user_id))
.find(self.comment_id),
)
.set(body.eq(self.body)))
.get_result::<Comment>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToUpdateComment)
})
}
}
impl Message for UpdateComment {
type Result = Result<Comment, ServiceError>;
}
impl Handler<UpdateComment> for DbExecutor {
type Result = Result<Comment, ServiceError>;
fn handle(&mut self, msg: UpdateComment, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct DeleteComment {
pub comment_id: i32,
pub user_id: i32,
}
impl DeleteComment {
pub fn execute(self, conn: &DbPooledConn) -> Result<usize, ServiceError> {
use crate::schema::comments::dsl::*;
q!(diesel::delete(
comments
.filter(user_id.eq(self.user_id))
.find(self.comment_id),
))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::UnableToDeleteComment)
})
}
}
impl Message for DeleteComment {
type Result = Result<(), ServiceError>;
}
impl Handler<DeleteComment> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: DeleteComment, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)?;
Ok(())
}
}

View File

@ -1,124 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::{msg::WsError, Epic};
use crate::{db::DbExecutor, db_pool, errors::ServiceError, q};
pub struct LoadEpics {
pub project_id: i32,
}
impl Message for LoadEpics {
type Result = Result<Vec<Epic>, ServiceError>;
}
impl Handler<LoadEpics> for DbExecutor {
type Result = Result<Vec<Epic>, ServiceError>;
fn handle(&mut self, msg: LoadEpics, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::epics::dsl::*;
let conn = db_pool!(self);
q!(epics.distinct_on(id).filter(project_id.eq(msg.project_id)))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadEpics)
})
}
}
pub struct CreateEpic {
pub user_id: i32,
pub project_id: i32,
pub name: String,
}
impl Message for CreateEpic {
type Result = Result<Epic, ServiceError>;
}
impl Handler<CreateEpic> for DbExecutor {
type Result = Result<Epic, ServiceError>;
fn handle(&mut self, msg: CreateEpic, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::epics::dsl::*;
let conn = db_pool!(self);
q!(diesel::insert_into(epics).values((
name.eq(msg.name.as_str()),
user_id.eq(msg.user_id),
project_id.eq(msg.project_id),
)))
.get_result::<Epic>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::InvalidEpic)
})
}
}
pub struct UpdateEpic {
pub epic_id: i32,
pub project_id: i32,
pub name: String,
}
impl Message for UpdateEpic {
type Result = Result<Epic, ServiceError>;
}
impl Handler<UpdateEpic> for DbExecutor {
type Result = Result<Epic, ServiceError>;
fn handle(&mut self, msg: UpdateEpic, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::epics::dsl::*;
let conn = db_pool!(self);
q!(diesel::update(
epics
.filter(project_id.eq(msg.project_id))
.find(msg.epic_id),
)
.set(name.eq(msg.name)))
.get_result::<Epic>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToUpdateEpic)
})
}
}
pub struct DeleteEpic {
pub epic_id: i32,
pub user_id: i32,
}
impl Message for DeleteEpic {
type Result = Result<(), ServiceError>;
}
impl Handler<DeleteEpic> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: DeleteEpic, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::epics::dsl::*;
let conn = db_pool!(self);
q!(diesel::delete(
epics.filter(user_id.eq(msg.user_id)).find(msg.epic_id)
))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::UnableToDeleteEpic)
})?;
Ok(())
}
}

View File

@ -1,300 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::{
msg::WsError, EmailString, Invitation, InvitationId, InvitationState, InvitationToken,
ProjectId, Token, User, UserId, UserRole, UsernameString,
};
use crate::db::DbPooledConn;
use crate::{
db::{
tokens::CreateBindToken,
users::{LookupUser, Register},
DbExecutor,
},
db_pool,
errors::ServiceError,
q,
};
pub struct FindByBindToken {
pub token: InvitationToken,
}
impl FindByBindToken {
pub fn execute(self, conn: &DbPooledConn) -> Result<Invitation, ServiceError> {
use crate::schema::invitations::dsl::*;
q!(invitations.filter(bind_token.eq(self.token)))
.first(conn)
.map_err(|e| ServiceError::DatabaseQueryFailed(format!("{}", e)))
}
}
impl Message for FindByBindToken {
type Result = Result<Invitation, ServiceError>;
}
impl Handler<FindByBindToken> for DbExecutor {
type Result = Result<Invitation, ServiceError>;
fn handle(&mut self, msg: FindByBindToken, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct ListInvitation {
pub user_id: UserId,
}
impl Message for ListInvitation {
type Result = Result<Vec<Invitation>, ServiceError>;
}
impl Handler<ListInvitation> for DbExecutor {
type Result = Result<Vec<Invitation>, ServiceError>;
fn handle(&mut self, msg: ListInvitation, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::invitations::dsl::*;
let conn = db_pool!(self);
q!(invitations
.filter(invited_by_id.eq(msg.user_id))
.filter(state.ne(InvitationState::Accepted))
.order_by(state.asc())
.then_order_by(updated_at.desc()))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadInvitations)
})
}
}
pub struct CreateInvitation {
pub user_id: UserId,
pub project_id: ProjectId,
pub email: EmailString,
pub name: UsernameString,
pub role: UserRole,
}
impl CreateInvitation {
pub fn execute(self, conn: &DbPooledConn) -> Result<Invitation, ServiceError> {
use crate::schema::invitations::dsl::*;
q!(diesel::insert_into(invitations).values((
name.eq(self.name),
email.eq(self.email),
state.eq(InvitationState::Sent),
project_id.eq(self.project_id),
invited_by_id.eq(self.user_id),
role.eq(self.role),
)))
.get_result(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::InvalidInvitation)
})
}
}
impl Message for CreateInvitation {
type Result = Result<Invitation, ServiceError>;
}
impl Handler<CreateInvitation> for DbExecutor {
type Result = Result<Invitation, ServiceError>;
fn handle(&mut self, msg: CreateInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct DeleteInvitation {
pub id: InvitationId,
}
impl DeleteInvitation {
pub fn execute(self, conn: &DbPooledConn) -> Result<usize, ServiceError> {
use crate::schema::invitations::dsl::*;
q!(diesel::delete(invitations).filter(id.eq(self.id)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::UnableToDeleteInvitation)
})
}
}
impl Message for DeleteInvitation {
type Result = Result<(), ServiceError>;
}
impl Handler<DeleteInvitation> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: DeleteInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)?;
Ok(())
}
}
struct UpdateInvitationState {
pub id: InvitationId,
pub state: InvitationState,
}
impl UpdateInvitationState {
pub fn execute(self, conn: &DbPooledConn) -> Result<usize, ServiceError> {
use crate::schema::invitations::dsl::*;
q!(diesel::update(invitations)
.set((
state.eq(self.state),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
.filter(id.eq(self.id)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToUpdateInvitation)
})
}
}
impl Message for UpdateInvitationState {
type Result = Result<(), ServiceError>;
}
impl Handler<UpdateInvitationState> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: UpdateInvitationState, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)?;
Ok(())
}
}
pub struct RevokeInvitation {
pub id: InvitationId,
}
impl Message for RevokeInvitation {
type Result = Result<(), ServiceError>;
}
impl Handler<RevokeInvitation> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: RevokeInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
UpdateInvitationState {
id: msg.id,
state: InvitationState::Revoked,
}
.execute(conn)?;
Ok(())
}
}
pub struct AcceptInvitation {
pub invitation_token: InvitationToken,
}
impl AcceptInvitation {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, ServiceError> {
use crate::schema::invitations::dsl::*;
crate::db::Guard::new(conn)?.run::<Token, _>(|_guard| {
let invitation = crate::db::invitations::FindByBindToken {
token: self.invitation_token,
}
.execute(conn)?;
if invitation.state == InvitationState::Revoked {
return Err(ServiceError::Error(WsError::InvitationRevoked));
}
crate::db::invitations::UpdateInvitationState {
id: invitation.id,
state: InvitationState::Accepted,
}
.execute(conn)?;
q!(diesel::update(invitations)
.set((
state.eq(InvitationState::Accepted),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
.filter(id.eq(invitation.id))
.filter(state.eq(InvitationState::Sent)))
.execute(conn)
.map_err(|e| {
ServiceError::DatabaseQueryFailed(format!(
"update invitation {} {}",
invitation.id, e
))
})?;
match {
Register {
name: invitation.name.clone(),
email: invitation.email.clone(),
project_id: Some(invitation.project_id),
role: UserRole::User,
}
.execute(conn)
} {
Ok(_) => (),
Err(ServiceError::Error(WsError::InvalidPair(..))) => (),
Err(e) => return Err(e),
};
let user: User = LookupUser {
name: invitation.name.clone(),
email: invitation.email.clone(),
}
.execute(conn)?;
CreateBindToken { user_id: user.id }.execute(conn)?;
self.bind_to_default_project(conn, &invitation, &user)?;
crate::db::tokens::FindUserId { user_id: user.id }.execute(conn)
})
}
fn bind_to_default_project(
&self,
conn: &DbPooledConn,
invitation: &Invitation,
user: &User,
) -> Result<usize, ServiceError> {
crate::db::user_projects::CreateUserProject {
user_id: user.id,
project_id: invitation.project_id,
is_current: false,
is_default: false,
role: invitation.role,
}
.execute(conn)
}
}
impl Message for AcceptInvitation {
type Result = Result<Token, ServiceError>;
}
impl Handler<AcceptInvitation> for DbExecutor {
type Result = Result<Token, ServiceError>;
fn handle(&mut self, msg: AcceptInvitation, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}

View File

@ -1,44 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::IssueAssignee;
use crate::{
db::{DbExecutor, DbPooledConn},
db_pool,
errors::ServiceError,
q,
};
pub struct LoadAssignees {
pub issue_id: i32,
}
impl LoadAssignees {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<IssueAssignee>, ServiceError> {
use crate::schema::issue_assignees::dsl::*;
q!(issue_assignees
.distinct_on(id)
.filter(issue_id.eq(self.issue_id)))
.load::<IssueAssignee>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("issue users".to_string())
})
}
}
impl Message for LoadAssignees {
type Result = Result<Vec<IssueAssignee>, ServiceError>;
}
impl Handler<LoadAssignees> for DbExecutor {
type Result = Result<Vec<IssueAssignee>, ServiceError>;
fn handle(&mut self, msg: LoadAssignees, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}

View File

@ -1,142 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::{IssueStatus, IssueStatusId, Position, ProjectId, TitleString};
use crate::db::DbPooledConn;
use crate::{db::DbExecutor, db_pool, errors::ServiceError, q};
pub struct LoadIssueStatuses {
pub project_id: ProjectId,
}
impl LoadIssueStatuses {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<IssueStatus>, ServiceError> {
use crate::schema::issue_statuses::dsl::{id, issue_statuses, project_id};
q!(issue_statuses
.distinct_on(id)
.filter(project_id.eq(self.project_id)))
.load::<IssueStatus>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("issue users".to_string())
})
}
}
impl Message for LoadIssueStatuses {
type Result = Result<Vec<IssueStatus>, ServiceError>;
}
impl Handler<LoadIssueStatuses> for DbExecutor {
type Result = Result<Vec<IssueStatus>, ServiceError>;
fn handle(&mut self, msg: LoadIssueStatuses, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct CreateIssueStatus {
pub project_id: ProjectId,
pub position: i32,
pub name: TitleString,
}
impl CreateIssueStatus {
pub fn execute(self, conn: &DbPooledConn) -> Result<IssueStatus, ServiceError> {
use crate::schema::issue_statuses::dsl::{issue_statuses, name, position, project_id};
q!(diesel::insert_into(issue_statuses).values((
project_id.eq(self.project_id),
name.eq(self.name),
position.eq(self.position),
)))
.get_result::<IssueStatus>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("issue users".to_string())
})
}
}
impl Message for CreateIssueStatus {
type Result = Result<IssueStatus, ServiceError>;
}
impl Handler<CreateIssueStatus> for DbExecutor {
type Result = Result<IssueStatus, ServiceError>;
fn handle(&mut self, msg: CreateIssueStatus, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct DeleteIssueStatus {
pub project_id: ProjectId,
pub issue_status_id: IssueStatusId,
}
impl Message for DeleteIssueStatus {
type Result = Result<IssueStatusId, ServiceError>;
}
impl Handler<DeleteIssueStatus> for DbExecutor {
type Result = Result<IssueStatusId, ServiceError>;
fn handle(&mut self, msg: DeleteIssueStatus, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issue_statuses::dsl::{id, issue_statuses, project_id};
let conn = db_pool!(self);
q!(diesel::delete(issue_statuses)
.filter(id.eq(msg.issue_status_id))
.filter(project_id.eq(msg.project_id)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("issue users".to_string())
})?;
Ok(msg.issue_status_id)
}
}
pub struct UpdateIssueStatus {
pub issue_status_id: IssueStatusId,
pub project_id: ProjectId,
pub position: Position,
pub name: TitleString,
}
impl Message for UpdateIssueStatus {
type Result = Result<IssueStatus, ServiceError>;
}
impl Handler<UpdateIssueStatus> for DbExecutor {
type Result = Result<IssueStatus, ServiceError>;
fn handle(&mut self, msg: UpdateIssueStatus, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issue_statuses::dsl::{
id, issue_statuses, name, position, project_id, updated_at,
};
let conn = db_pool!(self);
q!(diesel::update(issue_statuses)
.set((
name.eq(msg.name),
position.eq(msg.position),
updated_at.eq(chrono::Utc::now().naive_utc()),
))
.filter(id.eq(msg.issue_status_id))
.filter(project_id.eq(msg.project_id)))
.get_result::<IssueStatus>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("issue users".to_string())
})
}
}

View File

@ -1,318 +0,0 @@
use actix::{Handler, Message};
use diesel::expression::dsl::not;
use diesel::expression::sql_literal::sql;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use jirs_data::msg::WsError;
use jirs_data::{IssuePriority, IssueStatusId, IssueType};
use crate::{db::DbExecutor, db_pool, errors::ServiceError, models::Issue};
const FAILED_CONNECT_USER_AND_ISSUE: &str = "Failed to create connection between user and issue";
#[derive(Serialize, Deserialize)]
pub struct LoadIssue {
pub issue_id: i32,
}
impl Message for LoadIssue {
type Result = Result<Issue, ServiceError>;
}
impl Handler<LoadIssue> for DbExecutor {
type Result = Result<Issue, ServiceError>;
fn handle(&mut self, msg: LoadIssue, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issues::dsl::{id, issues};
let conn = db_pool!(self);
let query = issues.filter(id.eq(msg.issue_id)).distinct();
debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&query).to_string()
);
query.first::<Issue>(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("project issues".to_string())
})
}
}
#[derive(Serialize, Deserialize)]
pub struct LoadProjectIssues {
pub project_id: i32,
}
impl Message for LoadProjectIssues {
type Result = Result<Vec<Issue>, ServiceError>;
}
impl Handler<LoadProjectIssues> for DbExecutor {
type Result = Result<Vec<Issue>, ServiceError>;
fn handle(&mut self, msg: LoadProjectIssues, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issues::dsl::{issues, project_id};
let conn = db_pool!(self);
let chain = issues.filter(project_id.eq(msg.project_id)).distinct();
debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&chain).to_string()
);
let vec = chain.load::<Issue>(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("project issues".to_string())
})?;
Ok(vec)
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct UpdateIssue {
pub issue_id: i32,
pub title: Option<String>,
pub issue_type: Option<IssueType>,
pub priority: Option<IssuePriority>,
pub list_position: Option<i32>,
pub description: Option<String>,
pub description_text: Option<String>,
pub estimate: Option<i32>,
pub time_spent: Option<i32>,
pub time_remaining: Option<i32>,
pub project_id: Option<i32>,
pub user_ids: Option<Vec<i32>>,
pub reporter_id: Option<i32>,
pub issue_status_id: Option<i32>,
pub epic_id: Option<Option<i32>>,
}
impl Message for UpdateIssue {
type Result = Result<Issue, ServiceError>;
}
impl Handler<UpdateIssue> for DbExecutor {
type Result = Result<Issue, ServiceError>;
fn handle(&mut self, msg: UpdateIssue, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issues::dsl::{self, issues};
let conn = db_pool!(self);
let current_issue_id = msg.issue_id;
let chain = diesel::update(issues.find(current_issue_id)).set((
msg.title.map(|title| dsl::title.eq(title)),
msg.issue_type
.map(|issue_type| dsl::issue_type.eq(issue_type)),
msg.issue_status_id.map(|id| dsl::issue_status_id.eq(id)),
msg.priority.map(|priority| dsl::priority.eq(priority)),
msg.list_position
.map(|list_position| dsl::list_position.eq(list_position)),
msg.description
.map(|description| dsl::description.eq(description)),
msg.description_text
.map(|description_text| dsl::description_text.eq(description_text)),
msg.estimate.map(|estimate| dsl::estimate.eq(estimate)),
msg.time_spent
.map(|time_spent| dsl::time_spent.eq(time_spent)),
msg.time_remaining
.map(|time_remaining| dsl::time_remaining.eq(time_remaining)),
msg.project_id
.map(|project_id| dsl::project_id.eq(project_id)),
msg.reporter_id
.map(|reporter_id| dsl::reporter_id.eq(reporter_id)),
msg.epic_id.map(|epic_id| dsl::epic_id.eq(epic_id)),
dsl::updated_at.eq(chrono::Utc::now().naive_utc()),
));
debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&chain).to_string()
);
chain.get_result::<Issue>(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed("Failed to update issue".to_string())
})?;
if let Some(user_ids) = msg.user_ids.as_ref() {
use crate::schema::issue_assignees::dsl;
diesel::delete(dsl::issue_assignees)
.filter(not(dsl::user_id.eq_any(user_ids)).and(dsl::issue_id.eq(current_issue_id)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
let existing: Vec<i32> = dsl::issue_assignees
.select(dsl::user_id)
.filter(dsl::issue_id.eq(current_issue_id))
.get_results::<i32>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
let mut values = vec![];
for user_id in user_ids.iter() {
if !existing.contains(user_id) {
values.push(crate::models::CreateIssueAssigneeForm {
issue_id: current_issue_id,
user_id: *user_id,
})
}
}
diesel::insert_into(dsl::issue_assignees)
.values(values)
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed(FAILED_CONNECT_USER_AND_ISSUE.to_string())
})?;
}
issues.find(msg.issue_id).first::<Issue>(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})
}
}
#[derive(Serialize, Deserialize)]
pub struct DeleteIssue {
pub issue_id: i32,
}
impl Message for DeleteIssue {
type Result = Result<(), ServiceError>;
}
impl Handler<DeleteIssue> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: DeleteIssue, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issue_assignees::dsl::{issue_assignees, issue_id};
use crate::schema::issues::dsl::issues;
let conn = db_pool!(self);
diesel::delete(issue_assignees.filter(issue_id.eq(msg.issue_id)))
.execute(conn)
.map_err(|e| ServiceError::RecordNotFound(format!("issue {}. {}", msg.issue_id, e)))?;
diesel::delete(issues.find(msg.issue_id))
.execute(conn)
.map_err(|e| ServiceError::RecordNotFound(format!("issue {}. {}", msg.issue_id, e)))?;
Ok(())
}
}
#[derive(Serialize, Deserialize)]
pub struct CreateIssue {
pub title: String,
pub issue_type: IssueType,
pub issue_status_id: IssueStatusId,
pub priority: IssuePriority,
pub description: Option<String>,
pub description_text: Option<String>,
pub estimate: Option<i32>,
pub time_spent: Option<i32>,
pub time_remaining: Option<i32>,
pub project_id: jirs_data::ProjectId,
pub reporter_id: jirs_data::UserId,
pub user_ids: Vec<jirs_data::UserId>,
pub epic_id: Option<jirs_data::EpicId>,
}
impl Message for CreateIssue {
type Result = Result<Issue, ServiceError>;
}
impl Handler<CreateIssue> for DbExecutor {
type Result = Result<Issue, ServiceError>;
fn handle(&mut self, msg: CreateIssue, ctx: &mut Self::Context) -> Self::Result {
use crate::schema::issue_assignees::dsl;
use crate::schema::issues::dsl::issues;
let conn = db_pool!(self);
let list_position = issues
// .filter(issue_status_id.eq(IssueStatus::Backlog))
.select(sql("COALESCE(max(list_position), 0) + 1"))
.get_result::<i32>(conn)
.map_err(|e| {
error!("resolve new issue position failed {}", e);
ServiceError::DatabaseConnectionLost
})?;
info!("{:?}", msg.issue_type);
info!("msg.issue_status_id {:?}", msg.issue_status_id);
let issue_status_id = if msg.issue_status_id == 0 {
self.handle(
crate::db::issue_statuses::LoadIssueStatuses {
project_id: msg.project_id,
},
ctx,
)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToFetchIssueStatuses)
})?
.get(0)
.ok_or_else(|| ServiceError::Error(WsError::NoIssueStatuses))?
.id
} else {
msg.issue_status_id
};
let form = crate::models::CreateIssueForm {
title: msg.title,
issue_type: msg.issue_type,
issue_status_id,
priority: msg.priority,
list_position,
description: msg.description,
description_text: msg.description_text,
estimate: msg.estimate,
time_spent: msg.time_spent,
time_remaining: msg.time_remaining,
reporter_id: msg.reporter_id,
project_id: msg.project_id,
epic_id: msg.epic_id,
};
let issue = diesel::insert_into(issues)
.values(form)
.on_conflict_do_nothing()
.get_result::<Issue>(conn)
.map_err(|e| {
error!("{}", e);
ServiceError::DatabaseConnectionLost
})?;
let mut values = vec![];
for user_id in msg.user_ids.iter() {
values.push(crate::models::CreateIssueAssigneeForm {
issue_id: issue.id,
user_id: *user_id,
});
}
if !msg.user_ids.contains(&msg.reporter_id) {
values.push(crate::models::CreateIssueAssigneeForm {
issue_id: issue.id,
user_id: msg.reporter_id,
});
}
diesel::insert_into(dsl::issue_assignees)
.values(values)
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
Ok(issue)
}
}

View File

@ -1,174 +0,0 @@
use actix::Handler;
use diesel::prelude::*;
use jirs_data::{BindToken, Message, MessageId, MessageType, User, UserId};
use crate::{
db::{
users::{FindUser, LookupUser},
DbExecutor,
},
db_pool,
errors::ServiceError,
q,
};
#[derive(Debug)]
pub struct LoadMessages {
pub user_id: UserId,
}
impl actix::Message for LoadMessages {
type Result = Result<Vec<Message>, ServiceError>;
}
impl Handler<LoadMessages> for DbExecutor {
type Result = Result<Vec<Message>, ServiceError>;
fn handle(&mut self, msg: LoadMessages, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::messages::dsl::*;
let conn = db_pool!(self);
q!(messages.filter(receiver_id.eq(msg.user_id)))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed("load user messages".to_string())
})
}
}
#[derive(Debug)]
pub struct MarkMessageSeen {
pub user_id: UserId,
pub message_id: MessageId,
}
impl actix::Message for MarkMessageSeen {
type Result = Result<MessageId, ServiceError>;
}
impl Handler<MarkMessageSeen> for DbExecutor {
type Result = Result<MessageId, ServiceError>;
fn handle(&mut self, msg: MarkMessageSeen, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::messages::dsl::*;
let conn = db_pool!(self);
let size = q!(diesel::delete(
messages
.find(msg.message_id)
.filter(receiver_id.eq(msg.user_id)),
))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed("load user messages".to_string())
})?;
if size > 0 {
Ok(msg.message_id)
} else {
Err(ServiceError::DatabaseQueryFailed(format!(
"failed to delete message for {:?}",
msg
)))
}
}
}
#[derive(Debug)]
pub enum CreateMessageReceiver {
Reference(UserId),
Lookup { name: String, email: String },
}
#[derive(Debug)]
pub struct CreateMessage {
pub receiver: CreateMessageReceiver,
pub sender_id: UserId,
pub summary: String,
pub description: String,
pub message_type: MessageType,
pub hyper_link: String,
}
impl actix::Message for CreateMessage {
type Result = Result<Message, ServiceError>;
}
impl Handler<CreateMessage> for DbExecutor {
type Result = Result<Message, ServiceError>;
fn handle(&mut self, msg: CreateMessage, ctx: &mut Self::Context) -> Self::Result {
use crate::schema::messages::dsl::*;
let conn = db_pool!(self);
let user: User = match {
match msg.receiver {
CreateMessageReceiver::Lookup { name, email } => {
self.handle(LookupUser { name, email }, ctx)
}
CreateMessageReceiver::Reference(user_id) => self.handle(FindUser { user_id }, ctx),
}
} {
Ok(user) => user,
_ => {
return Err(ServiceError::RecordNotFound(
"No matching user found".to_string(),
));
}
};
let query = diesel::insert_into(messages).values((
receiver_id.eq(user.id),
sender_id.eq(msg.sender_id),
summary.eq(msg.summary),
description.eq(msg.description),
message_type.eq(msg.message_type),
hyper_link.eq(msg.hyper_link),
));
debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&query).to_string()
);
query.get_result(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed("create message failed".to_string())
})
}
}
#[derive(Debug)]
pub struct LookupMessagesByToken {
pub token: BindToken,
pub user_id: UserId,
}
impl actix::Message for LookupMessagesByToken {
type Result = Result<Vec<Message>, ServiceError>;
}
impl Handler<LookupMessagesByToken> for DbExecutor {
type Result = Result<Vec<Message>, ServiceError>;
fn handle(&mut self, msg: LookupMessagesByToken, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::messages::dsl::*;
let conn = db_pool!(self);
q!(messages.filter(
hyper_link
.eq(format!("#{}", msg.token))
.and(receiver_id.eq(msg.user_id)),
))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed("create message failed".to_string())
})
}
}

View File

@ -1,175 +0,0 @@
use std::fs::*;
use actix::{Actor, SyncContext};
use diesel::pg::PgConnection;
use diesel::r2d2::{self, ConnectionManager};
use serde::{Deserialize, Serialize};
use crate::errors::ServiceError;
pub mod authorize_user;
pub mod comments;
pub mod epics;
pub mod invitations;
pub mod issue_assignees;
pub mod issue_statuses;
pub mod issues;
pub mod messages;
pub mod projects;
pub mod tokens;
pub mod user_projects;
pub mod users;
pub type DbPool = r2d2::Pool<ConnectionManager<PgConnection>>;
pub type DbPooledConn = r2d2::PooledConnection<ConnectionManager<PgConnection>>;
pub struct DbExecutor {
pub pool: DbPool,
pub config: Configuration,
}
impl Actor for DbExecutor {
type Context = SyncContext<Self>;
}
impl Default for DbExecutor {
fn default() -> Self {
Self {
pool: build_pool(),
config: Configuration::read(),
}
}
}
pub fn build_pool() -> DbPool {
dotenv::dotenv().ok();
let config = Configuration::read();
let manager = ConnectionManager::<PgConnection>::new(config.database_url);
r2d2::Pool::builder()
.max_size(config.concurrency as u32)
.build(manager)
.unwrap_or_else(|e| panic!("Failed to create pool. {}", e))
}
pub trait SyncQuery {
type Result;
fn handle(&self, pool: &DbPool) -> Self::Result;
}
#[derive(Serialize, Deserialize)]
pub struct Configuration {
pub concurrency: usize,
pub database_url: String,
}
impl Default for Configuration {
fn default() -> Self {
let database_url = if cfg!(test) {
"postgres://postgres@localhost:5432/jirs_test".to_string()
} else {
std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://postgres@localhost:5432/jirs".to_string())
};
Self {
concurrency: 2,
database_url,
}
}
}
impl Configuration {
pub fn read() -> Self {
let contents: String = read_to_string(Self::config_file()).unwrap_or_default();
match toml::from_str(contents.as_str()) {
Ok(config) => config,
_ => {
let config = Configuration::default();
config.write().unwrap_or_else(|e| panic!(e));
config
}
}
}
pub fn write(&self) -> Result<(), String> {
let s = toml::to_string(self).map_err(|e| e.to_string())?;
write(Self::config_file(), s.as_str()).map_err(|e| e.to_string())?;
Ok(())
}
#[cfg(not(test))]
pub fn config_file() -> &'static str {
"db.toml"
}
#[cfg(test)]
pub fn config_file() -> &'static str {
"db.test.toml"
}
}
#[macro_export]
macro_rules! db_pool {
($self: expr) => {
&$self.pool.get().map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?
};
}
#[macro_export]
macro_rules! q {
($q: expr) => {{
let q = $q;
debug!(
"{}",
diesel::debug_query::<diesel::pg::Pg, _>(&q).to_string()
);
q
}};
}
pub struct Guard<'l> {
conn: &'l crate::db::DbPooledConn,
tm: &'l diesel::connection::AnsiTransactionManager,
}
impl<'l> Guard<'l> {
pub fn new(conn: &'l DbPooledConn) -> Result<Self, ServiceError> {
use diesel::{connection::TransactionManager, prelude::*};
let tm = conn.transaction_manager();
tm.begin_transaction(conn).map_err(|e| {
log::error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
Ok(Self { conn, tm })
}
pub fn run<R, F: FnOnce(&Guard) -> Result<R, ServiceError>>(
&self,
f: F,
) -> Result<R, ServiceError> {
use diesel::connection::TransactionManager;
let r = f(self);
match r {
Ok(r) => {
self.tm.commit_transaction(self.conn).map_err(|e| {
log::error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
Ok(r)
}
Err(e) => {
log::error!("{:?}", e);
self.tm.rollback_transaction(self.conn).map_err(|e| {
log::error!("{:?}", e);
ServiceError::DatabaseConnectionLost
})?;
Err(e)
}
}
}
}

View File

@ -1,174 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use jirs_data::{NameString, Project, ProjectCategory, ProjectId, TimeTracking, UserId};
use crate::db::DbPooledConn;
use crate::{db::DbExecutor, db_pool, errors::ServiceError, q, schema::projects::all_columns};
#[derive(Serialize, Deserialize)]
pub struct LoadCurrentProject {
pub project_id: ProjectId,
}
impl LoadCurrentProject {
pub fn execute(self, conn: &DbPooledConn) -> Result<Project, ServiceError> {
use crate::schema::projects::dsl::projects;
q!(projects.find(self.project_id))
.first::<Project>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("Project".to_string())
})
}
}
impl Message for LoadCurrentProject {
type Result = Result<Project, ServiceError>;
}
impl Handler<LoadCurrentProject> for DbExecutor {
type Result = Result<Project, ServiceError>;
fn handle(&mut self, msg: LoadCurrentProject, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct CreateProject {
pub name: NameString,
pub url: Option<String>,
pub description: Option<String>,
pub category: Option<ProjectCategory>,
pub time_tracking: Option<TimeTracking>,
}
impl CreateProject {
pub fn execute(self, conn: &DbPooledConn) -> Result<Project, ServiceError> {
use crate::schema::projects::dsl::*;
crate::db::Guard::new(conn)?.run(|_guard| {
let p = q!(diesel::insert_into(projects)
.values((
name.eq(self.name),
self.url.map(|v| url.eq(v)),
self.description.map(|v| description.eq(v)),
self.category.map(|v| category.eq(v)),
self.time_tracking.map(|v| time_tracking.eq(v)),
))
.returning(all_columns))
.get_result::<Project>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed(format!("{}", e))
})?;
crate::db::issue_statuses::CreateIssueStatus {
project_id: p.id,
position: 0,
name: "TODO".to_string(),
}
.execute(conn)?;
Ok(p)
})
}
}
impl Message for CreateProject {
type Result = Result<Project, ServiceError>;
}
impl Handler<CreateProject> for DbExecutor {
type Result = Result<Project, ServiceError>;
fn handle(&mut self, msg: CreateProject, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct UpdateProject {
pub project_id: ProjectId,
pub name: Option<NameString>,
pub url: Option<String>,
pub description: Option<String>,
pub category: Option<ProjectCategory>,
pub time_tracking: Option<TimeTracking>,
}
impl UpdateProject {
pub fn execute(self, conn: &DbPooledConn) -> Result<Project, ServiceError> {
use crate::schema::projects::dsl::*;
q!(diesel::update(projects.find(self.project_id)).set((
self.name.map(|v| name.eq(v)),
self.url.map(|v| url.eq(v)),
self.description.map(|v| description.eq(v)),
self.category.map(|v| category.eq(v)),
self.time_tracking.map(|v| time_tracking.eq(v)),
)))
.execute(conn)
.map_err(|e| ServiceError::DatabaseQueryFailed(format!("{}", e)))?;
LoadCurrentProject {
project_id: self.project_id,
}
.execute(conn)
}
}
impl Message for UpdateProject {
type Result = Result<Project, ServiceError>;
}
impl Handler<UpdateProject> for DbExecutor {
type Result = Result<Project, ServiceError>;
fn handle(&mut self, msg: UpdateProject, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct LoadProjects {
pub user_id: UserId,
}
impl LoadProjects {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<Project>, ServiceError> {
use crate::schema::projects::dsl::*;
use crate::schema::user_projects::dsl::{project_id, user_id, user_projects};
q!(projects
.inner_join(user_projects.on(project_id.eq(id)))
.filter(user_id.eq(self.user_id))
.distinct_on(id)
.select(all_columns))
.load::<Project>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound("Project".to_string())
})
}
}
impl Message for LoadProjects {
type Result = Result<Vec<Project>, ServiceError>;
}
impl Handler<LoadProjects> for DbExecutor {
type Result = Result<Vec<Project>, ServiceError>;
fn handle(&mut self, msg: LoadProjects, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}

View File

@ -1,148 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use uuid::Uuid;
use jirs_data::msg::WsError;
use jirs_data::{Token, UserId};
use crate::{
db::{DbExecutor, DbPooledConn},
db_pool,
errors::ServiceError,
q,
};
pub struct FindUserId {
pub user_id: UserId,
}
impl FindUserId {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, ServiceError> {
use crate::schema::tokens::dsl::*;
q!(tokens.filter(user_id.eq(self.user_id)).order_by(id.desc()))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::NoBindToken)
})
}
}
impl Message for FindUserId {
type Result = Result<Token, ServiceError>;
}
impl Handler<FindUserId> for DbExecutor {
type Result = Result<Token, ServiceError>;
fn handle(&mut self, msg: FindUserId, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct FindBindToken {
pub token: Uuid,
}
impl FindBindToken {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, ServiceError> {
use crate::schema::tokens::dsl::{bind_token, tokens};
let token: Token = q!(tokens.filter(bind_token.eq(Some(self.token))))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::BindTokenNotExists)
})?;
q!(diesel::update(tokens.find(token.id)).set(bind_token.eq(None as Option<Uuid>)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToDisableBindToken)
})?;
Ok(token)
}
}
impl Message for FindBindToken {
type Result = Result<Token, ServiceError>;
}
impl Handler<FindBindToken> for DbExecutor {
type Result = Result<Token, ServiceError>;
fn handle(&mut self, msg: FindBindToken, _: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct FindAccessToken {
pub token: Uuid,
}
impl FindAccessToken {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, ServiceError> {
use crate::schema::tokens::dsl::{access_token, tokens};
q!(tokens.filter(access_token.eq(self.token)))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::AccessTokenNotExists)
})
}
}
impl Message for FindAccessToken {
type Result = Result<Token, ServiceError>;
}
impl Handler<FindAccessToken> for DbExecutor {
type Result = Result<Token, ServiceError>;
fn handle(&mut self, msg: FindAccessToken, _: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct CreateBindToken {
pub user_id: UserId,
}
impl CreateBindToken {
pub fn execute(self, conn: &DbPooledConn) -> Result<Token, ServiceError> {
use crate::schema::tokens::dsl::*;
q!(diesel::insert_into(tokens).values((
user_id.eq(self.user_id),
access_token.eq(Uuid::new_v4()),
refresh_token.eq(Uuid::new_v4()),
bind_token.eq(Some(Uuid::new_v4())),
)))
.get_result(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToCreateBindToken)
})
}
}
impl Message for CreateBindToken {
type Result = Result<Token, ServiceError>;
}
impl Handler<CreateBindToken> for DbExecutor {
type Result = Result<Token, ServiceError>;
fn handle(&mut self, msg: CreateBindToken, _: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}

View File

@ -1,221 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use jirs_data::msg::WsError;
use jirs_data::{ProjectId, UserId, UserProject, UserProjectId, UserRole};
use crate::{
db::{DbExecutor, DbPooledConn},
db_pool,
errors::ServiceError,
q,
};
pub struct CurrentUserProject {
pub user_id: UserId,
}
impl Message for CurrentUserProject {
type Result = Result<UserProject, ServiceError>;
}
impl Handler<CurrentUserProject> for DbExecutor {
type Result = Result<UserProject, ServiceError>;
fn handle(&mut self, msg: CurrentUserProject, _: &mut Self::Context) -> Self::Result {
use crate::schema::user_projects::dsl::*;
let conn = db_pool!(self);
q!(user_projects.filter(user_id.eq(msg.user_id).and(is_current.eq(true))))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound(format!("user project {}", msg.user_id))
})
}
}
pub struct LoadUserProjects {
pub user_id: UserId,
}
impl Message for LoadUserProjects {
type Result = Result<Vec<UserProject>, ServiceError>;
}
impl Handler<LoadUserProjects> for DbExecutor {
type Result = Result<Vec<UserProject>, ServiceError>;
fn handle(&mut self, msg: LoadUserProjects, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::user_projects::dsl::*;
let conn = db_pool!(self);
q!(user_projects.filter(user_id.eq(msg.user_id)))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound(format!("user project {}", msg.user_id))
})
}
}
pub struct ChangeCurrentUserProject {
pub user_id: UserId,
pub id: UserProjectId,
}
impl ChangeCurrentUserProject {
pub fn execute(self, conn: &DbPooledConn) -> Result<UserProject, ServiceError> {
use crate::schema::user_projects::dsl::*;
crate::db::Guard::new(conn)?.run(|_guard| {
let mut user_project: UserProject =
q!(user_projects.filter(id.eq(self.id).and(user_id.eq(self.user_id))))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound(format!("user project {}", self.user_id))
})?;
q!(diesel::update(user_projects)
.set(is_current.eq(false))
.filter(user_id.eq(self.user_id)))
.execute(conn)
.map(|_| ())
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed(format!(
"setting current flag to false while updating current project {}",
self.user_id
))
})?;
q!(diesel::update(user_projects)
.set(is_current.eq(true))
.filter(id.eq(self.id).and(user_id.eq(self.user_id))))
.execute(conn)
.map(|_| ())
.map_err(|e| {
error!("{:?}", e);
ServiceError::DatabaseQueryFailed(format!(
"set current flag on project while updating current project {}",
self.user_id
))
})?;
user_project.is_current = true;
Ok(user_project)
})
}
}
impl Message for ChangeCurrentUserProject {
type Result = Result<UserProject, ServiceError>;
}
impl Handler<ChangeCurrentUserProject> for DbExecutor {
type Result = Result<UserProject, ServiceError>;
fn handle(&mut self, msg: ChangeCurrentUserProject, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct RemoveInvitedUser {
pub invited_id: UserId,
pub inviter_id: UserId,
pub project_id: ProjectId,
}
impl RemoveInvitedUser {
pub fn execute(self, conn: &DbPooledConn) -> Result<usize, ServiceError> {
use crate::schema::user_projects::dsl::*;
if self.invited_id == self.inviter_id {
return Err(ServiceError::Unauthorized);
}
q!(user_projects.filter(
user_id
.eq(self.inviter_id)
.and(project_id.eq(self.project_id))
.and(role.eq(UserRole::Owner)),
))
.first::<UserProject>(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Unauthorized
})?;
q!(diesel::delete(user_projects).filter(
user_id
.eq(self.invited_id)
.and(project_id.eq(self.project_id)),
))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::RecordNotFound(format!(
"user project user with id {} for project {}",
self.invited_id, self.project_id
))
})
}
}
impl Message for RemoveInvitedUser {
type Result = Result<(), ServiceError>;
}
impl Handler<RemoveInvitedUser> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: RemoveInvitedUser, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)?;
Ok(())
}
}
pub struct CreateUserProject {
pub user_id: UserId,
pub project_id: ProjectId,
pub is_current: bool,
pub is_default: bool,
pub role: UserRole,
}
impl CreateUserProject {
pub fn execute(self, conn: &DbPooledConn) -> Result<usize, ServiceError> {
use crate::schema::user_projects::dsl::*;
q!(diesel::insert_into(user_projects).values((
user_id.eq(self.user_id),
project_id.eq(self.project_id),
is_current.eq(self.is_current),
is_default.eq(self.is_default),
role.eq(self.role),
)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::InvalidUserProject)
})
}
}
impl Message for CreateUserProject {
type Result = Result<(), ServiceError>;
}
impl Handler<CreateUserProject> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: CreateUserProject, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)?;
Ok(())
}
}

View File

@ -1,442 +0,0 @@
use actix::{Handler, Message};
use diesel::prelude::*;
use diesel::result::Error;
use jirs_data::{msg::WsError, EmailString, ProjectId, User, UserId, UserRole, UsernameString};
use crate::db::user_projects::CreateUserProject;
use crate::{
db::{projects::CreateProject, DbExecutor, DbPooledConn},
db_pool,
errors::ServiceError,
q,
schema::users::all_columns,
};
#[derive(Debug)]
pub struct FindUser {
pub user_id: UserId,
}
impl FindUser {
pub fn execute(self, conn: &DbPooledConn) -> Result<User, ServiceError> {
use crate::schema::users::dsl::*;
q!(users.find(self.user_id)).first(conn).map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::UserNotExists(self.user_id))
})
}
}
impl Message for FindUser {
type Result = Result<User, ServiceError>;
}
impl Handler<FindUser> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: FindUser, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct LookupUser {
pub name: String,
pub email: String,
}
impl LookupUser {
pub fn execute(self, conn: &DbPooledConn) -> Result<User, ServiceError> {
use crate::schema::users::dsl::*;
q!(users
.distinct_on(id)
.filter(email.eq(self.email.as_str()))
.filter(name.eq(self.name.as_str())))
.first(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::NoMatchingPair(self.name, self.email))
})
}
}
impl Message for LookupUser {
type Result = Result<User, ServiceError>;
}
impl Handler<LookupUser> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: LookupUser, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct LoadProjectUsers {
pub project_id: i32,
}
impl LoadProjectUsers {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<User>, ServiceError> {
use crate::schema::user_projects::dsl::{project_id, user_id, user_projects};
use crate::schema::users::dsl::*;
q!(users
.distinct_on(id)
.inner_join(user_projects.on(user_id.eq(id)))
.filter(project_id.eq(self.project_id))
.select(all_columns))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadProjectUsers)
})
}
}
impl Message for LoadProjectUsers {
type Result = Result<Vec<User>, ServiceError>;
}
impl Handler<LoadProjectUsers> for DbExecutor {
type Result = Result<Vec<User>, ServiceError>;
fn handle(&mut self, msg: LoadProjectUsers, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct LoadIssueAssignees {
pub issue_id: i32,
}
impl LoadIssueAssignees {
pub fn execute(self, conn: &DbPooledConn) -> Result<Vec<User>, ServiceError> {
use crate::schema::issue_assignees::dsl::{issue_assignees, issue_id, user_id};
use crate::schema::users::dsl::*;
q!(users
.distinct_on(id)
.inner_join(issue_assignees.on(user_id.eq(id)))
.filter(issue_id.eq(self.issue_id))
.select(users::all_columns()))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadAssignees)
})
}
}
impl Message for LoadIssueAssignees {
type Result = Result<Vec<User>, ServiceError>;
}
impl Handler<LoadIssueAssignees> for DbExecutor {
type Result = Result<Vec<User>, ServiceError>;
fn handle(&mut self, msg: LoadIssueAssignees, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct CreateUser {
pub name: UsernameString,
pub email: EmailString,
}
impl CreateUser {
pub fn execute(self, conn: &DbPooledConn) -> Result<User, ServiceError> {
use crate::schema::users::dsl::*;
q!(diesel::insert_into(users)
.values((name.eq(self.name.as_str()), email.eq(self.email.as_str()))))
.get_result(conn)
.map_err(|e| {
error!("{:?}", e);
let ws = match e {
Error::InvalidCString(_) => WsError::InvalidPair(self.name, self.email),
Error::DatabaseError(diesel::result::DatabaseErrorKind::UniqueViolation, _) => {
WsError::TakenPair(self.name, self.email)
}
Error::DatabaseError(_, _) => WsError::InvalidPair(self.name, self.email),
Error::NotFound => WsError::InvalidPair(self.name, self.email),
Error::QueryBuilderError(_) => WsError::InvalidPair(self.name, self.email),
Error::DeserializationError(_) => WsError::InvalidPair(self.name, self.email),
Error::SerializationError(_) => WsError::InvalidPair(self.name, self.email),
Error::RollbackTransaction => WsError::InvalidPair(self.name, self.email),
Error::AlreadyInTransaction => WsError::InvalidPair(self.name, self.email),
Error::__Nonexhaustive => WsError::InvalidPair(self.name, self.email),
};
ServiceError::Error(ws)
})
}
}
impl Message for CreateUser {
type Result = Result<User, ServiceError>;
}
impl Handler<CreateUser> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: CreateUser, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct Register {
pub name: UsernameString,
pub email: EmailString,
pub project_id: Option<ProjectId>,
pub role: UserRole,
}
impl Register {
pub fn execute(self, conn: &DbPooledConn) -> Result<(), ServiceError> {
let Register {
name: given_name,
email: given_email,
project_id: given_project_id,
role: given_role,
} = self;
crate::db::Guard::new(conn)?.run(|_guard| {
if count_matching_users(given_name.as_str(), given_email.as_str(), conn) > 0 {
return Err(ServiceError::Error(WsError::InvalidLoginPair));
}
let current_project_id: ProjectId = match given_project_id {
Some(current_project_id) => current_project_id,
_ => {
CreateProject {
name: "initial".to_string(),
url: None,
description: None,
category: None,
time_tracking: None,
}
.execute(conn)?
.id
}
};
let user: User = CreateUser {
name: given_name,
email: given_email,
}
.execute(conn)?;
CreateUserProject {
user_id: user.id,
project_id: current_project_id,
is_current: true,
is_default: true,
role: given_role,
}
.execute(conn)?;
Ok(())
})
}
}
impl Message for Register {
type Result = Result<(), ServiceError>;
}
impl Handler<Register> for DbExecutor {
type Result = Result<(), ServiceError>;
fn handle(&mut self, msg: Register, _ctx: &mut Self::Context) -> Self::Result {
let conn = db_pool!(self);
msg.execute(conn)
}
}
pub struct LoadInvitedUsers {
pub user_id: UserId,
}
impl Message for LoadInvitedUsers {
type Result = Result<Vec<User>, ServiceError>;
}
impl Handler<LoadInvitedUsers> for DbExecutor {
type Result = Result<Vec<User>, ServiceError>;
fn handle(&mut self, msg: LoadInvitedUsers, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::invitations::dsl::{email as i_email, invitations, invited_by_id};
use crate::schema::users::dsl::{email as u_email, users};
let conn = db_pool!(self);
q!(users
.inner_join(invitations.on(i_email.eq(u_email)))
.filter(invited_by_id.eq(msg.user_id))
.select(users::all_columns()))
.load(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToLoadInvitedUsers)
})
}
}
fn count_matching_users(name: &str, email: &str, conn: &DbPooledConn) -> i64 {
use crate::schema::users::dsl;
q!(dsl::users
.filter(dsl::email.eq(email).and(dsl::name.ne(name)))
.or_filter(dsl::email.ne(email).and(dsl::name.eq(name)))
.or_filter(dsl::email.eq(email).and(dsl::name.eq(name)))
.count())
.get_result::<i64>(conn)
.unwrap_or(1)
}
pub struct UpdateAvatarUrl {
pub user_id: UserId,
pub avatar_url: Option<String>,
}
impl Message for UpdateAvatarUrl {
type Result = Result<User, ServiceError>;
}
impl Handler<UpdateAvatarUrl> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: UpdateAvatarUrl, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::users::dsl::{avatar_url, id, users};
let conn = db_pool!(self);
q!(diesel::update(users)
.set(avatar_url.eq(msg.avatar_url))
.filter(id.eq(msg.user_id)))
.execute(conn)
.map_err(|e| {
error!("{:?}", e);
ServiceError::Error(WsError::FailedToChangeAvatar)
})?;
FindUser {
user_id: msg.user_id,
}
.execute(conn)
}
}
pub struct ProfileUpdate {
pub user_id: UserId,
pub name: String,
pub email: String,
}
impl Message for ProfileUpdate {
type Result = Result<User, ServiceError>;
}
impl Handler<ProfileUpdate> for DbExecutor {
type Result = Result<User, ServiceError>;
fn handle(&mut self, msg: ProfileUpdate, _ctx: &mut Self::Context) -> Self::Result {
use crate::schema::users::dsl::{email, id, name, users};
let conn = db_pool!(self);
q!(diesel::update(users)
.set((email.eq(msg.email), name.eq(msg.name)))
.filter(id.eq(msg.user_id)))
.execute(conn)
.map_err(|e| ServiceError::DatabaseQueryFailed(format!("{}", e)))?;
q!(users.find(msg.user_id))
.first(conn)
.map_err(|e| ServiceError::DatabaseQueryFailed(format!("{}", e)))
}
}
#[cfg(test)]
mod tests {
use diesel::connection::TransactionManager;
use jirs_data::{Project, ProjectCategory};
use crate::db::build_pool;
use super::*;
#[test]
fn check_collision() {
use crate::schema::projects::dsl::projects;
use crate::schema::user_projects::dsl::user_projects;
use crate::schema::users::dsl::users;
let pool = build_pool();
let conn = &pool.get().unwrap();
let tm = conn.transaction_manager();
tm.begin_transaction(conn).unwrap();
diesel::delete(user_projects).execute(conn).unwrap();
diesel::delete(users).execute(conn).unwrap();
diesel::delete(projects).execute(conn).unwrap();
let project: Project = {
use crate::schema::projects::dsl::*;
diesel::insert_into(projects)
.values((
name.eq("baz".to_string()),
url.eq("/uz".to_string()),
description.eq("None".to_string()),
category.eq(ProjectCategory::Software),
))
.get_result::<Project>(conn)
.unwrap()
};
let user: User = {
use crate::schema::users::dsl::*;
diesel::insert_into(users)
.values((
name.eq("Foo".to_string()),
email.eq("foo@example.com".to_string()),
))
.get_result(conn)
.unwrap()
};
{
use crate::schema::user_projects::dsl::*;
diesel::insert_into(user_projects)
.values((
user_id.eq(user.id),
project_id.eq(project.id),
is_current.eq(true),
is_default.eq(true),
))
.execute(conn)
.unwrap();
}
let res1 = count_matching_users("Foo", "bar@example.com", conn);
let res2 = count_matching_users("Bar", "foo@example.com", conn);
let res3 = count_matching_users("Foo", "foo@example.com", conn);
tm.rollback_transaction(conn).unwrap();
assert_eq!(res1, 1);
assert_eq!(res2, 1);
assert_eq!(res3, 1);
}
}

View File

@ -1,110 +0,0 @@
use {
crate::errors::{HighlightError, ServiceError},
actix::{Actor, Handler, SyncContext},
serde::{Deserialize, Serialize},
std::fs::*,
std::sync::Arc,
syntect::{
easy::HighlightLines,
highlighting::{Style, ThemeSet},
parsing::SyntaxSet,
},
};
mod load;
lazy_static::lazy_static! {
pub static ref THEME_SET: Arc<ThemeSet> = Arc::new(load::integrated_themeset());
pub static ref SYNTAX_SET: Arc<SyntaxSet> = Arc::new(load::integrated_syntaxset());
}
fn hi<'l>(code: &'l str, lang: &'l str) -> Result<Vec<(Style, &'l str)>, ServiceError> {
let set = SYNTAX_SET
.as_ref()
.find_syntax_by_name(lang)
.ok_or_else(|| ServiceError::Highlight(HighlightError::UnknownLanguage))?;
let theme: &syntect::highlighting::Theme = THEME_SET
.as_ref()
.themes
.get("GitHub")
.ok_or_else(|| ServiceError::Highlight(HighlightError::UnknownTheme))?;
let mut hi = HighlightLines::new(set, theme);
Ok(hi.highlight(code, SYNTAX_SET.as_ref()))
}
#[derive(Debug, Default)]
pub struct HighlightActor {}
impl Actor for HighlightActor {
type Context = SyncContext<Self>;
}
#[derive(actix::Message)]
#[rtype(result = "Result<Vec<u8>, ServiceError>")]
pub struct HighlightCode {
pub code: String,
pub lang: String,
}
impl Handler<HighlightCode> for HighlightActor {
type Result = Result<Vec<u8>, ServiceError>;
fn handle(&mut self, msg: HighlightCode, _ctx: &mut Self::Context) -> Self::Result {
let res = hi(&msg.code, &msg.lang)?;
bincode::serialize(&res)
.map_err(|_| ServiceError::Highlight(HighlightError::ResultUnserializable))
}
}
#[derive(Serialize, Deserialize)]
pub struct Configuration {
pub port: usize,
pub bind: String,
}
impl Default for Configuration {
fn default() -> Self {
Self {
port: std::env::var("HI_PORT")
.map_err(|_| ())
.and_then(|s| s.parse().map_err(|_| ()))
.unwrap_or_else(|_| 6541),
bind: std::env::var("HI_BIND").unwrap_or_else(|_| "0.0.0.0".to_string()),
}
}
}
impl Configuration {
pub fn addr(&self) -> String {
format!("{}:{}", self.bind, self.port)
}
pub fn read() -> Self {
let contents: String = read_to_string(Self::config_file()).unwrap_or_default();
match toml::from_str(contents.as_str()) {
Ok(config) => config,
_ => {
let config = Configuration::default();
config.write().unwrap_or_else(|e| panic!(e));
config
}
}
}
pub fn write(&self) -> Result<(), String> {
let s = toml::to_string(self).map_err(|e| e.to_string())?;
write(Self::config_file(), s.as_str()).map_err(|e| e.to_string())?;
Ok(())
}
#[cfg(not(test))]
pub fn config_file() -> &'static str {
"highlight.toml"
}
#[cfg(test)]
pub fn config_file() -> &'static str {
"highlight.test.toml"
}
}

View File

@ -1,99 +0,0 @@
use std::fs::*;
use actix::{Actor, SyncContext};
// use lettre;
use serde::{Deserialize, Serialize};
pub mod invite;
pub mod welcome;
pub type MailTransport = lettre::SmtpTransport;
pub struct MailExecutor {
pub transport: MailTransport,
pub config: Configuration,
}
impl Actor for MailExecutor {
type Context = SyncContext<Self>;
}
impl Default for MailExecutor {
fn default() -> Self {
let config = Configuration::read();
Self {
transport: mail_transport(&config),
config,
}
}
}
fn mail_client(config: &Configuration) -> lettre::SmtpClient {
let mail_user = config.user.as_str();
let mail_pass = config.pass.as_str();
let mail_host = config.host.as_str();
lettre::SmtpClient::new_simple(mail_host)
.expect("Failed to init SMTP client")
.credentials(lettre::smtp::authentication::Credentials::new(
mail_user.to_string(),
mail_pass.to_string(),
))
.connection_reuse(lettre::smtp::ConnectionReuseParameters::ReuseUnlimited)
.smtp_utf8(true)
}
fn mail_transport(config: &Configuration) -> MailTransport {
mail_client(config).transport()
}
#[derive(Serialize, Deserialize)]
pub struct Configuration {
pub concurrency: usize,
pub user: String,
pub pass: String,
pub host: String,
pub from: String,
}
impl Default for Configuration {
fn default() -> Self {
Self {
concurrency: 2,
user: "apikey".to_string(),
pass: "YOUR-TOKEN".to_string(),
host: "smtp.sendgrid.net".to_string(),
from: "contact@jirs.pl".to_string(),
}
}
}
impl Configuration {
pub fn read() -> Self {
let contents: String = read_to_string(Self::config_file()).unwrap_or_default();
match toml::from_str(contents.as_str()) {
Ok(config) => config,
_ => {
let config = Configuration::default();
config.write().unwrap_or_else(|e| panic!(e));
config
}
}
}
pub fn write(&self) -> Result<(), String> {
let s = toml::to_string(self).map_err(|e| e.to_string())?;
write(Self::config_file(), s.as_str()).map_err(|e| e.to_string())?;
Ok(())
}
#[cfg(not(test))]
fn config_file() -> &'static str {
"mail.toml"
}
#[cfg(test)]
fn config_file() -> &'static str {
"mail.test.toml"
}
}

View File

@ -2,81 +2,67 @@
#![feature(vec_remove_item)] #![feature(vec_remove_item)]
#![recursion_limit = "256"] #![recursion_limit = "256"]
#[macro_use] use {
extern crate diesel; actix::Actor,
#[macro_use] actix_web::{App, HttpServer},
extern crate log; };
use actix::Actor;
// use actix_cors::Cors;
#[cfg(feature = "local-storage")]
use actix_files as fs;
use actix_web::{App, HttpServer};
use crate::ws::WsServer;
// use actix_web::http::Method;
pub mod db;
pub mod errors; pub mod errors;
pub mod hi;
pub mod mail; macro_rules! featured {
pub mod middleware; ($app: ident, $feature: expr, $connect: expr) => {
pub mod models; #[cfg(feature = $feature)]
pub mod schema; let $app = $connect;
pub mod utils; };
pub mod web; }
pub mod ws;
#[actix_rt::main] #[actix_rt::main]
async fn main() -> Result<(), String> { async fn main() -> Result<(), String> {
dotenv::dotenv().ok(); dotenv::dotenv().ok();
pretty_env_logger::init(); pretty_env_logger::init();
let web_config = web::Configuration::read(); let web_config = jirs_config::web::Configuration::read();
std::fs::create_dir_all(web_config.tmp_dir.as_str()).map_err(|e| e.to_string())?;
#[cfg(feature = "local-storage")]
if !web_config.filesystem.is_empty() {
let filesystem = &web_config.filesystem;
std::fs::create_dir_all(filesystem.store_path.as_str()).map_err(|e| e.to_string())?;
}
let db_addr = actix::SyncArbiter::start( let db_addr = actix::SyncArbiter::start(
crate::db::Configuration::read().concurrency, jirs_config::database::Configuration::read().concurrency,
crate::db::DbExecutor::default, database_actor::DbExecutor::default,
); );
let mail_addr = actix::SyncArbiter::start( let mail_addr = actix::SyncArbiter::start(
crate::mail::Configuration::read().concurrency, jirs_config::mail::Configuration::read().concurrency,
crate::mail::MailExecutor::default, mail_actor::MailExecutor::default,
);
let hi_addr = actix::SyncArbiter::start(
jirs_config::hi::Configuration::read().concurrency,
highlight_actor::HighlightActor::default,
);
#[cfg(feature = "local-storage")]
let fs_addr = actix::SyncArbiter::start(
jirs_config::fs::Configuration::read().concurrency,
filesystem_actor::FileSystemExecutor::default,
); );
let hi_addr = actix::SyncArbiter::start(10, crate::hi::HighlightActor::default);
let ws_server = WsServer::default().start(); let ws_server = websocket_actor::server::WsServer::start_default();
HttpServer::new(move || { HttpServer::new(move || {
let app = App::new() let app = App::new().wrap(actix_web::middleware::Logger::default());
.wrap(actix_web::middleware::Logger::default())
.data(ws_server.clone())
.data(db_addr.clone())
.data(mail_addr.clone())
.data(hi_addr.clone())
.data(crate::db::build_pool())
.service(crate::ws::index)
.service(actix_web::web::scope("/avatar").service(crate::web::avatar::upload));
#[cfg(feature = "local-storage")] // data step
let web_config = web::Configuration::read(); let app = app
#[cfg(feature = "local-storage")] .data(ws_server.clone())
let app = if !web_config.filesystem.is_empty() { .data(db_addr.clone())
let filesystem = &web_config.filesystem; .data(mail_addr.clone())
app.service(fs::Files::new( .data(hi_addr.clone())
filesystem.client_path.as_str(), .data(database_actor::build_pool());
filesystem.store_path.as_str(), featured! { app, "local-storage", app.data(fs_addr.clone()) }
)) ;
} else {
app // services step
}; let app = app
.service(websocket_actor::index)
.service(actix_web::web::scope("/avatar").service(web_actor::avatar::upload));
featured! { app, "local-storage", app.service(filesystem_actor::service()) }
;
app app
}) })
.workers(web_config.concurrency) .workers(web_config.concurrency)

View File

@ -1 +0,0 @@

View File

@ -1,214 +0,0 @@
#[cfg(feature = "aws-s3")]
use std::fs::File;
#[cfg(feature = "aws-s3")]
use std::io::Read;
use std::io::Write;
use actix::Addr;
use actix_multipart::{Field, Multipart};
use actix_web::http::header::ContentDisposition;
use actix_web::web::Data;
use actix_web::{post, web, Error, HttpResponse};
use futures::executor::block_on;
use futures::{StreamExt, TryStreamExt};
#[cfg(feature = "aws-s3")]
use rusoto_s3::{PutObjectRequest, S3Client, S3};
use jirs_data::{User, UserId, WsMsg};
use crate::db::authorize_user::AuthorizeUser;
use crate::db::user_projects::CurrentUserProject;
use crate::db::users::UpdateAvatarUrl;
use crate::db::DbExecutor;
#[cfg(feature = "aws-s3")]
use crate::web::AmazonS3Storage;
use crate::ws::InnerMsg::BroadcastToChannel;
use crate::ws::WsServer;
#[post("/")]
pub async fn upload(
mut payload: Multipart,
db: Data<Addr<DbExecutor>>,
ws: Data<Addr<WsServer>>,
) -> Result<HttpResponse, Error> {
let mut user_id: Option<UserId> = None;
let mut avatar_url: Option<String> = None;
while let Ok(Some(field)) = payload.try_next().await {
let disposition: ContentDisposition = match field.content_disposition() {
Some(d) => d,
_ => continue,
};
if !disposition.is_form_data() {
return Ok(HttpResponse::BadRequest().finish());
}
match disposition.get_name() {
Some("token") => {
user_id = Some(handle_token(field, db.clone()).await?);
}
Some("avatar") => {
let id = user_id.ok_or_else(|| HttpResponse::Unauthorized().finish())?;
avatar_url = Some(handle_image(id, field, disposition, db.clone()).await?);
}
_ => continue,
};
}
let user_id = match user_id {
Some(id) => id,
_ => return Ok(HttpResponse::Unauthorized().finish()),
};
let project_id = match block_on(db.send(CurrentUserProject { user_id })) {
Ok(Ok(user_project)) => user_project.project_id,
_ => return Ok(HttpResponse::UnprocessableEntity().finish()),
};
match (user_id, avatar_url) {
(user_id, Some(avatar_url)) => {
let user = update_user_avatar(user_id, avatar_url.clone(), db).await?;
ws.send(BroadcastToChannel(
project_id,
WsMsg::AvatarUrlChanged(user.id, avatar_url),
))
.await
.map_err(|_| HttpResponse::UnprocessableEntity().finish())?;
Ok(HttpResponse::NoContent().finish())
}
_ => Ok(HttpResponse::UnprocessableEntity().finish()),
}
}
async fn update_user_avatar(
user_id: UserId,
new_url: String,
db: Data<Addr<DbExecutor>>,
) -> Result<User, Error> {
match db
.send(UpdateAvatarUrl {
user_id,
avatar_url: Some(new_url),
})
.await
{
Ok(Ok(user)) => Ok(user),
Ok(Err(e)) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
Err(e) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
}
}
async fn handle_token(mut field: Field, db: Data<Addr<DbExecutor>>) -> Result<UserId, Error> {
let mut f: Vec<u8> = vec![];
while let Some(chunk) = field.next().await {
let data = chunk.unwrap();
f = web::block(move || f.write_all(&data).map(|_| f)).await?;
}
let access_token = String::from_utf8(f)
.unwrap_or_default()
.parse::<uuid::Uuid>()
.map_err(|_| HttpResponse::Unauthorized().finish())?;
match db.send(AuthorizeUser { access_token }).await {
Ok(Ok(user)) => Ok(user.id),
Ok(Err(e)) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
Err(e) => {
error!("{:?}", e);
Err(HttpResponse::Unauthorized().finish().into())
}
}
}
async fn handle_image(
user_id: UserId,
mut field: Field,
disposition: ContentDisposition,
_db: Data<Addr<DbExecutor>>,
) -> Result<String, Error> {
let web_config = crate::web::Configuration::read();
let mut new_link = None;
let filename = disposition.get_filename().unwrap();
let tmp_file_path = format!("{}/{}-{}", web_config.tmp_dir, user_id, filename);
let mut f = web::block(move || std::fs::File::create(tmp_file_path))
.await
.unwrap();
// Write temp file
while let Some(chunk) = field.next().await {
let data = chunk.unwrap();
f = web::block(move || f.write_all(&data).map(|_| f)).await?;
}
// Write public visible file
#[cfg(feature = "local-storage")]
if !web_config.filesystem.is_empty() {
let filesystem = &web_config.filesystem;
std::fs::copy(
format!("{}/{}-{}", web_config.tmp_dir, user_id, filename),
format!("{}/{}-{}", filesystem.store_path, user_id, filename),
)
.map_err(|_| HttpResponse::InsufficientStorage().finish())?;
new_link = Some(format!(
"{proto}://{bind}{port}{client_path}/{user_id}-{filename}",
proto = if web_config.ssl { "https" } else { "http" },
bind = web_config.bind,
port = match web_config.port.as_str() {
"80" | "443" => "".to_string(),
p => format!(":{}", p),
},
client_path = filesystem.client_path,
user_id = user_id,
filename = filename
));
}
// Upload to AWS S3
#[cfg(feature = "aws-s3")]
if !web_config.s3.is_empty() {
let s3 = &web_config.s3;
s3.set_variables();
let key = format!("{}-{}", user_id, filename);
let mut tmp_file = File::open(format!("{}/{}-{}", web_config.tmp_dir, user_id, filename))
.map_err(|_| HttpResponse::InternalServerError())?;
let mut buffer: Vec<u8> = vec![];
tmp_file
.read_to_end(&mut buffer)
.map_err(|_| HttpResponse::InternalServerError())?;
let client = S3Client::new(s3.region());
let put_object = PutObjectRequest {
bucket: s3.bucket.clone(),
key: key.clone(),
body: Some(buffer.into()),
..Default::default()
};
let _id = client
.put_object(put_object)
.await
.map_err(|_| HttpResponse::InternalServerError())?;
new_link = Some(aws_s3_url(key.as_str(), s3));
}
std::fs::remove_file(format!("{}/{}-{}", web_config.tmp_dir, user_id, filename).as_str())
.unwrap_or_default();
Ok(new_link.unwrap_or_default())
}
#[cfg(feature = "aws-s3")]
fn aws_s3_url(key: &str, config: &AmazonS3Storage) -> String {
format!(
"https://{bucket}.s3.{region}.amazonaws.com/{key}",
bucket = config.bucket,
region = config.region_name,
key = key
)
}

Some files were not shown because too many files have changed in this diff Show More