From 233f52d7f8a6c1067f2ad9e8b965d30deb457ee0 Mon Sep 17 00:00:00 2001 From: eraden Date: Sun, 18 Feb 2024 06:31:50 +0100 Subject: [PATCH] working on analytics filters --- Cargo.lock | 6 + crates/squadron-api/Cargo.toml | 1 + crates/squadron-api/src/ext/mod.rs | 121 ++++++ crates/squadron-api/src/http/api/analytics.rs | 386 +++++++++++++++++ .../src/http/api/authentication/api_tokens.rs | 221 +++++++++- .../http/api/authentication/reset_password.rs | 2 +- crates/squadron-api/src/http/api/mod.rs | 4 + crates/squadron-api/src/main.rs | 1 + .../squadron-api/src/utils/issues_filter.rs | 407 ++++++++++++++++++ crates/squadron-api/src/utils/mod.rs | 7 + plane_db.sql | 8 +- scripts/rebuild_db.sh | 7 + 12 files changed, 1148 insertions(+), 23 deletions(-) create mode 100644 crates/squadron-api/src/ext/mod.rs create mode 100644 crates/squadron-api/src/http/api/analytics.rs create mode 100644 crates/squadron-api/src/utils/issues_filter.rs create mode 100644 scripts/rebuild_db.sh diff --git a/Cargo.lock b/Cargo.lock index 5d8f4ed..9bc7963 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -316,6 +316,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ + "log", "memchr", ] @@ -2099,6 +2100,9 @@ name = "memchr" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +dependencies = [ + "log", +] [[package]] name = "mime" @@ -2966,6 +2970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", + "log", "memchr", "regex-syntax 0.8.2", ] @@ -4142,6 +4147,7 @@ dependencies = [ "oauth2-signin", "password-hash", "rand", + "regex", "reqwest", "rumqttc", "rust-s3", diff --git a/crates/squadron-api/Cargo.toml b/crates/squadron-api/Cargo.toml index cd08c15..8e95f93 100644 --- a/crates/squadron-api/Cargo.toml +++ b/crates/squadron-api/Cargo.toml @@ -31,6 +31,7 @@ oauth2-google = "0.2.0" oauth2-signin = "0.2.0" password-hash = "0.5.0" rand = { version = "0.8.5", features = ["serde"] } +regex = { version = "1.10.3", features = ["logging", "pattern"] } reqwest = { version = "0.11.23", default-features = false, features = ["rustls", "tokio-rustls", "tokio-socks", "multipart"] } rumqttc = { version = "0.23.0", features = ["use-rustls"] } rust-s3 = { version = "0.33.0", features = ["tokio-rustls-tls", "futures-util", "futures-io"] } diff --git a/crates/squadron-api/src/ext/mod.rs b/crates/squadron-api/src/ext/mod.rs new file mode 100644 index 0000000..2f0e40d --- /dev/null +++ b/crates/squadron-api/src/ext/mod.rs @@ -0,0 +1,121 @@ +use sea_orm::sea_query::SeaRc; +use sea_orm::*; + +pub struct JoinBuilder +where + E: EntityTrait, +{ + select: Select, +} + +impl JoinBuilder +where + E: EntityTrait, +{ + pub fn new(s: Select) -> Self { + Self { select: s } + } + + #[must_use] + pub fn left_condition( + mut self, + rel: impl RelationTrait, + col: impl ColumnTrait, + v: &V, + ) -> Self + where + V: Into + Send + Sync + 'static + Clone, + { + self.select = self.select.join( + JoinType::Join, + rel.def().on_condition(move |left, _right| { + use sea_query::*; + + Expr::col((left, col)).eq(v.clone()).into_condition() + }), + ); + self + } + #[must_use] + pub fn right_condition( + mut self, + rel: impl RelationTrait, + col: impl ColumnTrait, + v: &V, + ) -> Self + where + V: Into + Send + Sync + 'static + Clone, + { + self.select = self.select.join( + JoinType::Join, + rel.def().on_condition(move |_left, right| { + use sea_query::*; + + let expr = Expr::col((right, col)); + expr.eq(v.clone()).into_condition() + }), + ); + self + } + #[must_use] + pub fn right_condition_with(mut self, rel: impl RelationTrait, col: Col, f: F) -> Self + where + Col: ColumnTrait, + F: FnOnce(sea_query::Expr) -> sea_query::SimpleExpr + 'static + Send + Sync, + { + self.select = self.select.join( + JoinType::Join, + rel.def().on_condition(move |_left, right| { + use sea_query::*; + f(sea_query::Expr::col((right, col))).into_condition() + }), + ); + self + } + + #[must_use] + pub fn finish(self) -> Select { + self.select + } +} + +pub trait JoinOnCondition +where + E: EntityTrait, +{ + fn with_join(self) -> JoinBuilder; +} + +impl JoinOnCondition for Select { + fn with_join(self) -> JoinBuilder { + JoinBuilder::new(self) + } +} + +pub trait WithSlug { + fn with_slug(self, slug: String) -> Self; +} + +impl WithSlug for Select { + fn with_slug(self, slug: String) -> Self { + self.with_join() + .right_condition( + entities::api_tokens::Relation::Workspaces1, + entities::workspaces::Column::Slug, + slug.clone(), + ) + .finish() + } +} + +impl WithSlug for Select { + fn with_slug(self, slug: String) -> Self { + self.with_join() + .right_condition( + entities::issues::Relation::Workspaces1, + entities::workspaces::Column::Slug, + slug.clone(), + ) + .finish() + } +} diff --git a/crates/squadron-api/src/http/api/analytics.rs b/crates/squadron-api/src/http/api/analytics.rs new file mode 100644 index 0000000..6a3f4c0 --- /dev/null +++ b/crates/squadron-api/src/http/api/analytics.rs @@ -0,0 +1,386 @@ +use actix_web::web::{Data, Json, Path, ServiceConfig}; +use actix_web::{get, HttpRequest, HttpResponse}; +use entities::prelude::Issues; +use issues_filter::filter_get::Filter; +use sea_orm::sea_query::SeaRc; +use sea_orm::*; +use serde::Deserialize; +use squadron_contract::WorkspaceSlug; +use uuid::Uuid; + +use crate::ext::{JoinOnCondition, WithSlug}; +use crate::models::JsonError; +use crate::utils::issues_filter; + +pub fn configure(config: &mut ServiceConfig) { + config.service(workspace_analytics); +} + +static VALID_XAXIS_SEGMENT: [&str; 12] = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", +]; + +static VALID_YAXIS: [&str; 2] = ["issue_count", "estimate"]; + +#[derive(Debug, PartialEq)] +enum YAxis { + IssueCount, + Estimate, +} + +struct YAxisVisitor; + +impl<'de> serde::de::Visitor<'de> for YAxisVisitor { + type Value = YAxis; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_fmt(format_args!("Expect one of {VALID_YAXIS:?}")) + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + use YAxis::*; + Ok(match v { + "issue_count" => IssueCount, + "estimate" => Estimate, + _ => { + return Err(E::custom(format!( + "x-axis segment {v} not in {VALID_YAXIS:?}" + ))) + } + }) + } + + fn visit_string(self, v: String) -> Result + where + E: serde::de::Error, + { + self.visit_str(&v) + } +} + +impl<'de> serde::de::Deserialize<'de> for YAxis { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + deserializer.deserialize_str(YAxisVisitor) + } +} + +#[derive(Debug, PartialEq)] +enum XAxis { + StateId, + StateGroup, + LabelsId, + AssigneesId, + EstimatePoint, + IssueCycleCycleId, + IssueModuleModuleId, + Priority, + StartDate, + TargetDate, + CreatedAt, + CompletedAt, +} + +struct XAxisVisitor; +impl<'de> serde::de::Visitor<'de> for XAxisVisitor { + type Value = XAxis; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_fmt(format_args!("Expect one of {VALID_XAXIS_SEGMENT:?}")) + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + use XAxis::*; + Ok(match v { + "state_id" => StateId, + "state__group" => StateGroup, + "labels__id" => LabelsId, + "assignees__id" => AssigneesId, + "estimate_point" => EstimatePoint, + "issue_cycle__cycle_id" => IssueCycleCycleId, + "issue_module__module_id" => IssueModuleModuleId, + "priority" => Priority, + "start_date" => StartDate, + "target_date" => TargetDate, + "created_at" => CreatedAt, + "completed_at" => CompletedAt, + _ => { + return Err(E::custom(format!( + "x-axis segment {v} not in {VALID_XAXIS_SEGMENT:?}" + ))) + } + }) + } + + fn visit_string(self, v: String) -> Result + where + E: serde::de::Error, + { + self.visit_str(&v) + } +} + +impl<'de> serde::de::Deserialize<'de> for XAxis { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + deserializer.deserialize_str(XAxisVisitor) + } +} + +#[derive(Debug, Deserialize)] +struct FilterInput { + state: Option, + state_group: Option, + estimate_point: Option, + priority: Option, + parent: Option, + labels: Option, + assignees: Option, + mentions: Option, + created_by: Option, + name: Option, + created_at: Option, + updated_at: Option, + completed_at: Option, + start_date: Option, + target_date: Option, + issue_state_type: Option, + project: Option, + cycle: Option, + module: Option, + inbox_status: Option, + sub_issue: bool, + subscriber: Option, + start_target_date: bool, +} + +impl issues_filter::FilterParams for FilterInput { + fn state(&self) -> Option<&str> { + self.state.as_deref() + } + fn state_group(&self) -> Option<&str> { + self.state_group.as_deref() + } + fn estimate_point(&self) -> Option<&str> { + self.estimate_point.as_deref() + } + fn priority(&self) -> Option<&str> { + self.priority.as_deref() + } + fn parent(&self) -> Option<&str> { + self.parent.as_deref() + } + fn labels(&self) -> Option<&str> { + self.labels.as_deref() + } + fn assignees(&self) -> Option<&str> { + self.assignees.as_deref() + } + fn mentions(&self) -> Option<&str> { + self.mentions.as_deref() + } + fn created_by(&self) -> Option<&str> { + self.created_by.as_deref() + } + fn name(&self) -> Option<&str> { + self.name.as_deref() + } + fn created_at(&self) -> Option<&str> { + self.created_at.as_deref() + } + fn updated_at(&self) -> Option<&str> { + self.updated_at.as_deref() + } + fn completed_at(&self) -> Option<&str> { + self.completed_at.as_deref() + } + fn start_date(&self) -> Option<&str> { + self.start_date.as_deref() + } + fn target_date(&self) -> Option<&str> { + self.target_date.as_deref() + } + fn issue_state_type(&self) -> Option<&str> { + self.issue_state_type.as_deref() + } + fn project(&self) -> Option<&str> { + self.project.as_deref() + } + fn cycle(&self) -> Option<&str> { + self.cycle.as_deref() + } + fn module(&self) -> Option<&str> { + self.module.as_deref() + } + fn inbox_status(&self) -> Option<&str> { + self.inbox_status.as_deref() + } + fn sub_issue(&self) -> bool { + self.sub_issue + } + fn subscriber(&self) -> Option<&str> { + self.subscriber.as_deref() + } + fn start_target_date(&self) -> bool { + self.start_target_date + } +} + +#[derive(Debug, Deserialize)] +struct AnalyticsInput { + x_axis: XAxis, + y_axis: YAxis, + segment: Option, + #[serde(flatten)] + filter: FilterInput, +} + +#[get("/workspaces/{slug}/analytics")] +async fn workspace_analytics( + req: HttpRequest, + path: Path, + input: Json, + db: Data, +) -> Result { + let input = input.into_inner(); + + if Some(input.x_axis) == input.segment { + return Err(JsonError::new( + "Both segment and x axis cannot be same and segment should be valid", + )); + } + let slug = path.into_inner(); + let mut issues_query = Issues::find().with_slug(slug.clone()); + { + let Filter { + state_in, + state_group_in, + estimate_point_in, + priority_in, + parent_in, + labels_in, + assignees_in, + issue_mention_ids_in, + cycle_ids_in, + module_ids_in, + created_by_in, + project_in, + subscriber_ids_in, + name_contains, + created_at_in, + updated_at_in, + completed_at_in, + start_date_in, + target_date_in, + issue_state_type_in, + inbox_status_in, + parent_is_null, + target_date_isnull, + start_date_isnull, + } = Filter::default().filter_params(&input.filter); + + /* + pub created_at: DateTimeWithTimeZone, + pub updated_at: DateTimeWithTimeZone, + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub name: String, + #[sea_orm(column_type = "JsonBinary")] + pub description: Json, + pub priority: String, + pub start_date: Option, + pub target_date: Option, + pub sequence_id: i32, + pub created_by_id: Option, + pub parent_id: Option, + pub project_id: Uuid, + pub state_id: Option, + pub updated_by_id: Option, + pub workspace_id: Uuid, + #[sea_orm(column_type = "Text")] + pub description_html: String, + #[sea_orm(column_type = "Text", nullable)] + pub description_stripped: Option, + pub completed_at: Option, + #[sea_orm(column_type = "Double")] + pub sort_order: f64, + pub estimate_point: Option, + pub archived_at: Option, + pub is_draft: bool, + pub external_id: Option, + pub external_source: Option, + */ + + if let Some(v) = state_in { + use entities::issues::Column; + issues_query = issues_query.filter(Column::StateId.is_in(v)); + } + if let Some(v) = state_group_in { + use entities::issues::Relation; + use entities::states::Column; + issues_query = issues_query + .with_join() + .right_condition_with(Relation::States, Column::Group, |expr| expr.is_in(v)) + .finish(); + } + if let Some(v) = estimate_point_in {} + if let Some(v) = priority_in {} + if let Some(v) = parent_in {} + if let Some(v) = labels_in {} + if let Some(v) = assignees_in {} + if let Some(v) = issue_mention_ids_in {} + if let Some(v) = cycle_ids_in {} + if let Some(v) = module_ids_in {} + if let Some(v) = created_by_in {} + if let Some(v) = project_in { + use entities::issues::Column; + issues_query = issues_query.filter(Column::ProjectId.is_in(v)); + } + if let Some(v) = subscriber_ids_in {} + if let Some(v) = name_contains { + use entities::issues::Column; + issues_query = issues_query.filter(Column::Name.contains(v)); + } + if let Some(v) = issue_state_type_in {} + if let Some(v) = inbox_status_in {} + // dates + let range = created_at_in; + let range = updated_at_in; + let range = completed_at_in; + let range = start_date_in; + let range = target_date_in; + + if parent_is_null { + issues_query = issues_query.filter(entities::issues::Column::ParentId.is_null()); + } + if target_date_isnull { + issues_query = issues_query.filter(entities::issues::Column::TargetDate.is_null()); + } + if start_date_isnull { + issues_query = issues_query.filter(entities::issues::Column::StartDate.is_null()); + } + } + + todo!() +} diff --git a/crates/squadron-api/src/http/api/authentication/api_tokens.rs b/crates/squadron-api/src/http/api/authentication/api_tokens.rs index b9be80a..11033c0 100644 --- a/crates/squadron-api/src/http/api/authentication/api_tokens.rs +++ b/crates/squadron-api/src/http/api/authentication/api_tokens.rs @@ -1,5 +1,5 @@ -use actix_web::web::{Data, Path, ServiceConfig}; -use actix_web::{get, HttpResponse}; +use actix_web::web::{Data, Json, Path, ServiceConfig}; +use actix_web::{delete, get, patch, post, HttpResponse}; use entities::api_tokens::*; use entities::prelude::ApiTokens; use reqwest::StatusCode; @@ -8,12 +8,19 @@ use sea_orm::*; use squadron_contract::{ApiTokenId, WorkspaceSlug}; use tracing::error; +use crate::ext::{JoinOnCondition, WithSlug}; use crate::extractors::{RequireInstanceConfigured, RequireUser}; use crate::models::{Error, JsonError}; +use crate::utils::ApiTokenBuilder; use crate::DatabaseConnection; pub fn configure(_: reqwest::Client, config: &mut ServiceConfig) { - config.service(single_api_token); + config + .service(single_api_token) + .service(user_api_tokens) + .service(delete_api_token) + .service(create_api_token) + .service(update_api_token); } #[get("{workspace_slug}/api-tokens/{id}")] @@ -25,20 +32,8 @@ async fn single_api_token( ) -> Result { let (slug, id) = path.into_inner(); - let slug = slug.as_str().to_string(); match ApiTokens::find() - .join( - JoinType::Join, - Relation::Workspaces1 - .def() - .on_condition(move |_left, right| { - use sea_query::*; - - Expr::col((right, entities::workspaces::Column::Slug)) - .eq(slug.clone()) - .into_condition() - }), - ) + .with_slug(slug.as_str().to_string()) .filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id))) .one(&**db) .await @@ -54,6 +49,199 @@ async fn single_api_token( } } +#[get("{workspace_slug}/api-tokens")] +async fn user_api_tokens( + _: RequireInstanceConfigured, + user: RequireUser, + path: Path, + db: Data, +) -> Result { + let slug = path.into_inner(); + + let slug = slug.as_str().to_string(); + match ApiTokens::find() + .with_slug(slug.as_str().to_string()) + .filter(Column::UserId.eq(user.id)) + .all(&**db) + .await + { + Ok(tokens) => Ok(HttpResponse::Ok().json(&tokens)), + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + } +} + +#[delete("{workspace_slug}/api-tokens/{id}")] +async fn delete_api_token( + _: RequireInstanceConfigured, + user: RequireUser, + path: Path<(WorkspaceSlug, ApiTokenId)>, + db: Data, +) -> Result { + let (slug, id) = path.into_inner(); + + let slug = slug.as_str().to_string(); + let token = match ApiTokens::find() + .with_slug(slug.as_str().to_string()) + .filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id))) + .one(&**db) + .await + { + Ok(Some(token)) => token, + Ok(None) => { + return Err( + JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND) + ) + } + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + }; + + match ApiTokens::delete_by_id(token.id).exec(&**db).await { + Ok(res) => tracing::debug!("Delete {} api tokens", res.rows_affected), + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + }; + + Ok(HttpResponse::NoContent().finish()) +} + +#[derive(Debug, serde::Deserialize)] +struct CreateInput { + label: String, + description: Option, + expired_at: Option>, +} + +#[post("{workspace_slug}/api-tokens")] +async fn create_api_token( + _: RequireInstanceConfigured, + user: RequireUser, + path: Path, + input: Json, + db: Data, +) -> Result { + let slug = path.into_inner(); + + let slug = slug.as_str().to_string(); + let workspace = match entities::prelude::Workspaces::find() + .filter(entities::workspaces::Column::Slug.eq(slug)) + .one(&**db) + .await + { + Ok(Some(w)) => w, + Ok(None) => { + return Err( + JsonError::new("Workspace does not exists").with_status(StatusCode::NOT_FOUND) + ) + } + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + }; + let input = input.into_inner(); + let model = ApiTokenBuilder::new( + &input.label, + input.description.as_deref().unwrap_or_default(), + &*user, + Some(&workspace), + ) + .with_expired_at(input.expired_at) + .into_active_model(); + let token = match ApiTokens::insert(model).exec_with_returning(&**db).await { + Ok(token) => token, + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + }; + + Ok(HttpResponse::Created().json(token)) +} + +#[derive(Debug, serde::Deserialize)] +struct UpdateInput { + token: Option, + label: Option, + // user_type: Option, + created_by_id: Option>, + updated_by_id: Option>, + user_id: Option, + workspace_id: Option>, + description: Option, + expired_at: Option>>, + is_active: Option, + last_used: Option>>, +} + +#[patch("{workspace_slug}/api-tokens/{id}")] +async fn update_api_token( + _: RequireInstanceConfigured, + user: RequireUser, + path: Path<(WorkspaceSlug, ApiTokenId)>, + input: Json, + db: Data, +) -> Result { + let (slug, id) = path.into_inner(); + + let slug = slug.as_str().to_string(); + let token = ApiTokens::find() + .with_slug(slug.as_str().to_string()) + .filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id))) + .one(&**db) + .await; + let token = match token { + Ok(Some(token)) => token, + Ok(None) => { + return Err( + JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND) + ) + } + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + }; + + let i = input.into_inner(); + match ApiTokens::update(ActiveModel { + id: Set(token.id), + token: i.token.map(Set).unwrap_or(NotSet), + label: i.label.map(Set).unwrap_or(NotSet), + user_type: Set(if user.is_bot { 1 } else { 0 }), + created_by_id: i.created_by_id.map(Set).unwrap_or(NotSet), + updated_by_id: i.updated_by_id.map(Set).unwrap_or(NotSet), + user_id: i.user_id.map(Set).unwrap_or(NotSet), + workspace_id: i.workspace_id.map(Set).unwrap_or(NotSet), + description: i.description.map(Set).unwrap_or(NotSet), + expired_at: i.expired_at.map(Set).unwrap_or(NotSet), + is_active: i.is_active.map(Set).unwrap_or(NotSet), + last_used: i.last_used.map(Set).unwrap_or(NotSet), + updated_at: Set(chrono::Utc::now().fixed_offset()), + ..Default::default() + }) + .exec(&**db) + .await + { + Ok(model) => Ok(HttpResponse::Ok().json(model)), + Err(DbErr::Exec(e)) => { + error!("Failed to update single api token: {e}"); + return Err(JsonError::new("Invalid payload"))?; + } + Err(e) => { + error!("Failed to load single api token: {e}"); + return Err(Error::DatabaseError)?; + } + } +} + #[cfg(test)] mod single_tests { use actix_jwt_session::{ @@ -262,7 +450,6 @@ mod single_tests { "http://example.com/{}/api-tokens/{}", workspace.slug, api_token.id ); - eprintln!("URI: {uri:?}"); let pair = session .store( diff --git a/crates/squadron-api/src/http/api/authentication/reset_password.rs b/crates/squadron-api/src/http/api/authentication/reset_password.rs index 0c6a370..2c4ac81 100644 --- a/crates/squadron-api/src/http/api/authentication/reset_password.rs +++ b/crates/squadron-api/src/http/api/authentication/reset_password.rs @@ -7,8 +7,8 @@ use reqwest::StatusCode; use sea_orm::{DatabaseConnection, DatabaseTransaction, EntityTrait, Set}; use serde::Deserialize; +use super::auth_http_response; use super::password::PassValidity; -use super::{auth_http_response, random_hex}; use crate::extractors::RequireInstanceConfigured; use crate::models::{ Error, JsonError, JsonErrorDetails, PasswordResetSecret, PasswordResetTimeout, diff --git a/crates/squadron-api/src/http/api/mod.rs b/crates/squadron-api/src/http/api/mod.rs index c55f8af..857cab8 100644 --- a/crates/squadron-api/src/http/api/mod.rs +++ b/crates/squadron-api/src/http/api/mod.rs @@ -1,8 +1,11 @@ use actix_web::web::{scope, ServiceConfig}; pub use authentication::*; +mod analytics; mod authentication; mod config; + +//todo mod users; pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) { @@ -10,6 +13,7 @@ pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) { scope("/api") .configure(config::configure) .configure(users::configure) + .configure(analytics::configure) .configure(|c| authentication::configure(http_client, c)), ); } diff --git a/crates/squadron-api/src/main.rs b/crates/squadron-api/src/main.rs index d855c4f..bb9fa84 100644 --- a/crates/squadron-api/src/main.rs +++ b/crates/squadron-api/src/main.rs @@ -10,6 +10,7 @@ pub use squadron_contract::{deadpool_redis, redis, RedisClient}; pub mod config; pub mod events; +pub mod ext; pub mod extractors; pub mod http; pub mod models; diff --git a/crates/squadron-api/src/utils/issues_filter.rs b/crates/squadron-api/src/utils/issues_filter.rs new file mode 100644 index 0000000..ac3d739 --- /dev/null +++ b/crates/squadron-api/src/utils/issues_filter.rs @@ -0,0 +1,407 @@ +use std::cmp::Ordering; +use std::str::FromStr; + +pub trait FilterParams { + fn state(&self) -> Option<&str> { + None + } + fn state_group(&self) -> Option<&str> { + None + } + fn estimate_point(&self) -> Option<&str> { + None + } + fn priority(&self) -> Option<&str> { + None + } + fn parent(&self) -> Option<&str> { + None + } + fn labels(&self) -> Option<&str> { + None + } + fn assignees(&self) -> Option<&str> { + None + } + fn mentions(&self) -> Option<&str> { + None + } + fn created_by(&self) -> Option<&str> { + None + } + fn name(&self) -> Option<&str> { + None + } + fn created_at(&self) -> Option<&str> { + None + } + fn updated_at(&self) -> Option<&str> { + None + } + fn completed_at(&self) -> Option<&str> { + None + } + fn start_date(&self) -> Option<&str> { + None + } + fn target_date(&self) -> Option<&str> { + None + } + fn issue_state_type(&self) -> Option<&str> { + None + } + fn project(&self) -> Option<&str> { + None + } + fn cycle(&self) -> Option<&str> { + None + } + fn module(&self) -> Option<&str> { + None + } + fn inbox_status(&self) -> Option<&str> { + None + } + fn sub_issue(&self) -> bool { + false + } + fn subscriber(&self) -> Option<&str> { + None + } + fn start_target_date(&self) -> bool { + false + } +} + +pub static DATE_PATTERN: &str = "\\d+_(weeks|months)$"; + +fn string_date_filter( + duration: i64, + subsequent: Subsequest, + term: DurationType, + offset: DateOffset, +) -> Option { + use std::cmp::Ordering::{Greater, Less}; + + use chrono::{Duration, Utc}; + + let (order, duration) = match (term, subsequent, offset) { + // months + (DurationType::Months, Subsequest::After, DateOffset::FromNow) => { + (Greater, Duration::days(duration * 30)) + } + (DurationType::Months, Subsequest::After, DateOffset::Other) => { + (Greater, Duration::days(-duration * 30)) + } + (DurationType::Months, Subsequest::Before, DateOffset::FromNow) => { + (Less, Duration::days(duration * 30)) + } + (DurationType::Months, Subsequest::Before, DateOffset::Other) => { + (Less, Duration::days(-duration * 30)) + } + // weeks + (DurationType::Weeks, Subsequest::After, DateOffset::FromNow) => { + (Greater, Duration::days(duration * 7)) + } + (DurationType::Weeks, Subsequest::After, DateOffset::Other) => { + (Greater, Duration::days(-duration * 7)) + } + (DurationType::Weeks, Subsequest::Before, DateOffset::FromNow) => { + (Less, Duration::days(duration * 7)) + } + (DurationType::Weeks, Subsequest::Before, DateOffset::Other) => { + (Less, Duration::days(-duration * 7)) + } + }; + Some(match order { + Ordering::Greater => DateOrder::Gte((Utc::now() + duration).date_naive()), + _ => DateOrder::Lte((Utc::now() + duration).date_naive()), + }) +} + +#[derive(Debug, Copy, Clone)] +enum DateOffset { + FromNow, + Other, +} + +impl FromStr for DateOffset { + type Err = (); + + fn from_str(s: &str) -> Result { + Ok(match s { + "fromnow" => Self::FromNow, + _ => Self::Other, + }) + } +} + +#[derive(Debug, Copy, Clone)] +enum Subsequest { + After, + Before, +} + +impl FromStr for Subsequest { + type Err = (); + + fn from_str(s: &str) -> Result { + Ok(match s { + "after" => Self::After, + _ => Self::Before, + }) + } +} + +#[derive(Debug, Copy, Clone)] +enum DurationType { + Months, + Weeks, +} + +impl FromStr for DurationType { + type Err = (); + + fn from_str(s: &str) -> Result { + Ok(match s { + "months" => Self::Months, + "weeks" => Self::Weeks, + _ => return Err(()), + }) + } +} + +fn parse_date_head(head: &str) -> Option<(i64, DurationType)> { + let mut parts = head.split('_'); + Some((parts.next()?.parse().ok()?, parts.next()?.parse().ok()?)) +} + +fn date_filter<'s>( + queries: impl Iterator + 's, +) -> impl Iterator + 's { + queries.filter_map(|query| { + if query.split(';').count() <= 2 { + return None; + } + let mut date_query = query.split(";"); + let head = date_query.next()?; + let Some((duration, term)) = parse_date_head(head) else { + // TODO: unsupported + // + // if "after" in date_query: + // filter[f"{date_term}__gte"] = date_query[0] + // else: + // filter[f"{date_term}__lte"] = date_query[0] + return None; + }; + if query.split(";").count() != 3 { + return None; + }; + let subsequent = date_query.next()?.parse().ok()?; + let offset = date_query.next()?.parse().ok()?; + return string_date_filter(duration, subsequent, term, offset); + }) +} + +#[derive(Debug)] +pub enum DateOrder { + Gte(chrono::NaiveDate), + Lte(chrono::NaiveDate), +} + +pub mod filter_get { + use std::str::FromStr; + + use uuid::Uuid; + + use super::*; + + #[derive(Debug, Default)] + pub struct DateRange { + pub lte: Option, + pub gte: Option, + } + + impl DateRange { + pub fn fill(&mut self, it: impl Iterator) { + for order in it { + match order { + DateOrder::Gte(date) => self.gte = Some(date), + DateOrder::Lte(date) => self.lte = Some(date), + }; + } + } + } + + #[derive(Debug, Default)] + pub struct Filter { + // state__in + pub state_in: Option>, + // state__group__in + pub state_group_in: Option>, + // estimate_point__in + pub estimate_point_in: Option>, + pub priority_in: Option>, + pub parent_in: Option>, + pub labels_in: Option>, + pub assignees_in: Option>, + // issue_mention__mention__id__in + pub issue_mention_ids_in: Option>, + /// issue_cycle__cycle_id__in + pub cycle_ids_in: Option>, + /// issue_module__module_id__in + pub module_ids_in: Option>, + pub created_by_in: Option>, + pub project_in: Option>, + /// issue_subscribers__subscriber_id__in + pub subscriber_ids_in: Option>, + /// name__icontains + pub name_contains: Option, + // created_at__date + pub created_at_in: DateRange, + pub updated_at_in: DateRange, + pub completed_at_in: DateRange, + pub start_date_in: DateRange, + pub target_date_in: DateRange, + pub issue_state_type_in: Option<&'static [&'static str]>, + /// issue_inbox__status__in + pub inbox_status_in: Option>, + /// parent__isnull + pub parent_is_null: bool, + /// target_date__isnull + pub target_date_isnull: bool, + /// start_date__isnull + pub start_date_isnull: bool, + } + + trait IntoParam { + fn into_param(s: &str) -> Option + where + Self: Sized; + } + + impl IntoParam for Uuid { + fn into_param(s: &str) -> Option { + Uuid::from_str(s).ok() + } + } + + impl IntoParam for String { + fn into_param(s: &str) -> Option { + Some(s.to_owned()) + } + } + + fn parse_param_list(param: &str) -> Option> { + let param = param.trim(); + if param.is_empty() || param == "null" { + return None; + } + let states = param + .split(',') + .filter(|s| s != &"null" && s != &"") + .filter_map(V::into_param) + .collect::>(); + if states.is_empty() { + return None; + } + Some(states) + } + + impl Filter { + pub fn filter_params<'p>(mut self, p: &'p impl FilterParams) -> Self { + if let Some(s) = p.state() { + self.state_in = parse_param_list(s); + } + if let Some(s) = p.state_group() { + self.state_group_in = parse_param_list(s); + } + if let Some(s) = p.estimate_point() { + self.estimate_point_in = parse_param_list(s); + } + if let Some(s) = p.priority() { + self.priority_in = parse_param_list(s); + } + if let Some(s) = p.parent() { + self.parent_in = parse_param_list(s); + } + if let Some(s) = p.labels() { + self.labels_in = parse_param_list(s); + } + if let Some(s) = p.assignees() { + self.assignees_in = parse_param_list(s); + } + if let Some(s) = p.mentions() { + self.issue_mention_ids_in = parse_param_list(s); + } + if let Some(s) = p.subscriber() { + self.subscriber_ids_in = parse_param_list(s); + } + if let Some(s) = p.project() { + self.project_in = parse_param_list(s); + } + if let Some(s) = p.created_by() { + self.created_by_in = parse_param_list(s); + } + if let Some(s) = p.inbox_status() { + self.inbox_status_in = parse_param_list(s); + } + if let Some(s) = p.name() { + self.name_contains = Some(s.to_string()); + } + if let Some(s) = p.cycle() { + self.cycle_ids_in = parse_param_list(s); + } + if let Some(s) = p.module() { + self.module_ids_in = parse_param_list(s); + } + if let Some(s) = p.created_at() { + self.created_at_in.fill(super::date_filter(s.split(','))); + } + if let Some(s) = p.updated_at() { + self.updated_at_in.fill(super::date_filter(s.split(','))); + } + if let Some(s) = p.completed_at() { + self.completed_at_in.fill(super::date_filter(s.split(','))); + } + if let Some(s) = p.start_date() { + self.start_date_in.fill(super::date_filter(s.split(','))); + } + if let Some(s) = p.target_date() { + self.target_date_in.fill(super::date_filter(s.split(','))); + } + if let Some(s) = p.issue_state_type() { + self.issue_state_type_in = Some(match s { + "backlog" => &["backlog"], + "active" => &["unstarted", "started"], + _ => &["backlog", "unstarted", "started", "completed", "cancelled"], + }); + } + if let false = p.sub_issue() { + self.parent_is_null = true; + } + if let true = p.start_target_date() { + self.start_date_isnull = false; + self.target_date_isnull = false; + } + + self + } + } +} + +pub mod filter_non_get { + #[derive(Debug, Default)] + pub struct Filter { + pub state_in: Option, + } + + pub fn filter_state(state: &str, filter: &mut Filter) { + if state == "null" || state == "" { + return; + } + filter.state_in = Some(state.to_string()); + } + + pub fn filter_state_group() {} +} diff --git a/crates/squadron-api/src/utils/mod.rs b/crates/squadron-api/src/utils/mod.rs index 573eaa4..fd88c32 100644 --- a/crates/squadron-api/src/utils/mod.rs +++ b/crates/squadron-api/src/utils/mod.rs @@ -22,6 +22,8 @@ use uuid::Uuid; use crate::http::{random_hex, AuthError, OAuthError}; use crate::models::Error; +pub mod issues_filter; + #[macro_export] macro_rules! db_t { ($db: expr) => {{ @@ -460,6 +462,11 @@ impl ApiTokenBuilder { } } + pub fn with_expired_at(mut self, exp: Option>) -> Self { + self.expired_at = exp; + self + } + pub fn into_active_model(self) -> entities::api_tokens::ActiveModel { use sea_orm::*; diff --git a/plane_db.sql b/plane_db.sql index 0bd1c47..135ebfc 100644 --- a/plane_db.sql +++ b/plane_db.sql @@ -39,10 +39,6 @@ CREATE TYPE project_member_roles AS ENUM ( 'Guest' ); --- --- Name: users; Type: TABLE; Schema: public; Owner: plane --- - CREATE TABLE users ( password character varying(128) NOT NULL, last_login timestamp with time zone, @@ -240,7 +236,9 @@ CREATE TABLE issues ( archived_at date, is_draft boolean NOT NULL, external_id character varying(255), - external_source character varying(255) + external_source character varying(255), + CONSTRAINT parent_fk FOREIGN KEY (parent_id) REFERENCES issues (id), + CONSTRAINT state_fk FOREIGN KEY (state_id) REFERENCES states (id) ); -- diff --git a/scripts/rebuild_db.sh b/scripts/rebuild_db.sh new file mode 100644 index 0000000..47038d9 --- /dev/null +++ b/scripts/rebuild_db.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env zsh + +DB_NAME=squadron +psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql + +DB_NAME=squadron_test +psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql