working on analytics filters

This commit is contained in:
eraden 2024-02-18 06:31:50 +01:00
parent ec2f6b9d52
commit 233f52d7f8
12 changed files with 1148 additions and 23 deletions

6
Cargo.lock generated
View File

@ -316,6 +316,7 @@ version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [ dependencies = [
"log",
"memchr", "memchr",
] ]
@ -2099,6 +2100,9 @@ name = "memchr"
version = "2.7.1" version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
dependencies = [
"log",
]
[[package]] [[package]]
name = "mime" name = "mime"
@ -2966,6 +2970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"log",
"memchr", "memchr",
"regex-syntax 0.8.2", "regex-syntax 0.8.2",
] ]
@ -4142,6 +4147,7 @@ dependencies = [
"oauth2-signin", "oauth2-signin",
"password-hash", "password-hash",
"rand", "rand",
"regex",
"reqwest", "reqwest",
"rumqttc", "rumqttc",
"rust-s3", "rust-s3",

View File

@ -31,6 +31,7 @@ oauth2-google = "0.2.0"
oauth2-signin = "0.2.0" oauth2-signin = "0.2.0"
password-hash = "0.5.0" password-hash = "0.5.0"
rand = { version = "0.8.5", features = ["serde"] } rand = { version = "0.8.5", features = ["serde"] }
regex = { version = "1.10.3", features = ["logging", "pattern"] }
reqwest = { version = "0.11.23", default-features = false, features = ["rustls", "tokio-rustls", "tokio-socks", "multipart"] } reqwest = { version = "0.11.23", default-features = false, features = ["rustls", "tokio-rustls", "tokio-socks", "multipart"] }
rumqttc = { version = "0.23.0", features = ["use-rustls"] } rumqttc = { version = "0.23.0", features = ["use-rustls"] }
rust-s3 = { version = "0.33.0", features = ["tokio-rustls-tls", "futures-util", "futures-io"] } rust-s3 = { version = "0.33.0", features = ["tokio-rustls-tls", "futures-util", "futures-io"] }

View File

@ -0,0 +1,121 @@
use sea_orm::sea_query::SeaRc;
use sea_orm::*;
pub struct JoinBuilder<E>
where
E: EntityTrait,
{
select: Select<E>,
}
impl<E> JoinBuilder<E>
where
E: EntityTrait,
{
pub fn new(s: Select<E>) -> Self {
Self { select: s }
}
#[must_use]
pub fn left_condition<V>(
mut self,
rel: impl RelationTrait,
col: impl ColumnTrait,
v: &V,
) -> Self
where
V: Into<sea_orm::Value> + Send + Sync + 'static + Clone,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |left, _right| {
use sea_query::*;
Expr::col((left, col)).eq(v.clone()).into_condition()
}),
);
self
}
#[must_use]
pub fn right_condition<V>(
mut self,
rel: impl RelationTrait,
col: impl ColumnTrait,
v: &V,
) -> Self
where
V: Into<sea_orm::Value> + Send + Sync + 'static + Clone,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |_left, right| {
use sea_query::*;
let expr = Expr::col((right, col));
expr.eq(v.clone()).into_condition()
}),
);
self
}
#[must_use]
pub fn right_condition_with<F, Col>(mut self, rel: impl RelationTrait, col: Col, f: F) -> Self
where
Col: ColumnTrait,
F: FnOnce(sea_query::Expr) -> sea_query::SimpleExpr + 'static + Send + Sync,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |_left, right| {
use sea_query::*;
f(sea_query::Expr::col((right, col))).into_condition()
}),
);
self
}
#[must_use]
pub fn finish(self) -> Select<E> {
self.select
}
}
pub trait JoinOnCondition<E>
where
E: EntityTrait,
{
fn with_join(self) -> JoinBuilder<E>;
}
impl<E: EntityTrait> JoinOnCondition<E> for Select<E> {
fn with_join(self) -> JoinBuilder<E> {
JoinBuilder::new(self)
}
}
pub trait WithSlug {
fn with_slug(self, slug: String) -> Self;
}
impl WithSlug for Select<entities::api_tokens::Entity> {
fn with_slug(self, slug: String) -> Self {
self.with_join()
.right_condition(
entities::api_tokens::Relation::Workspaces1,
entities::workspaces::Column::Slug,
slug.clone(),
)
.finish()
}
}
impl WithSlug for Select<entities::issues::Entity> {
fn with_slug(self, slug: String) -> Self {
self.with_join()
.right_condition(
entities::issues::Relation::Workspaces1,
entities::workspaces::Column::Slug,
slug.clone(),
)
.finish()
}
}

View File

@ -0,0 +1,386 @@
use actix_web::web::{Data, Json, Path, ServiceConfig};
use actix_web::{get, HttpRequest, HttpResponse};
use entities::prelude::Issues;
use issues_filter::filter_get::Filter;
use sea_orm::sea_query::SeaRc;
use sea_orm::*;
use serde::Deserialize;
use squadron_contract::WorkspaceSlug;
use uuid::Uuid;
use crate::ext::{JoinOnCondition, WithSlug};
use crate::models::JsonError;
use crate::utils::issues_filter;
pub fn configure(config: &mut ServiceConfig) {
config.service(workspace_analytics);
}
static VALID_XAXIS_SEGMENT: [&str; 12] = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
];
static VALID_YAXIS: [&str; 2] = ["issue_count", "estimate"];
#[derive(Debug, PartialEq)]
enum YAxis {
IssueCount,
Estimate,
}
struct YAxisVisitor;
impl<'de> serde::de::Visitor<'de> for YAxisVisitor {
type Value = YAxis;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_fmt(format_args!("Expect one of {VALID_YAXIS:?}"))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use YAxis::*;
Ok(match v {
"issue_count" => IssueCount,
"estimate" => Estimate,
_ => {
return Err(E::custom(format!(
"x-axis segment {v} not in {VALID_YAXIS:?}"
)))
}
})
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> serde::de::Deserialize<'de> for YAxis {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(YAxisVisitor)
}
}
#[derive(Debug, PartialEq)]
enum XAxis {
StateId,
StateGroup,
LabelsId,
AssigneesId,
EstimatePoint,
IssueCycleCycleId,
IssueModuleModuleId,
Priority,
StartDate,
TargetDate,
CreatedAt,
CompletedAt,
}
struct XAxisVisitor;
impl<'de> serde::de::Visitor<'de> for XAxisVisitor {
type Value = XAxis;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_fmt(format_args!("Expect one of {VALID_XAXIS_SEGMENT:?}"))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use XAxis::*;
Ok(match v {
"state_id" => StateId,
"state__group" => StateGroup,
"labels__id" => LabelsId,
"assignees__id" => AssigneesId,
"estimate_point" => EstimatePoint,
"issue_cycle__cycle_id" => IssueCycleCycleId,
"issue_module__module_id" => IssueModuleModuleId,
"priority" => Priority,
"start_date" => StartDate,
"target_date" => TargetDate,
"created_at" => CreatedAt,
"completed_at" => CompletedAt,
_ => {
return Err(E::custom(format!(
"x-axis segment {v} not in {VALID_XAXIS_SEGMENT:?}"
)))
}
})
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> serde::de::Deserialize<'de> for XAxis {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(XAxisVisitor)
}
}
#[derive(Debug, Deserialize)]
struct FilterInput {
state: Option<String>,
state_group: Option<String>,
estimate_point: Option<String>,
priority: Option<String>,
parent: Option<String>,
labels: Option<String>,
assignees: Option<String>,
mentions: Option<String>,
created_by: Option<String>,
name: Option<String>,
created_at: Option<String>,
updated_at: Option<String>,
completed_at: Option<String>,
start_date: Option<String>,
target_date: Option<String>,
issue_state_type: Option<String>,
project: Option<String>,
cycle: Option<String>,
module: Option<String>,
inbox_status: Option<String>,
sub_issue: bool,
subscriber: Option<String>,
start_target_date: bool,
}
impl issues_filter::FilterParams for FilterInput {
fn state(&self) -> Option<&str> {
self.state.as_deref()
}
fn state_group(&self) -> Option<&str> {
self.state_group.as_deref()
}
fn estimate_point(&self) -> Option<&str> {
self.estimate_point.as_deref()
}
fn priority(&self) -> Option<&str> {
self.priority.as_deref()
}
fn parent(&self) -> Option<&str> {
self.parent.as_deref()
}
fn labels(&self) -> Option<&str> {
self.labels.as_deref()
}
fn assignees(&self) -> Option<&str> {
self.assignees.as_deref()
}
fn mentions(&self) -> Option<&str> {
self.mentions.as_deref()
}
fn created_by(&self) -> Option<&str> {
self.created_by.as_deref()
}
fn name(&self) -> Option<&str> {
self.name.as_deref()
}
fn created_at(&self) -> Option<&str> {
self.created_at.as_deref()
}
fn updated_at(&self) -> Option<&str> {
self.updated_at.as_deref()
}
fn completed_at(&self) -> Option<&str> {
self.completed_at.as_deref()
}
fn start_date(&self) -> Option<&str> {
self.start_date.as_deref()
}
fn target_date(&self) -> Option<&str> {
self.target_date.as_deref()
}
fn issue_state_type(&self) -> Option<&str> {
self.issue_state_type.as_deref()
}
fn project(&self) -> Option<&str> {
self.project.as_deref()
}
fn cycle(&self) -> Option<&str> {
self.cycle.as_deref()
}
fn module(&self) -> Option<&str> {
self.module.as_deref()
}
fn inbox_status(&self) -> Option<&str> {
self.inbox_status.as_deref()
}
fn sub_issue(&self) -> bool {
self.sub_issue
}
fn subscriber(&self) -> Option<&str> {
self.subscriber.as_deref()
}
fn start_target_date(&self) -> bool {
self.start_target_date
}
}
#[derive(Debug, Deserialize)]
struct AnalyticsInput {
x_axis: XAxis,
y_axis: YAxis,
segment: Option<XAxis>,
#[serde(flatten)]
filter: FilterInput,
}
#[get("/workspaces/{slug}/analytics")]
async fn workspace_analytics(
req: HttpRequest,
path: Path<WorkspaceSlug>,
input: Json<AnalyticsInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let input = input.into_inner();
if Some(input.x_axis) == input.segment {
return Err(JsonError::new(
"Both segment and x axis cannot be same and segment should be valid",
));
}
let slug = path.into_inner();
let mut issues_query = Issues::find().with_slug(slug.clone());
{
let Filter {
state_in,
state_group_in,
estimate_point_in,
priority_in,
parent_in,
labels_in,
assignees_in,
issue_mention_ids_in,
cycle_ids_in,
module_ids_in,
created_by_in,
project_in,
subscriber_ids_in,
name_contains,
created_at_in,
updated_at_in,
completed_at_in,
start_date_in,
target_date_in,
issue_state_type_in,
inbox_status_in,
parent_is_null,
target_date_isnull,
start_date_isnull,
} = Filter::default().filter_params(&input.filter);
/*
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub name: String,
#[sea_orm(column_type = "JsonBinary")]
pub description: Json,
pub priority: String,
pub start_date: Option<Date>,
pub target_date: Option<Date>,
pub sequence_id: i32,
pub created_by_id: Option<Uuid>,
pub parent_id: Option<Uuid>,
pub project_id: Uuid,
pub state_id: Option<Uuid>,
pub updated_by_id: Option<Uuid>,
pub workspace_id: Uuid,
#[sea_orm(column_type = "Text")]
pub description_html: String,
#[sea_orm(column_type = "Text", nullable)]
pub description_stripped: Option<String>,
pub completed_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_type = "Double")]
pub sort_order: f64,
pub estimate_point: Option<i32>,
pub archived_at: Option<Date>,
pub is_draft: bool,
pub external_id: Option<String>,
pub external_source: Option<String>,
*/
if let Some(v) = state_in {
use entities::issues::Column;
issues_query = issues_query.filter(Column::StateId.is_in(v));
}
if let Some(v) = state_group_in {
use entities::issues::Relation;
use entities::states::Column;
issues_query = issues_query
.with_join()
.right_condition_with(Relation::States, Column::Group, |expr| expr.is_in(v))
.finish();
}
if let Some(v) = estimate_point_in {}
if let Some(v) = priority_in {}
if let Some(v) = parent_in {}
if let Some(v) = labels_in {}
if let Some(v) = assignees_in {}
if let Some(v) = issue_mention_ids_in {}
if let Some(v) = cycle_ids_in {}
if let Some(v) = module_ids_in {}
if let Some(v) = created_by_in {}
if let Some(v) = project_in {
use entities::issues::Column;
issues_query = issues_query.filter(Column::ProjectId.is_in(v));
}
if let Some(v) = subscriber_ids_in {}
if let Some(v) = name_contains {
use entities::issues::Column;
issues_query = issues_query.filter(Column::Name.contains(v));
}
if let Some(v) = issue_state_type_in {}
if let Some(v) = inbox_status_in {}
// dates
let range = created_at_in;
let range = updated_at_in;
let range = completed_at_in;
let range = start_date_in;
let range = target_date_in;
if parent_is_null {
issues_query = issues_query.filter(entities::issues::Column::ParentId.is_null());
}
if target_date_isnull {
issues_query = issues_query.filter(entities::issues::Column::TargetDate.is_null());
}
if start_date_isnull {
issues_query = issues_query.filter(entities::issues::Column::StartDate.is_null());
}
}
todo!()
}

View File

@ -1,5 +1,5 @@
use actix_web::web::{Data, Path, ServiceConfig}; use actix_web::web::{Data, Json, Path, ServiceConfig};
use actix_web::{get, HttpResponse}; use actix_web::{delete, get, patch, post, HttpResponse};
use entities::api_tokens::*; use entities::api_tokens::*;
use entities::prelude::ApiTokens; use entities::prelude::ApiTokens;
use reqwest::StatusCode; use reqwest::StatusCode;
@ -8,12 +8,19 @@ use sea_orm::*;
use squadron_contract::{ApiTokenId, WorkspaceSlug}; use squadron_contract::{ApiTokenId, WorkspaceSlug};
use tracing::error; use tracing::error;
use crate::ext::{JoinOnCondition, WithSlug};
use crate::extractors::{RequireInstanceConfigured, RequireUser}; use crate::extractors::{RequireInstanceConfigured, RequireUser};
use crate::models::{Error, JsonError}; use crate::models::{Error, JsonError};
use crate::utils::ApiTokenBuilder;
use crate::DatabaseConnection; use crate::DatabaseConnection;
pub fn configure(_: reqwest::Client, config: &mut ServiceConfig) { pub fn configure(_: reqwest::Client, config: &mut ServiceConfig) {
config.service(single_api_token); config
.service(single_api_token)
.service(user_api_tokens)
.service(delete_api_token)
.service(create_api_token)
.service(update_api_token);
} }
#[get("{workspace_slug}/api-tokens/{id}")] #[get("{workspace_slug}/api-tokens/{id}")]
@ -25,20 +32,8 @@ async fn single_api_token(
) -> Result<HttpResponse, JsonError> { ) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner(); let (slug, id) = path.into_inner();
let slug = slug.as_str().to_string();
match ApiTokens::find() match ApiTokens::find()
.join( .with_slug(slug.as_str().to_string())
JoinType::Join,
Relation::Workspaces1
.def()
.on_condition(move |_left, right| {
use sea_query::*;
Expr::col((right, entities::workspaces::Column::Slug))
.eq(slug.clone())
.into_condition()
}),
)
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id))) .filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db) .one(&**db)
.await .await
@ -54,6 +49,199 @@ async fn single_api_token(
} }
} }
#[get("{workspace_slug}/api-tokens")]
async fn user_api_tokens(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<WorkspaceSlug>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let slug = path.into_inner();
let slug = slug.as_str().to_string();
match ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id))
.all(&**db)
.await
{
Ok(tokens) => Ok(HttpResponse::Ok().json(&tokens)),
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
}
}
#[delete("{workspace_slug}/api-tokens/{id}")]
async fn delete_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<(WorkspaceSlug, ApiTokenId)>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner();
let slug = slug.as_str().to_string();
let token = match ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db)
.await
{
Ok(Some(token)) => token,
Ok(None) => {
return Err(
JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
match ApiTokens::delete_by_id(token.id).exec(&**db).await {
Ok(res) => tracing::debug!("Delete {} api tokens", res.rows_affected),
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
Ok(HttpResponse::NoContent().finish())
}
#[derive(Debug, serde::Deserialize)]
struct CreateInput {
label: String,
description: Option<String>,
expired_at: Option<chrono::DateTime<chrono::FixedOffset>>,
}
#[post("{workspace_slug}/api-tokens")]
async fn create_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<WorkspaceSlug>,
input: Json<CreateInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let slug = path.into_inner();
let slug = slug.as_str().to_string();
let workspace = match entities::prelude::Workspaces::find()
.filter(entities::workspaces::Column::Slug.eq(slug))
.one(&**db)
.await
{
Ok(Some(w)) => w,
Ok(None) => {
return Err(
JsonError::new("Workspace does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
let input = input.into_inner();
let model = ApiTokenBuilder::new(
&input.label,
input.description.as_deref().unwrap_or_default(),
&*user,
Some(&workspace),
)
.with_expired_at(input.expired_at)
.into_active_model();
let token = match ApiTokens::insert(model).exec_with_returning(&**db).await {
Ok(token) => token,
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
Ok(HttpResponse::Created().json(token))
}
#[derive(Debug, serde::Deserialize)]
struct UpdateInput {
token: Option<String>,
label: Option<String>,
// user_type: Option<i16>,
created_by_id: Option<Option<Uuid>>,
updated_by_id: Option<Option<Uuid>>,
user_id: Option<Uuid>,
workspace_id: Option<Option<Uuid>>,
description: Option<String>,
expired_at: Option<Option<chrono::DateTime<chrono::FixedOffset>>>,
is_active: Option<bool>,
last_used: Option<Option<chrono::DateTime<chrono::FixedOffset>>>,
}
#[patch("{workspace_slug}/api-tokens/{id}")]
async fn update_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<(WorkspaceSlug, ApiTokenId)>,
input: Json<UpdateInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner();
let slug = slug.as_str().to_string();
let token = ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db)
.await;
let token = match token {
Ok(Some(token)) => token,
Ok(None) => {
return Err(
JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
let i = input.into_inner();
match ApiTokens::update(ActiveModel {
id: Set(token.id),
token: i.token.map(Set).unwrap_or(NotSet),
label: i.label.map(Set).unwrap_or(NotSet),
user_type: Set(if user.is_bot { 1 } else { 0 }),
created_by_id: i.created_by_id.map(Set).unwrap_or(NotSet),
updated_by_id: i.updated_by_id.map(Set).unwrap_or(NotSet),
user_id: i.user_id.map(Set).unwrap_or(NotSet),
workspace_id: i.workspace_id.map(Set).unwrap_or(NotSet),
description: i.description.map(Set).unwrap_or(NotSet),
expired_at: i.expired_at.map(Set).unwrap_or(NotSet),
is_active: i.is_active.map(Set).unwrap_or(NotSet),
last_used: i.last_used.map(Set).unwrap_or(NotSet),
updated_at: Set(chrono::Utc::now().fixed_offset()),
..Default::default()
})
.exec(&**db)
.await
{
Ok(model) => Ok(HttpResponse::Ok().json(model)),
Err(DbErr::Exec(e)) => {
error!("Failed to update single api token: {e}");
return Err(JsonError::new("Invalid payload"))?;
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
}
}
#[cfg(test)] #[cfg(test)]
mod single_tests { mod single_tests {
use actix_jwt_session::{ use actix_jwt_session::{
@ -262,7 +450,6 @@ mod single_tests {
"http://example.com/{}/api-tokens/{}", "http://example.com/{}/api-tokens/{}",
workspace.slug, api_token.id workspace.slug, api_token.id
); );
eprintln!("URI: {uri:?}");
let pair = session let pair = session
.store( .store(

View File

@ -7,8 +7,8 @@ use reqwest::StatusCode;
use sea_orm::{DatabaseConnection, DatabaseTransaction, EntityTrait, Set}; use sea_orm::{DatabaseConnection, DatabaseTransaction, EntityTrait, Set};
use serde::Deserialize; use serde::Deserialize;
use super::auth_http_response;
use super::password::PassValidity; use super::password::PassValidity;
use super::{auth_http_response, random_hex};
use crate::extractors::RequireInstanceConfigured; use crate::extractors::RequireInstanceConfigured;
use crate::models::{ use crate::models::{
Error, JsonError, JsonErrorDetails, PasswordResetSecret, PasswordResetTimeout, Error, JsonError, JsonErrorDetails, PasswordResetSecret, PasswordResetTimeout,

View File

@ -1,8 +1,11 @@
use actix_web::web::{scope, ServiceConfig}; use actix_web::web::{scope, ServiceConfig};
pub use authentication::*; pub use authentication::*;
mod analytics;
mod authentication; mod authentication;
mod config; mod config;
//todo
mod users; mod users;
pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) { pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) {
@ -10,6 +13,7 @@ pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) {
scope("/api") scope("/api")
.configure(config::configure) .configure(config::configure)
.configure(users::configure) .configure(users::configure)
.configure(analytics::configure)
.configure(|c| authentication::configure(http_client, c)), .configure(|c| authentication::configure(http_client, c)),
); );
} }

View File

@ -10,6 +10,7 @@ pub use squadron_contract::{deadpool_redis, redis, RedisClient};
pub mod config; pub mod config;
pub mod events; pub mod events;
pub mod ext;
pub mod extractors; pub mod extractors;
pub mod http; pub mod http;
pub mod models; pub mod models;

View File

@ -0,0 +1,407 @@
use std::cmp::Ordering;
use std::str::FromStr;
pub trait FilterParams {
fn state(&self) -> Option<&str> {
None
}
fn state_group(&self) -> Option<&str> {
None
}
fn estimate_point(&self) -> Option<&str> {
None
}
fn priority(&self) -> Option<&str> {
None
}
fn parent(&self) -> Option<&str> {
None
}
fn labels(&self) -> Option<&str> {
None
}
fn assignees(&self) -> Option<&str> {
None
}
fn mentions(&self) -> Option<&str> {
None
}
fn created_by(&self) -> Option<&str> {
None
}
fn name(&self) -> Option<&str> {
None
}
fn created_at(&self) -> Option<&str> {
None
}
fn updated_at(&self) -> Option<&str> {
None
}
fn completed_at(&self) -> Option<&str> {
None
}
fn start_date(&self) -> Option<&str> {
None
}
fn target_date(&self) -> Option<&str> {
None
}
fn issue_state_type(&self) -> Option<&str> {
None
}
fn project(&self) -> Option<&str> {
None
}
fn cycle(&self) -> Option<&str> {
None
}
fn module(&self) -> Option<&str> {
None
}
fn inbox_status(&self) -> Option<&str> {
None
}
fn sub_issue(&self) -> bool {
false
}
fn subscriber(&self) -> Option<&str> {
None
}
fn start_target_date(&self) -> bool {
false
}
}
pub static DATE_PATTERN: &str = "\\d+_(weeks|months)$";
fn string_date_filter(
duration: i64,
subsequent: Subsequest,
term: DurationType,
offset: DateOffset,
) -> Option<DateOrder> {
use std::cmp::Ordering::{Greater, Less};
use chrono::{Duration, Utc};
let (order, duration) = match (term, subsequent, offset) {
// months
(DurationType::Months, Subsequest::After, DateOffset::FromNow) => {
(Greater, Duration::days(duration * 30))
}
(DurationType::Months, Subsequest::After, DateOffset::Other) => {
(Greater, Duration::days(-duration * 30))
}
(DurationType::Months, Subsequest::Before, DateOffset::FromNow) => {
(Less, Duration::days(duration * 30))
}
(DurationType::Months, Subsequest::Before, DateOffset::Other) => {
(Less, Duration::days(-duration * 30))
}
// weeks
(DurationType::Weeks, Subsequest::After, DateOffset::FromNow) => {
(Greater, Duration::days(duration * 7))
}
(DurationType::Weeks, Subsequest::After, DateOffset::Other) => {
(Greater, Duration::days(-duration * 7))
}
(DurationType::Weeks, Subsequest::Before, DateOffset::FromNow) => {
(Less, Duration::days(duration * 7))
}
(DurationType::Weeks, Subsequest::Before, DateOffset::Other) => {
(Less, Duration::days(-duration * 7))
}
};
Some(match order {
Ordering::Greater => DateOrder::Gte((Utc::now() + duration).date_naive()),
_ => DateOrder::Lte((Utc::now() + duration).date_naive()),
})
}
#[derive(Debug, Copy, Clone)]
enum DateOffset {
FromNow,
Other,
}
impl FromStr for DateOffset {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"fromnow" => Self::FromNow,
_ => Self::Other,
})
}
}
#[derive(Debug, Copy, Clone)]
enum Subsequest {
After,
Before,
}
impl FromStr for Subsequest {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"after" => Self::After,
_ => Self::Before,
})
}
}
#[derive(Debug, Copy, Clone)]
enum DurationType {
Months,
Weeks,
}
impl FromStr for DurationType {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"months" => Self::Months,
"weeks" => Self::Weeks,
_ => return Err(()),
})
}
}
fn parse_date_head(head: &str) -> Option<(i64, DurationType)> {
let mut parts = head.split('_');
Some((parts.next()?.parse().ok()?, parts.next()?.parse().ok()?))
}
fn date_filter<'s>(
queries: impl Iterator<Item = &'s str> + 's,
) -> impl Iterator<Item = DateOrder> + 's {
queries.filter_map(|query| {
if query.split(';').count() <= 2 {
return None;
}
let mut date_query = query.split(";");
let head = date_query.next()?;
let Some((duration, term)) = parse_date_head(head) else {
// TODO: unsupported
//
// if "after" in date_query:
// filter[f"{date_term}__gte"] = date_query[0]
// else:
// filter[f"{date_term}__lte"] = date_query[0]
return None;
};
if query.split(";").count() != 3 {
return None;
};
let subsequent = date_query.next()?.parse().ok()?;
let offset = date_query.next()?.parse().ok()?;
return string_date_filter(duration, subsequent, term, offset);
})
}
#[derive(Debug)]
pub enum DateOrder {
Gte(chrono::NaiveDate),
Lte(chrono::NaiveDate),
}
pub mod filter_get {
use std::str::FromStr;
use uuid::Uuid;
use super::*;
#[derive(Debug, Default)]
pub struct DateRange {
pub lte: Option<chrono::NaiveDate>,
pub gte: Option<chrono::NaiveDate>,
}
impl DateRange {
pub fn fill(&mut self, it: impl Iterator<Item = DateOrder>) {
for order in it {
match order {
DateOrder::Gte(date) => self.gte = Some(date),
DateOrder::Lte(date) => self.lte = Some(date),
};
}
}
}
#[derive(Debug, Default)]
pub struct Filter {
// state__in
pub state_in: Option<Vec<Uuid>>,
// state__group__in
pub state_group_in: Option<Vec<String>>,
// estimate_point__in
pub estimate_point_in: Option<Vec<String>>,
pub priority_in: Option<Vec<String>>,
pub parent_in: Option<Vec<Uuid>>,
pub labels_in: Option<Vec<Uuid>>,
pub assignees_in: Option<Vec<Uuid>>,
// issue_mention__mention__id__in
pub issue_mention_ids_in: Option<Vec<Uuid>>,
/// issue_cycle__cycle_id__in
pub cycle_ids_in: Option<Vec<Uuid>>,
/// issue_module__module_id__in
pub module_ids_in: Option<Vec<Uuid>>,
pub created_by_in: Option<Vec<Uuid>>,
pub project_in: Option<Vec<Uuid>>,
/// issue_subscribers__subscriber_id__in
pub subscriber_ids_in: Option<Vec<Uuid>>,
/// name__icontains
pub name_contains: Option<String>,
// created_at__date
pub created_at_in: DateRange,
pub updated_at_in: DateRange,
pub completed_at_in: DateRange,
pub start_date_in: DateRange,
pub target_date_in: DateRange,
pub issue_state_type_in: Option<&'static [&'static str]>,
/// issue_inbox__status__in
pub inbox_status_in: Option<Vec<String>>,
/// parent__isnull
pub parent_is_null: bool,
/// target_date__isnull
pub target_date_isnull: bool,
/// start_date__isnull
pub start_date_isnull: bool,
}
trait IntoParam {
fn into_param(s: &str) -> Option<Self>
where
Self: Sized;
}
impl IntoParam for Uuid {
fn into_param(s: &str) -> Option<Self> {
Uuid::from_str(s).ok()
}
}
impl IntoParam for String {
fn into_param(s: &str) -> Option<Self> {
Some(s.to_owned())
}
}
fn parse_param_list<V: IntoParam>(param: &str) -> Option<Vec<V>> {
let param = param.trim();
if param.is_empty() || param == "null" {
return None;
}
let states = param
.split(',')
.filter(|s| s != &"null" && s != &"")
.filter_map(V::into_param)
.collect::<Vec<_>>();
if states.is_empty() {
return None;
}
Some(states)
}
impl Filter {
pub fn filter_params<'p>(mut self, p: &'p impl FilterParams) -> Self {
if let Some(s) = p.state() {
self.state_in = parse_param_list(s);
}
if let Some(s) = p.state_group() {
self.state_group_in = parse_param_list(s);
}
if let Some(s) = p.estimate_point() {
self.estimate_point_in = parse_param_list(s);
}
if let Some(s) = p.priority() {
self.priority_in = parse_param_list(s);
}
if let Some(s) = p.parent() {
self.parent_in = parse_param_list(s);
}
if let Some(s) = p.labels() {
self.labels_in = parse_param_list(s);
}
if let Some(s) = p.assignees() {
self.assignees_in = parse_param_list(s);
}
if let Some(s) = p.mentions() {
self.issue_mention_ids_in = parse_param_list(s);
}
if let Some(s) = p.subscriber() {
self.subscriber_ids_in = parse_param_list(s);
}
if let Some(s) = p.project() {
self.project_in = parse_param_list(s);
}
if let Some(s) = p.created_by() {
self.created_by_in = parse_param_list(s);
}
if let Some(s) = p.inbox_status() {
self.inbox_status_in = parse_param_list(s);
}
if let Some(s) = p.name() {
self.name_contains = Some(s.to_string());
}
if let Some(s) = p.cycle() {
self.cycle_ids_in = parse_param_list(s);
}
if let Some(s) = p.module() {
self.module_ids_in = parse_param_list(s);
}
if let Some(s) = p.created_at() {
self.created_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.updated_at() {
self.updated_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.completed_at() {
self.completed_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.start_date() {
self.start_date_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.target_date() {
self.target_date_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.issue_state_type() {
self.issue_state_type_in = Some(match s {
"backlog" => &["backlog"],
"active" => &["unstarted", "started"],
_ => &["backlog", "unstarted", "started", "completed", "cancelled"],
});
}
if let false = p.sub_issue() {
self.parent_is_null = true;
}
if let true = p.start_target_date() {
self.start_date_isnull = false;
self.target_date_isnull = false;
}
self
}
}
}
pub mod filter_non_get {
#[derive(Debug, Default)]
pub struct Filter {
pub state_in: Option<String>,
}
pub fn filter_state(state: &str, filter: &mut Filter) {
if state == "null" || state == "" {
return;
}
filter.state_in = Some(state.to_string());
}
pub fn filter_state_group() {}
}

View File

@ -22,6 +22,8 @@ use uuid::Uuid;
use crate::http::{random_hex, AuthError, OAuthError}; use crate::http::{random_hex, AuthError, OAuthError};
use crate::models::Error; use crate::models::Error;
pub mod issues_filter;
#[macro_export] #[macro_export]
macro_rules! db_t { macro_rules! db_t {
($db: expr) => {{ ($db: expr) => {{
@ -460,6 +462,11 @@ impl ApiTokenBuilder {
} }
} }
pub fn with_expired_at(mut self, exp: Option<chrono::DateTime<chrono::FixedOffset>>) -> Self {
self.expired_at = exp;
self
}
pub fn into_active_model(self) -> entities::api_tokens::ActiveModel { pub fn into_active_model(self) -> entities::api_tokens::ActiveModel {
use sea_orm::*; use sea_orm::*;

View File

@ -39,10 +39,6 @@ CREATE TYPE project_member_roles AS ENUM (
'Guest' 'Guest'
); );
--
-- Name: users; Type: TABLE; Schema: public; Owner: plane
--
CREATE TABLE users ( CREATE TABLE users (
password character varying(128) NOT NULL, password character varying(128) NOT NULL,
last_login timestamp with time zone, last_login timestamp with time zone,
@ -240,7 +236,9 @@ CREATE TABLE issues (
archived_at date, archived_at date,
is_draft boolean NOT NULL, is_draft boolean NOT NULL,
external_id character varying(255), external_id character varying(255),
external_source character varying(255) external_source character varying(255),
CONSTRAINT parent_fk FOREIGN KEY (parent_id) REFERENCES issues (id),
CONSTRAINT state_fk FOREIGN KEY (state_id) REFERENCES states (id)
); );
-- --

7
scripts/rebuild_db.sh Normal file
View File

@ -0,0 +1,7 @@
#!/usr/bin/env zsh
DB_NAME=squadron
psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql
DB_NAME=squadron_test
psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql