Compare commits

...

2 Commits

Author SHA1 Message Date
233f52d7f8 working on analytics filters 2024-02-18 06:31:50 +01:00
ec2f6b9d52 Test single api token 2024-02-13 07:23:20 +01:00
16 changed files with 1561 additions and 41 deletions

6
Cargo.lock generated
View File

@ -316,6 +316,7 @@ version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [ dependencies = [
"log",
"memchr", "memchr",
] ]
@ -2099,6 +2100,9 @@ name = "memchr"
version = "2.7.1" version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
dependencies = [
"log",
]
[[package]] [[package]]
name = "mime" name = "mime"
@ -2966,6 +2970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"log",
"memchr", "memchr",
"regex-syntax 0.8.2", "regex-syntax 0.8.2",
] ]
@ -4142,6 +4147,7 @@ dependencies = [
"oauth2-signin", "oauth2-signin",
"password-hash", "password-hash",
"rand", "rand",
"regex",
"reqwest", "reqwest",
"rumqttc", "rumqttc",
"rust-s3", "rust-s3",

View File

@ -31,6 +31,7 @@ oauth2-google = "0.2.0"
oauth2-signin = "0.2.0" oauth2-signin = "0.2.0"
password-hash = "0.5.0" password-hash = "0.5.0"
rand = { version = "0.8.5", features = ["serde"] } rand = { version = "0.8.5", features = ["serde"] }
regex = { version = "1.10.3", features = ["logging", "pattern"] }
reqwest = { version = "0.11.23", default-features = false, features = ["rustls", "tokio-rustls", "tokio-socks", "multipart"] } reqwest = { version = "0.11.23", default-features = false, features = ["rustls", "tokio-rustls", "tokio-socks", "multipart"] }
rumqttc = { version = "0.23.0", features = ["use-rustls"] } rumqttc = { version = "0.23.0", features = ["use-rustls"] }
rust-s3 = { version = "0.33.0", features = ["tokio-rustls-tls", "futures-util", "futures-io"] } rust-s3 = { version = "0.33.0", features = ["tokio-rustls-tls", "futures-util", "futures-io"] }

View File

@ -0,0 +1,121 @@
use sea_orm::sea_query::SeaRc;
use sea_orm::*;
pub struct JoinBuilder<E>
where
E: EntityTrait,
{
select: Select<E>,
}
impl<E> JoinBuilder<E>
where
E: EntityTrait,
{
pub fn new(s: Select<E>) -> Self {
Self { select: s }
}
#[must_use]
pub fn left_condition<V>(
mut self,
rel: impl RelationTrait,
col: impl ColumnTrait,
v: &V,
) -> Self
where
V: Into<sea_orm::Value> + Send + Sync + 'static + Clone,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |left, _right| {
use sea_query::*;
Expr::col((left, col)).eq(v.clone()).into_condition()
}),
);
self
}
#[must_use]
pub fn right_condition<V>(
mut self,
rel: impl RelationTrait,
col: impl ColumnTrait,
v: &V,
) -> Self
where
V: Into<sea_orm::Value> + Send + Sync + 'static + Clone,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |_left, right| {
use sea_query::*;
let expr = Expr::col((right, col));
expr.eq(v.clone()).into_condition()
}),
);
self
}
#[must_use]
pub fn right_condition_with<F, Col>(mut self, rel: impl RelationTrait, col: Col, f: F) -> Self
where
Col: ColumnTrait,
F: FnOnce(sea_query::Expr) -> sea_query::SimpleExpr + 'static + Send + Sync,
{
self.select = self.select.join(
JoinType::Join,
rel.def().on_condition(move |_left, right| {
use sea_query::*;
f(sea_query::Expr::col((right, col))).into_condition()
}),
);
self
}
#[must_use]
pub fn finish(self) -> Select<E> {
self.select
}
}
pub trait JoinOnCondition<E>
where
E: EntityTrait,
{
fn with_join(self) -> JoinBuilder<E>;
}
impl<E: EntityTrait> JoinOnCondition<E> for Select<E> {
fn with_join(self) -> JoinBuilder<E> {
JoinBuilder::new(self)
}
}
pub trait WithSlug {
fn with_slug(self, slug: String) -> Self;
}
impl WithSlug for Select<entities::api_tokens::Entity> {
fn with_slug(self, slug: String) -> Self {
self.with_join()
.right_condition(
entities::api_tokens::Relation::Workspaces1,
entities::workspaces::Column::Slug,
slug.clone(),
)
.finish()
}
}
impl WithSlug for Select<entities::issues::Entity> {
fn with_slug(self, slug: String) -> Self {
self.with_join()
.right_condition(
entities::issues::Relation::Workspaces1,
entities::workspaces::Column::Slug,
slug.clone(),
)
.finish()
}
}

View File

@ -0,0 +1,386 @@
use actix_web::web::{Data, Json, Path, ServiceConfig};
use actix_web::{get, HttpRequest, HttpResponse};
use entities::prelude::Issues;
use issues_filter::filter_get::Filter;
use sea_orm::sea_query::SeaRc;
use sea_orm::*;
use serde::Deserialize;
use squadron_contract::WorkspaceSlug;
use uuid::Uuid;
use crate::ext::{JoinOnCondition, WithSlug};
use crate::models::JsonError;
use crate::utils::issues_filter;
pub fn configure(config: &mut ServiceConfig) {
config.service(workspace_analytics);
}
static VALID_XAXIS_SEGMENT: [&str; 12] = [
"state_id",
"state__group",
"labels__id",
"assignees__id",
"estimate_point",
"issue_cycle__cycle_id",
"issue_module__module_id",
"priority",
"start_date",
"target_date",
"created_at",
"completed_at",
];
static VALID_YAXIS: [&str; 2] = ["issue_count", "estimate"];
#[derive(Debug, PartialEq)]
enum YAxis {
IssueCount,
Estimate,
}
struct YAxisVisitor;
impl<'de> serde::de::Visitor<'de> for YAxisVisitor {
type Value = YAxis;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_fmt(format_args!("Expect one of {VALID_YAXIS:?}"))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use YAxis::*;
Ok(match v {
"issue_count" => IssueCount,
"estimate" => Estimate,
_ => {
return Err(E::custom(format!(
"x-axis segment {v} not in {VALID_YAXIS:?}"
)))
}
})
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> serde::de::Deserialize<'de> for YAxis {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(YAxisVisitor)
}
}
#[derive(Debug, PartialEq)]
enum XAxis {
StateId,
StateGroup,
LabelsId,
AssigneesId,
EstimatePoint,
IssueCycleCycleId,
IssueModuleModuleId,
Priority,
StartDate,
TargetDate,
CreatedAt,
CompletedAt,
}
struct XAxisVisitor;
impl<'de> serde::de::Visitor<'de> for XAxisVisitor {
type Value = XAxis;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_fmt(format_args!("Expect one of {VALID_XAXIS_SEGMENT:?}"))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use XAxis::*;
Ok(match v {
"state_id" => StateId,
"state__group" => StateGroup,
"labels__id" => LabelsId,
"assignees__id" => AssigneesId,
"estimate_point" => EstimatePoint,
"issue_cycle__cycle_id" => IssueCycleCycleId,
"issue_module__module_id" => IssueModuleModuleId,
"priority" => Priority,
"start_date" => StartDate,
"target_date" => TargetDate,
"created_at" => CreatedAt,
"completed_at" => CompletedAt,
_ => {
return Err(E::custom(format!(
"x-axis segment {v} not in {VALID_XAXIS_SEGMENT:?}"
)))
}
})
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> serde::de::Deserialize<'de> for XAxis {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(XAxisVisitor)
}
}
#[derive(Debug, Deserialize)]
struct FilterInput {
state: Option<String>,
state_group: Option<String>,
estimate_point: Option<String>,
priority: Option<String>,
parent: Option<String>,
labels: Option<String>,
assignees: Option<String>,
mentions: Option<String>,
created_by: Option<String>,
name: Option<String>,
created_at: Option<String>,
updated_at: Option<String>,
completed_at: Option<String>,
start_date: Option<String>,
target_date: Option<String>,
issue_state_type: Option<String>,
project: Option<String>,
cycle: Option<String>,
module: Option<String>,
inbox_status: Option<String>,
sub_issue: bool,
subscriber: Option<String>,
start_target_date: bool,
}
impl issues_filter::FilterParams for FilterInput {
fn state(&self) -> Option<&str> {
self.state.as_deref()
}
fn state_group(&self) -> Option<&str> {
self.state_group.as_deref()
}
fn estimate_point(&self) -> Option<&str> {
self.estimate_point.as_deref()
}
fn priority(&self) -> Option<&str> {
self.priority.as_deref()
}
fn parent(&self) -> Option<&str> {
self.parent.as_deref()
}
fn labels(&self) -> Option<&str> {
self.labels.as_deref()
}
fn assignees(&self) -> Option<&str> {
self.assignees.as_deref()
}
fn mentions(&self) -> Option<&str> {
self.mentions.as_deref()
}
fn created_by(&self) -> Option<&str> {
self.created_by.as_deref()
}
fn name(&self) -> Option<&str> {
self.name.as_deref()
}
fn created_at(&self) -> Option<&str> {
self.created_at.as_deref()
}
fn updated_at(&self) -> Option<&str> {
self.updated_at.as_deref()
}
fn completed_at(&self) -> Option<&str> {
self.completed_at.as_deref()
}
fn start_date(&self) -> Option<&str> {
self.start_date.as_deref()
}
fn target_date(&self) -> Option<&str> {
self.target_date.as_deref()
}
fn issue_state_type(&self) -> Option<&str> {
self.issue_state_type.as_deref()
}
fn project(&self) -> Option<&str> {
self.project.as_deref()
}
fn cycle(&self) -> Option<&str> {
self.cycle.as_deref()
}
fn module(&self) -> Option<&str> {
self.module.as_deref()
}
fn inbox_status(&self) -> Option<&str> {
self.inbox_status.as_deref()
}
fn sub_issue(&self) -> bool {
self.sub_issue
}
fn subscriber(&self) -> Option<&str> {
self.subscriber.as_deref()
}
fn start_target_date(&self) -> bool {
self.start_target_date
}
}
#[derive(Debug, Deserialize)]
struct AnalyticsInput {
x_axis: XAxis,
y_axis: YAxis,
segment: Option<XAxis>,
#[serde(flatten)]
filter: FilterInput,
}
#[get("/workspaces/{slug}/analytics")]
async fn workspace_analytics(
req: HttpRequest,
path: Path<WorkspaceSlug>,
input: Json<AnalyticsInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let input = input.into_inner();
if Some(input.x_axis) == input.segment {
return Err(JsonError::new(
"Both segment and x axis cannot be same and segment should be valid",
));
}
let slug = path.into_inner();
let mut issues_query = Issues::find().with_slug(slug.clone());
{
let Filter {
state_in,
state_group_in,
estimate_point_in,
priority_in,
parent_in,
labels_in,
assignees_in,
issue_mention_ids_in,
cycle_ids_in,
module_ids_in,
created_by_in,
project_in,
subscriber_ids_in,
name_contains,
created_at_in,
updated_at_in,
completed_at_in,
start_date_in,
target_date_in,
issue_state_type_in,
inbox_status_in,
parent_is_null,
target_date_isnull,
start_date_isnull,
} = Filter::default().filter_params(&input.filter);
/*
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub name: String,
#[sea_orm(column_type = "JsonBinary")]
pub description: Json,
pub priority: String,
pub start_date: Option<Date>,
pub target_date: Option<Date>,
pub sequence_id: i32,
pub created_by_id: Option<Uuid>,
pub parent_id: Option<Uuid>,
pub project_id: Uuid,
pub state_id: Option<Uuid>,
pub updated_by_id: Option<Uuid>,
pub workspace_id: Uuid,
#[sea_orm(column_type = "Text")]
pub description_html: String,
#[sea_orm(column_type = "Text", nullable)]
pub description_stripped: Option<String>,
pub completed_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_type = "Double")]
pub sort_order: f64,
pub estimate_point: Option<i32>,
pub archived_at: Option<Date>,
pub is_draft: bool,
pub external_id: Option<String>,
pub external_source: Option<String>,
*/
if let Some(v) = state_in {
use entities::issues::Column;
issues_query = issues_query.filter(Column::StateId.is_in(v));
}
if let Some(v) = state_group_in {
use entities::issues::Relation;
use entities::states::Column;
issues_query = issues_query
.with_join()
.right_condition_with(Relation::States, Column::Group, |expr| expr.is_in(v))
.finish();
}
if let Some(v) = estimate_point_in {}
if let Some(v) = priority_in {}
if let Some(v) = parent_in {}
if let Some(v) = labels_in {}
if let Some(v) = assignees_in {}
if let Some(v) = issue_mention_ids_in {}
if let Some(v) = cycle_ids_in {}
if let Some(v) = module_ids_in {}
if let Some(v) = created_by_in {}
if let Some(v) = project_in {
use entities::issues::Column;
issues_query = issues_query.filter(Column::ProjectId.is_in(v));
}
if let Some(v) = subscriber_ids_in {}
if let Some(v) = name_contains {
use entities::issues::Column;
issues_query = issues_query.filter(Column::Name.contains(v));
}
if let Some(v) = issue_state_type_in {}
if let Some(v) = inbox_status_in {}
// dates
let range = created_at_in;
let range = updated_at_in;
let range = completed_at_in;
let range = start_date_in;
let range = target_date_in;
if parent_is_null {
issues_query = issues_query.filter(entities::issues::Column::ParentId.is_null());
}
if target_date_isnull {
issues_query = issues_query.filter(entities::issues::Column::TargetDate.is_null());
}
if start_date_isnull {
issues_query = issues_query.filter(entities::issues::Column::StartDate.is_null());
}
}
todo!()
}

View File

@ -1,6 +1,5 @@
use actix_jwt_session::Authenticated; use actix_web::web::{Data, Json, Path, ServiceConfig};
use actix_web::web::{Data, Path, ServiceConfig}; use actix_web::{delete, get, patch, post, HttpResponse};
use actix_web::{get, HttpResponse};
use entities::api_tokens::*; use entities::api_tokens::*;
use entities::prelude::ApiTokens; use entities::prelude::ApiTokens;
use reqwest::StatusCode; use reqwest::StatusCode;
@ -9,46 +8,32 @@ use sea_orm::*;
use squadron_contract::{ApiTokenId, WorkspaceSlug}; use squadron_contract::{ApiTokenId, WorkspaceSlug};
use tracing::error; use tracing::error;
use crate::extractors::RequireInstanceConfigured; use crate::ext::{JoinOnCondition, WithSlug};
use crate::extractors::{RequireInstanceConfigured, RequireUser};
use crate::models::{Error, JsonError}; use crate::models::{Error, JsonError};
use crate::session::AppClaims; use crate::utils::ApiTokenBuilder;
use crate::DatabaseConnection; use crate::DatabaseConnection;
pub fn configure(_: reqwest::Client, config: &mut ServiceConfig) { pub fn configure(_: reqwest::Client, config: &mut ServiceConfig) {
config.service(single_api_token); config
.service(single_api_token)
.service(user_api_tokens)
.service(delete_api_token)
.service(create_api_token)
.service(update_api_token);
} }
#[get("{workspace_slug}/api-tokens/{id}")] #[get("{workspace_slug}/api-tokens/{id}")]
async fn single_api_token( async fn single_api_token(
_: RequireInstanceConfigured, _: RequireInstanceConfigured,
session: Authenticated<AppClaims>, user: RequireUser,
path: Path<(WorkspaceSlug, ApiTokenId)>, path: Path<(WorkspaceSlug, ApiTokenId)>,
db: Data<DatabaseConnection>, db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> { ) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner(); let (slug, id) = path.into_inner();
let user = entities::prelude::Users::find_by_id(session.account_id())
.one(&**db)
.await
.map_err(|e| {
error!("Failed to load user: {e}");
Error::DatabaseError
})?
.ok_or(Error::UserRequired)?;
let slug = slug.as_str().to_string();
match ApiTokens::find() match ApiTokens::find()
.join( .with_slug(slug.as_str().to_string())
JoinType::Join,
Relation::Workspaces1
.def()
.on_condition(move |_left, right| {
use sea_query::*;
Expr::col((right, entities::workspaces::Column::Slug))
.eq(slug.clone())
.into_condition()
}),
)
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id))) .filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db) .one(&**db)
.await .await
@ -64,5 +49,442 @@ async fn single_api_token(
} }
} }
#[get("{workspace_slug}/api-tokens")]
async fn user_api_tokens(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<WorkspaceSlug>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let slug = path.into_inner();
let slug = slug.as_str().to_string();
match ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id))
.all(&**db)
.await
{
Ok(tokens) => Ok(HttpResponse::Ok().json(&tokens)),
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
}
}
#[delete("{workspace_slug}/api-tokens/{id}")]
async fn delete_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<(WorkspaceSlug, ApiTokenId)>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner();
let slug = slug.as_str().to_string();
let token = match ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db)
.await
{
Ok(Some(token)) => token,
Ok(None) => {
return Err(
JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
match ApiTokens::delete_by_id(token.id).exec(&**db).await {
Ok(res) => tracing::debug!("Delete {} api tokens", res.rows_affected),
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
Ok(HttpResponse::NoContent().finish())
}
#[derive(Debug, serde::Deserialize)]
struct CreateInput {
label: String,
description: Option<String>,
expired_at: Option<chrono::DateTime<chrono::FixedOffset>>,
}
#[post("{workspace_slug}/api-tokens")]
async fn create_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<WorkspaceSlug>,
input: Json<CreateInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let slug = path.into_inner();
let slug = slug.as_str().to_string();
let workspace = match entities::prelude::Workspaces::find()
.filter(entities::workspaces::Column::Slug.eq(slug))
.one(&**db)
.await
{
Ok(Some(w)) => w,
Ok(None) => {
return Err(
JsonError::new("Workspace does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
let input = input.into_inner();
let model = ApiTokenBuilder::new(
&input.label,
input.description.as_deref().unwrap_or_default(),
&*user,
Some(&workspace),
)
.with_expired_at(input.expired_at)
.into_active_model();
let token = match ApiTokens::insert(model).exec_with_returning(&**db).await {
Ok(token) => token,
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
Ok(HttpResponse::Created().json(token))
}
#[derive(Debug, serde::Deserialize)]
struct UpdateInput {
token: Option<String>,
label: Option<String>,
// user_type: Option<i16>,
created_by_id: Option<Option<Uuid>>,
updated_by_id: Option<Option<Uuid>>,
user_id: Option<Uuid>,
workspace_id: Option<Option<Uuid>>,
description: Option<String>,
expired_at: Option<Option<chrono::DateTime<chrono::FixedOffset>>>,
is_active: Option<bool>,
last_used: Option<Option<chrono::DateTime<chrono::FixedOffset>>>,
}
#[patch("{workspace_slug}/api-tokens/{id}")]
async fn update_api_token(
_: RequireInstanceConfigured,
user: RequireUser,
path: Path<(WorkspaceSlug, ApiTokenId)>,
input: Json<UpdateInput>,
db: Data<DatabaseConnection>,
) -> Result<HttpResponse, JsonError> {
let (slug, id) = path.into_inner();
let slug = slug.as_str().to_string();
let token = ApiTokens::find()
.with_slug(slug.as_str().to_string())
.filter(Column::UserId.eq(user.id).and(Column::Id.eq(*id)))
.one(&**db)
.await;
let token = match token {
Ok(Some(token)) => token,
Ok(None) => {
return Err(
JsonError::new("API Token does not exists").with_status(StatusCode::NOT_FOUND)
)
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
};
let i = input.into_inner();
match ApiTokens::update(ActiveModel {
id: Set(token.id),
token: i.token.map(Set).unwrap_or(NotSet),
label: i.label.map(Set).unwrap_or(NotSet),
user_type: Set(if user.is_bot { 1 } else { 0 }),
created_by_id: i.created_by_id.map(Set).unwrap_or(NotSet),
updated_by_id: i.updated_by_id.map(Set).unwrap_or(NotSet),
user_id: i.user_id.map(Set).unwrap_or(NotSet),
workspace_id: i.workspace_id.map(Set).unwrap_or(NotSet),
description: i.description.map(Set).unwrap_or(NotSet),
expired_at: i.expired_at.map(Set).unwrap_or(NotSet),
is_active: i.is_active.map(Set).unwrap_or(NotSet),
last_used: i.last_used.map(Set).unwrap_or(NotSet),
updated_at: Set(chrono::Utc::now().fixed_offset()),
..Default::default()
})
.exec(&**db)
.await
{
Ok(model) => Ok(HttpResponse::Ok().json(model)),
Err(DbErr::Exec(e)) => {
error!("Failed to update single api token: {e}");
return Err(JsonError::new("Invalid payload"))?;
}
Err(e) => {
error!("Failed to load single api token: {e}");
return Err(Error::DatabaseError)?;
}
}
}
#[cfg(test)] #[cfg(test)]
mod single_tests {} mod single_tests {
use actix_jwt_session::{
Hashing, JwtTtl, RefreshTtl, SessionMiddlewareFactory, JWT_COOKIE_NAME, JWT_HEADER_NAME,
REFRESH_COOKIE_NAME, REFRESH_HEADER_NAME,
};
use actix_web::body::to_bytes;
use actix_web::web::Data;
use actix_web::{test, App};
use reqwest::{Method, StatusCode};
use sea_orm::Database;
use squadron_contract::deadpool_redis;
use tracing_test::traced_test;
use uuid::Uuid;
use super::super::*;
use super::*;
use crate::session;
use crate::utils::{slugify, ApiTokenBuilder, UserExt};
macro_rules! create_app {
($app: ident, $session_storage: ident, $db: ident) => {
std::env::set_var(
"DATABASE_URL",
"postgres://postgres@0.0.0.0:5432/squadron_test",
);
let redis = deadpool_redis::Config::from_url("redis://0.0.0.0:6379")
.create_pool(Some(deadpool_redis::Runtime::Tokio1))
.expect("Can't connect to redis");
let $db: sea_orm::prelude::DatabaseConnection =
Database::connect("postgres://postgres@0.0.0.0:5432/squadron_test")
.await
.expect("Failed to connect to database");
let ($session_storage, factory) =
SessionMiddlewareFactory::<session::AppClaims>::build_ed_dsa()
.with_redis_pool(redis.clone())
// Check if header "Authorization" exists and contains Bearer with encoded JWT
.with_jwt_header(JWT_HEADER_NAME)
// Check if cookie JWT exists and contains encoded JWT
.with_jwt_cookie(JWT_COOKIE_NAME)
.with_refresh_header(REFRESH_HEADER_NAME)
// Check if cookie JWT exists and contains encoded JWT
.with_refresh_cookie(REFRESH_COOKIE_NAME)
.with_jwt_json(&["access_token"])
.finish();
let $db = Data::new($db.clone());
ensure_instance($db.clone()).await;
let $app = test::init_service(
App::new()
.app_data(Data::new($session_storage.clone()))
.app_data($db.clone())
.app_data(Data::new(redis))
.wrap(actix_web::middleware::NormalizePath::trim())
.wrap(actix_web::middleware::Logger::default())
.wrap(factory)
.service(single_api_token),
)
.await;
};
}
async fn ensure_instance(db: Data<DatabaseConnection>) {
use entities::instances::*;
use entities::prelude::Instances;
use sea_orm::*;
if Instances::find().count(&**db).await.unwrap() > 0 {
return;
}
ActiveModel {
instance_name: Set("Plan Free".into()),
is_telemetry_enabled: Set(true),
is_support_required: Set(true),
is_setup_done: Set(true),
is_signup_screen_visited: Set(true),
is_verified: Set(true),
user_count: Set(0),
instance_id: Set(random_hex(12)),
api_key: Set(random_hex(8)),
version: Set(env!("CARGO_PKG_VERSION").to_string()),
last_checked_at: Set(chrono::Utc::now().fixed_offset()),
..Default::default()
}
.save(&**db)
.await
.unwrap();
}
async fn create_user(
db: Data<DatabaseConnection>,
user_name: &str,
pass: &str,
) -> entities::users::Model {
use entities::users::*;
use sea_orm::*;
if let Ok(Some(user)) = Users::find()
.filter(Column::Email.eq(format!("{user_name}@example.com")))
.one(&**db)
.await
{
return user;
}
let pass = Hashing::encrypt(pass).unwrap();
Users::insert(ActiveModel {
password: Set(pass),
email: Set(Some(format!("{user_name}@example.com"))),
display_name: Set(user_name.to_string()),
username: Set(Uuid::new_v4().to_string()),
first_name: Set("".to_string()),
last_name: Set("".to_string()),
last_location: Set("".to_string()),
created_location: Set("".to_string()),
is_password_autoset: Set(false),
token: Set(Uuid::new_v4().to_string()),
billing_address_country: Set("".to_string()),
user_timezone: Set("UTC".to_string()),
last_login_ip: Set("0.0.0.0".to_string()),
last_login_medium: Set("None".to_string()),
last_logout_ip: Set("0.0.0.0".to_string()),
last_login_uagent: Set("test".to_string()),
is_active: Set(true),
avatar: Set("".to_string()),
..Default::default()
})
.exec_with_returning(&**db)
.await
.unwrap()
}
async fn ensure_workspace(
db: Data<DatabaseConnection>,
name: &str,
user: &entities::users::Model,
) -> entities::workspaces::Model {
use entities::prelude::*;
use entities::workspaces::*;
let slug = slugify(name);
if let Ok(Some(m)) = Workspaces::find()
.filter(Column::Slug.eq(slug))
.one(&**db)
.await
{
return m;
}
Workspaces::insert(user.new_workspace(name))
.exec_with_returning(&**db)
.await
.unwrap()
}
async fn ensure_api_token(
db: Data<DatabaseConnection>,
label: &str,
desc: &str,
user: &entities::users::Model,
workspace: &entities::workspaces::Model,
) -> entities::api_tokens::Model {
use entities::api_tokens::*;
use entities::prelude::*;
if let Ok(Some(m)) = ApiTokens::find()
.filter(
Column::Label.eq(label).and(
Column::Description.eq(desc).and(
Column::WorkspaceId
.eq(workspace.id)
.and(Column::UserId.eq(user.id)),
),
),
)
.one(&**db)
.await
{
return m;
}
ApiTokens::insert(
ApiTokenBuilder::new(label, desc, user, Some(workspace)).into_active_model(),
)
.exec_with_returning(&**db)
.await
.unwrap()
}
#[traced_test]
#[actix_web::test]
async fn valid() {
create_app!(app, session, db);
let user = create_user(db.clone(), "valid_single_api_token", "qweQWE123!@#").await;
let workspace = ensure_workspace(db.clone(), "Foo bar%&^", &user).await;
let api_token = ensure_api_token(
db.clone(),
"valid search single api token",
"",
&user,
&workspace,
)
.await;
let uri = format!(
"http://example.com/{}/api-tokens/{}",
workspace.slug, api_token.id
);
let pair = session
.store(
AppClaims {
expiration_time: (chrono::Utc::now() + chrono::Duration::days(100))
.timestamp_millis(),
issued_at: 0,
subject: "999999999".into(),
audience: session::Audience::Web,
jwt_id: Uuid::new_v4(),
account_id: user.id,
not_before: 0,
},
JwtTtl::new(actix_jwt_session::Duration::days(9999)),
RefreshTtl::new(actix_jwt_session::Duration::days(9999)),
)
.await
.unwrap();
let req = test::TestRequest::default()
.insert_header((JWT_HEADER_NAME, pair.jwt.encode().unwrap()))
.uri(&uri)
.method(Method::GET)
.to_request();
let resp = test::call_service(&app, req).await;
assert_eq!(resp.status(), StatusCode::OK);
let body = resp.into_body();
let json: serde_json::Value =
serde_json::from_slice(&to_bytes(body).await.unwrap()[..]).unwrap();
let expected: serde_json::Value =
serde_json::from_str(&serde_json::to_string(&api_token).unwrap()).unwrap();
assert_eq!(json, expected,);
}
}

View File

@ -156,7 +156,9 @@ mod tests {
.app_data($db.clone()) .app_data($db.clone())
.app_data(Data::new(redis)) .app_data(Data::new(redis))
.app_data(Data::new(EventBusClient::new_succ_mock())) .app_data(Data::new(EventBusClient::new_succ_mock()))
.app_data(Data::new(PasswordResetSecret::new("ahsdhy9asd".to_string()))) .app_data(Data::new(PasswordResetSecret::new(
"ahsdhy9asd".to_string(),
)))
.wrap(actix_web::middleware::NormalizePath::trim()) .wrap(actix_web::middleware::NormalizePath::trim())
.wrap(actix_web::middleware::Logger::default()) .wrap(actix_web::middleware::Logger::default())
.wrap(factory) .wrap(factory)

View File

@ -7,8 +7,8 @@ use reqwest::StatusCode;
use sea_orm::{DatabaseConnection, DatabaseTransaction, EntityTrait, Set}; use sea_orm::{DatabaseConnection, DatabaseTransaction, EntityTrait, Set};
use serde::Deserialize; use serde::Deserialize;
use super::auth_http_response;
use super::password::PassValidity; use super::password::PassValidity;
use super::{auth_http_response, random_hex};
use crate::extractors::RequireInstanceConfigured; use crate::extractors::RequireInstanceConfigured;
use crate::models::{ use crate::models::{
Error, JsonError, JsonErrorDetails, PasswordResetSecret, PasswordResetTimeout, Error, JsonError, JsonErrorDetails, PasswordResetSecret, PasswordResetTimeout,

View File

@ -1,8 +1,11 @@
use actix_web::web::{scope, ServiceConfig}; use actix_web::web::{scope, ServiceConfig};
pub use authentication::*; pub use authentication::*;
mod analytics;
mod authentication; mod authentication;
mod config; mod config;
//todo
mod users; mod users;
pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) { pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) {
@ -10,6 +13,7 @@ pub fn configure(http_client: reqwest::Client, config: &mut ServiceConfig) {
scope("/api") scope("/api")
.configure(config::configure) .configure(config::configure)
.configure(users::configure) .configure(users::configure)
.configure(analytics::configure)
.configure(|c| authentication::configure(http_client, c)), .configure(|c| authentication::configure(http_client, c)),
); );
} }

View File

@ -10,6 +10,7 @@ pub use squadron_contract::{deadpool_redis, redis, RedisClient};
pub mod config; pub mod config;
pub mod events; pub mod events;
pub mod ext;
pub mod extractors; pub mod extractors;
pub mod http; pub mod http;
pub mod models; pub mod models;

View File

@ -0,0 +1,407 @@
use std::cmp::Ordering;
use std::str::FromStr;
pub trait FilterParams {
fn state(&self) -> Option<&str> {
None
}
fn state_group(&self) -> Option<&str> {
None
}
fn estimate_point(&self) -> Option<&str> {
None
}
fn priority(&self) -> Option<&str> {
None
}
fn parent(&self) -> Option<&str> {
None
}
fn labels(&self) -> Option<&str> {
None
}
fn assignees(&self) -> Option<&str> {
None
}
fn mentions(&self) -> Option<&str> {
None
}
fn created_by(&self) -> Option<&str> {
None
}
fn name(&self) -> Option<&str> {
None
}
fn created_at(&self) -> Option<&str> {
None
}
fn updated_at(&self) -> Option<&str> {
None
}
fn completed_at(&self) -> Option<&str> {
None
}
fn start_date(&self) -> Option<&str> {
None
}
fn target_date(&self) -> Option<&str> {
None
}
fn issue_state_type(&self) -> Option<&str> {
None
}
fn project(&self) -> Option<&str> {
None
}
fn cycle(&self) -> Option<&str> {
None
}
fn module(&self) -> Option<&str> {
None
}
fn inbox_status(&self) -> Option<&str> {
None
}
fn sub_issue(&self) -> bool {
false
}
fn subscriber(&self) -> Option<&str> {
None
}
fn start_target_date(&self) -> bool {
false
}
}
pub static DATE_PATTERN: &str = "\\d+_(weeks|months)$";
fn string_date_filter(
duration: i64,
subsequent: Subsequest,
term: DurationType,
offset: DateOffset,
) -> Option<DateOrder> {
use std::cmp::Ordering::{Greater, Less};
use chrono::{Duration, Utc};
let (order, duration) = match (term, subsequent, offset) {
// months
(DurationType::Months, Subsequest::After, DateOffset::FromNow) => {
(Greater, Duration::days(duration * 30))
}
(DurationType::Months, Subsequest::After, DateOffset::Other) => {
(Greater, Duration::days(-duration * 30))
}
(DurationType::Months, Subsequest::Before, DateOffset::FromNow) => {
(Less, Duration::days(duration * 30))
}
(DurationType::Months, Subsequest::Before, DateOffset::Other) => {
(Less, Duration::days(-duration * 30))
}
// weeks
(DurationType::Weeks, Subsequest::After, DateOffset::FromNow) => {
(Greater, Duration::days(duration * 7))
}
(DurationType::Weeks, Subsequest::After, DateOffset::Other) => {
(Greater, Duration::days(-duration * 7))
}
(DurationType::Weeks, Subsequest::Before, DateOffset::FromNow) => {
(Less, Duration::days(duration * 7))
}
(DurationType::Weeks, Subsequest::Before, DateOffset::Other) => {
(Less, Duration::days(-duration * 7))
}
};
Some(match order {
Ordering::Greater => DateOrder::Gte((Utc::now() + duration).date_naive()),
_ => DateOrder::Lte((Utc::now() + duration).date_naive()),
})
}
#[derive(Debug, Copy, Clone)]
enum DateOffset {
FromNow,
Other,
}
impl FromStr for DateOffset {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"fromnow" => Self::FromNow,
_ => Self::Other,
})
}
}
#[derive(Debug, Copy, Clone)]
enum Subsequest {
After,
Before,
}
impl FromStr for Subsequest {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"after" => Self::After,
_ => Self::Before,
})
}
}
#[derive(Debug, Copy, Clone)]
enum DurationType {
Months,
Weeks,
}
impl FromStr for DurationType {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"months" => Self::Months,
"weeks" => Self::Weeks,
_ => return Err(()),
})
}
}
fn parse_date_head(head: &str) -> Option<(i64, DurationType)> {
let mut parts = head.split('_');
Some((parts.next()?.parse().ok()?, parts.next()?.parse().ok()?))
}
fn date_filter<'s>(
queries: impl Iterator<Item = &'s str> + 's,
) -> impl Iterator<Item = DateOrder> + 's {
queries.filter_map(|query| {
if query.split(';').count() <= 2 {
return None;
}
let mut date_query = query.split(";");
let head = date_query.next()?;
let Some((duration, term)) = parse_date_head(head) else {
// TODO: unsupported
//
// if "after" in date_query:
// filter[f"{date_term}__gte"] = date_query[0]
// else:
// filter[f"{date_term}__lte"] = date_query[0]
return None;
};
if query.split(";").count() != 3 {
return None;
};
let subsequent = date_query.next()?.parse().ok()?;
let offset = date_query.next()?.parse().ok()?;
return string_date_filter(duration, subsequent, term, offset);
})
}
#[derive(Debug)]
pub enum DateOrder {
Gte(chrono::NaiveDate),
Lte(chrono::NaiveDate),
}
pub mod filter_get {
use std::str::FromStr;
use uuid::Uuid;
use super::*;
#[derive(Debug, Default)]
pub struct DateRange {
pub lte: Option<chrono::NaiveDate>,
pub gte: Option<chrono::NaiveDate>,
}
impl DateRange {
pub fn fill(&mut self, it: impl Iterator<Item = DateOrder>) {
for order in it {
match order {
DateOrder::Gte(date) => self.gte = Some(date),
DateOrder::Lte(date) => self.lte = Some(date),
};
}
}
}
#[derive(Debug, Default)]
pub struct Filter {
// state__in
pub state_in: Option<Vec<Uuid>>,
// state__group__in
pub state_group_in: Option<Vec<String>>,
// estimate_point__in
pub estimate_point_in: Option<Vec<String>>,
pub priority_in: Option<Vec<String>>,
pub parent_in: Option<Vec<Uuid>>,
pub labels_in: Option<Vec<Uuid>>,
pub assignees_in: Option<Vec<Uuid>>,
// issue_mention__mention__id__in
pub issue_mention_ids_in: Option<Vec<Uuid>>,
/// issue_cycle__cycle_id__in
pub cycle_ids_in: Option<Vec<Uuid>>,
/// issue_module__module_id__in
pub module_ids_in: Option<Vec<Uuid>>,
pub created_by_in: Option<Vec<Uuid>>,
pub project_in: Option<Vec<Uuid>>,
/// issue_subscribers__subscriber_id__in
pub subscriber_ids_in: Option<Vec<Uuid>>,
/// name__icontains
pub name_contains: Option<String>,
// created_at__date
pub created_at_in: DateRange,
pub updated_at_in: DateRange,
pub completed_at_in: DateRange,
pub start_date_in: DateRange,
pub target_date_in: DateRange,
pub issue_state_type_in: Option<&'static [&'static str]>,
/// issue_inbox__status__in
pub inbox_status_in: Option<Vec<String>>,
/// parent__isnull
pub parent_is_null: bool,
/// target_date__isnull
pub target_date_isnull: bool,
/// start_date__isnull
pub start_date_isnull: bool,
}
trait IntoParam {
fn into_param(s: &str) -> Option<Self>
where
Self: Sized;
}
impl IntoParam for Uuid {
fn into_param(s: &str) -> Option<Self> {
Uuid::from_str(s).ok()
}
}
impl IntoParam for String {
fn into_param(s: &str) -> Option<Self> {
Some(s.to_owned())
}
}
fn parse_param_list<V: IntoParam>(param: &str) -> Option<Vec<V>> {
let param = param.trim();
if param.is_empty() || param == "null" {
return None;
}
let states = param
.split(',')
.filter(|s| s != &"null" && s != &"")
.filter_map(V::into_param)
.collect::<Vec<_>>();
if states.is_empty() {
return None;
}
Some(states)
}
impl Filter {
pub fn filter_params<'p>(mut self, p: &'p impl FilterParams) -> Self {
if let Some(s) = p.state() {
self.state_in = parse_param_list(s);
}
if let Some(s) = p.state_group() {
self.state_group_in = parse_param_list(s);
}
if let Some(s) = p.estimate_point() {
self.estimate_point_in = parse_param_list(s);
}
if let Some(s) = p.priority() {
self.priority_in = parse_param_list(s);
}
if let Some(s) = p.parent() {
self.parent_in = parse_param_list(s);
}
if let Some(s) = p.labels() {
self.labels_in = parse_param_list(s);
}
if let Some(s) = p.assignees() {
self.assignees_in = parse_param_list(s);
}
if let Some(s) = p.mentions() {
self.issue_mention_ids_in = parse_param_list(s);
}
if let Some(s) = p.subscriber() {
self.subscriber_ids_in = parse_param_list(s);
}
if let Some(s) = p.project() {
self.project_in = parse_param_list(s);
}
if let Some(s) = p.created_by() {
self.created_by_in = parse_param_list(s);
}
if let Some(s) = p.inbox_status() {
self.inbox_status_in = parse_param_list(s);
}
if let Some(s) = p.name() {
self.name_contains = Some(s.to_string());
}
if let Some(s) = p.cycle() {
self.cycle_ids_in = parse_param_list(s);
}
if let Some(s) = p.module() {
self.module_ids_in = parse_param_list(s);
}
if let Some(s) = p.created_at() {
self.created_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.updated_at() {
self.updated_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.completed_at() {
self.completed_at_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.start_date() {
self.start_date_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.target_date() {
self.target_date_in.fill(super::date_filter(s.split(',')));
}
if let Some(s) = p.issue_state_type() {
self.issue_state_type_in = Some(match s {
"backlog" => &["backlog"],
"active" => &["unstarted", "started"],
_ => &["backlog", "unstarted", "started", "completed", "cancelled"],
});
}
if let false = p.sub_issue() {
self.parent_is_null = true;
}
if let true = p.start_target_date() {
self.start_date_isnull = false;
self.target_date_isnull = false;
}
self
}
}
}
pub mod filter_non_get {
#[derive(Debug, Default)]
pub struct Filter {
pub state_in: Option<String>,
}
pub fn filter_state(state: &str, filter: &mut Filter) {
if state == "null" || state == "" {
return;
}
filter.state_in = Some(state.to_string());
}
pub fn filter_state_group() {}
}

View File

@ -19,9 +19,11 @@ use sea_orm::*;
use tracing::error; use tracing::error;
use uuid::Uuid; use uuid::Uuid;
use crate::http::{AuthError, OAuthError}; use crate::http::{random_hex, AuthError, OAuthError};
use crate::models::Error; use crate::models::Error;
pub mod issues_filter;
#[macro_export] #[macro_export]
macro_rules! db_t { macro_rules! db_t {
($db: expr) => {{ ($db: expr) => {{
@ -392,3 +394,122 @@ pub mod pass_reset_token {
) )
} }
} }
pub trait UserExt {
fn new_workspace(&self, name: &str) -> entities::workspaces::ActiveModel;
}
impl UserExt for entities::users::Model {
fn new_workspace(&self, name: &str) -> entities::workspaces::ActiveModel {
entities::workspaces::ActiveModel::new_workspace(self.id, name)
}
}
pub trait WorkspaceExt {
fn new_workspace(owner_id: Uuid, name: &str) -> entities::workspaces::ActiveModel {
use sea_orm::*;
entities::workspaces::ActiveModel {
name: Set(name.to_string()),
slug: Set(slugify(name)),
owner_id: Set(owner_id),
..Default::default()
}
}
}
impl WorkspaceExt for entities::workspaces::ActiveModel {}
pub struct ApiTokenBuilder {
pub created_at: Option<chrono::DateTime<chrono::FixedOffset>>,
pub updated_at: Option<chrono::DateTime<chrono::FixedOffset>>,
pub token: String,
pub label: String,
pub user_type: i16,
pub created_by_id: Option<Uuid>,
pub updated_by_id: Option<Uuid>,
pub user_id: Uuid,
pub workspace_id: Option<Uuid>,
pub description: String,
pub expired_at: Option<chrono::DateTime<chrono::FixedOffset>>,
pub is_active: bool,
pub last_used: Option<chrono::DateTime<chrono::FixedOffset>>,
}
impl ApiTokenBuilder {
pub fn new(
label: &str,
desc: &str,
user: &entities::users::Model,
workspace: Option<&entities::workspaces::Model>,
) -> Self {
let now = chrono::Utc::now().fixed_offset();
Self {
token: random_hex(60),
created_at: Some(now),
updated_at: Some(now),
label: label.to_string(),
user_type: if user.is_bot { 1 } else { 0 },
created_by_id: Some(user.id),
updated_by_id: Some(user.id),
user_id: user.id,
workspace_id: workspace.map(|w| w.id),
description: desc.to_string(),
expired_at: None,
is_active: true,
last_used: None,
}
}
pub fn with_expired_at(mut self, exp: Option<chrono::DateTime<chrono::FixedOffset>>) -> Self {
self.expired_at = exp;
self
}
pub fn into_active_model(self) -> entities::api_tokens::ActiveModel {
use sea_orm::*;
let ApiTokenBuilder {
label,
created_at,
updated_at,
token,
user_type,
created_by_id,
updated_by_id,
user_id,
workspace_id,
description,
expired_at,
is_active,
last_used,
} = self;
entities::api_tokens::ActiveModel {
label: Set(label),
created_at: created_at.map(Set).unwrap_or(NotSet),
updated_at: updated_at.map(Set).unwrap_or(NotSet),
token: Set(token),
user_type: Set(user_type),
created_by_id: Set(created_by_id),
updated_by_id: Set(updated_by_id),
user_id: Set(user_id),
workspace_id: Set(workspace_id),
description: Set(description),
expired_at: Set(expired_at),
is_active: Set(is_active),
last_used: Set(last_used),
..Default::default()
}
}
}
pub fn slugify(s: &str) -> String {
s.chars()
.filter_map(|c| match c {
_ if c.is_whitespace() => Some('-'),
_ if c.is_ascii_alphanumeric() => Some(c.to_ascii_lowercase()),
_ => None,
})
.collect()
}

View File

@ -39,10 +39,6 @@ CREATE TYPE project_member_roles AS ENUM (
'Guest' 'Guest'
); );
--
-- Name: users; Type: TABLE; Schema: public; Owner: plane
--
CREATE TABLE users ( CREATE TABLE users (
password character varying(128) NOT NULL, password character varying(128) NOT NULL,
last_login timestamp with time zone, last_login timestamp with time zone,
@ -91,10 +87,6 @@ CREATE TABLE users (
use_case text use_case text
); );
--
-- Name: workspaces; Type: TABLE; Schema: public; Owner: plane
--
CREATE TABLE workspaces ( CREATE TABLE workspaces (
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
@ -244,7 +236,9 @@ CREATE TABLE issues (
archived_at date, archived_at date,
is_draft boolean NOT NULL, is_draft boolean NOT NULL,
external_id character varying(255), external_id character varying(255),
external_source character varying(255) external_source character varying(255),
CONSTRAINT parent_fk FOREIGN KEY (parent_id) REFERENCES issues (id),
CONSTRAINT state_fk FOREIGN KEY (state_id) REFERENCES states (id)
); );
-- --

7
scripts/rebuild_db.sh Normal file
View File

@ -0,0 +1,7 @@
#!/usr/bin/env zsh
DB_NAME=squadron
psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql
DB_NAME=squadron_test
psql postgres postgres -c "DROP DATABASE $DB_NAME";psql postgres postgres -c "CREATE DATABASE $DB_NAME"; psql $DB_NAME postgres -f ./plane_db.sql

6
web/architecture.svg Normal file
View File

@ -0,0 +1,6 @@
<svg height="600" width="600" xmlns="http://www.w3.org/2000/svg">
<g>
<rect x="20" y="20" width="100" height="60" fill="rgb(94, 235, 244)" />
<text x="20" y="100">API Server</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 217 B

24
web/index.html Normal file
View File

@ -0,0 +1,24 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
</head>
<body>
<base href="/" />
<main>
<header style="display:flex;position:sticky;top:0;left:0;height:2rem;width:100%;">
<div style="display:flex;">
<picture>
<img src="/logo.svg" style="height:36px;margin-right:1rem;" />
</picture>
<b style="height:2rem;line-height:2rem;text-align:center;display:block;">Squadron</b>
</div>
</header>
<app style="display:block;position:relative;margin-top:2rem;">
<section>
<img src="/architecture.svg" />
</section>
</app>
</main>
</body>
</html>

18
web/logo.svg Normal file
View File

@ -0,0 +1,18 @@
<svg height="560" width="560" xmlns="http://www.w3.org/2000/svg">
<defs>
<g id="jet">
<polygon points="200,60 260,150 140,150" />
<polygon points="200,10 225,190 175,190" />
</g>
</defs>
<circle r="260" cx="280" cy="280" fill="rgb(94, 235, 244)" />
<g transform="translate(0 90) rotate(-45 0 0)" transform-box="fill-box" transform-origin="center" style="fill:#00569d;stroke:none;stroke-width:0">
<g transform="translate(30, 30)"><use href="#jet" /></g>
<g transform="translate(130, -90)"><use href="#jet" /></g>
<g transform="translate(230, 30)"><use href="#jet" /></g>
<g transform="translate(130, 190)"><use href="#jet" /></g>
<g transform="translate(300, 190)"><use href="#jet" /></g>
<g transform="translate(-50, 190)"><use href="#jet" /></g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 785 B