Work on stocks database

This commit is contained in:
Adrian Woźniak 2022-11-09 16:59:12 +01:00
parent 2f0a43c8ef
commit 755363c23f
No known key found for this signature in database
GPG Key ID: 0012845A89C7352B
12 changed files with 909 additions and 8 deletions

15
Cargo.lock generated
View File

@ -11,6 +11,7 @@ dependencies = [
"channels", "channels",
"config", "config",
"dotenv", "dotenv",
"fake",
"futures 0.3.25", "futures 0.3.25",
"gumdrop", "gumdrop",
"json", "json",
@ -23,6 +24,7 @@ dependencies = [
"sqlx", "sqlx",
"sqlx-core", "sqlx-core",
"tarpc", "tarpc",
"testx",
"thiserror", "thiserror",
"tokio", "tokio",
"tracing", "tracing",
@ -1253,6 +1255,15 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "db-utils"
version = "0.1.0"
dependencies = [
"model",
"sqlx",
"sqlx-core",
]
[[package]] [[package]]
name = "dbg" name = "dbg"
version = "1.0.4" version = "1.0.4"
@ -4092,8 +4103,10 @@ dependencies = [
"channels", "channels",
"chrono", "chrono",
"config", "config",
"db-utils",
"derive_more", "derive_more",
"dotenv", "dotenv",
"fake",
"futures 0.3.25", "futures 0.3.25",
"model", "model",
"opentelemetry 0.17.0", "opentelemetry 0.17.0",
@ -4104,11 +4117,13 @@ dependencies = [
"sqlx", "sqlx",
"sqlx-core", "sqlx-core",
"tarpc", "tarpc",
"testx",
"thiserror", "thiserror",
"tokio", "tokio",
"tracing", "tracing",
"tracing-opentelemetry", "tracing-opentelemetry",
"tracing-subscriber", "tracing-subscriber",
"uuid 1.2.1",
] ]
[[package]] [[package]]

View File

@ -5,6 +5,7 @@ members = [
"crates/channels", "crates/channels",
"crates/config", "crates/config",
"crates/testx", "crates/testx",
"crates/db-utils",
# actors # actors
"crates/account_manager", "crates/account_manager",
"crates/cart_manager", "crates/cart_manager",

View File

@ -30,3 +30,7 @@ tokio = { version = "1.21.2", features = ['full'] }
tracing = { version = "0.1.6" } tracing = { version = "0.1.6" }
tracing-opentelemetry = { version = "0.17.4" } tracing-opentelemetry = { version = "0.17.4" }
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
[dev-dependencies]
fake = { version = "2.5.0" }
testx = { path = "../testx" }

View File

@ -234,7 +234,7 @@ mod tests {
.unwrap() .unwrap()
} }
#[actix::test] #[tokio::test]
async fn create_account() { async fn create_account() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -266,7 +266,7 @@ mod tests {
assert_eq!(account, expected); assert_eq!(account, expected);
} }
#[actix::test] #[tokio::test]
async fn all_accounts() { async fn all_accounts() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -280,7 +280,7 @@ mod tests {
assert!(v.len() >= 3); assert!(v.len() >= 3);
} }
#[actix::test] #[tokio::test]
async fn update_account_without_pass() { async fn update_account_without_pass() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -326,7 +326,7 @@ mod tests {
assert_eq!(updated_account, expected); assert_eq!(updated_account, expected);
} }
#[actix::test] #[tokio::test]
async fn update_account_with_pass() { async fn update_account_with_pass() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -373,7 +373,7 @@ mod tests {
assert_eq!(updated_account, expected); assert_eq!(updated_account, expected);
} }
#[actix::test] #[tokio::test]
async fn find() { async fn find() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -390,7 +390,7 @@ mod tests {
assert_eq!(account, res); assert_eq!(account, res);
} }
#[actix::test] #[tokio::test]
async fn find_identity_email() { async fn find_identity_email() {
testx::db_t_ref!(t); testx::db_t_ref!(t);
@ -408,7 +408,7 @@ mod tests {
assert_eq!(account, res); assert_eq!(account, res);
} }
#[actix::test] #[tokio::test]
async fn find_identity_login() { async fn find_identity_login() {
testx::db_t_ref!(t); testx::db_t_ref!(t);

View File

@ -215,7 +215,7 @@ mod test {
.unwrap() .unwrap()
} }
#[actix::test] #[tokio::test]
async fn full_check() { async fn full_check() {
testx::db_t_ref!(t); testx::db_t_ref!(t);

View File

@ -0,0 +1,9 @@
[package]
name = "db-utils"
version = "0.1.0"
edition = "2021"
[dependencies]
model = { path = "../model" }
sqlx = { version = "0.6.2", features = ["migrate", "runtime-actix-rustls", "all-types", "postgres"] }
sqlx-core = { version = "0.6.2", features = [] }

View File

@ -0,0 +1,97 @@
use sqlx::Arguments;
pub struct MultiLoad<'transaction, 'transaction2, 'header, 'condition, T> {
pool: &'transaction mut sqlx::Transaction<'transaction2, sqlx::Postgres>,
header: &'header str,
condition: &'condition str,
sort: Option<String>,
size: Option<usize>,
__phantom: std::marker::PhantomData<T>,
}
impl<'transaction, 'transaction2, 'header, 'condition, T>
MultiLoad<'transaction, 'transaction2, 'header, 'condition, T>
where
T: for<'r> sqlx::FromRow<'r, sqlx::postgres::PgRow> + Send + Unpin,
{
pub fn new(
pool: &'transaction mut sqlx::Transaction<'transaction2, sqlx::Postgres>,
header: &'header str,
condition: &'condition str,
) -> Self {
Self {
pool,
header,
condition,
sort: None,
size: None,
__phantom: Default::default(),
}
}
pub fn with_sorting<S: Into<String>>(mut self, order: S) -> Self {
self.sort = Some(order.into());
self
}
pub fn with_size(mut self, size: usize) -> Self {
self.size = Some(size);
self
}
pub async fn load<'query, Error, ErrorFn, Ids>(
&mut self,
len: usize,
items: Ids,
on_error: ErrorFn,
) -> Result<Vec<T>, Error>
where
Ids: Iterator<Item = model::RecordId>,
ErrorFn: Fn(sqlx::Error) -> Error,
{
let mut res = Vec::new();
let size = self.size.unwrap_or(20).min(200);
for ids in items.fold(
Vec::<Vec<model::RecordId>>::with_capacity(len),
|mut v, id| {
let last_len = v.last().map(|v| v.len());
if last_len == Some(size) || last_len.is_none() {
v.push(Vec::with_capacity(size));
}
v.last_mut().unwrap().push(id);
v
},
) {
let query: String = self.header.into();
let mut query = ids.iter().enumerate().fold(query, |mut q, (idx, _id)| {
if idx != 0 {
q.push_str(" OR");
}
q.push_str(&format!(" {} ${}", self.condition, idx + 1));
q
});
if let Some(s) = self.sort.as_deref() {
query.push_str("\nORDER BY ");
query.push_str(s);
query.push(' ');
}
let q = sqlx::query_as_with(
query.as_str(),
ids.into_iter()
.fold(sqlx::postgres::PgArguments::default(), |mut args, id| {
args.add(id);
args
}),
);
let records: Vec<T> = match q.fetch_all(&mut *self.pool).await {
Ok(rec) => rec,
Err(e) => return Err(on_error(e)),
};
res.extend(records);
}
Ok(res)
}
}

View File

@ -11,6 +11,7 @@ path = "./src/main.rs"
channels = { path = "../channels" } channels = { path = "../channels" }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
config = { path = "../config" } config = { path = "../config" }
db-utils = { path = "../db-utils" }
derive_more = { version = "0.99", features = [] } derive_more = { version = "0.99", features = [] }
dotenv = { version = "0.15.0" } dotenv = { version = "0.15.0" }
futures = { version = "0.3.25" } futures = { version = "0.3.25" }
@ -28,3 +29,8 @@ tokio = { version = "1.21.2", features = ['full'] }
tracing = { version = "0.1.6" } tracing = { version = "0.1.6" }
tracing-opentelemetry = { version = "0.17.4" } tracing-opentelemetry = { version = "0.17.4" }
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
uuid = { version = "1.2.1" }
[dev-dependencies]
fake = { version = "2.5.0" }
testx = { path = "../testx" }

View File

@ -4,6 +4,7 @@ use sqlx_core::postgres::Postgres;
mod photos; mod photos;
mod product_photos; mod product_photos;
mod product_variants;
mod products; mod products;
mod stocks; mod stocks;

View File

@ -0,0 +1,55 @@
use model::v2::*;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("")]
CreateProductVariant,
}
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub struct CreateProductVariant {
pub product_id: ProductId,
pub name: ProductName,
pub short_description: ProductShortDesc,
pub long_description: ProductLongDesc,
pub price: Price,
}
impl CreateProductVariant {
pub async fn run(
self,
pool: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ProductVariant> {
sqlx::query_as(
r#"
INSERT INTO product_variants (
product_id,
name,
short_description,
long_description,
price
) VALUES ($1, $2, $3, $4, $5)
RETURNINGS id,
product_id,
name,
short_description,
long_description,
price
"#,
)
.bind(self.product_id)
.bind(self.name)
.bind(self.short_description)
.bind(self.long_description)
.bind(self.price)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{}", e);
dbg!(e);
Error::CreateProductVariant
})
}
}

View File

@ -1 +1,377 @@
use model::v2::*; use model::v2::*;
use model::ShoppingCartId;
#[derive(Debug, Copy, Clone, PartialEq, Eq, serde::Serialize, thiserror::Error)]
pub enum Error {
#[error("Unable to load all products")]
All,
#[error("Unable to create product")]
Create,
#[error("Unable to update product")]
Update(ProductId),
#[error("Unable to delete product")]
Delete(ProductId),
#[error("Unable to find products for shopping cart")]
ShoppingCartProducts(ShoppingCartId),
#[error("Product with id {0} can't be found")]
Single(ProductId),
#[error("Failed to load products for given ids")]
FindProducts,
}
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub struct AllProducts {
limit: i32,
offset: i32,
}
impl AllProducts {
pub async fn run<'e, E>(self, pool: E) -> Result<Vec<model::Product>>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
SELECT id,
name,
category,
deliver_days_flag
FROM products
ORDER BY id
LIMIT $1 OFFSET $2
"#,
)
.bind(self.limit.max(1).min(200))
.bind(self.offset.max(0))
.fetch_all(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::All
})
}
}
#[derive(Debug)]
pub struct FindProduct {
pub product_id: ProductId,
}
impl FindProduct {
pub async fn run<'e, E>(self, pool: E) -> Result<model::Product>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
SELECT id,
name,
category,
deliver_days_flag
FROM products
WHERE id = $1
"#,
)
.bind(self.product_id)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::Single(self.product_id)
})
}
}
#[derive(Debug)]
pub struct CreateProduct {
pub name: ProductName,
pub category: Option<ProductCategory>,
pub deliver_days_flag: Days,
}
impl CreateProduct {
pub async fn run<'e, E>(self, pool: E) -> Result<model::Product>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
INSERT INTO products (name, category, deliver_days_flag)
VALUES ($1, $2, $3)
RETURNING id,
name,
category,
deliver_days_flag
"#,
)
.bind(self.name)
.bind(self.category)
.bind(self.deliver_days_flag)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
dbg!(e);
Error::Create
})
}
}
#[derive(Debug)]
pub struct UpdateProduct {
pub id: ProductId,
pub name: ProductName,
pub category: Option<ProductCategory>,
pub deliver_days_flag: Days,
}
impl UpdateProduct {
pub async fn run<'e, E>(self, pool: E) -> Result<Product>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
UPDATE products
SET name = $2,
category = $3,
deliver_days_flag = $4
WHERE id = $1
RETURNING id,
name,
category,
deliver_days_flag
"#,
)
.bind(self.id)
.bind(self.name)
.bind(self.category)
.bind(self.deliver_days_flag)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
dbg!(e);
Error::Update(self.id)
})
}
}
#[derive(Debug)]
pub struct DeleteProduct {
pub product_id: ProductId,
}
impl DeleteProduct {
pub async fn run<'e, E>(self, pool: E) -> Result<Option<Product>>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
DELETE FROM products
WHERE id = $1
RETURNING id,
name,
category,
deliver_days_flag
"#,
)
.bind(self.product_id)
.fetch_optional(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::Delete(self.product_id)
})
}
}
#[derive(Debug)]
pub struct ShoppingCartProducts {
pub shopping_cart_id: ShoppingCartId,
pub limit: i32,
pub offset: i32,
}
impl ShoppingCartProducts {
pub async fn shopping_cart_products<'e, E>(self, pool: E) -> Result<Vec<Product>>
where
E: sqlx::Executor<'e, Database = sqlx::Postgres>,
{
sqlx::query_as(
r#"
SELECT products.id,
products.name,
products.category,
products.deliver_days_flag
FROM products
INNER JOIN shopping_cart_items ON shopping_cart_items.product_id = products.id
WHERE shopping_cart_id = $1
ORDER BY products.id
LIMIT $2 OFFSET $3
"#,
)
.bind(self.shopping_cart_id)
.bind(self.limit.min(1).max(200))
.bind(self.offset.min(0))
.fetch_all(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::ShoppingCartProducts(self.shopping_cart_id)
})
}
}
#[derive(Debug)]
pub struct FindProducts {
pub product_ids: Vec<ProductId>,
}
impl FindProducts {
pub async fn run(
self,
pool: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Vec<Product>> {
db_utils::MultiLoad::new(
pool,
r#"
SELECT id,
name,
category,
deliver_days_flag
FROM products
WHERE
"#,
"products.id =",
)
.with_size(200)
.load(
self.product_ids.len(),
self.product_ids.into_iter().map(|id| *id),
|e| {
tracing::error!("{e:?}");
Error::FindProducts
},
)
.await
}
}
#[cfg(test)]
mod tests {
use config::UpdateConfig;
use model::*;
use uuid::Uuid;
pub struct NoOpts;
impl UpdateConfig for NoOpts {}
use super::*;
async fn test_product(
t: &mut sqlx::Transaction<'_, sqlx::Postgres>,
name: Option<String>,
short_description: Option<String>,
long_description: Option<String>,
category: Option<ProductCategory>,
price: Option<u32>,
deliver_days_flag: Option<Days>,
) -> Product {
CreateProduct {
name: ProductName::new(name.unwrap_or_else(|| format!("{}", Uuid::new_v4()))),
category,
deliver_days_flag: deliver_days_flag
.unwrap_or_else(|| Days(vec![Day::Friday, Day::Sunday])),
}
.run(t)
.await
.unwrap()
}
#[tokio::test]
async fn create() {
testx::db_t_ref!(t);
test_product(&mut t, None, None, None, None, None, None).await;
testx::db_rollback!(t);
}
#[tokio::test]
async fn all() {
testx::db_t_ref!(t);
let p1 = test_product(&mut t, None, None, None, None, None, None).await;
let p2 = test_product(&mut t, None, None, None, None, None, None).await;
let p3 = test_product(&mut t, None, None, None, None, None, None).await;
let products = super::all(AllProducts, &mut t).await.unwrap();
testx::db_rollback!(t);
assert_eq!(products, vec![p1, p2, p3]);
}
#[tokio::test]
async fn find() {
testx::db_t_ref!(t);
let p1 = test_product(&mut t, None, None, None, None, None, None).await;
let p2 = test_product(&mut t, None, None, None, None, None, None).await;
let p3 = test_product(&mut t, None, None, None, None, None, None).await;
let product = find_product(FindProduct { product_id: p2.id }, &mut t)
.await
.unwrap();
testx::db_rollback!(t);
assert_ne!(product, p1);
assert_eq!(product, p2);
assert_ne!(product, p3);
}
#[tokio::test]
async fn update() {
testx::db_t_ref!(t);
let original = test_product(&mut t, None, None, None, None, None, None).await;
let updated = UpdateProduct {
id: original.id,
name: ProductName::new("a9s0dja0sjd0jas09dj"),
short_description: ProductShortDesc::new("ajs9d8ua9sdu9ahsd98has"),
long_description: ProductLongDesc::new("hja89sdy9yha9sdy98ayusd9hya9sy8dh"),
category: None,
price: Price::from_u32(823794),
deliver_days_flag: Day::Tuesday | Day::Saturday,
}
.run(&mut t)
.await
.unwrap();
let reloaded = FindProduct {
product_id: original.id,
}
.run(&mut t)
.await
.unwrap();
testx::db_rollback!(t);
assert_ne!(updated, original);
assert_eq!(updated, reloaded);
assert_eq!(
updated,
Product {
id: original.id,
name: ProductName::new("a9s0dja0sjd0jas09dj"),
short_description: ProductShortDesc::new("ajs9d8ua9sdu9ahsd98has"),
long_description: ProductLongDesc::new("hja89sdy9yha9sdy98ayusd9hya9sy8dh"),
category: None,
price: Price::from_u32(823794),
deliver_days_flag: Day::Tuesday | Day::Saturday,
}
);
}
}

View File

@ -1 +1,338 @@
use model::v2::*; use model::v2::*;
use crate::db::products::AllProducts;
#[derive(Debug, Copy, Clone, PartialEq, Eq, serde::Serialize, thiserror::Error)]
pub enum Error {
#[error("Unable to load all stocks")]
All,
#[error("Unable to create stock")]
Create,
#[error("Unable to update stock {0:?}")]
Update(StockId),
#[error("Unable to delete stock {0:?}")]
Delete(StockId),
#[error("Unable find stock for product")]
ProductVariantStock,
#[error("Stock {0:?} does not exists")]
NotFound(StockId),
}
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub struct AllStocks {
pub limit: i32,
pub offset: i32,
}
impl AllStocks {
pub async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Vec<Stock>> {
sqlx::query_as(
r#"
SELECT id, product_variant_id, quantity, quantity_unit
FROM stocks
ORDER BY id ASC
LIMIT $1 OFFSET $2
"#,
)
.bind(self.limit)
.bind(self.offset)
.fetch_all(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::All
})
}
}
#[derive(Debug)]
pub struct FindStock {
pub id: StockId,
}
impl FindStock {
pub async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Stock> {
sqlx::query_as(
r#"
SELECT id, product_variant_id, quantity, quantity_unit
FROM stocks
WHERE id = $1
"#,
)
.bind(self.id)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
dbg!(e);
Error::NotFound(self.id)
})
}
}
#[derive(Debug)]
pub struct CreateStock {
pub product_variant_id: ProductVariantId,
pub quantity: Quantity,
pub quantity_unit: QuantityUnit,
}
impl CreateStock {
pub async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Stock> {
sqlx::query_as(
r#"
INSERT INTO stocks (product_variant_id, quantity, quantity_unit)
VALUES ($1, $2, $3)
RETURNING id, product_variant_id, quantity, quantity_unit
"#,
)
.bind(self.product_variant_id)
.bind(self.quantity)
.bind(self.quantity_unit)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
dbg!(e);
Error::Create
})
}
}
#[derive(Debug)]
pub struct UpdateStock {
pub id: StockId,
pub product_id: ProductId,
pub quantity: Quantity,
pub quantity_unit: QuantityUnit,
}
impl UpdateStock {
pub async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Stock> {
sqlx::query_as(
r#"
UPDATE stocks
SET product_variant_id = $1,
quantity = $2,
quantity_unit = $3
WHERE id = $4
RETURNING id, product_variant_id, quantity, quantity_unit
"#,
)
.bind(self.product_id)
.bind(self.quantity)
.bind(self.quantity_unit)
.bind(self.id)
.fetch_one(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::Update(self.id)
})
}
}
#[derive(Debug)]
pub struct DeleteStock {
pub stock_id: StockId,
}
impl DeleteStock {
async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Option<Stock>> {
sqlx::query_as(
r#"
DELETE FROM stocks
WHERE id = $1
RETURNING id, product_variant_id, quantity, quantity_unit
"#,
)
.bind(self.stock_id)
.fetch_optional(pool)
.await
.map_err(|e| {
tracing::error!("{e:?}");
Error::Delete(self.stock_id)
})
}
}
#[derive(Debug)]
pub struct ProductVariantsStock {
pub product_variant_ids: Vec<ProductVariantId>,
}
impl ProductVariantsStock {
async fn run(self, pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<Vec<Stock>> {
db_utils::MultiLoad::new(
pool,
r#"
SELECT id, product_variant_id, quantity, quantity_unit
FROM stocks
WHERE
"#,
" product_variant_id =",
)
.with_size(200)
.load(
self.product_variant_ids.len(),
self.product_variant_ids.into_iter().map(|id| *id),
|_e| Error::ProductVariantStock,
)
.await
}
}
#[cfg(test)]
mod tests {
use config::UpdateConfig;
use fake::faker::lorem::en as lorem;
use fake::Fake;
use model::*;
use uuid::Uuid;
pub struct NoOpts;
impl UpdateConfig for NoOpts {}
use super::*;
use crate::db::products::*;
async fn test_product(pool: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Product {
CreateProduct {
name: ProductName::new(format!("db stocks test product {}", Uuid::new_v4())),
category: None,
deliver_days_flag: Days(vec![Day::Friday, Day::Sunday]),
}
.run(pool)
.await
.unwrap()
}
async fn test_stock(
pool: &mut sqlx::Transaction<'_, sqlx::Postgres>,
product_variant_id: Option<ProductVariantId>,
quantity: Option<Quantity>,
quantity_unit: Option<QuantityUnit>,
) -> Stock {
let product_variant_id = match product_variant_id {
Some(id) => id,
_ => test_product(&mut *pool).await.id,
};
let quantity = quantity.unwrap_or_else(|| Quantity::from_u32(345));
let quantity_unit = quantity_unit.unwrap_or(QuantityUnit::Piece);
CreateStock {
product_variant_id,
quantity_unit,
quantity,
}
.run(&mut *pool)
.await
.unwrap()
}
#[tokio::test]
async fn create_stock() {
testx::db_t_ref!(t);
test_stock(&mut t, None, None, None).await;
testx::db_rollback!(t);
}
#[tokio::test]
async fn products_stock() {
testx::db_t_ref!(t);
let first = test_stock(&mut t, None, None, None).await;
let second = test_stock(&mut t, None, None, None).await;
let stocks: Vec<Stock> = ProductVariantsStock {
product_variant_ids: vec![first.product_id, second.product_id],
}
.run(&mut t)
.await
.unwrap();
testx::db_rollback!(t);
assert_eq!(stocks, vec![first, second]);
}
#[tokio::test]
async fn all_stocks() {
testx::db_t_ref!(t);
let first = test_stock(&mut t, None, None, None).await;
let second = test_stock(&mut t, None, None, None).await;
let stocks: Vec<Stock> = AllStocks {
limit: 200,
offset: 0,
}
.run(&mut t)
.await
.unwrap();
testx::db_rollback!(t);
assert_eq!(stocks, vec![first, second]);
}
#[tokio::test]
async fn delete_stock() {
testx::db_t_ref!(t);
let first = test_stock(&mut t, None, None, None).await;
let second = test_stock(&mut t, None, None, None).await;
let deleted: Option<Stock> = DeleteStock {
stock_id: second.id,
}
.run(&mut t)
.await
.unwrap();
let reloaded = super::find_stock(FindStock { id: second.id }, &mut t).await;
testx::db_rollback!(t);
assert_eq!(deleted, Some(second));
assert_ne!(deleted, Some(first));
assert_eq!(reloaded, Err(crate::Error::Stock(super::Error::NotFound)));
}
#[tokio::test]
async fn update_stock() {
testx::db_t_ref!(t);
let first = test_stock(&mut t, None, None, None).await;
let second = test_stock(&mut t, None, None, None).await;
let another_product = test_product(&mut t).await;
let updated: Stock = UpdateStock {
id: second.id,
product_id: another_product.id,
quantity: Quantity::from_u32(19191),
quantity_unit: QuantityUnit::Gram,
}
.run(&mut t)
.await
.unwrap();
let reloaded = super::find_stock(FindStock { id: second.id }, &mut t)
.await
.unwrap();
testx::db_rollback!(t);
assert_eq!(
updated,
Stock {
id: second.id,
product_id: another_product.id,
quantity: Quantity::from_u32(19191),
quantity_unit: QuantityUnit::Gram,
}
);
assert_ne!(updated, second);
assert_ne!(updated, first);
assert_eq!(reloaded, updated);
}
}