summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--fixtures/tasks.sql4
-rw-r--r--migrations/20240417203222_create_task.up.sql4
-rw-r--r--src/api/tasks.rs60
-rw-r--r--src/api/users.rs9
4 files changed, 58 insertions, 19 deletions
diff --git a/fixtures/tasks.sql b/fixtures/tasks.sql
index 93d8a59..3e948df 100644
--- a/fixtures/tasks.sql
+++ b/fixtures/tasks.sql
@@ -1,4 +1,4 @@
-INSERT INTO task (id, user_id, title, description, remote, location, start_at, end_at) VALUES(
+INSERT INTO task (id, user_id, title, description, remote, location, start, duration) VALUES(
'd5d31b54-0fc4-432c-9212-25175749c7f4',
'4c14f795-86f0-4361-a02f-0edb966fb145',
'Unload Cargo',
@@ -6,5 +6,5 @@ INSERT INTO task (id, user_id, title, description, remote, location, start_at, e
FALSE,
'Astroid Gamma 2b, Madranite Mining Belt, 42d5b4, Orion Beta',
'2042-05-13 12:00:00 -5',
- '2042-05-13 15:00:00 -5'
+ '3 hours'
);
diff --git a/migrations/20240417203222_create_task.up.sql b/migrations/20240417203222_create_task.up.sql
index fd8eb9c..2f4517f 100644
--- a/migrations/20240417203222_create_task.up.sql
+++ b/migrations/20240417203222_create_task.up.sql
@@ -5,8 +5,8 @@ CREATE TABLE task (
description TEXT,
remote BOOLEAN NOT NULL,
location TEXT,
- start_at TIMESTAMP WITH TIME ZONE,
- end_at TIMESTAMP WITH TIME ZONE,
+ start TIMESTAMP WITH TIME ZONE NOT NULL,
+ duration INTERVAL NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
diff --git a/src/api/tasks.rs b/src/api/tasks.rs
index b8acd91..cda0ce9 100644
--- a/src/api/tasks.rs
+++ b/src/api/tasks.rs
@@ -1,11 +1,13 @@
+use std::{ops::Range, time::Duration};
+
use axum::{
- extract::{Path, State},
+ extract::{Path, Query, State},
http::StatusCode,
Json,
};
use axum_extra::routing::Resource;
use serde::{Deserialize, Serialize};
-use sqlx::PgPool;
+use sqlx::{prelude::FromRow, PgPool};
use time::OffsetDateTime;
use uuid::Uuid;
@@ -13,7 +15,7 @@ use crate::{auth::AccessClaims, state::AppState};
use super::error::Error;
-#[derive(Debug, Clone, Serialize, Deserialize)]
+#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Task {
pub id: Uuid,
pub user_id: Option<Uuid>,
@@ -21,30 +23,64 @@ pub struct Task {
pub description: Option<String>,
pub remote: bool,
pub location: Option<String>,
- pub start_at: Option<OffsetDateTime>,
- pub end_at: Option<OffsetDateTime>,
+ pub start: OffsetDateTime,
+ pub duration: Duration,
pub created_at: OffsetDateTime,
pub updated_at: OffsetDateTime,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct TaskQuery {
+ pub user_id: Option<Uuid>,
+ pub remote: Option<bool>,
+ pub title: Option<String>,
+ pub description: Option<String>,
+ pub time_range: Option<Range<OffsetDateTime>>,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateTaskSchema {
pub user_id: Uuid,
pub title: String,
pub description: Option<String>,
pub remote: bool,
pub location: Option<String>,
- pub start_at: Option<OffsetDateTime>,
- pub end_at: Option<OffsetDateTime>,
+ pub start: OffsetDateTime,
+ pub duration: Duration,
}
pub fn router() -> Resource<AppState> {
Resource::named("tasks")
+ .index(index)
.create(create)
.show(show)
.destroy(destroy)
}
+pub async fn index(
+ Query(task_query): Query<TaskQuery>,
+ State(pool): State<PgPool>,
+ _: AccessClaims,
+) -> Result<Json<Vec<Task>>, Error> {
+ sqlx::query_as!(
+ Task,
+ "SELECT * FROM task
+ WHERE ($1::UUID IS NULL OR user_id = $1)
+ AND ($2::BOOLEAN IS NULL OR remote = $2)
+ AND ($3::TEXT IS NULL OR title LIKE '%$3%')
+ AND ($4::TEXT IS NULL OR title LIKE '%$4%')
+ LIMIT 100",
+ task_query.user_id,
+ task_query.remote,
+ task_query.title,
+ task_query.description,
+ )
+ .fetch_all(&pool)
+ .await
+ .map(Json)
+ .map_err(Into::into)
+}
+
pub async fn create(
State(pool): State<PgPool>,
AccessClaims { sub, .. }: AccessClaims,
@@ -54,8 +90,8 @@ pub async fn create(
description,
remote,
location,
- start_at,
- end_at,
+ start,
+ duration,
}): Json<CreateTaskSchema>,
) -> Result<(StatusCode, Json<Task>), Error> {
if sub != user_id {
@@ -63,14 +99,14 @@ pub async fn create(
}
let task = sqlx::query_as!(Task,
- "INSERT INTO task (user_id, title, description, remote, location, start_at, end_at) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *",
+ "INSERT INTO task (user_id, title, description, remote, location, start, duration) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *",
user_id,
title,
description,
remote,
location,
- start_at,
- end_at,
+ start,
+ duration,
)
.fetch_one(&pool)
.await?;
diff --git a/src/api/users.rs b/src/api/users.rs
index bfbc0d6..86007ca 100644
--- a/src/api/users.rs
+++ b/src/api/users.rs
@@ -4,7 +4,7 @@ use axum::{
extract::{Path, State},
http::header::LOCATION,
response::{IntoResponse, IntoResponseParts},
- Json,
+ Json, Router,
};
use axum_extra::routing::Resource;
use serde::{Deserialize, Serialize};
@@ -13,14 +13,17 @@ use time::OffsetDateTime;
use uuid::Uuid;
use crate::{
+ api::tasks::Task,
auth::{credentials::Credential, AccessClaims},
state::AppState,
};
use super::error::Error;
-pub fn router() -> Resource<AppState> {
- Resource::named("users").create(create).show(show)
+pub fn router() -> Router<AppState> {
+ let resource = Resource::named("users").create(create).show(show);
+
+ Router::from(resource)
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]