generated from Patagia/template-nix
parent
3b04b82998
commit
9b7e1fb226
18 changed files with 1117 additions and 30 deletions
47
controller/.sqlx/query-40dee0d539971f95bb3dc2ba4c49d5910bfdb2a6c9b82ddb296854973369594c.json
generated
Normal file
47
controller/.sqlx/query-40dee0d539971f95bb3dc2ba4c49d5910bfdb2a6c9b82ddb296854973369594c.json
generated
Normal file
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM users WHERE id > coalesce($1, '00000000-0000-0000-0000-000000000000'::UUID) ORDER BY id LIMIT $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "time_deleted",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "time_created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "time_modified",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "40dee0d539971f95bb3dc2ba4c49d5910bfdb2a6c9b82ddb296854973369594c"
|
||||
}
|
46
controller/.sqlx/query-843923b9a0257cf80f1dff554e7dc8fdfc05f489328e8376513124dfb42996e3.json
generated
Normal file
46
controller/.sqlx/query-843923b9a0257cf80f1dff554e7dc8fdfc05f489328e8376513124dfb42996e3.json
generated
Normal file
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM users WHERE id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "time_deleted",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "time_created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "time_modified",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "843923b9a0257cf80f1dff554e7dc8fdfc05f489328e8376513124dfb42996e3"
|
||||
}
|
|
@ -15,10 +15,14 @@ schemars.workspace = true
|
|||
serde.workspace = true
|
||||
slog-async.workspace = true
|
||||
slog.workspace = true
|
||||
sqlx = { version = "0.8.3", default-features = false, features = [
|
||||
"macros", "migrate", "postgres", "runtime-tokio", "tls-rustls", "time", "uuid"
|
||||
] }
|
||||
tokio.workspace = true
|
||||
trace-request = { path = "../trace-request" }
|
||||
tracing-slog.workspace = true
|
||||
tracing.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
[package.metadata.cargo-machete]
|
||||
ignored = ["http"]
|
||||
|
|
5
controller/build.rs
Normal file
5
controller/build.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
// generated by `sqlx migrate build-script`
|
||||
fn main() {
|
||||
// trigger recompilation when a new migration is added
|
||||
println!("cargo:rerun-if-changed=migrations");
|
||||
}
|
7
controller/migrations/20250108132540_users.sql
Normal file
7
controller/migrations/20250108132540_users.sql
Normal file
|
@ -0,0 +1,7 @@
|
|||
CREATE TABLE IF NOT EXISTS patagia.public.Users(
|
||||
id UUID PRIMARY KEY,
|
||||
name VARCHAR(63) NOT NULL,
|
||||
time_deleted TIMESTAMP WITH TIME ZONE, -- non-NULL if deleted
|
||||
time_created TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
time_modified TIMESTAMP WITH TIME ZONE NOT NULL
|
||||
);
|
|
@ -4,12 +4,14 @@ use dropshot::ApiDescription;
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::context::ControllerContext;
|
||||
use crate::user;
|
||||
use crate::version;
|
||||
|
||||
type ControllerApiDescription = ApiDescription<Arc<ControllerContext>>;
|
||||
|
||||
pub fn api() -> Result<ControllerApiDescription> {
|
||||
let mut api = ControllerApiDescription::new();
|
||||
user::register_api(&mut api)?;
|
||||
api.register(version::version)?;
|
||||
Ok(api)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use clap::Parser;
|
||||
use dropshot::{ConfigDropshot, ServerBuilder};
|
||||
|
||||
use slog::Drain;
|
||||
use sqlx::postgres::PgPool;
|
||||
use tracing_slog::TracingSlogDrain;
|
||||
|
||||
use std::net::SocketAddr;
|
||||
|
@ -36,6 +36,13 @@ struct Cli {
|
|||
env = "LISTEN_ADDRESS"
|
||||
)]
|
||||
listen_address: String,
|
||||
|
||||
#[arg(
|
||||
long = "database-url",
|
||||
default_value = "postgresql://localhost/patagia",
|
||||
env = "DATABASE_URL"
|
||||
)]
|
||||
database_url: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
|
@ -57,7 +64,19 @@ async fn main() -> Result<()> {
|
|||
slog::Logger::root(async_drain, slog::o!())
|
||||
};
|
||||
|
||||
let ctx = ControllerContext::new();
|
||||
let database_url = args.database_url.unwrap();
|
||||
|
||||
tracing::info!(
|
||||
database_url,
|
||||
listen_address = args.listen_address,
|
||||
"Starting server"
|
||||
);
|
||||
|
||||
let pg = PgPool::connect(&database_url).await?;
|
||||
|
||||
sqlx::migrate!().run(&pg).await?;
|
||||
|
||||
let ctx = ControllerContext::new(pg);
|
||||
let api = api::api()?;
|
||||
ServerBuilder::new(api, Arc::new(ctx), logger)
|
||||
.config(config)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
pub struct ControllerContext {}
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
pub struct ControllerContext {
|
||||
pub pg_pool: PgPool,
|
||||
}
|
||||
|
||||
impl ControllerContext {
|
||||
pub fn new() -> ControllerContext {
|
||||
ControllerContext {}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ControllerContext {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
pub fn new(pg_pool: PgPool) -> ControllerContext {
|
||||
ControllerContext { pg_pool }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
pub mod api;
|
||||
pub mod context;
|
||||
|
||||
mod user;
|
||||
mod version;
|
||||
|
|
110
controller/src/user/api.rs
Normal file
110
controller/src/user/api.rs
Normal file
|
@ -0,0 +1,110 @@
|
|||
use dropshot::{
|
||||
endpoint, EmptyScanParams, HttpError, HttpResponseOk, PaginationParams, Path, Query,
|
||||
RequestContext, ResultsPage, WhichPage,
|
||||
};
|
||||
use dropshot::{ApiDescription, ApiDescriptionRegisterError};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use trace_request::trace_request;
|
||||
use uuid::Uuid;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::User;
|
||||
use crate::context::ControllerContext;
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UsersPathParams {
|
||||
user_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UserPage {
|
||||
user_id: Uuid,
|
||||
}
|
||||
|
||||
pub fn register_api(
|
||||
api: &mut ApiDescription<Arc<ControllerContext>>,
|
||||
) -> Result<(), ApiDescriptionRegisterError> {
|
||||
api.register(get_user_by_id)?;
|
||||
api.register(list_users)
|
||||
}
|
||||
|
||||
/// Fetch user info.
|
||||
#[endpoint {
|
||||
method = GET,
|
||||
path = "/users/{userId}",
|
||||
tags = [ "user" ],
|
||||
}]
|
||||
#[trace_request]
|
||||
async fn get_user_by_id(
|
||||
rqctx: RequestContext<Arc<ControllerContext>>,
|
||||
params: Path<UsersPathParams>,
|
||||
) -> Result<HttpResponseOk<User>, HttpError> {
|
||||
let id = params.into_inner().user_id;
|
||||
tracing::debug!(id = id.to_string(), "Getting user by id");
|
||||
|
||||
let pg = rqctx.context().pg_pool.to_owned();
|
||||
|
||||
let rec = sqlx::query!(r#"SELECT * FROM users WHERE id = $1"#, id)
|
||||
.fetch_one(&pg)
|
||||
.await
|
||||
.map_err(|e| match e {
|
||||
sqlx::Error::RowNotFound => {
|
||||
HttpError::for_not_found(None, format!("User not found by id: {:?}", id))
|
||||
}
|
||||
err => HttpError::for_internal_error(format!("Error: {}", err)),
|
||||
})?;
|
||||
|
||||
let user = User {
|
||||
id: rec.id,
|
||||
name: rec.name,
|
||||
};
|
||||
|
||||
Ok(HttpResponseOk(user))
|
||||
}
|
||||
|
||||
/// List users
|
||||
#[endpoint {
|
||||
method = GET,
|
||||
path = "/users",
|
||||
tags = [ "user" ],
|
||||
}]
|
||||
#[trace_request]
|
||||
async fn list_users(
|
||||
rqctx: RequestContext<Arc<ControllerContext>>,
|
||||
query: Query<PaginationParams<EmptyScanParams, UserPage>>,
|
||||
) -> Result<HttpResponseOk<ResultsPage<User>>, HttpError> {
|
||||
let pag_params = query.into_inner();
|
||||
let limit = rqctx.page_limit(&pag_params)?.get() as i64;
|
||||
let pg = rqctx.context().pg_pool.to_owned();
|
||||
|
||||
let last_seen = match &pag_params.page {
|
||||
WhichPage::Next(UserPage { user_id: id }) => Some(id),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let users = sqlx::query!(
|
||||
r#"SELECT * FROM users WHERE id > coalesce($1, '00000000-0000-0000-0000-000000000000'::UUID) ORDER BY id LIMIT $2"#,
|
||||
last_seen,
|
||||
limit
|
||||
)
|
||||
.fetch_all(&pg)
|
||||
.await
|
||||
.map_err(|e| HttpError::for_internal_error(format!("Error: {}", e)))?
|
||||
.into_iter()
|
||||
.map(|rec| User {
|
||||
id: rec.id,
|
||||
name: rec.name,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponseOk(ResultsPage::new(
|
||||
users,
|
||||
&EmptyScanParams {},
|
||||
|u: &User, _| UserPage { user_id: u.id },
|
||||
)?))
|
||||
}
|
14
controller/src/user/mod.rs
Normal file
14
controller/src/user/mod.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
use schemars::JsonSchema;
|
||||
use serde::Serialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
mod api;
|
||||
|
||||
pub use self::api::register_api;
|
||||
|
||||
/// User
|
||||
#[derive(Serialize, JsonSchema)]
|
||||
struct User {
|
||||
id: Uuid,
|
||||
name: String,
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue