From 34e030224281d2025ae7bf3cef725348137d074d Mon Sep 17 00:00:00 2001 From: Brent Schroeter Date: Sun, 14 Sep 2025 16:19:44 -0400 Subject: [PATCH] refactor with updated naming convention (part 1) --- Cargo.lock | 28 +++- Cargo.toml | 2 + dev-services/docker-compose.yaml | 6 +- interim-models/Cargo.toml | 2 + interim-models/build.rs | 5 + interim-models/migrations/.keep | 0 .../migrations/20241125232658_users.down.sql | 1 - .../migrations/20241125232658_users.up.sql | 6 - .../20250108211839_sessions.down.sql | 1 - .../migrations/20250108211839_sessions.up.sql | 8 - .../migrations/20250522224809_bases.down.sql | 2 - .../migrations/20250522224809_bases.up.sql | 19 --- .../20250528060837_rel_invitations.down.sql | 1 - .../20250528060837_rel_invitations.up.sql | 12 -- .../migrations/20250528233517_lenses.down.sql | 3 - .../migrations/20250528233517_lenses.up.sql | 21 --- .../migrations/20250918060948_init.down.sql | 9 ++ .../migrations/20250918060948_init.up.sql | 94 ++++++++++++ interim-models/src/client.rs | 1 + interim-models/src/field.rs | 57 +++---- interim-models/src/field_form_prompt.rs | 129 ++++++++++++++++ interim-models/src/form_transition.rs | 106 +++++++++++++ interim-models/src/language.rs | 39 +++++ interim-models/src/lib.rs | 8 +- interim-models/src/{lens.rs => portal.rs} | 116 +++++++------- interim-models/src/presentation.rs | 23 ++- interim-models/src/rel_invitation.rs | 13 +- interim-models/src/{base.rs => workspace.rs} | 77 ++++++---- interim-models/src/workspace_user_perm.rs | 130 ++++++++++++++++ interim-pgtypes/src/client.rs | 6 +- interim-pgtypes/src/pg_attribute.rs | 12 +- interim-pgtypes/src/pg_class.rs | 15 +- interim-pgtypes/src/pg_database.rs | 4 +- interim-pgtypes/src/pg_role.rs | 13 +- interim-server/src/app_state.rs | 48 +++--- interim-server/src/auth.rs | 12 +- interim-server/src/base_pooler.rs | 58 ++++--- interim-server/src/base_user_perms.rs | 56 +++---- interim-server/src/cli.rs | 12 +- interim-server/src/main.rs | 17 +- interim-server/src/navbar.rs | 87 +++++------ interim-server/src/navigator.rs | 44 ++++-- interim-server/src/router.rs | 14 +- interim-server/src/routes/bases.rs | 4 +- interim-server/src/routes/lens_index.rs | 4 +- interim-server/src/routes/lens_insert.rs | 4 +- interim-server/src/routes/lenses.rs | 14 +- interim-server/src/routes/mod.rs | 142 +++++++++++++++-- interim-server/src/routes/relations.rs | 6 +- .../src/routes/relations_multi/mod.rs | 7 + .../relations_single/add_field_handler.rs | 141 +++++++++++++++++ .../relations_single/add_portal_handler.rs | 56 +++++++ .../relations_single/get_data_handler.rs | 145 ++++++++++++++++++ .../routes/relations_single/insert_handler.rs | 124 +++++++++++++++ .../src/routes/relations_single/mod.rs | 22 +++ .../routes/relations_single/portal_handler.rs | 88 +++++++++++ .../routes/workspaces_multi/list_handlers.rs | 39 +++++ .../src/routes/workspaces_multi/mod.rs | 12 ++ .../workspaces_single/add_table_handler.rs | 109 +++++++++++++ .../src/routes/workspaces_single/mod.rs | 21 +++ .../routes/workspaces_single/nav_handler.rs | 77 ++++++++++ interim-server/src/sessions.rs | 14 +- interim-server/src/settings.rs | 69 +++++---- interim-server/src/user.rs | 42 ++--- interim-server/src/worker.rs | 4 +- interim-server/templates/workspace_nav.html | 91 +++++++++++ .../templates/workspaces_multi/list.html | 16 ++ .../templates/workspaces_single/nav.html | 8 + sass/main.scss | 2 + svelte/src/expression-editor.webc.svelte | 4 +- svelte/src/field-adder.webc.svelte | 69 +++++---- svelte/src/field-details.svelte | 6 +- svelte/src/field-header.svelte | 12 +- svelte/src/field.svelte.ts | 4 +- svelte/src/table-viewer.webc.svelte | 2 +- 75 files changed, 2135 insertions(+), 540 deletions(-) create mode 100644 interim-models/build.rs delete mode 100644 interim-models/migrations/.keep delete mode 100644 interim-models/migrations/20241125232658_users.down.sql delete mode 100644 interim-models/migrations/20241125232658_users.up.sql delete mode 100644 interim-models/migrations/20250108211839_sessions.down.sql delete mode 100644 interim-models/migrations/20250108211839_sessions.up.sql delete mode 100644 interim-models/migrations/20250522224809_bases.down.sql delete mode 100644 interim-models/migrations/20250522224809_bases.up.sql delete mode 100644 interim-models/migrations/20250528060837_rel_invitations.down.sql delete mode 100644 interim-models/migrations/20250528060837_rel_invitations.up.sql delete mode 100644 interim-models/migrations/20250528233517_lenses.down.sql delete mode 100644 interim-models/migrations/20250528233517_lenses.up.sql create mode 100644 interim-models/migrations/20250918060948_init.down.sql create mode 100644 interim-models/migrations/20250918060948_init.up.sql create mode 100644 interim-models/src/field_form_prompt.rs create mode 100644 interim-models/src/form_transition.rs create mode 100644 interim-models/src/language.rs rename interim-models/src/{lens.rs => portal.rs} (51%) rename interim-models/src/{base.rs => workspace.rs} (52%) create mode 100644 interim-models/src/workspace_user_perm.rs create mode 100644 interim-server/src/routes/relations_multi/mod.rs create mode 100644 interim-server/src/routes/relations_single/add_field_handler.rs create mode 100644 interim-server/src/routes/relations_single/add_portal_handler.rs create mode 100644 interim-server/src/routes/relations_single/get_data_handler.rs create mode 100644 interim-server/src/routes/relations_single/insert_handler.rs create mode 100644 interim-server/src/routes/relations_single/mod.rs create mode 100644 interim-server/src/routes/relations_single/portal_handler.rs create mode 100644 interim-server/src/routes/workspaces_multi/list_handlers.rs create mode 100644 interim-server/src/routes/workspaces_multi/mod.rs create mode 100644 interim-server/src/routes/workspaces_single/add_table_handler.rs create mode 100644 interim-server/src/routes/workspaces_single/mod.rs create mode 100644 interim-server/src/routes/workspaces_single/nav_handler.rs create mode 100644 interim-server/templates/workspace_nav.html create mode 100644 interim-server/templates/workspaces_multi/list.html create mode 100644 interim-server/templates/workspaces_single/nav.html diff --git a/Cargo.lock b/Cargo.lock index a830a23..6bc9d3c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1001,9 +1001,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -1603,9 +1603,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1639,12 +1639,14 @@ dependencies = [ "chrono", "derive_builder", "interim-pgtypes", + "redact", "regex", "serde", "serde_json", "sqlx", "strum", "thiserror 2.0.12", + "url", "uuid", ] @@ -2115,9 +2117,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" @@ -2332,6 +2334,16 @@ dependencies = [ "getrandom 0.3.3", ] +[[package]] +name = "redact" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcecefd225c2fb69914585a7a6f8878929feb316a7ecb61c07d79e361d46d8ac" +dependencies = [ + "serde", + "zeroize", +] + [[package]] name = "redox_syscall" version = "0.5.12" @@ -3693,9 +3705,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", diff --git a/Cargo.toml b/Cargo.toml index e7ecf30..0f411e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ futures = "0.3.31" interim-models = { path = "./interim-models" } interim-pgtypes = { path = "./interim-pgtypes" } rand = "0.8.5" +redact = { version = "0.1.11", features = ["serde", "zeroize"] } regex = "1.11.1" reqwest = { version = "0.12.8", features = ["json"] } serde = { version = "1.0.213", features = ["derive"] } @@ -22,5 +23,6 @@ sqlx = { version = "0.8.6", features = ["runtime-tokio", "tls-rustls-ring-native thiserror = "2.0.12" tokio = { version = "1.42.0", features = ["full"] } tracing = "0.1.40" +url = { version = "2.5.7", features = ["serde"] } uuid = { version = "1.11.0", features = ["serde", "v4", "v7"] } validator = { version = "0.20.0", features = ["derive"] } diff --git a/dev-services/docker-compose.yaml b/dev-services/docker-compose.yaml index 1bc9e11..9a6b180 100644 --- a/dev-services/docker-compose.yaml +++ b/dev-services/docker-compose.yaml @@ -1,8 +1,8 @@ -name: interim +name: phono services: pg: - image: postgres:17 + image: postgres:18rc1 restart: always environment: POSTGRES_USER: postgres @@ -11,7 +11,7 @@ services: - "127.0.0.1:5432:5432" volumes: - "./docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d:ro" - - "./pgdata:/var/lib/postgresql/data" + - "./pgdata:/var/lib/postgresql/18/docker" keycloak: depends_on: [pg] diff --git a/interim-models/Cargo.toml b/interim-models/Cargo.toml index 1101e83..70fd399 100644 --- a/interim-models/Cargo.toml +++ b/interim-models/Cargo.toml @@ -7,10 +7,12 @@ version.workspace = true chrono = { workspace = true } derive_builder = { workspace = true } interim-pgtypes = { path = "../interim-pgtypes" } +redact = { workspace = true } regex = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } sqlx = { workspace = true } strum = { version = "0.27.2", features = ["derive"] } thiserror = { workspace = true } +url = { workspace = true } uuid = { workspace = true } diff --git a/interim-models/build.rs b/interim-models/build.rs new file mode 100644 index 0000000..d506869 --- /dev/null +++ b/interim-models/build.rs @@ -0,0 +1,5 @@ +// generated by `sqlx migrate build-script` +fn main() { + // trigger recompilation when a new migration is added + println!("cargo:rerun-if-changed=migrations"); +} diff --git a/interim-models/migrations/.keep b/interim-models/migrations/.keep deleted file mode 100644 index e69de29..0000000 diff --git a/interim-models/migrations/20241125232658_users.down.sql b/interim-models/migrations/20241125232658_users.down.sql deleted file mode 100644 index 2482754..0000000 --- a/interim-models/migrations/20241125232658_users.down.sql +++ /dev/null @@ -1 +0,0 @@ -drop table if exists users; diff --git a/interim-models/migrations/20241125232658_users.up.sql b/interim-models/migrations/20241125232658_users.up.sql deleted file mode 100644 index 93ff111..0000000 --- a/interim-models/migrations/20241125232658_users.up.sql +++ /dev/null @@ -1,6 +0,0 @@ -create table if not exists users ( - id uuid not null primary key, - uid text unique not null, - email text not null -); -create index on users (uid); diff --git a/interim-models/migrations/20250108211839_sessions.down.sql b/interim-models/migrations/20250108211839_sessions.down.sql deleted file mode 100644 index 97738a8..0000000 --- a/interim-models/migrations/20250108211839_sessions.down.sql +++ /dev/null @@ -1 +0,0 @@ -drop table if exists browser_sessions; diff --git a/interim-models/migrations/20250108211839_sessions.up.sql b/interim-models/migrations/20250108211839_sessions.up.sql deleted file mode 100644 index fdc96f2..0000000 --- a/interim-models/migrations/20250108211839_sessions.up.sql +++ /dev/null @@ -1,8 +0,0 @@ -create table if not exists browser_sessions ( - id text not null primary key, - serialized text not null, - created_at timestamptz not null default now(), - expiry timestamptz -); -create index on browser_sessions (expiry); -create index on browser_sessions (created_at); diff --git a/interim-models/migrations/20250522224809_bases.down.sql b/interim-models/migrations/20250522224809_bases.down.sql deleted file mode 100644 index c14b6a4..0000000 --- a/interim-models/migrations/20250522224809_bases.down.sql +++ /dev/null @@ -1,2 +0,0 @@ -drop table if exists base_user_perms; -drop table if exists bases; diff --git a/interim-models/migrations/20250522224809_bases.up.sql b/interim-models/migrations/20250522224809_bases.up.sql deleted file mode 100644 index 7833fab..0000000 --- a/interim-models/migrations/20250522224809_bases.up.sql +++ /dev/null @@ -1,19 +0,0 @@ -create table if not exists bases ( - id uuid not null primary key, - name text not null default '', - url text not null, - owner_id uuid not null references users(id) - on delete restrict, - user_role_prefix text not null default '__itmu__' -); -create index on bases (owner_id); - -create table if not exists base_user_perms ( - id uuid not null primary key, - base_id uuid not null references bases(id), - user_id uuid not null references users(id), - perm text not null, - unique (base_id, user_id, perm) -); -create index on base_user_perms (user_id); -create index on base_user_perms (base_id); diff --git a/interim-models/migrations/20250528060837_rel_invitations.down.sql b/interim-models/migrations/20250528060837_rel_invitations.down.sql deleted file mode 100644 index 36a261f..0000000 --- a/interim-models/migrations/20250528060837_rel_invitations.down.sql +++ /dev/null @@ -1 +0,0 @@ -drop table if exists rel_invitations; diff --git a/interim-models/migrations/20250528060837_rel_invitations.up.sql b/interim-models/migrations/20250528060837_rel_invitations.up.sql deleted file mode 100644 index 74ce487..0000000 --- a/interim-models/migrations/20250528060837_rel_invitations.up.sql +++ /dev/null @@ -1,12 +0,0 @@ -create table if not exists rel_invitations ( - id uuid not null primary key, - email text not null, - base_id uuid not null references bases(id) on delete cascade, - class_oid oid not null, - created_by uuid not null references users(id) on delete restrict, - privilege text not null, - expires_at timestamptz, - unique (email, base_id, class_oid, privilege) -); -create index on rel_invitations (base_id, class_oid); -create index on rel_invitations (email); diff --git a/interim-models/migrations/20250528233517_lenses.down.sql b/interim-models/migrations/20250528233517_lenses.down.sql deleted file mode 100644 index ab64ac4..0000000 --- a/interim-models/migrations/20250528233517_lenses.down.sql +++ /dev/null @@ -1,3 +0,0 @@ -drop table if exists fields; -drop table if exists lenses; -drop type if exists lens_display_type; diff --git a/interim-models/migrations/20250528233517_lenses.up.sql b/interim-models/migrations/20250528233517_lenses.up.sql deleted file mode 100644 index 18dd9f9..0000000 --- a/interim-models/migrations/20250528233517_lenses.up.sql +++ /dev/null @@ -1,21 +0,0 @@ -create type lens_display_type as enum ('table'); - -create table if not exists lenses ( - id uuid not null primary key, - name text not null, - base_id uuid not null references bases(id) on delete cascade, - class_oid oid not null, - filter jsonb not null default 'null'::jsonb, - order_by jsonb not null default '[]'::jsonb, - display_type lens_display_type not null default 'table' -); -create index on lenses (base_id); - -create table if not exists fields ( - id uuid not null primary key, - lens_id uuid not null references lenses(id) on delete cascade, - name text not null, - label text, - presentation jsonb not null, - width_px int not null default 200 -); diff --git a/interim-models/migrations/20250918060948_init.down.sql b/interim-models/migrations/20250918060948_init.down.sql new file mode 100644 index 0000000..d4e51d6 --- /dev/null +++ b/interim-models/migrations/20250918060948_init.down.sql @@ -0,0 +1,9 @@ +drop table if exists field_form_prompts; +drop table if exists form_transitions; +drop table if exists fields; +drop table if exists portals; +drop table if exists rel_invitations; +drop table if exists workspace_user_perms; +drop table if exists workspaces; +drop table if exists browser_sessions; +drop table if exists users; diff --git a/interim-models/migrations/20250918060948_init.up.sql b/interim-models/migrations/20250918060948_init.up.sql new file mode 100644 index 0000000..04f59af --- /dev/null +++ b/interim-models/migrations/20250918060948_init.up.sql @@ -0,0 +1,94 @@ +-- Users -- + +create table if not exists users ( + id uuid not null primary key default uuidv7(), + uid text unique not null, + email text not null +); +create index on users (uid); + +-- async_session Browser Sessions -- + +create table if not exists browser_sessions ( + id text not null primary key, + serialized text not null, + created_at timestamptz not null default now(), + expiry timestamptz +); +create index on browser_sessions (expiry); +create index on browser_sessions (created_at); + +-- Workspaces -- + +create table if not exists workspaces ( + id uuid not null primary key default uuidv7(), + name text not null default '', + url text not null, + owner_id uuid not null references users(id) on delete restrict +); +create index on workspaces (owner_id); + +create table if not exists workspace_user_perms ( + id uuid not null primary key default uuidv7(), + workspace_id uuid not null references workspaces(id) on delete cascade, + user_id uuid not null references users(id) on delete cascade, + perm text not null, + unique (workspace_id, user_id, perm) +); +create index on workspace_user_perms (user_id); +create index on workspace_user_perms (workspace_id); + +-- Relation Invitations -- + +create table if not exists rel_invitations ( + id uuid not null primary key default uuidv7(), + email text not null, + workspace_id uuid not null references workspaces(id) on delete cascade, + class_oid oid not null, + created_by uuid not null references users(id) on delete restrict, + privilege text not null, + expires_at timestamptz, + unique (email, workspace_id, class_oid, privilege) +); +create index on rel_invitations (workspace_id, class_oid); +create index on rel_invitations (email); + +-- Portals -- + +create table if not exists portals ( + id uuid not null primary key default uuidv7(), + name text not null, + workspace_id uuid not null references workspaces(id) on delete cascade, + class_oid oid not null, + table_filter jsonb not null default 'null', + table_order_by jsonb not null default '[]' +); +create index on portals (workspace_id); + +create table if not exists fields ( + id uuid not null primary key default uuidv7(), + portal_id uuid not null references portals(id) on delete cascade, + name text not null, + presentation jsonb not null, + table_label text, + table_width_px int not null default 200 +); + +-- Forms -- + +create table if not exists form_transitions ( + id uuid not null primary key default uuidv7(), + source_id uuid not null references portals(id) on delete cascade, + dest_id uuid not null references portals(id) on delete restrict, + condition jsonb not null default 'null' +); +create index on form_transitions (source_id); + +create table if not exists field_form_prompts ( + id uuid not null primary key default uuidv7(), + field_id uuid not null references fields(id) on delete cascade, + language text not null, + content text not null default '', + unique (field_id, language) +); +create index on field_form_prompts (field_id); diff --git a/interim-models/src/client.rs b/interim-models/src/client.rs index 6830a24..02cbd97 100644 --- a/interim-models/src/client.rs +++ b/interim-models/src/client.rs @@ -1,5 +1,6 @@ use sqlx::{PgConnection, Postgres, pool::PoolConnection}; +#[derive(Debug)] pub struct AppDbClient { pub(crate) conn: PoolConnection, } diff --git a/interim-models/src/field.rs b/interim-models/src/field.rs index c7d8d98..68df728 100644 --- a/interim-models/src/field.rs +++ b/interim-models/src/field.rs @@ -1,3 +1,4 @@ +use chrono::{DateTime, Utc}; use derive_builder::Builder; use interim_pgtypes::pg_attribute::PgAttribute; use serde::{Deserialize, Serialize}; @@ -22,14 +23,14 @@ pub struct Field { /// Name of the database column. pub name: String, - /// Optional human friendly label. - pub label: Option, - /// Refer to documentation for `Presentation`. pub presentation: sqlx::types::Json, + /// Optional human friendly label. + pub table_label: Option, + /// Width of UI table column in pixels. - pub width_px: i32, + pub table_width_px: i32, } impl Field { @@ -44,9 +45,9 @@ impl Field { Presentation::default_from_attr(attr).map(|presentation| Self { id: Uuid::now_v7(), name: attr.attname.clone(), - label: None, + table_label: None, presentation: sqlx::types::Json(presentation), - width_px: 200, + table_width_px: 200, }) } @@ -64,21 +65,24 @@ impl Field { "UUID" => { Encodable::Uuid( as Decode>::decode(value_ref).unwrap()) } + "TIMESTAMPTZ" => Encodable::Timestamp( + > as Decode>::decode(value_ref).unwrap(), + ), _ => return Err(ParseError::UnknownType), }) } - pub fn belonging_to_lens(lens_id: Uuid) -> BelongingToLensQuery { - BelongingToLensQuery { lens_id } + pub fn belonging_to_portal(portal_id: Uuid) -> BelongingToPortalQuery { + BelongingToPortalQuery { portal_id } } } #[derive(Clone, Debug)] -pub struct BelongingToLensQuery { - lens_id: Uuid, +pub struct BelongingToPortalQuery { + portal_id: Uuid, } -impl BelongingToLensQuery { +impl BelongingToPortalQuery { pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result, sqlx::Error> { query_as!( Field, @@ -86,13 +90,13 @@ impl BelongingToLensQuery { select id, name, - label, + table_label, presentation as "presentation: sqlx::types::Json", - width_px + table_width_px from fields -where lens_id = $1 +where portal_id = $1 "#, - self.lens_id + self.portal_id ) .fetch_all(&mut *app_db.conn) .await @@ -101,13 +105,13 @@ where lens_id = $1 #[derive(Builder, Clone, Debug)] pub struct InsertableField { - lens_id: Uuid, + portal_id: Uuid, name: String, #[builder(default)] - label: Option, + table_label: Option, presentation: Presentation, #[builder(default = 200)] - width_px: i32, + table_width_px: i32, } impl InsertableField { @@ -116,21 +120,20 @@ impl InsertableField { Field, r#" insert into fields -(id, lens_id, name, label, presentation, width_px) -values ($1, $2, $3, $4, $5, $6) +(portal_id, name, table_label, presentation, table_width_px) +values ($1, $2, $3, $4, $5) returning id, name, - label, + table_label, presentation as "presentation: sqlx::types::Json", - width_px + table_width_px "#, - Uuid::now_v7(), - self.lens_id, + self.portal_id, self.name, - self.label, + self.table_label, sqlx::types::Json::<_>(self.presentation) as sqlx::types::Json, - self.width_px, + self.table_width_px, ) .fetch_one(&mut *app_db.conn) .await @@ -150,8 +153,10 @@ impl InsertableFieldBuilder { /// Error when parsing a sqlx value to JSON #[derive(Debug, Error)] pub enum ParseError { + // TODO: can this be removed? #[error("incompatible json type")] BadJsonType, + #[error("field not found in row")] FieldNotFound, #[error("unknown postgres type")] diff --git a/interim-models/src/field_form_prompt.rs b/interim-models/src/field_form_prompt.rs new file mode 100644 index 0000000..e69b5da --- /dev/null +++ b/interim-models/src/field_form_prompt.rs @@ -0,0 +1,129 @@ +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; +use sqlx::query_as; +use uuid::Uuid; + +use crate::{client::AppDbClient, language::Language}; + +/// A localized prompt to display above or alongside the form input for the +/// given field. +/// +/// There may be zero or one `field_form_prompt` entries for each +/// `(field_id, language)` pair. (This uniqueness should be enforced by the +/// database.) +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct FieldFormPrompt { + /// Primary key (defaults to UUIDv7). + pub id: Uuid, + + /// ID of the field to which this prompt belongs. + pub field_id: Uuid, + + /// [ISO 639-3](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes) + /// language code. + pub language: Language, + + /// Prompt content for this field, in this language. + pub content: String, +} + +impl FieldFormPrompt { + /// Build an insert statement to create a new prompt. + pub fn insert() -> InsertableBuilder { + InsertableBuilder::default() + } + + /// Build an update statement to alter the content of an existing prompt. + pub fn update() -> UpdateBuilder { + UpdateBuilder::default() + } + + /// Build a single-field query by field ID. + pub fn belonging_to_field(id: Uuid) -> BelongingToFieldQuery { + BelongingToFieldQuery { id } + } +} + +#[derive(Builder, Clone, Debug)] +pub struct Insertable { + field_id: Uuid, + language: Language, + content: String, +} + +impl Insertable { + pub async fn execute(self, app_db: &mut AppDbClient) -> Result { + query_as!( + FieldFormPrompt, + r#" +insert into field_form_prompts (field_id, language, content) values ($1, $2, $3) +returning + id, + field_id, + language as "language: Language", + content +"#, + self.field_id, + self.language.to_string(), + self.content, + ) + .fetch_one(app_db.get_conn()) + .await + } +} + +#[derive(Builder, Clone, Debug, Default)] +pub struct Update { + id: Uuid, + content: String, +} + +impl Update { + pub async fn execute(self, app_db: &mut AppDbClient) -> Result { + query_as!( + FieldFormPrompt, + r#" +update field_form_prompts +set content = $1 +where id = $2 +returning + id, + field_id, + language as "language: Language", + content +"#, + self.content, + self.id, + ) + .fetch_one(app_db.get_conn()) + .await + } +} + +#[derive(Clone, Debug)] +pub struct BelongingToFieldQuery { + id: Uuid, +} + +impl BelongingToFieldQuery { + pub async fn fetch_all( + self, + app_db: &mut AppDbClient, + ) -> Result, sqlx::Error> { + query_as!( + FieldFormPrompt, + r#" +select + id, + field_id, + language as "language: Language", + content +from field_form_prompts +where field_id = $1 +"#, + self.id, + ) + .fetch_all(app_db.get_conn()) + .await + } +} diff --git a/interim-models/src/form_transition.rs b/interim-models/src/form_transition.rs new file mode 100644 index 0000000..da682b6 --- /dev/null +++ b/interim-models/src/form_transition.rs @@ -0,0 +1,106 @@ +use derive_builder::Builder; +use sqlx::{query_as, types::Json}; +use uuid::Uuid; + +use crate::{client::AppDbClient, expression::PgExpressionAny}; + +/// A form transition directionally connects two portals within the same +/// workspace, representing a potential navigation of a user between two forms. +/// If the user submits a form, form transitions with `source_id` corresponding +/// to that portal will be evaluated one by one (in order by ID---that is, by +/// creation time), and the first with a condition evaluating to true will be +/// used to direct the user to the form corresponding to portal `dest_id`. +#[derive(Clone, Debug)] +pub struct FormTransition { + /// Primary key (defaults to UUIDv7). + pub id: Uuid, + + /// When a user is filling out a sequence of forms, this is the ID of the + /// portal for which they have just submitted a form for. + /// + /// **Source portal is expected to belong to the same workspace as the + /// destination portal.** + pub source_id: Uuid, + + /// When a user is filling out a sequence of forms, this is the ID of the + /// portal for which they will be directed to if the condition evaluates to + /// true. + /// + /// **Destination portal is expected to belong to the same workspace as the + /// source portal.** + pub dest_id: Uuid, + + /// Represents a semi-arbitrary Postgres expression which will permit this + /// transition to be followed, only if the expression evaluates to true at + /// the time of the source form's submission. + pub condition: Json>, +} + +impl FormTransition { + /// Build an insert statement to create a new transtition. + pub fn insert() -> InsertableBuilder { + InsertableBuilder::default() + } + + /// Build a single-field query by source portal ID. + pub fn with_source(id: Uuid) -> WithSourceQuery { + WithSourceQuery { id } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct WithSourceQuery { + id: Uuid, +} + +impl WithSourceQuery { + pub async fn fetch_all( + self, + app_db: &mut AppDbClient, + ) -> Result, sqlx::Error> { + query_as!( + FormTransition, + r#" +select + id, + source_id, + dest_id, + condition as "condition: Json>" +from form_transitions +where source_id = $1 +"#, + self.id, + ) + .fetch_all(app_db.get_conn()) + .await + } +} + +#[derive(Builder, Clone, Debug)] +pub struct Insertable { + source_id: Uuid, + dest_id: Uuid, + condition: Option, +} + +impl Insertable { + pub async fn execute(self, app_db: &mut AppDbClient) -> Result { + query_as!( + FormTransition, + r#" +insert into form_transitions (source_id, dest_id, condition) +values ($1, $2, $3) +returning + id, + source_id, + dest_id, + condition as "condition: Json>" +"#, + self.source_id, + self.dest_id, + Json(self.condition) as Json>, + ) + .fetch_one(app_db.get_conn()) + .await + } +} diff --git a/interim-models/src/language.rs b/interim-models/src/language.rs new file mode 100644 index 0000000..d3820fe --- /dev/null +++ b/interim-models/src/language.rs @@ -0,0 +1,39 @@ +use serde::{Deserialize, Serialize}; +use sqlx::Decode; +use strum::{EnumIter, EnumString}; + +/// Languages represented as +/// [ISO 639-3 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes). +#[derive( + Clone, Debug, Decode, Deserialize, strum::Display, PartialEq, Serialize, EnumIter, EnumString, +)] +#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "lowercase")] +// [`sqlx`] implements Decode and Encode to/from the Postgres `TEXT` type based +// on the [`std::fmt::Display`] and [`std::str::FromStr`] traits, so it should +// use the transformations applied by [`strum`]. +// +pub enum Language { + Deu, + Eng, + Spa, +} + +impl Default for Language { + /// Language defaults to English when necessary, as the product is being + /// developed with a primarily English speaking/reading/writing market in + /// mind. + fn default() -> Self { + Self::Eng + } +} + +impl Language { + pub fn as_locale_str(&self) -> &'static str { + match self { + Self::Deu => "Deutsch", + Self::Eng => "English", + Self::Spa => "EspaƱol", + } + } +} diff --git a/interim-models/src/lib.rs b/interim-models/src/lib.rs index e10c0ca..9395d41 100644 --- a/interim-models/src/lib.rs +++ b/interim-models/src/lib.rs @@ -1,11 +1,15 @@ -pub mod base; pub mod client; pub mod encodable; pub mod expression; pub mod field; -pub mod lens; +pub mod field_form_prompt; +pub mod form_transition; +pub mod language; +pub mod portal; pub mod presentation; pub mod rel_invitation; pub mod user; +pub mod workspace; +pub mod workspace_user_perm; pub static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!(); diff --git a/interim-models/src/lens.rs b/interim-models/src/portal.rs similarity index 51% rename from interim-models/src/lens.rs rename to interim-models/src/portal.rs index 8ccac4b..5f2b6b8 100644 --- a/interim-models/src/lens.rs +++ b/interim-models/src/portal.rs @@ -5,31 +5,46 @@ use uuid::Uuid; use crate::{client::AppDbClient, expression::PgExpressionAny}; +/// A portal is a derivative representation of a Postgres relation. #[derive(Clone, Debug, Serialize)] -pub struct Lens { +pub struct Portal { + /// Primary key (defaults to UUIDv7). pub id: Uuid, + + /// Human friendly name for portal. pub name: String, - pub base_id: Uuid, + + /// Workspace to which this portal belongs. + pub workspace_id: Uuid, + + /// OID of the underlying Postgres relation. Currently, this is expected + /// to be a normal table, not a view, etc. pub class_oid: Oid, - pub display_type: LensDisplayType, - pub filter: Json>, + + /// JSONB-encoded expression to use for filtering rows in the web-based + /// table view. + pub table_filter: Json>, } -impl Lens { - pub fn insertable_builder() -> InsertableLensBuilder { - InsertableLensBuilder::default() +impl Portal { + /// Build an insert statement to create a new portal. + pub fn insert() -> InsertablePortalBuilder { + InsertablePortalBuilder::default() } - pub fn update() -> LensUpdateBuilder { - LensUpdateBuilder::default() + /// Build an update statement to alter an existing portal. + pub fn update() -> PortalUpdateBuilder { + PortalUpdateBuilder::default() } + /// Build a single-field query by portal ID. pub fn with_id(id: Uuid) -> WithIdQuery { WithIdQuery { id } } - pub fn belonging_to_base(base_id: Uuid) -> BelongingToBaseQuery { - BelongingToBaseQuery { base_id } + /// Build a query by workspace ID and relation OID. + pub fn belonging_to_workspace(workspace_id: Uuid) -> BelongingToWorkspaceQuery { + BelongingToWorkspaceQuery { workspace_id } } } @@ -42,18 +57,17 @@ impl WithIdQuery { pub async fn fetch_optional( self, app_db: &mut AppDbClient, - ) -> Result, sqlx::Error> { + ) -> Result, sqlx::Error> { query_as!( - Lens, + Portal, r#" select id, name, - base_id, + workspace_id, class_oid, - display_type as "display_type: LensDisplayType", - filter as "filter: Json>" -from lenses + table_filter as "table_filter: Json>" +from portals where id = $1 "#, self.id @@ -62,18 +76,17 @@ where id = $1 .await } - pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result { + pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result { query_as!( - Lens, + Portal, r#" select id, name, - base_id, + workspace_id, class_oid, - display_type as "display_type: LensDisplayType", - filter as "filter: Json>" -from lenses + table_filter as "table_filter: Json>" +from portals where id = $1 "#, self.id @@ -84,14 +97,14 @@ where id = $1 } #[derive(Clone, Debug)] -pub struct BelongingToBaseQuery { - base_id: Uuid, +pub struct BelongingToWorkspaceQuery { + workspace_id: Uuid, } -impl BelongingToBaseQuery { +impl BelongingToWorkspaceQuery { pub fn belonging_to_rel(self, rel_oid: Oid) -> BelongingToRelQuery { BelongingToRelQuery { - base_id: self.base_id, + workspace_id: self.workspace_id, rel_oid, } } @@ -99,26 +112,25 @@ impl BelongingToBaseQuery { #[derive(Clone, Debug)] pub struct BelongingToRelQuery { - base_id: Uuid, + workspace_id: Uuid, rel_oid: Oid, } impl BelongingToRelQuery { - pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result, sqlx::Error> { + pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result, sqlx::Error> { query_as!( - Lens, + Portal, r#" select id, name, - base_id, + workspace_id, class_oid, - display_type as "display_type: LensDisplayType", - filter as "filter: Json>" -from lenses -where base_id = $1 and class_oid = $2 + table_filter as "table_filter: Json>" +from portals +where workspace_id = $1 and class_oid = $2 "#, - self.base_id, + self.workspace_id, self.rel_oid ) .fetch_all(&mut *app_db.conn) @@ -133,34 +145,30 @@ pub enum LensDisplayType { } #[derive(Builder, Clone, Debug)] -pub struct InsertableLens { +pub struct InsertablePortal { name: String, - base_id: Uuid, + workspace_id: Uuid, class_oid: Oid, - display_type: LensDisplayType, } -impl InsertableLens { - pub async fn insert(self, app_db: &mut AppDbClient) -> Result { +impl InsertablePortal { + pub async fn execute(self, app_db: &mut AppDbClient) -> Result { query_as!( - Lens, + Portal, r#" -insert into lenses -(id, base_id, class_oid, name, display_type) -values ($1, $2, $3, $4, $5) +insert into portals +(workspace_id, class_oid, name) +values ($1, $2, $3) returning id, name, - base_id, + workspace_id, class_oid, - display_type as "display_type: LensDisplayType", - filter as "filter: Json>" + table_filter as "table_filter: Json>" "#, - Uuid::now_v7(), - self.base_id, + self.workspace_id, self.class_oid, self.name, - self.display_type as LensDisplayType ) .fetch_one(&mut *app_db.conn) .await @@ -168,17 +176,17 @@ returning } #[derive(Builder, Clone, Debug)] -pub struct LensUpdate { +pub struct PortalUpdate { id: Uuid, #[builder(setter(strip_option = true))] filter: Option>, } -impl LensUpdate { +impl PortalUpdate { pub async fn execute(self, app_db: &mut AppDbClient) -> Result<(), sqlx::Error> { if let Some(filter) = self.filter { query!( - "update lenses set filter = $1 where id = $2", + "update portals set table_filter = $1 where id = $2", Json(filter) as Json>, self.id ) diff --git a/interim-models/src/presentation.rs b/interim-models/src/presentation.rs index a187d4b..62868a3 100644 --- a/interim-models/src/presentation.rs +++ b/interim-models/src/presentation.rs @@ -1,10 +1,11 @@ use interim_pgtypes::pg_attribute::PgAttribute; use serde::{Deserialize, Serialize}; +use strum::{EnumIter, EnumString}; pub const RFC_3339_S: &str = "%Y-%m-%dT%H:%M:%S"; /// Struct defining how a field's is displayed and how it accepts input in UI. -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, EnumIter, EnumString, PartialEq, Serialize, strum::Display)] #[serde(tag = "t", content = "c")] pub enum Presentation { Array { inner: Box }, @@ -53,9 +54,27 @@ impl Presentation { } } -#[derive(Clone, Debug, Deserialize, Serialize)] +impl Default for Presentation { + /// Defaults to [`Self::Text`] as a reasonable fallback. The [`Default`] + /// trait is implemented for convenience, but in the vast majority of cases + /// the presentation value should be well defined and this should not be + /// called directly. + fn default() -> Self { + Self::Text { + input_mode: Default::default(), + } + } +} + +#[derive(Clone, Debug, Deserialize, EnumString, EnumIter, PartialEq, Serialize, strum::Display)] #[serde(tag = "t", content = "c")] pub enum TextInputMode { SingleLine {}, MultiLine {}, } + +impl Default for TextInputMode { + fn default() -> Self { + Self::MultiLine {} + } +} diff --git a/interim-models/src/rel_invitation.rs b/interim-models/src/rel_invitation.rs index d18d089..00b5f4a 100644 --- a/interim-models/src/rel_invitation.rs +++ b/interim-models/src/rel_invitation.rs @@ -10,7 +10,7 @@ use crate::client::AppDbClient; pub struct RelInvitation { pub id: Uuid, pub email: String, - pub base_id: Uuid, + pub workspace_id: Uuid, pub class_oid: Oid, pub created_by: Uuid, pub privilege: String, @@ -53,7 +53,7 @@ where class_oid = $1 #[derive(Builder, Clone, Debug)] pub struct UpsertableRelInvitation { email: String, - base_id: Uuid, + workspace_id: Uuid, class_oid: Oid, created_by: Uuid, privilege: PgPrivilegeType, @@ -67,16 +67,15 @@ impl UpsertableRelInvitation { RelInvitation, " insert into rel_invitations -(id, email, base_id, class_oid, privilege, created_by, expires_at) -values ($1, $2, $3, $4, $5, $6, $7) -on conflict (email, base_id, class_oid, privilege) do update set +(email, workspace_id, class_oid, privilege, created_by, expires_at) +values ($1, $2, $3, $4, $5, $6) +on conflict (email, workspace_id, class_oid, privilege) do update set created_by = excluded.created_by, expires_at = excluded.expires_at returning * ", - Uuid::now_v7(), self.email, - self.base_id, + self.workspace_id, self.class_oid, self.privilege.to_abbrev().to_string(), self.created_by, diff --git a/interim-models/src/base.rs b/interim-models/src/workspace.rs similarity index 52% rename from interim-models/src/base.rs rename to interim-models/src/workspace.rs index ac0a26a..9d36f59 100644 --- a/interim-models/src/base.rs +++ b/interim-models/src/workspace.rs @@ -1,27 +1,39 @@ use derive_builder::Builder; +use redact::Secret; use sqlx::query_as; +use url::Url; use uuid::Uuid; use crate::client::AppDbClient; +/// A workspace is 1:1 with a Postgres "database". #[derive(Clone, Debug)] -pub struct Base { +pub struct Workspace { + /// Primary key (defaults to UUIDv7). pub id: Uuid, + + /// Human friendly name for the workspace. pub name: String, - pub url: String, + + /// `postgresql://` URL of the instance and database hosting this workspace. + pub url: Secret, + + /// ID of the user account that created this workspace. pub owner_id: Uuid, - pub user_role_prefix: String, } -impl Base { - pub fn insertable_builder() -> InsertableBaseBuilder { - InsertableBaseBuilder::default() +impl Workspace { + /// Build an insert statement to create a new workspace. + pub fn insert() -> InsertableWorkspaceBuilder { + InsertableWorkspaceBuilder::default() } + /// Build a single-field query by workspace ID. pub fn with_id(id: Uuid) -> WithIdQuery { WithIdQuery { id } } + /// Build a query for workspaces filtered by a user's Phono permissions. pub fn with_permission_in>( perms: I, ) -> WithPermissionInQueryPartial { @@ -49,13 +61,13 @@ pub struct WithPermissionInQuery { } impl WithPermissionInQuery { - pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result, sqlx::Error> { + pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result, sqlx::Error> { query_as!( - Base, + Workspace, " -select bases.* -from bases inner join base_user_perms as p - on p.base_id = bases.id +select workspaces.* +from workspaces inner join workspace_user_perms as p + on p.workspace_id = workspaces.id where p.user_id = $1 and perm = ANY($2) ", self.user_id, @@ -74,37 +86,44 @@ impl WithIdQuery { pub async fn fetch_optional( self, app_db: &mut AppDbClient, - ) -> Result, sqlx::Error> { - query_as!(Base, "select * from bases where id = $1", &self.id) - .fetch_optional(&mut *app_db.conn) - .await + ) -> Result, sqlx::Error> { + query_as!( + Workspace, + "select * from workspaces where id = $1", + &self.id + ) + .fetch_optional(&mut *app_db.conn) + .await } - pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result { - query_as!(Base, "select * from bases where id = $1", &self.id) - .fetch_one(&mut *app_db.conn) - .await + pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result { + query_as!( + Workspace, + "select * from workspaces where id = $1", + &self.id + ) + .fetch_one(&mut *app_db.conn) + .await } } #[derive(Builder)] -pub struct InsertableBase { - url: String, +pub struct InsertableWorkspace { + url: Url, owner_id: Uuid, } -impl InsertableBase { - pub async fn insert(self, app_db: &mut AppDbClient) -> Result { +impl InsertableWorkspace { + pub async fn insert(self, app_db: &mut AppDbClient) -> Result { query_as!( - Base, + Workspace, " -insert into bases -(id, url, owner_id) -values ($1, $2, $3) +insert into workspaces +(url, owner_id) +values ($1, $2) returning * ", - Uuid::now_v7(), - self.url, + self.url.to_string(), self.owner_id ) .fetch_one(&mut *app_db.conn) diff --git a/interim-models/src/workspace_user_perm.rs b/interim-models/src/workspace_user_perm.rs new file mode 100644 index 0000000..e6f4d92 --- /dev/null +++ b/interim-models/src/workspace_user_perm.rs @@ -0,0 +1,130 @@ +use std::str::FromStr; + +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; +use sqlx::{Decode, Encode, Postgres, query_as}; +use strum::EnumString; +use uuid::Uuid; + +use crate::client::AppDbClient; + +/// Assigns an access control permission on a workspace to a user. These are +/// derived from the permission grants of the workspace's backing database. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct WorkspaceUserPerm { + /// Primary key (defaults to UUIDv7). + pub id: Uuid, + + /// Workspace to which the permission belongs. + pub workspace_id: Uuid, + + /// **Synthesized field** generated by joining to the `workspaces` table. + pub workspace_name: String, + + /// User to which the permission belongs. + pub user_id: Uuid, + + /// Permission assigned to the user (currently only "connect"). + pub perm: PermissionValue, +} + +impl WorkspaceUserPerm { + /// Construct a single-field query to fetch workspace permissions assigned + /// to a user. + pub fn belonging_to_user(id: Uuid) -> BelongingToUserQuery { + BelongingToUserQuery { id } + } + + /// Build an insert statement to create a new object. + pub fn insert() -> InsertBuilder { + InsertBuilder::default() + } +} + +#[derive(Clone, Debug)] +pub struct BelongingToUserQuery { + id: Uuid, +} + +impl BelongingToUserQuery { + pub async fn fetch_all( + self, + app_db: &mut AppDbClient, + ) -> Result, sqlx::Error> { + query_as!( + WorkspaceUserPerm, + r#" +select + p.id as id, + p.workspace_id as workspace_id, + p.user_id as user_id, + p.perm as "perm: PermissionValue", + w.name as workspace_name +from workspace_user_perms as p + inner join workspaces as w + on w.id = p.workspace_id +where p.user_id = $1 +"#, + self.id, + ) + .fetch_all(app_db.get_conn()) + .await + } +} + +#[derive(Builder, Clone, Debug)] +pub struct Insert { + workspace_id: Uuid, + user_id: Uuid, + perm: PermissionValue, +} + +impl Insert { + pub async fn execute(self, app_db: &mut AppDbClient) -> Result { + query_as!( + WorkspaceUserPerm, + r#" +with p as ( + insert into workspace_user_perms (workspace_id, user_id, perm) values ($1, $2, $3) + returning + id, + workspace_id, + user_id, + perm +) +select + p.id as id, + p.workspace_id as workspace_id, + p.user_id as user_id, + p.perm as "perm: PermissionValue", + w.name as workspace_name +from workspace_user_perms as p + inner join workspaces as w + on w.id = p.workspace_id +"#, + self.workspace_id, + self.user_id, + self.perm.to_string(), + ) + .fetch_one(app_db.get_conn()) + .await + } +} + +// TODO: The sqlx::Decode derive macro doesn't follow the strum serialization. +// Does sqlx::Encode? +#[derive(Clone, Debug, Deserialize, Encode, EnumString, PartialEq, Serialize, strum::Display)] +#[serde(rename = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum PermissionValue { + Connect, +} + +impl Decode<'_, Postgres> for PermissionValue { + fn decode( + value: ::ValueRef<'_>, + ) -> Result { + let value = <&str as Decode>::decode(value)?; + Ok(Self::from_str(value)?) + } +} diff --git a/interim-pgtypes/src/client.rs b/interim-pgtypes/src/client.rs index e38d1f3..062df69 100644 --- a/interim-pgtypes/src/client.rs +++ b/interim-pgtypes/src/client.rs @@ -2,11 +2,13 @@ use sqlx::{PgConnection, Postgres, Row as _, pool::PoolConnection, query}; use crate::escape_identifier; -pub struct BaseClient { +/// Newtype to differentiate between workspace and application database +/// connections. +pub struct WorkspaceClient { pub(crate) conn: PoolConnection, } -impl BaseClient { +impl WorkspaceClient { pub fn from_pool_conn(conn: PoolConnection) -> Self { Self { conn } } diff --git a/interim-pgtypes/src/pg_attribute.rs b/interim-pgtypes/src/pg_attribute.rs index abb0745..09681d8 100644 --- a/interim-pgtypes/src/pg_attribute.rs +++ b/interim-pgtypes/src/pg_attribute.rs @@ -1,7 +1,7 @@ use serde::Serialize; use sqlx::{postgres::types::Oid, query_as}; -use crate::client::BaseClient; +use crate::client::WorkspaceClient; #[derive(Clone, Serialize)] pub struct PgAttribute { @@ -59,7 +59,10 @@ pub struct AllForRelQuery { } impl AllForRelQuery { - pub async fn fetch_all(self, client: &mut BaseClient) -> Result, sqlx::Error> { + pub async fn fetch_all( + self, + client: &mut WorkspaceClient, + ) -> Result, sqlx::Error> { query_as!( PgAttribute, r#" @@ -96,7 +99,10 @@ pub struct PkeysForRelQuery { } impl PkeysForRelQuery { - pub async fn fetch_all(self, client: &mut BaseClient) -> Result, sqlx::Error> { + pub async fn fetch_all( + self, + client: &mut WorkspaceClient, + ) -> Result, sqlx::Error> { query_as!( PgAttribute, r#" diff --git a/interim-pgtypes/src/pg_class.rs b/interim-pgtypes/src/pg_class.rs index 2155a23..e60cdd8 100644 --- a/interim-pgtypes/src/pg_class.rs +++ b/interim-pgtypes/src/pg_class.rs @@ -1,6 +1,8 @@ use sqlx::{postgres::types::Oid, query_as}; -use crate::{client::BaseClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace}; +use crate::{ + client::WorkspaceClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace, +}; #[derive(Clone, Debug)] pub struct PgClass { @@ -46,7 +48,7 @@ pub struct PgClass { impl PgClass { pub async fn fetch_namespace( &self, - client: &mut BaseClient, + client: &mut WorkspaceClient, ) -> Result { PgNamespace::fetch_by_oid(self.relnamespace, &mut *client.conn) .await? @@ -115,7 +117,7 @@ where } impl WithOidQuery { - pub async fn fetch_one(self, client: &mut BaseClient) -> Result { + pub async fn fetch_one(self, client: &mut WorkspaceClient) -> Result { with_oid_sqlx_query!(self.oid) .fetch_one(&mut *client.conn) .await @@ -123,7 +125,7 @@ impl WithOidQuery { pub async fn fetch_optional( self, - client: &mut BaseClient, + client: &mut WorkspaceClient, ) -> Result, sqlx::Error> { with_oid_sqlx_query!(self.oid) .fetch_optional(&mut *client.conn) @@ -136,7 +138,10 @@ pub struct WithKindInQuery { } impl WithKindInQuery { - pub async fn fetch_all(self, client: &mut BaseClient) -> Result, sqlx::Error> { + pub async fn fetch_all( + self, + client: &mut WorkspaceClient, + ) -> Result, sqlx::Error> { let kinds_i8: Vec<_> = self .kinds .into_iter() diff --git a/interim-pgtypes/src/pg_database.rs b/interim-pgtypes/src/pg_database.rs index aa8f9fc..0423ab9 100644 --- a/interim-pgtypes/src/pg_database.rs +++ b/interim-pgtypes/src/pg_database.rs @@ -1,6 +1,6 @@ use sqlx::{postgres::types::Oid, query_as}; -use crate::{client::BaseClient, pg_acl::PgAclItem}; +use crate::{client::WorkspaceClient, pg_acl::PgAclItem}; #[derive(Clone, Debug)] pub struct PgDatabase { @@ -48,7 +48,7 @@ impl PgDatabase { pub struct CurrentQuery {} impl CurrentQuery { - pub async fn fetch_one(self, client: &mut BaseClient) -> Result { + pub async fn fetch_one(self, client: &mut WorkspaceClient) -> Result { query_as!( PgDatabase, r#" diff --git a/interim-pgtypes/src/pg_role.rs b/interim-pgtypes/src/pg_role.rs index 12dcf82..d12ec13 100644 --- a/interim-pgtypes/src/pg_role.rs +++ b/interim-pgtypes/src/pg_role.rs @@ -3,7 +3,7 @@ use sqlx::{postgres::types::Oid, prelude::FromRow, query_as}; use thiserror::Error; use uuid::Uuid; -use crate::client::BaseClient; +use crate::client::WorkspaceClient; #[derive(Clone, Debug, Eq, Hash, FromRow, PartialEq)] pub struct PgRole { @@ -43,7 +43,10 @@ pub struct WithNameInQuery { } impl WithNameInQuery { - pub async fn fetch_all(&self, client: &mut BaseClient) -> Result, sqlx::Error> { + pub async fn fetch_all( + &self, + client: &mut WorkspaceClient, + ) -> Result, sqlx::Error> { query_as!( PgRole, r#" @@ -120,7 +123,7 @@ pub struct MembersOfOidQuery { impl MembersOfOidQuery { pub async fn fetch_tree( self, - client: &mut BaseClient, + client: &mut WorkspaceClient, ) -> Result, sqlx::Error> { let rows: Vec = query_as( " @@ -162,7 +165,7 @@ pub struct MembersOfRolnameQuery { impl MembersOfRolnameQuery { pub async fn fetch_tree( self, - client: &mut BaseClient, + client: &mut WorkspaceClient, ) -> Result, sqlx::Error> { // This could almost be a macro to DRY with MembersOfOidQuery, except // for the extra ::text:: cast required on the parameter in this query. @@ -206,7 +209,7 @@ pub struct GrantedToQuery { impl GrantedToQuery { pub async fn fetch_tree( self, - client: &mut BaseClient, + client: &mut WorkspaceClient, ) -> Result, sqlx::Error> { let rows: Vec = query_as( " diff --git a/interim-server/src/app_state.rs b/interim-server/src/app_state.rs index aac6243..1629e4d 100644 --- a/interim-server/src/app_state.rs +++ b/interim-server/src/app_state.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use anyhow::Result; use axum::{ extract::{FromRef, FromRequestParts}, @@ -9,14 +7,17 @@ use interim_models::client::AppDbClient; use oauth2::basic::BasicClient; use sqlx::postgres::PgPoolOptions; -use crate::{ - app_error::AppError, auth, base_pooler::BasePooler, sessions::PgStore, settings::Settings, -}; +use crate::app_error::AppError; +use crate::auth; +use crate::base_pooler::WorkspacePooler; +use crate::sessions::PgStore; +use crate::settings::Settings; /// Global app configuration +#[derive(Clone, Debug)] pub struct App { pub app_db: sqlx::PgPool, - pub base_pooler: BasePooler, + pub workspace_pooler: WorkspacePooler, pub oauth_client: BasicClient, pub reqwest_client: reqwest::Client, pub session_store: PgStore, @@ -34,12 +35,14 @@ impl App { let session_store = PgStore::new(app_db.clone()); let reqwest_client = reqwest::ClientBuilder::new().https_only(true).build()?; let oauth_client = auth::new_oauth_client(&settings)?; - - let base_pooler = BasePooler::new_with_app_db(app_db.clone()); + let workspace_pooler = WorkspacePooler::builder() + .app_db_pool(app_db.clone()) + .db_role_prefix(settings.db_role_prefix.clone()) + .build()?; Ok(Self { app_db, - base_pooler, + workspace_pooler, oauth_client, reqwest_client, session_store, @@ -48,36 +51,25 @@ impl App { } } -/// Global app configuration, arced for relatively inexpensive clones -pub type AppState = Arc; - /// State extractor for shared reqwest client #[derive(Clone)] pub struct ReqwestClient(pub reqwest::Client); -impl FromRef for ReqwestClient -where - S: Into + Clone, -{ - fn from_ref(state: &S) -> Self { - ReqwestClient(Into::::into(state.clone()).reqwest_client.clone()) +impl FromRef for ReqwestClient { + fn from_ref(state: &App) -> Self { + ReqwestClient(state.reqwest_client.clone()) } } -/// Extractor to automatically obtain a Deadpool Diesel connection +/// Extractor to automatically obtain a sqlx connection for the application +/// database. pub struct AppDbConn(pub AppDbClient); -impl FromRequestParts for AppDbConn -where - S: Into + Clone + Sync, -{ +impl FromRequestParts for AppDbConn { type Rejection = AppError; - async fn from_request_parts(_: &mut Parts, state: &S) -> Result { - let conn = Into::::into(state.clone()) - .app_db - .acquire() - .await?; + async fn from_request_parts(_: &mut Parts, state: &App) -> Result { + let conn = state.app_db.acquire().await?; Ok(Self(AppDbClient::from_pool_conn(conn))) } } diff --git a/interim-server/src/auth.rs b/interim-server/src/auth.rs index 067a8ce..3238ee0 100644 --- a/interim-server/src/auth.rs +++ b/interim-server/src/auth.rs @@ -15,7 +15,7 @@ use serde::{Deserialize, Serialize}; use crate::{ app_error::AppError, - app_state::{AppState, ReqwestClient}, + app_state::{App, ReqwestClient}, sessions::{AppSession, PgStore}, settings::Settings, }; @@ -47,7 +47,7 @@ pub fn new_oauth_client(settings: &Settings) -> Result { } /// Creates a router which can be nested within the higher level app router. -pub fn new_router() -> Router { +pub fn new_router() -> Router { Router::new() .route("/login", get(start_login)) .route("/callback", get(callback)) @@ -56,7 +56,7 @@ pub fn new_router() -> Router { /// HTTP get handler for /login async fn start_login( - State(state): State, + State(state): State, State(Settings { auth: auth_settings, root_path, @@ -134,7 +134,7 @@ async fn logout( } let jar = jar.remove(Cookie::from(auth_settings.cookie_name)); tracing::debug!("Removed session cookie from jar."); - Ok((jar, Redirect::to(&format!("{}/", root_path)))) + Ok((jar, Redirect::to(&format!("{root_path}/")))) } #[derive(Debug, Deserialize)] @@ -147,7 +147,7 @@ struct AuthRequestQuery { /// HTTP get handler for /callback async fn callback( Query(query): Query, - State(state): State, + State(state): State, State(Settings { auth: auth_settings, root_path, @@ -205,7 +205,7 @@ async fn callback( } tracing::debug!("successfully authenticated"); Ok(Redirect::to( - &redirect_target.unwrap_or(format!("{}/", root_path)), + &redirect_target.unwrap_or(format!("{root_path}/")), )) } diff --git a/interim-server/src/base_pooler.rs b/interim-server/src/base_pooler.rs index 7160cdf..ee58b84 100644 --- a/interim-server/src/base_pooler.rs +++ b/interim-server/src/base_pooler.rs @@ -2,39 +2,38 @@ use std::{collections::HashMap, sync::Arc, time::Duration}; use anyhow::Result; use axum::extract::FromRef; -use interim_models::{base::Base, client::AppDbClient}; -use interim_pgtypes::client::BaseClient; +use derive_builder::Builder; +use interim_models::{client::AppDbClient, workspace::Workspace}; +use interim_pgtypes::client::WorkspaceClient; use sqlx::{Executor, PgPool, postgres::PgPoolOptions, raw_sql}; use tokio::sync::{OnceCell, RwLock}; use uuid::Uuid; -use crate::app_state::AppState; +use crate::app_state::App; const MAX_CONNECTIONS: u32 = 4; const IDLE_SECONDS: u64 = 3600; -// TODO: The Arc this uses will probably need to be cleaned up for -// performance eventually. - /// A collection of multiple SQLx Pools. -#[derive(Clone)] -pub struct BasePooler { +#[derive(Builder, Clone, Debug)] +pub struct WorkspacePooler { + #[builder(default, setter(skip))] pools: Arc>>>, - app_db: PgPool, + app_db_pool: PgPool, + db_role_prefix: String, } -impl BasePooler { - pub fn new_with_app_db(app_db: PgPool) -> Self { - Self { - app_db, - pools: Arc::new(RwLock::new(HashMap::new())), - } +impl WorkspacePooler { + pub fn builder() -> WorkspacePoolerBuilder { + WorkspacePoolerBuilder::default() } - async fn get_pool_for(&mut self, base_id: Uuid) -> Result { + async fn get_pool_for(&mut self, workspace_id: Uuid) -> Result { let init_cell = || async { - let mut app_db = AppDbClient::from_pool_conn(self.app_db.acquire().await?); - let base = Base::with_id(base_id).fetch_one(&mut app_db).await?; + let mut app_db = AppDbClient::from_pool_conn(self.app_db_pool.acquire().await?); + let workspace = Workspace::with_id(workspace_id) + .fetch_one(&mut app_db) + .await?; Ok(PgPoolOptions::new() .min_connections(0) .max_connections(MAX_CONNECTIONS) @@ -58,13 +57,13 @@ discard sequences; Ok(true) }) }) - .connect(&base.url) + .connect(&workspace.url.expose_secret()) .await?) }; // Attempt to get an existing pool without write-locking the map let pools = self.pools.read().await; - if let Some(cell) = pools.get(&base_id) { + if let Some(cell) = pools.get(&workspace_id) { return Ok(cell .get_or_try_init::(init_cell) .await? @@ -72,7 +71,7 @@ discard sequences; } drop(pools); // Release read lock let mut pools = self.pools.write().await; - let entry = pools.entry(base_id).or_insert(OnceCell::new()); + let entry = pools.entry(workspace_id).or_insert(OnceCell::new()); Ok(entry .get_or_try_init::(init_cell) .await? @@ -88,14 +87,12 @@ discard sequences; &mut self, base_id: Uuid, set_role: RoleAssignment, - ) -> Result { - let mut app_db = AppDbClient::from_pool_conn(self.app_db.acquire().await?); + ) -> Result { let pool = self.get_pool_for(base_id).await?; - let mut client = BaseClient::from_pool_conn(pool.acquire().await?); + let mut client = WorkspaceClient::from_pool_conn(pool.acquire().await?); match set_role { RoleAssignment::User(id) => { - let base = Base::with_id(base_id).fetch_one(&mut app_db).await?; - let prefix = base.user_role_prefix; + let prefix = &self.db_role_prefix; let user_id = id.simple(); client.init_role(&format!("{prefix}{user_id}")).await?; } @@ -122,12 +119,9 @@ discard sequences; // TODO: Add a cleanup method to remove entries with no connections } -impl FromRef for BasePooler -where - S: Into + Clone, -{ - fn from_ref(state: &S) -> Self { - Into::::into(state.clone()).base_pooler.clone() +impl FromRef for WorkspacePooler { + fn from_ref(state: &App) -> Self { + state.workspace_pooler.clone() } } diff --git a/interim-server/src/base_user_perms.rs b/interim-server/src/base_user_perms.rs index 094c194..4e4b82e 100644 --- a/interim-server/src/base_user_perms.rs +++ b/interim-server/src/base_user_perms.rs @@ -1,9 +1,12 @@ use std::collections::HashSet; use anyhow::Result; -use interim_models::{base::Base, client::AppDbClient}; +use interim_models::{ + client::AppDbClient, + workspace_user_perm::{self, WorkspaceUserPerm}, +}; use interim_pgtypes::{ - client::BaseClient, + client::WorkspaceClient, pg_acl::PgPrivilegeType, pg_database::PgDatabase, pg_role::{PgRole, RoleTree, user_id_from_rolname}, @@ -11,19 +14,15 @@ use interim_pgtypes::{ use sqlx::query; use uuid::Uuid; -pub struct BaseUserPerm { - pub id: Uuid, - pub base_id: Uuid, - pub user_id: Uuid, - pub perm: String, -} - -pub async fn sync_perms_for_base( - base_id: Uuid, +/// Derive workspace access control permissions from the permission grants of +/// a workspace's backing database. +pub(crate) async fn sync_for_workspace( + workspace_id: Uuid, app_db: &mut AppDbClient, - base_client: &mut BaseClient, + workspace_client: &mut WorkspaceClient, + db_role_prefix: &str, ) -> Result<()> { - let db = PgDatabase::current().fetch_one(base_client).await?; + let db = PgDatabase::current().fetch_one(workspace_client).await?; let explicit_roles = PgRole::with_name_in( db.datacl .unwrap_or_default() @@ -36,12 +35,12 @@ pub async fn sync_perms_for_base( .map(|item| item.grantee) .collect(), ) - .fetch_all(base_client) + .fetch_all(workspace_client) .await?; let mut all_roles: HashSet = HashSet::new(); for explicit_role in explicit_roles { if let Some(role_tree) = RoleTree::members_of_oid(explicit_role.oid) - .fetch_tree(base_client) + .fetch_tree(workspace_client) .await? { for implicit_role in role_tree.flatten_inherited() { @@ -49,32 +48,25 @@ pub async fn sync_perms_for_base( } } } - let base = Base::with_id(base_id).fetch_one(app_db).await?; let user_ids: Vec = all_roles .iter() - .filter_map(|role| user_id_from_rolname(&role.rolname, &base.user_role_prefix).ok()) + .filter_map(|role| user_id_from_rolname(&role.rolname, db_role_prefix).ok()) .collect(); query!( - "delete from base_user_perms where base_id = $1 and not (user_id = any($2))", - base_id, + "delete from workspace_user_perms where workspace_id = $1 and not (user_id = any($2))", + workspace_id, user_ids.as_slice(), ) .execute(app_db.get_conn()) .await?; for user_id in user_ids { - query!( - " -insert into base_user_perms - (id, base_id, user_id, perm) -values ($1, $2, $3, 'connect') -on conflict (base_id, user_id, perm) do nothing -", - Uuid::now_v7(), - base.id, - user_id - ) - .execute(app_db.get_conn()) - .await?; + WorkspaceUserPerm::insert() + .workspace_id(workspace_id) + .user_id(user_id) + .perm(workspace_user_perm::PermissionValue::Connect) + .build()? + .execute(app_db) + .await?; } Ok(()) } diff --git a/interim-server/src/cli.rs b/interim-server/src/cli.rs index ae95f20..f029833 100644 --- a/interim-server/src/cli.rs +++ b/interim-server/src/cli.rs @@ -2,10 +2,10 @@ use std::net::SocketAddr; use anyhow::Result; use axum::{ - extract::Request, - http::{header::CONTENT_SECURITY_POLICY, HeaderValue}, - middleware::map_request, ServiceExt, + extract::Request, + http::{HeaderValue, header::CONTENT_SECURITY_POLICY}, + middleware::map_request, }; use chrono::{TimeDelta, Utc}; use clap::{Parser, Subcommand}; @@ -16,7 +16,7 @@ use tower_http::{ }; use crate::{ - app_state::AppState, middleware::lowercase_uri_path, router::new_router, worker::run_worker, + app_state::App, middleware::lowercase_uri_path, routes::new_router, worker::run_worker, }; #[derive(Parser)] @@ -43,7 +43,7 @@ pub enum Commands { // mechanisms like Governor::reset_all() } -pub async fn serve_command(state: AppState) -> Result<()> { +pub async fn serve_command(state: App) -> Result<()> { let router = ServiceBuilder::new() .layer(map_request(lowercase_uri_path)) .layer(TraceLayer::new_for_http()) @@ -73,7 +73,7 @@ pub async fn serve_command(state: AppState) -> Result<()> { .map_err(Into::into) } -pub async fn worker_command(args: &WorkerArgs, state: AppState) -> Result<()> { +pub async fn worker_command(args: &WorkerArgs, state: App) -> Result<()> { if let Some(loop_seconds) = args.auto_loop_seconds { let loop_delta = TimeDelta::seconds(i64::from(loop_seconds)); loop { diff --git a/interim-server/src/main.rs b/interim-server/src/main.rs index 3a0f55e..0742cab 100644 --- a/interim-server/src/main.rs +++ b/interim-server/src/main.rs @@ -1,10 +1,11 @@ +use anyhow::Result; use clap::Parser as _; use dotenvy::dotenv; use interim_models::MIGRATOR; use tracing_subscriber::EnvFilter; use crate::{ - app_state::{App, AppState}, + app_state::App, cli::{Cli, Commands, serve_command, worker_command}, settings::Settings, }; @@ -20,7 +21,6 @@ mod middleware; mod navbar; mod navigator; mod renderable_role_tree; -mod router; mod routes; mod sessions; mod settings; @@ -29,24 +29,25 @@ mod worker; /// Run CLI #[tokio::main] -async fn main() { +async fn main() -> Result<()> { // Attempt to pre-load .env in case it contains a RUST_LOG variable dotenv().ok(); tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .init(); - let settings = Settings::load().unwrap(); + let settings = Settings::load()?; - let state: AppState = App::from_settings(settings.clone()).await.unwrap().into(); + let app = App::from_settings(settings.clone()).await?; if settings.run_database_migrations != 0 { - MIGRATOR.run(&state.app_db).await.unwrap(); + MIGRATOR.run(&app.app_db).await?; } let cli = Cli::parse(); match &cli.command { - Commands::Serve => serve_command(state).await.unwrap(), - Commands::Worker(args) => worker_command(args, state).await.unwrap(), + Commands::Serve => serve_command(app).await?, + Commands::Worker(args) => worker_command(args, app).await?, } + Ok(()) } diff --git a/interim-server/src/navbar.rs b/interim-server/src/navbar.rs index e2fdefa..fb656a9 100644 --- a/interim-server/src/navbar.rs +++ b/interim-server/src/navbar.rs @@ -1,47 +1,41 @@ -use std::collections::HashMap; - use anyhow::Result; use askama::Template; use derive_builder::Builder; -use interim_models::{base::Base, client::AppDbClient, lens::Lens}; +use interim_models::{client::AppDbClient, portal::Portal, workspace::Workspace}; use interim_pgtypes::{ - client::BaseClient, + client::WorkspaceClient, pg_class::{PgClass, PgRelKind}, }; use sqlx::postgres::types::Oid; use uuid::Uuid; -#[derive(Builder, Clone, Template)] -#[template(path = "navbar.html")] -pub struct Navbar { - pub base: Base, - pub namespaces: Vec, - #[builder(setter(strip_option))] - pub current: Option, - pub root_path: String, +use crate::navigator::Navigator; + +#[derive(Builder, Clone, Debug, Template)] +#[template(path = "workspace_nav.html")] +pub(crate) struct WorkspaceNav { + workspace: Workspace, + relations: Vec, + #[builder(default, setter(strip_option))] + current: Option, + navigator: Navigator, } -impl Navbar { - pub fn builder() -> NavbarBuilder { - NavbarBuilder::default() +impl WorkspaceNav { + pub fn builder() -> WorkspaceNavBuilder { + WorkspaceNavBuilder::default() } } #[derive(Clone, Debug)] -pub struct NamespaceItem { +pub struct RelationItem { pub name: String, - pub rels: Vec, + pub oid: Oid, + pub portals: Vec, } #[derive(Clone, Debug)] -pub struct RelItem { - pub name: String, - pub class_oid: Oid, - pub lenses: Vec, -} - -#[derive(Clone, Debug)] -pub struct LensItem { +pub struct PortalItem { pub name: String, pub id: Uuid, } @@ -53,56 +47,49 @@ pub enum NavLocation { #[derive(Clone, Debug, PartialEq)] pub enum RelLocation { - Lens(Uuid), - Rbac, + Portal(Uuid), + Sharing, } -impl NavbarBuilder { +impl WorkspaceNavBuilder { /// Helper function to populate relations and lenses automatically. + /// [`WorkspaceNavBuilder::workspace()`] must be called first, or else this + /// method will return an error. pub async fn populate_rels( &mut self, app_db: &mut AppDbClient, - base_client: &mut BaseClient, + workspace_client: &mut WorkspaceClient, ) -> Result<&mut Self> { let rels = PgClass::with_kind_in([PgRelKind::OrdinaryTable]) - .fetch_all(base_client) + .fetch_all(workspace_client) .await?; - let mut namespaces: HashMap> = HashMap::new(); + let mut rel_items = Vec::with_capacity(rels.len()); for rel in rels { if rel.regnamespace.as_str() != "pg_catalog" && rel.regnamespace.as_str() != "information_schema" { - let lenses = Lens::belonging_to_base( - self.base + let portals = Portal::belonging_to_workspace( + self.workspace .as_ref() - .ok_or(NavbarBuilderError::UninitializedField("base"))? + .ok_or(WorkspaceNavBuilderError::UninitializedField("workspace"))? .id, ) .belonging_to_rel(rel.oid) .fetch_all(app_db) .await?; - let rel_items = namespaces.entry(rel.regnamespace).or_default(); - rel_items.push(RelItem { + rel_items.push(RelationItem { name: rel.relname, - class_oid: rel.oid, - lenses: lenses + oid: rel.oid, + portals: portals .into_iter() - .map(|lens| LensItem { - name: lens.name, - id: lens.id, + .map(|portal| PortalItem { + name: portal.name, + id: portal.id, }) .collect(), }); } } - Ok(self.namespaces( - namespaces - .into_iter() - .map(|(name, rel_items)| NamespaceItem { - name, - rels: rel_items, - }) - .collect(), - )) + Ok(self.relations(rel_items)) } } diff --git a/interim-server/src/navigator.rs b/interim-server/src/navigator.rs index 3fc66ba..ed8f62c 100644 --- a/interim-server/src/navigator.rs +++ b/interim-server/src/navigator.rs @@ -3,9 +3,10 @@ use axum::{ http::request::Parts, response::{IntoResponse as _, Redirect, Response}, }; -use interim_models::lens::Lens; +use interim_models::portal::Portal; +use uuid::Uuid; -use crate::{app_error::AppError, app_state::AppState}; +use crate::{app_error::AppError, app_state::App}; /// Helper type for semantically generating URI paths, e.g. for redirects. #[derive(Clone, Debug)] @@ -15,33 +16,44 @@ pub struct Navigator { } impl Navigator { - pub fn lens_page(&self, lens: &Lens) -> Self { + pub(crate) fn workspace_page(&self, workspace_id: Uuid) -> Self { + Self { + sub_path: format!("/w/{0}/", workspace_id.simple()), + ..self.clone() + } + } + + pub(crate) fn portal_page(&self, portal: &Portal) -> Self { Self { sub_path: format!( - "/d/{0}/r/{1}/l/{2}/", - lens.base_id.simple(), - lens.class_oid.0, - lens.id.simple() + "/w/{0}/r/{1}/p/{2}/", + portal.workspace_id.simple(), + portal.class_oid.0, + portal.id.simple() ), ..self.clone() } } - pub fn redirect_to(&self) -> Response { - Redirect::to(&format!("{0}{1}", self.root_path, self.sub_path)).into_response() + pub(crate) fn get_root_path(&self) -> String { + self.root_path.to_owned() + } + + pub(crate) fn abs_path(&self) -> String { + format!("{0}{1}", self.root_path, self.sub_path) + } + + pub(crate) fn redirect_to(&self) -> Response { + Redirect::to(&self.abs_path()).into_response() } } -impl FromRequestParts for Navigator -where - S: Into + Clone + Sync, -{ +impl FromRequestParts for Navigator { type Rejection = AppError; - async fn from_request_parts(_: &mut Parts, state: &S) -> Result { - let app_state: AppState = state.clone().into(); + async fn from_request_parts(_: &mut Parts, state: &App) -> Result { Ok(Navigator { - root_path: app_state.settings.root_path.clone(), + root_path: state.settings.root_path.clone(), sub_path: "/".to_owned(), }) } diff --git a/interim-server/src/router.rs b/interim-server/src/router.rs index 4584e2c..7573039 100644 --- a/interim-server/src/router.rs +++ b/interim-server/src/router.rs @@ -14,13 +14,13 @@ use tower_http::{ set_header::SetResponseHeaderLayer, }; -use crate::{app_state::AppState, auth, routes}; +use crate::{app_state::App, auth, routes}; -pub fn new_router(state: AppState) -> Router<()> { - let base_path = state.settings.root_path.clone(); +pub fn new_router(state: App) -> Router<()> { + let root_path = state.settings.root_path.clone(); let app = Router::new() - .route_with_tsr("/databases/", get(routes::bases::list_bases_page)) - .route_with_tsr("/databases/add/", post(routes::bases::add_base_page)) + .route_with_tsr("/workspaces/", get(routes::bases::list_bases_page)) + .route_with_tsr("/workspaces/add/", post(routes::bases::add_base_page)) .route_with_tsr( "/d/{base_id}/config/", get(routes::bases::base_config_page_get), @@ -173,10 +173,10 @@ pub fn new_router(state: AppState) -> Router<()> { ), ) .with_state(state); - if base_path.is_empty() { + if root_path.is_empty() { app } else { - Router::new().nest(&base_path, app).fallback_service( + Router::new().nest(&root_path, app).fallback_service( ServeDir::new("static").not_found_service(ServeFile::new("static/_404.html")), ) } diff --git a/interim-server/src/routes/bases.rs b/interim-server/src/routes/bases.rs index bee5f20..13f4568 100644 --- a/interim-server/src/routes/bases.rs +++ b/interim-server/src/routes/bases.rs @@ -14,7 +14,7 @@ use uuid::Uuid; use crate::{ app_error::AppError, app_state::AppDbConn, - base_pooler::{self, BasePooler}, + base_pooler::{self, WorkspacePooler}, base_user_perms::sync_perms_for_base, settings::Settings, user::CurrentUser, @@ -94,7 +94,7 @@ pub struct BaseConfigPageForm { pub async fn base_config_page_post( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(BaseConfigPagePath { base_id }): Path, diff --git a/interim-server/src/routes/lens_index.rs b/interim-server/src/routes/lens_index.rs index 87b7822..ae9c1af 100644 --- a/interim-server/src/routes/lens_index.rs +++ b/interim-server/src/routes/lens_index.rs @@ -10,7 +10,7 @@ use sqlx::postgres::types::Oid; use crate::{ app_error::AppError, app_state::AppDbConn, - base_pooler::{BasePooler, RoleAssignment}, + base_pooler::{RoleAssignment, WorkspacePooler}, navbar::{NavLocation, Navbar, RelLocation}, settings::Settings, user::CurrentUser, @@ -20,7 +20,7 @@ use super::LensPagePath; pub async fn lens_page_get( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(LensPagePath { diff --git a/interim-server/src/routes/lens_insert.rs b/interim-server/src/routes/lens_insert.rs index dc8b4e1..f563337 100644 --- a/interim-server/src/routes/lens_insert.rs +++ b/interim-server/src/routes/lens_insert.rs @@ -12,7 +12,7 @@ use sqlx::{postgres::types::Oid, query}; use crate::{ app_error::AppError, app_state::AppDbConn, - base_pooler::{BasePooler, RoleAssignment}, + base_pooler::{RoleAssignment, WorkspacePooler}, navigator::Navigator, user::CurrentUser, }; @@ -20,7 +20,7 @@ use crate::{ use super::LensPagePath; pub async fn insert_page_post( - State(mut base_pooler): State, + State(mut base_pooler): State, navigator: Navigator, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, diff --git a/interim-server/src/routes/lenses.rs b/interim-server/src/routes/lenses.rs index 66e2681..7a764d6 100644 --- a/interim-server/src/routes/lenses.rs +++ b/interim-server/src/routes/lenses.rs @@ -26,7 +26,7 @@ use uuid::Uuid; use crate::{ app_error::{AppError, bad_request}, app_state::AppDbConn, - base_pooler::{BasePooler, RoleAssignment}, + base_pooler::{RoleAssignment, WorkspacePooler}, field_info::FieldInfo, navigator::Navigator, settings::Settings, @@ -104,7 +104,7 @@ pub struct AddLensPagePostForm { pub async fn add_lens_page_post( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, navigator: Navigator, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, @@ -144,7 +144,7 @@ pub async fn add_lens_page_post( pub async fn get_data_page_get( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(LensPagePath { @@ -172,7 +172,7 @@ pub async fn get_data_page_get( .await?; let fields: Vec = { - let fields: Vec = Field::belonging_to_lens(lens.id) + let fields: Vec = Field::belonging_to_portal(lens.id) .fetch_all(&mut app_db) .await?; let mut field_info: Vec = Vec::with_capacity(fields.len()); @@ -287,7 +287,7 @@ fn try_presentation_from_form(form: &AddColumnPageForm) -> Result, + State(mut base_pooler): State, navigator: Navigator, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, @@ -385,7 +385,7 @@ pub struct UpdateValuePageForm { } pub async fn update_value_page_post( - State(mut base_pooler): State, + State(mut base_pooler): State, CurrentUser(current_user): CurrentUser, Path(LensPagePath { base_id, class_oid, .. @@ -431,7 +431,7 @@ pub struct ViewerPagePath { pub async fn viewer_page( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(params): Path, diff --git a/interim-server/src/routes/mod.rs b/interim-server/src/routes/mod.rs index de5eb8c..d0058c0 100644 --- a/interim-server/src/routes/mod.rs +++ b/interim-server/src/routes/mod.rs @@ -1,16 +1,132 @@ -use serde::Deserialize; -use uuid::Uuid; +//! Hierarchical HTTP routing. +//! +//! Top level module establishes the overall +//! [`axum::Router`], and submodules organize nested subrouters into manageable +//! chunks. Pragmatically, the submodule tree should be kept fairly flat, lest +//! file paths grow exceedingly long. Deeply nested routers may still be +//! implemented, by use of the `super` keyword. -pub mod bases; -pub mod lens_index; -pub mod lens_insert; -pub mod lens_set_filter; -pub mod lenses; -pub mod relations; +use std::net::SocketAddr; -#[derive(Deserialize)] -pub struct LensPagePath { - base_id: Uuid, - class_oid: u32, - lens_id: Uuid, +use axum::{ + Router, + extract::{ConnectInfo, State, WebSocketUpgrade, ws::WebSocket}, + http::{HeaderValue, header::CACHE_CONTROL}, + response::{Redirect, Response}, + routing::{any, get}, +}; +use tower::ServiceBuilder; +use tower_http::{ + services::{ServeDir, ServeFile}, + set_header::SetResponseHeaderLayer, +}; + +use crate::auth; +use crate::{app_state::App, settings::Settings}; + +mod relations_single; +mod workspaces_multi; +mod workspaces_single; + +/// Create the root [`Router`] for the application, including nesting according +/// to the `root_path` [`crate::settings::Settings`] value, setting cache +/// headers, setting up static file handling, and defining fallback handlers. +pub(crate) fn new_router(app: App) -> Router<()> { + let root_path = app.settings.root_path.clone(); + let router = Router::new() + .route( + "/", + get( + |State(Settings { root_path, .. }): State| async move { + Redirect::to(&format!("{root_path}/workspaces/list/")) + }, + ), + ) + .nest("/workspaces", workspaces_multi::new_router()) + .nest("/w/{workspace_id}", workspaces_single::new_router()) + .nest("/auth", auth::new_router()) + .route("/__dev-healthz", any(dev_healthz_handler)) + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + HeaderValue::from_static("no-cache"), + )) + .nest_service( + "/js_dist", + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + // FIXME: restore production value + // HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"), + HeaderValue::from_static("no-cache"), + )) + .service( + ServeDir::new("js_dist").not_found_service( + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + HeaderValue::from_static("no-cache"), + )) + .service(ServeFile::new("static/_404.html")), + ), + ), + ) + .nest_service( + "/css_dist", + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + // FIXME: restore production value + // HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"), + HeaderValue::from_static("no-cache"), + )) + .service( + ServeDir::new("css_dist").not_found_service( + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + HeaderValue::from_static("no-cache"), + )) + .service(ServeFile::new("static/_404.html")), + ), + ), + ) + .fallback_service( + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"), + )) + .service( + ServeDir::new("static").not_found_service( + ServiceBuilder::new() + .layer(SetResponseHeaderLayer::if_not_present( + CACHE_CONTROL, + HeaderValue::from_static("no-cache"), + )) + .service(ServeFile::new("static/_404.html")), + ), + ), + ) + .with_state(app); + if root_path.is_empty() { + router + } else { + Router::new() + .nest(&root_path, router) + .fallback(|| async move { Redirect::to(&root_path) }) + } +} + +/// Development endpoint helping to implement home-grown "hot" reloads. +async fn dev_healthz_handler( + ws: WebSocketUpgrade, + ConnectInfo(addr): ConnectInfo, +) -> Response { + tracing::info!("{addr} connected"); + ws.on_upgrade(move |socket| handle_dev_healthz_socket(socket, addr)) +} + +async fn handle_dev_healthz_socket(mut socket: WebSocket, _: SocketAddr) { + // Keep socket open indefinitely until the entire server exits + while let Some(Ok(_)) = socket.recv().await {} } diff --git a/interim-server/src/routes/relations.rs b/interim-server/src/routes/relations.rs index 37382d2..94bf237 100644 --- a/interim-server/src/routes/relations.rs +++ b/interim-server/src/routes/relations.rs @@ -20,7 +20,7 @@ use uuid::Uuid; use crate::{ app_error::{AppError, forbidden}, app_state::AppDbConn, - base_pooler::{self, BasePooler}, + base_pooler::{self, WorkspacePooler}, navbar::{NavLocation, Navbar, RelLocation}, renderable_role_tree::RenderableRoleTree, settings::Settings, @@ -34,7 +34,7 @@ pub struct ListRelationsPagePath { pub async fn list_relations_page( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(ListRelationsPagePath { base_id }): Path, @@ -116,7 +116,7 @@ pub async fn rel_index_page( pub async fn rel_rbac_page( State(settings): State, - State(mut base_pooler): State, + State(mut base_pooler): State, AppDbConn(mut app_db): AppDbConn, CurrentUser(current_user): CurrentUser, Path(RelPagePath { base_id, class_oid }): Path, diff --git a/interim-server/src/routes/relations_multi/mod.rs b/interim-server/src/routes/relations_multi/mod.rs new file mode 100644 index 0000000..377595a --- /dev/null +++ b/interim-server/src/routes/relations_multi/mod.rs @@ -0,0 +1,7 @@ +use axum::Router; + +use crate::app_state::App; + +pub(super) fn new_router() -> Router { + Router::::new().route_with_tsr() +} diff --git a/interim-server/src/routes/relations_single/add_field_handler.rs b/interim-server/src/routes/relations_single/add_field_handler.rs new file mode 100644 index 0000000..52eb1f1 --- /dev/null +++ b/interim-server/src/routes/relations_single/add_field_handler.rs @@ -0,0 +1,141 @@ +use axum::{ + debug_handler, + extract::{Path, State}, + response::Response, +}; +// [`axum_extra`]'s form extractor is preferred: +// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform +use axum_extra::extract::Form; +use interim_models::{ + field::Field, + portal::Portal, + presentation::{Presentation, RFC_3339_S, TextInputMode}, + workspace::Workspace, + workspace_user_perm::{self, WorkspaceUserPerm}, +}; +use interim_pgtypes::{escape_identifier, pg_class::PgClass}; +use serde::Deserialize; +use sqlx::query; +use uuid::Uuid; + +use crate::{ + app_error::{AppError, forbidden}, + app_state::{App, AppDbConn}, + base_pooler::{RoleAssignment, WorkspacePooler}, + navigator::Navigator, + user::CurrentUser, +}; + +#[derive(Debug, Deserialize)] +pub(super) struct PathParams { + portal_id: Uuid, + rel_oid: u32, + workspace_id: Uuid, +} + +#[derive(Debug, Deserialize)] +pub(super) struct FormBody { + name: String, + label: String, + presentation_tag: String, + dropdown_allow_custom: Option, + text_input_mode: Option, + timestamp_format: Option, +} + +/// HTTP POST handler for adding a [`Field`] to a [`Portal`]. If the field name +/// does not match a column in the backing database, a new column is created +/// with a compatible type. +/// +/// This handler expects 3 path parameters with the structure described by +/// [`PathParams`]. +#[debug_handler(state = App)] +pub(super) async fn post( + State(mut workspace_pooler): State, + AppDbConn(mut app_db): AppDbConn, + CurrentUser(user): CurrentUser, + navigator: Navigator, + Path(PathParams { + portal_id, + workspace_id, + .. + }): Path, + Form(form): Form, +) -> Result { + // Check workspace authorization. + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + if workspace_perms.iter().all(|p| { + p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect + }) { + return Err(forbidden!("access denied to workspace")); + } + // FIXME ensure workspace corresponds to rel/portal, and that user has + // permission to access/alter both as needed. + + let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?; + let workspace = Workspace::with_id(portal.workspace_id) + .fetch_one(&mut app_db) + .await?; + + let mut workspace_client = workspace_pooler + .acquire_for(workspace.id, RoleAssignment::User(user.id)) + .await?; + + let class = PgClass::with_oid(portal.class_oid) + .fetch_one(&mut workspace_client) + .await?; + + let presentation = try_presentation_from_form(&form)?; + + query(&format!( + "alter table {ident} add column if not exists {col} {typ}", + ident = class.get_identifier(), + col = escape_identifier(&form.name), + typ = presentation.attr_data_type_fragment(), + )) + .execute(workspace_client.get_conn()) + .await?; + + Field::insert() + .portal_id(portal.id) + .name(form.name) + .table_label(if form.label.is_empty() { + None + } else { + Some(form.label) + }) + .presentation(presentation) + .build()? + .insert(&mut app_db) + .await?; + + Ok(navigator.portal_page(&portal).redirect_to()) +} + +fn try_presentation_from_form(form: &FormBody) -> Result { + // Parses the presentation tag into the correct enum variant, but without + // meaningful inner value(s). Match arms should all use the + // `MyVariant { .. }` pattern to pay attention to only the tag. + let presentation_default = Presentation::try_from(form.presentation_tag.as_str())?; + Ok(match presentation_default { + Presentation::Array { .. } => todo!(), + Presentation::Dropdown { .. } => Presentation::Dropdown { allow_custom: true }, + Presentation::Text { .. } => Presentation::Text { + input_mode: form + .text_input_mode + .clone() + .map(|value| TextInputMode::try_from(value.as_str())) + .transpose()? + .unwrap_or_default(), + }, + Presentation::Timestamp { .. } => Presentation::Timestamp { + format: form + .timestamp_format + .clone() + .unwrap_or(RFC_3339_S.to_owned()), + }, + Presentation::Uuid { .. } => Presentation::Uuid {}, + }) +} diff --git a/interim-server/src/routes/relations_single/add_portal_handler.rs b/interim-server/src/routes/relations_single/add_portal_handler.rs new file mode 100644 index 0000000..443eebc --- /dev/null +++ b/interim-server/src/routes/relations_single/add_portal_handler.rs @@ -0,0 +1,56 @@ +use axum::{extract::Path, response::IntoResponse}; +use interim_models::{ + portal::Portal, + workspace_user_perm::{self, WorkspaceUserPerm}, +}; +use serde::Deserialize; +use sqlx::postgres::types::Oid; +use uuid::Uuid; + +use crate::{ + app_error::{AppError, forbidden}, + app_state::AppDbConn, + navigator::Navigator, + user::CurrentUser, +}; + +#[derive(Debug, Deserialize)] +pub(super) struct PathParams { + workspace_id: Uuid, + rel_oid: u32, +} + +/// HTTP POST handler for creating a [`Portal`] for an existing backing database +/// table. Upon success, it redirects the client to the portal's table viewer. +/// +/// This handler expects 2 path parameters, named `workspace_id`, which should +/// deserialize to a UUID, and `rel_oid`, which should deserialize to a u32. +pub(super) async fn post( + CurrentUser(user): CurrentUser, + navigator: Navigator, + AppDbConn(mut app_db): AppDbConn, + Path(PathParams { + rel_oid, + workspace_id, + }): Path, +) -> Result { + // Check workspace authorization. + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + if workspace_perms.iter().all(|p| { + p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect + }) { + return Err(forbidden!("access denied to workspace")); + } + + Portal::insert() + .workspace_id(workspace_id) + .name("Untitled".to_owned()) + .class_oid(Oid(rel_oid)) + .build()? + .execute(&mut app_db) + .await?; + + Ok(navigator.workspace_page(workspace_id).redirect_to()) +} diff --git a/interim-server/src/routes/relations_single/get_data_handler.rs b/interim-server/src/routes/relations_single/get_data_handler.rs new file mode 100644 index 0000000..e1763a6 --- /dev/null +++ b/interim-server/src/routes/relations_single/get_data_handler.rs @@ -0,0 +1,145 @@ +use std::collections::HashMap; + +use axum::{ + Json, + extract::{Path, State}, + response::{IntoResponse as _, Response}, +}; +use interim_models::{encodable::Encodable, field::Field, portal::Portal}; +use interim_pgtypes::{escape_identifier, pg_attribute::PgAttribute, pg_class::PgClass}; +use serde::{Deserialize, Serialize}; +use sqlx::{postgres::PgRow, query}; +use uuid::Uuid; + +use crate::{ + app_error::AppError, + app_state::AppDbConn, + base_pooler::{RoleAssignment, WorkspacePooler}, + field_info::FieldInfo, + user::CurrentUser, +}; + +#[derive(Clone, Debug, Deserialize)] +pub(super) struct PathParams { + portal_id: Uuid, +} + +const FRONTEND_ROW_LIMIT: i64 = 1000; + +/// HTTP GET handler for an API endpoint returning a JSON encoding of portal +/// data to display in a table or similar form. +/// +/// Only queries up to the first [`FRONTEND_ROW_LIMIT`] rows. +pub(super) async fn get( + State(mut workspace_pooler): State, + AppDbConn(mut app_db): AppDbConn, + CurrentUser(current_user): CurrentUser, + Path(PathParams { portal_id }): Path, +) -> Result { + // FIXME auth + let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?; + + let mut workspace_client = workspace_pooler + .acquire_for(portal.workspace_id, RoleAssignment::User(current_user.id)) + .await?; + let rel = PgClass::with_oid(portal.class_oid) + .fetch_one(&mut workspace_client) + .await?; + + let attrs = PgAttribute::all_for_rel(portal.class_oid) + .fetch_all(&mut workspace_client) + .await?; + let pkey_attrs = PgAttribute::pkeys_for_rel(portal.class_oid) + .fetch_all(&mut workspace_client) + .await?; + + let fields: Vec = { + let fields: Vec = Field::belonging_to_portal(portal.id) + .fetch_all(&mut app_db) + .await?; + let mut field_info: Vec = Vec::with_capacity(fields.len()); + for field in fields { + if let Some(attr) = attrs.iter().find(|attr| attr.attname == field.name) { + field_info.push(FieldInfo { + field, + has_default: attr.atthasdef, + not_null: attr.attnotnull.unwrap_or_default(), + }); + } + } + field_info + }; + + let mut sql_raw = format!( + "select {0} from {1}.{2}", + pkey_attrs + .iter() + .chain(attrs.iter()) + .map(|attr| escape_identifier(&attr.attname)) + .collect::>() + .join(", "), + escape_identifier(&rel.regnamespace), + escape_identifier(&rel.relname), + ); + let rows: Vec = if let Some(filter_expr) = portal.table_filter.0 { + let filter_fragment = filter_expr.into_query_fragment(); + let filter_params = filter_fragment.to_params(); + sql_raw = format!( + "{sql_raw} where {0} limit ${1}", + filter_fragment.to_sql(1), + filter_params.len() + 1 + ); + let mut q = query(&sql_raw); + for param in filter_params { + q = param.bind_onto(q); + } + q = q.bind(FRONTEND_ROW_LIMIT); + q.fetch_all(workspace_client.get_conn()).await? + } else { + sql_raw = format!("{sql_raw} limit $1"); + query(&sql_raw) + .bind(FRONTEND_ROW_LIMIT) + .fetch_all(workspace_client.get_conn()) + .await? + }; + + #[derive(Serialize)] + struct DataRow { + pkey: String, + data: Vec, + } + + let mut data_rows: Vec = vec![]; + let mut pkeys: Vec = vec![]; + for row in rows.iter() { + let mut pkey_values: HashMap = HashMap::new(); + for attr in pkey_attrs.clone() { + let field = Field::default_from_attr(&attr) + .ok_or(anyhow::anyhow!("unsupported primary key column type"))?; + pkey_values.insert(field.name.clone(), field.get_value_encodable(row)?); + } + let pkey = serde_json::to_string(&pkey_values)?; + pkeys.push(pkey.clone()); + let mut row_data: Vec = vec![]; + for field in fields.iter() { + row_data.push(field.field.get_value_encodable(row)?); + } + data_rows.push(DataRow { + pkey, + data: row_data, + }); + } + + #[derive(Serialize)] + struct ResponseBody { + rows: Vec, + fields: Vec, + pkeys: Vec, + } + Ok(Json(ResponseBody { + rows: data_rows, + fields, + pkeys, + }) + .into_response()) +} diff --git a/interim-server/src/routes/relations_single/insert_handler.rs b/interim-server/src/routes/relations_single/insert_handler.rs new file mode 100644 index 0000000..777b38a --- /dev/null +++ b/interim-server/src/routes/relations_single/insert_handler.rs @@ -0,0 +1,124 @@ +use std::collections::HashMap; + +use axum::{ + debug_handler, + extract::{Path, State}, + response::Response, +}; +// [`axum_extra`]'s form extractor is required to support repeated keys: +// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform +use axum_extra::extract::Form; +use interim_models::{ + encodable::Encodable, + portal::Portal, + workspace::Workspace, + workspace_user_perm::{self, WorkspaceUserPerm}, +}; +use interim_pgtypes::{escape_identifier, pg_class::PgClass}; +use serde::Deserialize; +use sqlx::{postgres::types::Oid, query}; +use uuid::Uuid; + +use crate::{ + app_error::{AppError, forbidden}, + app_state::{App, AppDbConn}, + base_pooler::{RoleAssignment, WorkspacePooler}, + navigator::Navigator, + user::CurrentUser, +}; + +#[derive(Debug, Deserialize)] +pub(super) struct PathParams { + portal_id: Uuid, + rel_oid: u32, + workspace_id: Uuid, +} + +/// HTTP POST handler for inserting one or more rows into a table. This handler +/// takes a form where the keys are column names, with keys optionally repeated +/// to insert multiple rows at once. If any key is repeated, the others should +/// be repeated the same number of times. Form values are expected to be JSON- +/// serialized representations of the `[Encodable]` type. +#[debug_handler(state = App)] +pub(super) async fn post( + State(mut workspace_pooler): State, + AppDbConn(mut app_db): AppDbConn, + CurrentUser(user): CurrentUser, + navigator: Navigator, + Path(PathParams { + portal_id, + rel_oid, + workspace_id, + }): Path, + Form(form): Form>>, +) -> Result { + // Check workspace authorization. + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + if workspace_perms.iter().all(|p| { + p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect + }) { + return Err(forbidden!("access denied to workspace")); + } + // FIXME ensure workspace corresponds to rel/portal, and that user has + // permission to access/alter both as needed. + + let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?; + let workspace = Workspace::with_id(portal.workspace_id) + .fetch_one(&mut app_db) + .await?; + + let mut workspace_client = workspace_pooler + .acquire_for(workspace.id, RoleAssignment::User(user.id)) + .await?; + + let rel = PgClass::with_oid(Oid(rel_oid)) + .fetch_one(&mut workspace_client) + .await?; + + let col_names: Vec = form.keys().cloned().collect(); + let col_list_sql = col_names + .iter() + .map(|value| escape_identifier(value)) + .collect::>() + .join(", "); + + let n_rows = form.values().map(|value| value.len()).max().unwrap_or(0); + if n_rows > 0 { + let mut param_index = 1; + let mut params: Vec = vec![]; + let mut row_list: Vec = vec![]; + for i in 0..n_rows { + let mut param_slots: Vec = vec![]; + for col in col_names.iter() { + let maybe_value: Option = form + .get(col) + .and_then(|col_values| col_values.get(i)) + .map(|value_raw| serde_json::from_str(value_raw)) + .transpose()?; + if let Some(value) = maybe_value.filter(|value| !value.is_none()) { + params.push(value); + param_slots.push(format!("${param_index}")); + param_index += 1; + } else { + param_slots.push("default".to_owned()); + } + } + row_list.push(format!("({0})", param_slots.join(", "))); + } + let row_list_sql = row_list.join(",\n"); + + let query_sql = &format!( + "insert into {ident} ({col_list_sql}) values {row_list_sql}", + ident = rel.get_identifier(), + ); + let mut q = query(query_sql); + for param in params { + q = param.bind_onto(q); + } + q.execute(workspace_client.get_conn()).await?; + } + + Ok(navigator.portal_page(&portal).redirect_to()) +} diff --git a/interim-server/src/routes/relations_single/mod.rs b/interim-server/src/routes/relations_single/mod.rs new file mode 100644 index 0000000..ebf2836 --- /dev/null +++ b/interim-server/src/routes/relations_single/mod.rs @@ -0,0 +1,22 @@ +use axum::{ + Router, + routing::{get, post}, +}; +use axum_extra::routing::RouterExt as _; + +use crate::app_state::App; + +mod add_field_handler; +mod add_portal_handler; +mod get_data_handler; +mod insert_handler; +mod portal_handler; + +pub(super) fn new_router() -> Router { + Router::::new() + .route("/add-portal", post(add_portal_handler::post)) + .route_with_tsr("/p/{portal_id}/", get(portal_handler::get)) + .route_with_tsr("/p/{portal_id}/get-data/", get(get_data_handler::get)) + .route("/p/{portal_id}/add-field", post(add_field_handler::post)) + .route("/p/{portal_id}/insert", post(insert_handler::post)) +} diff --git a/interim-server/src/routes/relations_single/portal_handler.rs b/interim-server/src/routes/relations_single/portal_handler.rs new file mode 100644 index 0000000..5de9541 --- /dev/null +++ b/interim-server/src/routes/relations_single/portal_handler.rs @@ -0,0 +1,88 @@ +use askama::Template; +use axum::{ + extract::{Path, State}, + response::{Html, IntoResponse as _, Response}, +}; +use interim_models::{expression::PgExpressionAny, portal::Portal, workspace::Workspace}; +use interim_pgtypes::pg_attribute::PgAttribute; +use serde::Deserialize; +use sqlx::postgres::types::Oid; +use uuid::Uuid; + +use crate::{ + app_error::AppError, + app_state::AppDbConn, + base_pooler::{RoleAssignment, WorkspacePooler}, + navbar::{NavLocation, RelLocation, WorkspaceNav}, + navigator::Navigator, + settings::Settings, + user::CurrentUser, +}; + +#[derive(Clone, Debug, Deserialize)] +pub(super) struct PathParams { + portal_id: Uuid, + rel_oid: u32, + workspace_id: Uuid, +} + +/// HTTP GET handler for the table viewer page of a [`Portal`]. This handler +/// performs some relatively simple queries pertaining to table structure, but +/// the bulk of the query logic resides in the [`super::get_data_handler`] +/// module. +pub(super) async fn get( + State(settings): State, + State(mut workspace_pooler): State, + AppDbConn(mut app_db): AppDbConn, + CurrentUser(current_user): CurrentUser, + navigator: Navigator, + Path(PathParams { + portal_id, + workspace_id, + rel_oid, + }): Path, +) -> Result { + // FIXME auth + + let workspace = Workspace::with_id(workspace_id) + .fetch_one(&mut app_db) + .await?; + let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?; + + let mut workspace_client = workspace_pooler + .acquire_for(portal.workspace_id, RoleAssignment::User(current_user.id)) + .await?; + + let attrs = PgAttribute::all_for_rel(portal.class_oid) + .fetch_all(&mut workspace_client) + .await?; + let attr_names: Vec = attrs.iter().map(|attr| attr.attname.clone()).collect(); + + #[derive(Template)] + #[template(path = "lens.html")] + struct ResponseTemplate { + attr_names: Vec, + filter: Option, + settings: Settings, + navbar: WorkspaceNav, + } + Ok(Html( + ResponseTemplate { + attr_names, + filter: portal.table_filter.0, + navbar: WorkspaceNav::builder() + .navigator(navigator) + .workspace(workspace.clone()) + .populate_rels(&mut app_db, &mut workspace_client) + .await? + .current(NavLocation::Rel( + Oid(rel_oid), + Some(RelLocation::Portal(portal.id)), + )) + .build()?, + settings, + } + .render()?, + ) + .into_response()) +} diff --git a/interim-server/src/routes/workspaces_multi/list_handlers.rs b/interim-server/src/routes/workspaces_multi/list_handlers.rs new file mode 100644 index 0000000..b0cc4e5 --- /dev/null +++ b/interim-server/src/routes/workspaces_multi/list_handlers.rs @@ -0,0 +1,39 @@ +use askama::Template; +use axum::{ + extract::State, + response::{Html, IntoResponse}, +}; +use interim_models::workspace_user_perm::WorkspaceUserPerm; + +use crate::{ + app_error::AppError, app_state::AppDbConn, navigator::Navigator, settings::Settings, + user::CurrentUser, +}; + +pub(super) async fn get( + State(settings): State, + CurrentUser(user): CurrentUser, + navigator: Navigator, + AppDbConn(mut app_db): AppDbConn, +) -> Result { + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + + #[derive(Template)] + #[template(path = "workspaces_multi/list.html")] + struct ResponseTemplate { + navigator: Navigator, + settings: Settings, + workspace_perms: Vec, + } + + Ok(Html( + ResponseTemplate { + navigator, + settings, + workspace_perms, + } + .render()?, + )) +} diff --git a/interim-server/src/routes/workspaces_multi/mod.rs b/interim-server/src/routes/workspaces_multi/mod.rs new file mode 100644 index 0000000..11b2579 --- /dev/null +++ b/interim-server/src/routes/workspaces_multi/mod.rs @@ -0,0 +1,12 @@ +use axum::{Router, response::Redirect, routing::get}; +use axum_extra::routing::RouterExt as _; + +use crate::app_state::App; + +mod list_handlers; + +pub(super) fn new_router() -> Router { + Router::::new() + .route("/", get(|| async move { Redirect::to("list/") })) + .route_with_tsr("/list/", get(list_handlers::get)) +} diff --git a/interim-server/src/routes/workspaces_single/add_table_handler.rs b/interim-server/src/routes/workspaces_single/add_table_handler.rs new file mode 100644 index 0000000..81782f7 --- /dev/null +++ b/interim-server/src/routes/workspaces_single/add_table_handler.rs @@ -0,0 +1,109 @@ +use axum::{ + extract::{Path, State}, + response::IntoResponse, +}; +use interim_models::workspace_user_perm::{self, WorkspaceUserPerm}; +use interim_pgtypes::escape_identifier; +use serde::Deserialize; +use sqlx::query; +use uuid::Uuid; + +use crate::{ + app_error::{AppError, forbidden}, + app_state::AppDbConn, + base_pooler::{RoleAssignment, WorkspacePooler}, + navigator::Navigator, + settings::Settings, + user::CurrentUser, +}; + +#[derive(Debug, Deserialize)] +pub(super) struct PathParams { + workspace_id: Uuid, +} + +/// HTTP POST handler for creating a managed Postgres table within a workspace +/// database. Upon success, it redirects the client back to the workspace +/// homepage, which is expected to display a list of available tables including +/// the newly created one. +/// +/// This handler expects 1 path parameter named `workspace_id` which should +/// deserialize to a UUID. +pub(super) async fn post( + State(settings): State, + State(mut pooler): State, + CurrentUser(user): CurrentUser, + navigator: Navigator, + AppDbConn(mut app_db): AppDbConn, + Path(PathParams { workspace_id }): Path, +) -> Result { + // Check workspace authorization. + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + if workspace_perms.iter().all(|p| { + p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect + }) { + return Err(forbidden!("access denied to workspace")); + } + + let mut workspace_client = pooler + // FIXME: Should this be scoped down to the unprivileged role after + // setting up the table owner? + .acquire_for(workspace_id, RoleAssignment::Root) + .await?; + + let table_owner_rolname = format!("table_owner_{0}", Uuid::new_v4().simple()); + query(&format!( + "create role {0}", + escape_identifier(&table_owner_rolname), + )) + .execute(workspace_client.get_conn()) + .await?; + query(&format!( + "grant {0} to {1} with admin option", + escape_identifier(&table_owner_rolname), + escape_identifier(&format!( + "{0}{1}", + settings.db_role_prefix, + user.id.simple() + )) + )) + .execute(workspace_client.get_conn()) + .await?; + query(&format!( + "grant create, usage on schema {0} to {1}", + escape_identifier(&settings.phono_table_namespace), + escape_identifier(&table_owner_rolname), + )) + .execute(workspace_client.get_conn()) + .await?; + const TABLE_NAME: &str = "untitled"; + query(&format!( + r#" +create table {0}.{1} ( + _id uuid primary key not null default uuidv7(), + _created_by text not null default current_user, + _created_at timestamptz not null default now(), + _form_session uuid, + _form_backlink_portal uuid, + _form_backlink_row uuid, + notes text +) +"#, + escape_identifier(&settings.phono_table_namespace), + escape_identifier(TABLE_NAME), + )) + .execute(workspace_client.get_conn()) + .await?; + query(&format!( + "alter table {0}.{1} owner to {2}", + escape_identifier(&settings.phono_table_namespace), + escape_identifier(TABLE_NAME), + escape_identifier(&table_owner_rolname) + )) + .execute(workspace_client.get_conn()) + .await?; + + Ok(navigator.workspace_page(workspace_id).redirect_to()) +} diff --git a/interim-server/src/routes/workspaces_single/mod.rs b/interim-server/src/routes/workspaces_single/mod.rs new file mode 100644 index 0000000..4261735 --- /dev/null +++ b/interim-server/src/routes/workspaces_single/mod.rs @@ -0,0 +1,21 @@ +use axum::{ + Router, + response::Redirect, + routing::{get, post}, +}; +use axum_extra::routing::RouterExt as _; + +use crate::app_state::App; + +use super::relations_single; + +mod add_table_handler; +mod nav_handler; + +pub(super) fn new_router() -> Router { + Router::::new() + .route("/", get(|| async move { Redirect::to("nav/") })) + .route("/add-table", post(add_table_handler::post)) + .route_with_tsr("/nav/", get(nav_handler::get)) + .nest("/r/{rel_oid}", relations_single::new_router()) +} diff --git a/interim-server/src/routes/workspaces_single/nav_handler.rs b/interim-server/src/routes/workspaces_single/nav_handler.rs new file mode 100644 index 0000000..49ac6d4 --- /dev/null +++ b/interim-server/src/routes/workspaces_single/nav_handler.rs @@ -0,0 +1,77 @@ +use askama::Template; +use axum::{ + debug_handler, + extract::{Path, State}, + response::{Html, IntoResponse}, +}; +use interim_models::{ + workspace::Workspace, + workspace_user_perm::{self, WorkspaceUserPerm}, +}; +use serde::Deserialize; +use uuid::Uuid; + +use crate::{ + app_error::{AppError, forbidden}, + app_state::{App, AppDbConn}, + base_pooler::{RoleAssignment, WorkspacePooler}, + navbar::WorkspaceNav, + navigator::Navigator, + settings::Settings, + user::CurrentUser, +}; + +#[derive(Debug, Deserialize)] +pub(super) struct PathParams { + workspace_id: Uuid, +} + +#[debug_handler(state = App)] +pub(super) async fn get( + State(settings): State, + CurrentUser(user): CurrentUser, + AppDbConn(mut app_db): AppDbConn, + Path(PathParams { workspace_id }): Path, + navigator: Navigator, + State(mut pooler): State, +) -> Result { + // Check workspace authorization. + let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id) + .fetch_all(&mut app_db) + .await?; + if workspace_perms.iter().all(|p| { + p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect + }) { + return Err(forbidden!("access denied to workspace")); + } + + let workspace = Workspace::with_id(workspace_id) + .fetch_one(&mut app_db) + .await?; + + let mut workspace_client = pooler + .acquire_for(workspace_id, RoleAssignment::User(user.id)) + .await?; + + #[derive(Template)] + #[template(path = "workspaces_single/nav.html")] + struct ResponseTemplate { + settings: Settings, + workspace: Workspace, + workspace_nav: WorkspaceNav, + } + + Ok(Html( + ResponseTemplate { + workspace_nav: WorkspaceNav::builder() + .navigator(navigator) + .workspace(workspace.clone()) + .populate_rels(&mut app_db, &mut workspace_client) + .await? + .build()?, + settings, + workspace, + } + .render()?, + )) +} diff --git a/interim-server/src/sessions.rs b/interim-server/src/sessions.rs index 70bd4a4..b1c19f2 100644 --- a/interim-server/src/sessions.rs +++ b/interim-server/src/sessions.rs @@ -1,3 +1,5 @@ +//! Browser session management via [`async_session`]. + use anyhow::Result; use async_session::{Session, SessionStore, async_trait}; use axum::{ @@ -10,7 +12,7 @@ use chrono::{DateTime, TimeDelta, Utc}; use sqlx::{PgPool, query, query_as}; use tracing::{Instrument, trace_span}; -use crate::{app_error::AppError, app_state::AppState}; +use crate::{app_error::AppError, app_state::App}; const EXPIRY_DAYS: i64 = 7; @@ -39,8 +41,8 @@ impl std::fmt::Debug for PgStore { } } -impl FromRef for PgStore { - fn from_ref(state: &AppState) -> Self { +impl FromRef for PgStore { + fn from_ref(state: &App) -> Self { state.session_store.clone() } } @@ -110,13 +112,13 @@ on conflict (id) do update set #[derive(Clone)] pub struct AppSession(pub Option); -impl FromRequestParts for AppSession { +impl FromRequestParts for AppSession { type Rejection = AppError; async fn from_request_parts( parts: &mut Parts, - state: &AppState, - ) -> Result>::Rejection> { + state: &App, + ) -> Result>::Rejection> { async move { let jar = parts.extract::().await.unwrap(); let session_cookie = match jar.get(&state.settings.auth.cookie_name) { diff --git a/interim-server/src/settings.rs b/interim-server/src/settings.rs index 2831212..f2e85b4 100644 --- a/interim-server/src/settings.rs +++ b/interim-server/src/settings.rs @@ -1,46 +1,56 @@ +//! Runtime application configuration values. + use anyhow::{Context as _, Result}; use axum::extract::FromRef; use config::{Config, Environment}; use dotenvy::dotenv; use serde::Deserialize; -use crate::app_state::AppState; +use crate::app_state::App; #[derive(Clone, Debug, Deserialize)] -pub struct Settings { +pub(crate) struct Settings { /// Prefix under which to nest all routes. If specified, include leading /// slash but no trailing slash, for example "/app". For default behavior, /// leave as empty string. #[serde(default)] - pub root_path: String, + pub(crate) root_path: String, /// When set to 1, dev features such as the frontend reloader will be /// enabled. #[serde(default)] - pub dev: u8, + pub(crate) dev: u8, /// postgresql:// URL for Interim's application database. - pub database_url: String, + pub(crate) database_url: String, #[serde(default = "default_app_db_max_connections")] - pub app_db_max_connections: u32, + pub(crate) app_db_max_connections: u32, /// When set to 1, embedded SQLx migrations will be run on startup. #[serde(default)] - pub run_database_migrations: u8, + pub(crate) run_database_migrations: u8, /// Address for server to bind to #[serde(default = "default_host")] - pub host: String, + pub(crate) host: String, /// Port for server to bind to #[serde(default = "default_port")] - pub port: u16, + pub(crate) port: u16, - /// Host visible to end users, for example "https://shout.dev" - pub frontend_host: String, + /// Host visible to end users, for example "https://phono.dev" + pub(crate) frontend_host: String, - pub auth: AuthSettings, + pub(crate) auth: AuthSettings, + + /// String to prepend to user IDs in order to construct Postgres role names. + #[serde(default = "default_db_role_prefix")] + pub(crate) db_role_prefix: String, + + /// Postgres schema in which to create managed backing tables. + #[serde(default = "default_phono_table_namespace")] + pub(crate) phono_table_namespace: String, } fn default_app_db_max_connections() -> u32 { @@ -55,17 +65,25 @@ fn default_host() -> String { "127.0.0.1".to_owned() } +fn default_db_role_prefix() -> String { + "__phono__".to_owned() +} + +fn default_phono_table_namespace() -> String { + "phono".to_owned() +} + #[derive(Clone, Debug, Deserialize)] -pub struct AuthSettings { - pub client_id: String, - pub client_secret: String, - pub auth_url: String, - pub token_url: String, - pub userinfo_url: String, - pub logout_url: Option, +pub(crate) struct AuthSettings { + pub(crate) client_id: String, + pub(crate) client_secret: String, + pub(crate) auth_url: String, + pub(crate) token_url: String, + pub(crate) userinfo_url: String, + pub(crate) logout_url: Option, #[serde(default = "default_cookie_name")] - pub cookie_name: String, + pub(crate) cookie_name: String, } fn default_cookie_name() -> String { @@ -73,7 +91,7 @@ fn default_cookie_name() -> String { } impl Settings { - pub fn load() -> Result { + pub(crate) fn load() -> Result { match dotenv() { Err(err) => { if err.not_found() { @@ -99,11 +117,8 @@ impl Settings { } } -impl FromRef for Settings -where - S: Into + Clone, -{ - fn from_ref(state: &S) -> Self { - Into::::into(state.clone()).settings.clone() +impl FromRef for Settings { + fn from_ref(state: &App) -> Self { + state.settings.clone() } } diff --git a/interim-server/src/user.rs b/interim-server/src/user.rs index 060a5ff..877d7f8 100644 --- a/interim-server/src/user.rs +++ b/interim-server/src/user.rs @@ -1,3 +1,5 @@ +//! Provides an Axum extractor to fetch the authenticated user for a request. + use async_session::{Session, SessionStore as _}; use axum::{ RequestPartsExt, @@ -15,28 +17,26 @@ use uuid::Uuid; use crate::{ app_error::AppError, - app_state::AppState, + app_state::App, auth::{AuthInfo, SESSION_KEY_AUTH_INFO, SESSION_KEY_AUTH_REDIRECT}, sessions::AppSession, }; +/// Extractor for the authenticated user associated with an HTTP request. If +/// the request is not authenticated, the extractor will abort request handling +/// and redirect the client to an OAuth2 login page. #[derive(Clone, Debug)] -pub struct CurrentUser(pub User); +pub(crate) struct CurrentUser(pub(crate) User); -impl FromRequestParts for CurrentUser -where - S: Into + Clone + Sync, -{ +impl FromRequestParts for CurrentUser { type Rejection = CurrentUserRejection; - async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { - let app_state: AppState = state.clone().into(); - let mut session = - if let AppSession(Some(value)) = parts.extract_with_state(&app_state).await? { - value - } else { - Session::new() - }; + async fn from_request_parts(parts: &mut Parts, state: &App) -> Result { + let mut session = if let AppSession(Some(value)) = parts.extract_with_state(state).await? { + value + } else { + Session::new() + }; let auth_info = if let Some(value) = session.get::(SESSION_KEY_AUTH_INFO) { value } else { @@ -48,12 +48,12 @@ where SESSION_KEY_AUTH_REDIRECT, uri.path_and_query() .map(|value| value.to_string()) - .unwrap_or(format!("{}/", app_state.settings.root_path)), + .unwrap_or(format!("{}/", state.settings.root_path)), )?; - if let Some(cookie_value) = app_state.session_store.store_session(session).await? { + if let Some(cookie_value) = state.session_store.store_session(session).await? { tracing::debug!("adding session cookie to jar"); jar.add( - Cookie::build((app_state.settings.auth.cookie_name.clone(), cookie_value)) + Cookie::build((state.settings.auth.cookie_name.clone(), cookie_value)) .same_site(SameSite::Lax) .http_only(true) .path("/"), @@ -70,12 +70,12 @@ where }; return Err(Self::Rejection::SetCookiesAndRedirect( jar, - format!("{}/auth/login", app_state.settings.root_path), + format!("{}/auth/login", state.settings.root_path), )); }; let current_user = if let Some(value) = query_as!(User, "select * from users where uid = $1", &auth_info.sub) - .fetch_optional(&app_state.app_db) + .fetch_optional(&state.app_db) .await? { value @@ -92,14 +92,14 @@ returning * &auth_info.sub, &auth_info.email ) - .fetch_optional(&app_state.app_db) + .fetch_optional(&state.app_db) .await? { value } else { tracing::debug!("detected race to insert current user record"); query_as!(User, "select * from users where uid = $1", &auth_info.sub) - .fetch_one(&app_state.app_db) + .fetch_one(&state.app_db) .await? }; Ok(CurrentUser(current_user)) diff --git a/interim-server/src/worker.rs b/interim-server/src/worker.rs index ad2ccff..fb06199 100644 --- a/interim-server/src/worker.rs +++ b/interim-server/src/worker.rs @@ -1,9 +1,9 @@ use anyhow::Result; use tracing::Instrument as _; -use crate::app_state::AppState; +use crate::app_state::App; -pub async fn run_worker(_state: AppState) -> Result<()> { +pub async fn run_worker(_state: App) -> Result<()> { async move { Ok(()) } .instrument(tracing::debug_span!("run_worker()")) .await diff --git a/interim-server/templates/workspace_nav.html b/interim-server/templates/workspace_nav.html new file mode 100644 index 0000000..15ab166 --- /dev/null +++ b/interim-server/templates/workspace_nav.html @@ -0,0 +1,91 @@ + + diff --git a/interim-server/templates/workspaces_multi/list.html b/interim-server/templates/workspaces_multi/list.html new file mode 100644 index 0000000..019b017 --- /dev/null +++ b/interim-server/templates/workspaces_multi/list.html @@ -0,0 +1,16 @@ +{% extends "base.html" %} + +{% block main %} +
+

Workspaces

+ +
+{% endblock %} diff --git a/interim-server/templates/workspaces_single/nav.html b/interim-server/templates/workspaces_single/nav.html new file mode 100644 index 0000000..ef7b798 --- /dev/null +++ b/interim-server/templates/workspaces_single/nav.html @@ -0,0 +1,8 @@ +{% extends "base.html" %} + +{% block main %} +
+

{{ workspace.name }}

+ {{ workspace_nav | safe }} +
+{% endblock %} diff --git a/sass/main.scss b/sass/main.scss index 9364afa..b4cdede 100644 --- a/sass/main.scss +++ b/sass/main.scss @@ -3,6 +3,8 @@ @use 'globals'; @use 'modern-normalize'; @use 'forms'; +@use 'collapsible_menu'; +@use 'navbar'; html { font-family: "Averia Serif Libre", "Open Sans", "Helvetica Neue", Arial, sans-serif; diff --git a/svelte/src/expression-editor.webc.svelte b/svelte/src/expression-editor.webc.svelte index f450942..31b48ec 100644 --- a/svelte/src/expression-editor.webc.svelte +++ b/svelte/src/expression-editor.webc.svelte @@ -32,10 +32,10 @@ (presentation) => ({ field: { id: "", - label: "", + table_label: "", name: "", presentation, - width_px: -1, + table_width_px: -1, }, not_null: true, has_default: false, diff --git a/svelte/src/field-adder.webc.svelte b/svelte/src/field-adder.webc.svelte index afbfba7..485ca72 100644 --- a/svelte/src/field-adder.webc.svelte +++ b/svelte/src/field-adder.webc.svelte @@ -73,8 +73,8 @@ submission. } -
-
+ +
- -
- - +
+ + +
-
-
- - -
+
+ + + +
+ diff --git a/svelte/src/field-details.svelte b/svelte/src/field-details.svelte index db048f8..936d9e3 100644 --- a/svelte/src/field-details.svelte +++ b/svelte/src/field-details.svelte @@ -102,10 +102,10 @@ field. This is typically rendered within a popover component, and within an HTML />