refactor with updated naming convention (part 1)
This commit is contained in:
parent
f6118e4d5b
commit
34e0302242
75 changed files with 2135 additions and 540 deletions
28
Cargo.lock
generated
28
Cargo.lock
generated
|
|
@ -1001,9 +1001,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
|
|||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
|
||||
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
|
||||
dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
|
@ -1603,9 +1603,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
|||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "1.0.3"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
|
||||
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
|
||||
dependencies = [
|
||||
"idna_adapter",
|
||||
"smallvec",
|
||||
|
|
@ -1639,12 +1639,14 @@ dependencies = [
|
|||
"chrono",
|
||||
"derive_builder",
|
||||
"interim-pgtypes",
|
||||
"redact",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"strum",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
|
|
@ -2115,9 +2117,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.3.1"
|
||||
version = "2.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
||||
|
||||
[[package]]
|
||||
name = "pest"
|
||||
|
|
@ -2332,6 +2334,16 @@ dependencies = [
|
|||
"getrandom 0.3.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redact"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcecefd225c2fb69914585a7a6f8878929feb316a7ecb61c07d79e361d46d8ac"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.12"
|
||||
|
|
@ -3693,9 +3705,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
|||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.4"
|
||||
version = "2.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
|
||||
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ futures = "0.3.31"
|
|||
interim-models = { path = "./interim-models" }
|
||||
interim-pgtypes = { path = "./interim-pgtypes" }
|
||||
rand = "0.8.5"
|
||||
redact = { version = "0.1.11", features = ["serde", "zeroize"] }
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.8", features = ["json"] }
|
||||
serde = { version = "1.0.213", features = ["derive"] }
|
||||
|
|
@ -22,5 +23,6 @@ sqlx = { version = "0.8.6", features = ["runtime-tokio", "tls-rustls-ring-native
|
|||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.42.0", features = ["full"] }
|
||||
tracing = "0.1.40"
|
||||
url = { version = "2.5.7", features = ["serde"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4", "v7"] }
|
||||
validator = { version = "0.20.0", features = ["derive"] }
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
name: interim
|
||||
name: phono
|
||||
|
||||
services:
|
||||
pg:
|
||||
image: postgres:17
|
||||
image: postgres:18rc1
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
|
|
@ -11,7 +11,7 @@ services:
|
|||
- "127.0.0.1:5432:5432"
|
||||
volumes:
|
||||
- "./docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d:ro"
|
||||
- "./pgdata:/var/lib/postgresql/data"
|
||||
- "./pgdata:/var/lib/postgresql/18/docker"
|
||||
|
||||
keycloak:
|
||||
depends_on: [pg]
|
||||
|
|
|
|||
|
|
@ -7,10 +7,12 @@ version.workspace = true
|
|||
chrono = { workspace = true }
|
||||
derive_builder = { workspace = true }
|
||||
interim-pgtypes = { path = "../interim-pgtypes" }
|
||||
redact = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
strum = { version = "0.27.2", features = ["derive"] }
|
||||
thiserror = { workspace = true }
|
||||
url = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
|
|
|
|||
5
interim-models/build.rs
Normal file
5
interim-models/build.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
// generated by `sqlx migrate build-script`
|
||||
fn main() {
|
||||
// trigger recompilation when a new migration is added
|
||||
println!("cargo:rerun-if-changed=migrations");
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
drop table if exists users;
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
create table if not exists users (
|
||||
id uuid not null primary key,
|
||||
uid text unique not null,
|
||||
email text not null
|
||||
);
|
||||
create index on users (uid);
|
||||
|
|
@ -1 +0,0 @@
|
|||
drop table if exists browser_sessions;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
create table if not exists browser_sessions (
|
||||
id text not null primary key,
|
||||
serialized text not null,
|
||||
created_at timestamptz not null default now(),
|
||||
expiry timestamptz
|
||||
);
|
||||
create index on browser_sessions (expiry);
|
||||
create index on browser_sessions (created_at);
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
drop table if exists base_user_perms;
|
||||
drop table if exists bases;
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
create table if not exists bases (
|
||||
id uuid not null primary key,
|
||||
name text not null default '',
|
||||
url text not null,
|
||||
owner_id uuid not null references users(id)
|
||||
on delete restrict,
|
||||
user_role_prefix text not null default '__itmu__'
|
||||
);
|
||||
create index on bases (owner_id);
|
||||
|
||||
create table if not exists base_user_perms (
|
||||
id uuid not null primary key,
|
||||
base_id uuid not null references bases(id),
|
||||
user_id uuid not null references users(id),
|
||||
perm text not null,
|
||||
unique (base_id, user_id, perm)
|
||||
);
|
||||
create index on base_user_perms (user_id);
|
||||
create index on base_user_perms (base_id);
|
||||
|
|
@ -1 +0,0 @@
|
|||
drop table if exists rel_invitations;
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
create table if not exists rel_invitations (
|
||||
id uuid not null primary key,
|
||||
email text not null,
|
||||
base_id uuid not null references bases(id) on delete cascade,
|
||||
class_oid oid not null,
|
||||
created_by uuid not null references users(id) on delete restrict,
|
||||
privilege text not null,
|
||||
expires_at timestamptz,
|
||||
unique (email, base_id, class_oid, privilege)
|
||||
);
|
||||
create index on rel_invitations (base_id, class_oid);
|
||||
create index on rel_invitations (email);
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
drop table if exists fields;
|
||||
drop table if exists lenses;
|
||||
drop type if exists lens_display_type;
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
create type lens_display_type as enum ('table');
|
||||
|
||||
create table if not exists lenses (
|
||||
id uuid not null primary key,
|
||||
name text not null,
|
||||
base_id uuid not null references bases(id) on delete cascade,
|
||||
class_oid oid not null,
|
||||
filter jsonb not null default 'null'::jsonb,
|
||||
order_by jsonb not null default '[]'::jsonb,
|
||||
display_type lens_display_type not null default 'table'
|
||||
);
|
||||
create index on lenses (base_id);
|
||||
|
||||
create table if not exists fields (
|
||||
id uuid not null primary key,
|
||||
lens_id uuid not null references lenses(id) on delete cascade,
|
||||
name text not null,
|
||||
label text,
|
||||
presentation jsonb not null,
|
||||
width_px int not null default 200
|
||||
);
|
||||
9
interim-models/migrations/20250918060948_init.down.sql
Normal file
9
interim-models/migrations/20250918060948_init.down.sql
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
drop table if exists field_form_prompts;
|
||||
drop table if exists form_transitions;
|
||||
drop table if exists fields;
|
||||
drop table if exists portals;
|
||||
drop table if exists rel_invitations;
|
||||
drop table if exists workspace_user_perms;
|
||||
drop table if exists workspaces;
|
||||
drop table if exists browser_sessions;
|
||||
drop table if exists users;
|
||||
94
interim-models/migrations/20250918060948_init.up.sql
Normal file
94
interim-models/migrations/20250918060948_init.up.sql
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
-- Users --
|
||||
|
||||
create table if not exists users (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
uid text unique not null,
|
||||
email text not null
|
||||
);
|
||||
create index on users (uid);
|
||||
|
||||
-- async_session Browser Sessions --
|
||||
|
||||
create table if not exists browser_sessions (
|
||||
id text not null primary key,
|
||||
serialized text not null,
|
||||
created_at timestamptz not null default now(),
|
||||
expiry timestamptz
|
||||
);
|
||||
create index on browser_sessions (expiry);
|
||||
create index on browser_sessions (created_at);
|
||||
|
||||
-- Workspaces --
|
||||
|
||||
create table if not exists workspaces (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
name text not null default '',
|
||||
url text not null,
|
||||
owner_id uuid not null references users(id) on delete restrict
|
||||
);
|
||||
create index on workspaces (owner_id);
|
||||
|
||||
create table if not exists workspace_user_perms (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
workspace_id uuid not null references workspaces(id) on delete cascade,
|
||||
user_id uuid not null references users(id) on delete cascade,
|
||||
perm text not null,
|
||||
unique (workspace_id, user_id, perm)
|
||||
);
|
||||
create index on workspace_user_perms (user_id);
|
||||
create index on workspace_user_perms (workspace_id);
|
||||
|
||||
-- Relation Invitations --
|
||||
|
||||
create table if not exists rel_invitations (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
email text not null,
|
||||
workspace_id uuid not null references workspaces(id) on delete cascade,
|
||||
class_oid oid not null,
|
||||
created_by uuid not null references users(id) on delete restrict,
|
||||
privilege text not null,
|
||||
expires_at timestamptz,
|
||||
unique (email, workspace_id, class_oid, privilege)
|
||||
);
|
||||
create index on rel_invitations (workspace_id, class_oid);
|
||||
create index on rel_invitations (email);
|
||||
|
||||
-- Portals --
|
||||
|
||||
create table if not exists portals (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
name text not null,
|
||||
workspace_id uuid not null references workspaces(id) on delete cascade,
|
||||
class_oid oid not null,
|
||||
table_filter jsonb not null default 'null',
|
||||
table_order_by jsonb not null default '[]'
|
||||
);
|
||||
create index on portals (workspace_id);
|
||||
|
||||
create table if not exists fields (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
portal_id uuid not null references portals(id) on delete cascade,
|
||||
name text not null,
|
||||
presentation jsonb not null,
|
||||
table_label text,
|
||||
table_width_px int not null default 200
|
||||
);
|
||||
|
||||
-- Forms --
|
||||
|
||||
create table if not exists form_transitions (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
source_id uuid not null references portals(id) on delete cascade,
|
||||
dest_id uuid not null references portals(id) on delete restrict,
|
||||
condition jsonb not null default 'null'
|
||||
);
|
||||
create index on form_transitions (source_id);
|
||||
|
||||
create table if not exists field_form_prompts (
|
||||
id uuid not null primary key default uuidv7(),
|
||||
field_id uuid not null references fields(id) on delete cascade,
|
||||
language text not null,
|
||||
content text not null default '',
|
||||
unique (field_id, language)
|
||||
);
|
||||
create index on field_form_prompts (field_id);
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
use sqlx::{PgConnection, Postgres, pool::PoolConnection};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AppDbClient {
|
||||
pub(crate) conn: PoolConnection<Postgres>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use derive_builder::Builder;
|
||||
use interim_pgtypes::pg_attribute::PgAttribute;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
@ -22,14 +23,14 @@ pub struct Field {
|
|||
/// Name of the database column.
|
||||
pub name: String,
|
||||
|
||||
/// Optional human friendly label.
|
||||
pub label: Option<String>,
|
||||
|
||||
/// Refer to documentation for `Presentation`.
|
||||
pub presentation: sqlx::types::Json<Presentation>,
|
||||
|
||||
/// Optional human friendly label.
|
||||
pub table_label: Option<String>,
|
||||
|
||||
/// Width of UI table column in pixels.
|
||||
pub width_px: i32,
|
||||
pub table_width_px: i32,
|
||||
}
|
||||
|
||||
impl Field {
|
||||
|
|
@ -44,9 +45,9 @@ impl Field {
|
|||
Presentation::default_from_attr(attr).map(|presentation| Self {
|
||||
id: Uuid::now_v7(),
|
||||
name: attr.attname.clone(),
|
||||
label: None,
|
||||
table_label: None,
|
||||
presentation: sqlx::types::Json(presentation),
|
||||
width_px: 200,
|
||||
table_width_px: 200,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -64,21 +65,24 @@ impl Field {
|
|||
"UUID" => {
|
||||
Encodable::Uuid(<Option<Uuid> as Decode<Postgres>>::decode(value_ref).unwrap())
|
||||
}
|
||||
"TIMESTAMPTZ" => Encodable::Timestamp(
|
||||
<Option<DateTime<Utc>> as Decode<Postgres>>::decode(value_ref).unwrap(),
|
||||
),
|
||||
_ => return Err(ParseError::UnknownType),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn belonging_to_lens(lens_id: Uuid) -> BelongingToLensQuery {
|
||||
BelongingToLensQuery { lens_id }
|
||||
pub fn belonging_to_portal(portal_id: Uuid) -> BelongingToPortalQuery {
|
||||
BelongingToPortalQuery { portal_id }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BelongingToLensQuery {
|
||||
lens_id: Uuid,
|
||||
pub struct BelongingToPortalQuery {
|
||||
portal_id: Uuid,
|
||||
}
|
||||
|
||||
impl BelongingToLensQuery {
|
||||
impl BelongingToPortalQuery {
|
||||
pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result<Vec<Field>, sqlx::Error> {
|
||||
query_as!(
|
||||
Field,
|
||||
|
|
@ -86,13 +90,13 @@ impl BelongingToLensQuery {
|
|||
select
|
||||
id,
|
||||
name,
|
||||
label,
|
||||
table_label,
|
||||
presentation as "presentation: sqlx::types::Json<Presentation>",
|
||||
width_px
|
||||
table_width_px
|
||||
from fields
|
||||
where lens_id = $1
|
||||
where portal_id = $1
|
||||
"#,
|
||||
self.lens_id
|
||||
self.portal_id
|
||||
)
|
||||
.fetch_all(&mut *app_db.conn)
|
||||
.await
|
||||
|
|
@ -101,13 +105,13 @@ where lens_id = $1
|
|||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct InsertableField {
|
||||
lens_id: Uuid,
|
||||
portal_id: Uuid,
|
||||
name: String,
|
||||
#[builder(default)]
|
||||
label: Option<String>,
|
||||
table_label: Option<String>,
|
||||
presentation: Presentation,
|
||||
#[builder(default = 200)]
|
||||
width_px: i32,
|
||||
table_width_px: i32,
|
||||
}
|
||||
|
||||
impl InsertableField {
|
||||
|
|
@ -116,21 +120,20 @@ impl InsertableField {
|
|||
Field,
|
||||
r#"
|
||||
insert into fields
|
||||
(id, lens_id, name, label, presentation, width_px)
|
||||
values ($1, $2, $3, $4, $5, $6)
|
||||
(portal_id, name, table_label, presentation, table_width_px)
|
||||
values ($1, $2, $3, $4, $5)
|
||||
returning
|
||||
id,
|
||||
name,
|
||||
label,
|
||||
table_label,
|
||||
presentation as "presentation: sqlx::types::Json<Presentation>",
|
||||
width_px
|
||||
table_width_px
|
||||
"#,
|
||||
Uuid::now_v7(),
|
||||
self.lens_id,
|
||||
self.portal_id,
|
||||
self.name,
|
||||
self.label,
|
||||
self.table_label,
|
||||
sqlx::types::Json::<_>(self.presentation) as sqlx::types::Json<Presentation>,
|
||||
self.width_px,
|
||||
self.table_width_px,
|
||||
)
|
||||
.fetch_one(&mut *app_db.conn)
|
||||
.await
|
||||
|
|
@ -150,8 +153,10 @@ impl InsertableFieldBuilder {
|
|||
/// Error when parsing a sqlx value to JSON
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ParseError {
|
||||
// TODO: can this be removed?
|
||||
#[error("incompatible json type")]
|
||||
BadJsonType,
|
||||
|
||||
#[error("field not found in row")]
|
||||
FieldNotFound,
|
||||
#[error("unknown postgres type")]
|
||||
|
|
|
|||
129
interim-models/src/field_form_prompt.rs
Normal file
129
interim-models/src/field_form_prompt.rs
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
use derive_builder::Builder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::query_as;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{client::AppDbClient, language::Language};
|
||||
|
||||
/// A localized prompt to display above or alongside the form input for the
|
||||
/// given field.
|
||||
///
|
||||
/// There may be zero or one `field_form_prompt` entries for each
|
||||
/// `(field_id, language)` pair. (This uniqueness should be enforced by the
|
||||
/// database.)
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct FieldFormPrompt {
|
||||
/// Primary key (defaults to UUIDv7).
|
||||
pub id: Uuid,
|
||||
|
||||
/// ID of the field to which this prompt belongs.
|
||||
pub field_id: Uuid,
|
||||
|
||||
/// [ISO 639-3](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes)
|
||||
/// language code.
|
||||
pub language: Language,
|
||||
|
||||
/// Prompt content for this field, in this language.
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
impl FieldFormPrompt {
|
||||
/// Build an insert statement to create a new prompt.
|
||||
pub fn insert() -> InsertableBuilder {
|
||||
InsertableBuilder::default()
|
||||
}
|
||||
|
||||
/// Build an update statement to alter the content of an existing prompt.
|
||||
pub fn update() -> UpdateBuilder {
|
||||
UpdateBuilder::default()
|
||||
}
|
||||
|
||||
/// Build a single-field query by field ID.
|
||||
pub fn belonging_to_field(id: Uuid) -> BelongingToFieldQuery {
|
||||
BelongingToFieldQuery { id }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct Insertable {
|
||||
field_id: Uuid,
|
||||
language: Language,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl Insertable {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<FieldFormPrompt, sqlx::Error> {
|
||||
query_as!(
|
||||
FieldFormPrompt,
|
||||
r#"
|
||||
insert into field_form_prompts (field_id, language, content) values ($1, $2, $3)
|
||||
returning
|
||||
id,
|
||||
field_id,
|
||||
language as "language: Language",
|
||||
content
|
||||
"#,
|
||||
self.field_id,
|
||||
self.language.to_string(),
|
||||
self.content,
|
||||
)
|
||||
.fetch_one(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug, Default)]
|
||||
pub struct Update {
|
||||
id: Uuid,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl Update {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<FieldFormPrompt, sqlx::Error> {
|
||||
query_as!(
|
||||
FieldFormPrompt,
|
||||
r#"
|
||||
update field_form_prompts
|
||||
set content = $1
|
||||
where id = $2
|
||||
returning
|
||||
id,
|
||||
field_id,
|
||||
language as "language: Language",
|
||||
content
|
||||
"#,
|
||||
self.content,
|
||||
self.id,
|
||||
)
|
||||
.fetch_one(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BelongingToFieldQuery {
|
||||
id: Uuid,
|
||||
}
|
||||
|
||||
impl BelongingToFieldQuery {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
app_db: &mut AppDbClient,
|
||||
) -> Result<Vec<FieldFormPrompt>, sqlx::Error> {
|
||||
query_as!(
|
||||
FieldFormPrompt,
|
||||
r#"
|
||||
select
|
||||
id,
|
||||
field_id,
|
||||
language as "language: Language",
|
||||
content
|
||||
from field_form_prompts
|
||||
where field_id = $1
|
||||
"#,
|
||||
self.id,
|
||||
)
|
||||
.fetch_all(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
106
interim-models/src/form_transition.rs
Normal file
106
interim-models/src/form_transition.rs
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
use derive_builder::Builder;
|
||||
use sqlx::{query_as, types::Json};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{client::AppDbClient, expression::PgExpressionAny};
|
||||
|
||||
/// A form transition directionally connects two portals within the same
|
||||
/// workspace, representing a potential navigation of a user between two forms.
|
||||
/// If the user submits a form, form transitions with `source_id` corresponding
|
||||
/// to that portal will be evaluated one by one (in order by ID---that is, by
|
||||
/// creation time), and the first with a condition evaluating to true will be
|
||||
/// used to direct the user to the form corresponding to portal `dest_id`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FormTransition {
|
||||
/// Primary key (defaults to UUIDv7).
|
||||
pub id: Uuid,
|
||||
|
||||
/// When a user is filling out a sequence of forms, this is the ID of the
|
||||
/// portal for which they have just submitted a form for.
|
||||
///
|
||||
/// **Source portal is expected to belong to the same workspace as the
|
||||
/// destination portal.**
|
||||
pub source_id: Uuid,
|
||||
|
||||
/// When a user is filling out a sequence of forms, this is the ID of the
|
||||
/// portal for which they will be directed to if the condition evaluates to
|
||||
/// true.
|
||||
///
|
||||
/// **Destination portal is expected to belong to the same workspace as the
|
||||
/// source portal.**
|
||||
pub dest_id: Uuid,
|
||||
|
||||
/// Represents a semi-arbitrary Postgres expression which will permit this
|
||||
/// transition to be followed, only if the expression evaluates to true at
|
||||
/// the time of the source form's submission.
|
||||
pub condition: Json<Option<PgExpressionAny>>,
|
||||
}
|
||||
|
||||
impl FormTransition {
|
||||
/// Build an insert statement to create a new transtition.
|
||||
pub fn insert() -> InsertableBuilder {
|
||||
InsertableBuilder::default()
|
||||
}
|
||||
|
||||
/// Build a single-field query by source portal ID.
|
||||
pub fn with_source(id: Uuid) -> WithSourceQuery {
|
||||
WithSourceQuery { id }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct WithSourceQuery {
|
||||
id: Uuid,
|
||||
}
|
||||
|
||||
impl WithSourceQuery {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
app_db: &mut AppDbClient,
|
||||
) -> Result<Vec<FormTransition>, sqlx::Error> {
|
||||
query_as!(
|
||||
FormTransition,
|
||||
r#"
|
||||
select
|
||||
id,
|
||||
source_id,
|
||||
dest_id,
|
||||
condition as "condition: Json<Option<PgExpressionAny>>"
|
||||
from form_transitions
|
||||
where source_id = $1
|
||||
"#,
|
||||
self.id,
|
||||
)
|
||||
.fetch_all(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct Insertable {
|
||||
source_id: Uuid,
|
||||
dest_id: Uuid,
|
||||
condition: Option<PgExpressionAny>,
|
||||
}
|
||||
|
||||
impl Insertable {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<FormTransition, sqlx::Error> {
|
||||
query_as!(
|
||||
FormTransition,
|
||||
r#"
|
||||
insert into form_transitions (source_id, dest_id, condition)
|
||||
values ($1, $2, $3)
|
||||
returning
|
||||
id,
|
||||
source_id,
|
||||
dest_id,
|
||||
condition as "condition: Json<Option<PgExpressionAny>>"
|
||||
"#,
|
||||
self.source_id,
|
||||
self.dest_id,
|
||||
Json(self.condition) as Json<Option<PgExpressionAny>>,
|
||||
)
|
||||
.fetch_one(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
39
interim-models/src/language.rs
Normal file
39
interim-models/src/language.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Decode;
|
||||
use strum::{EnumIter, EnumString};
|
||||
|
||||
/// Languages represented as
|
||||
/// [ISO 639-3 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes).
|
||||
#[derive(
|
||||
Clone, Debug, Decode, Deserialize, strum::Display, PartialEq, Serialize, EnumIter, EnumString,
|
||||
)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
// [`sqlx`] implements Decode and Encode to/from the Postgres `TEXT` type based
|
||||
// on the [`std::fmt::Display`] and [`std::str::FromStr`] traits, so it should
|
||||
// use the transformations applied by [`strum`].
|
||||
// <https://docs.rs/sqlx/latest/sqlx/types/struct.Text.html>
|
||||
pub enum Language {
|
||||
Deu,
|
||||
Eng,
|
||||
Spa,
|
||||
}
|
||||
|
||||
impl Default for Language {
|
||||
/// Language defaults to English when necessary, as the product is being
|
||||
/// developed with a primarily English speaking/reading/writing market in
|
||||
/// mind.
|
||||
fn default() -> Self {
|
||||
Self::Eng
|
||||
}
|
||||
}
|
||||
|
||||
impl Language {
|
||||
pub fn as_locale_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Deu => "Deutsch",
|
||||
Self::Eng => "English",
|
||||
Self::Spa => "Español",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +1,15 @@
|
|||
pub mod base;
|
||||
pub mod client;
|
||||
pub mod encodable;
|
||||
pub mod expression;
|
||||
pub mod field;
|
||||
pub mod lens;
|
||||
pub mod field_form_prompt;
|
||||
pub mod form_transition;
|
||||
pub mod language;
|
||||
pub mod portal;
|
||||
pub mod presentation;
|
||||
pub mod rel_invitation;
|
||||
pub mod user;
|
||||
pub mod workspace;
|
||||
pub mod workspace_user_perm;
|
||||
|
||||
pub static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!();
|
||||
|
|
|
|||
|
|
@ -5,31 +5,46 @@ use uuid::Uuid;
|
|||
|
||||
use crate::{client::AppDbClient, expression::PgExpressionAny};
|
||||
|
||||
/// A portal is a derivative representation of a Postgres relation.
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct Lens {
|
||||
pub struct Portal {
|
||||
/// Primary key (defaults to UUIDv7).
|
||||
pub id: Uuid,
|
||||
|
||||
/// Human friendly name for portal.
|
||||
pub name: String,
|
||||
pub base_id: Uuid,
|
||||
|
||||
/// Workspace to which this portal belongs.
|
||||
pub workspace_id: Uuid,
|
||||
|
||||
/// OID of the underlying Postgres relation. Currently, this is expected
|
||||
/// to be a normal table, not a view, etc.
|
||||
pub class_oid: Oid,
|
||||
pub display_type: LensDisplayType,
|
||||
pub filter: Json<Option<PgExpressionAny>>,
|
||||
|
||||
/// JSONB-encoded expression to use for filtering rows in the web-based
|
||||
/// table view.
|
||||
pub table_filter: Json<Option<PgExpressionAny>>,
|
||||
}
|
||||
|
||||
impl Lens {
|
||||
pub fn insertable_builder() -> InsertableLensBuilder {
|
||||
InsertableLensBuilder::default()
|
||||
impl Portal {
|
||||
/// Build an insert statement to create a new portal.
|
||||
pub fn insert() -> InsertablePortalBuilder {
|
||||
InsertablePortalBuilder::default()
|
||||
}
|
||||
|
||||
pub fn update() -> LensUpdateBuilder {
|
||||
LensUpdateBuilder::default()
|
||||
/// Build an update statement to alter an existing portal.
|
||||
pub fn update() -> PortalUpdateBuilder {
|
||||
PortalUpdateBuilder::default()
|
||||
}
|
||||
|
||||
/// Build a single-field query by portal ID.
|
||||
pub fn with_id(id: Uuid) -> WithIdQuery {
|
||||
WithIdQuery { id }
|
||||
}
|
||||
|
||||
pub fn belonging_to_base(base_id: Uuid) -> BelongingToBaseQuery {
|
||||
BelongingToBaseQuery { base_id }
|
||||
/// Build a query by workspace ID and relation OID.
|
||||
pub fn belonging_to_workspace(workspace_id: Uuid) -> BelongingToWorkspaceQuery {
|
||||
BelongingToWorkspaceQuery { workspace_id }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -42,18 +57,17 @@ impl WithIdQuery {
|
|||
pub async fn fetch_optional(
|
||||
self,
|
||||
app_db: &mut AppDbClient,
|
||||
) -> Result<Option<Lens>, sqlx::Error> {
|
||||
) -> Result<Option<Portal>, sqlx::Error> {
|
||||
query_as!(
|
||||
Lens,
|
||||
Portal,
|
||||
r#"
|
||||
select
|
||||
id,
|
||||
name,
|
||||
base_id,
|
||||
workspace_id,
|
||||
class_oid,
|
||||
display_type as "display_type: LensDisplayType",
|
||||
filter as "filter: Json<Option<PgExpressionAny>>"
|
||||
from lenses
|
||||
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||
from portals
|
||||
where id = $1
|
||||
"#,
|
||||
self.id
|
||||
|
|
@ -62,18 +76,17 @@ where id = $1
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result<Lens, sqlx::Error> {
|
||||
pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result<Portal, sqlx::Error> {
|
||||
query_as!(
|
||||
Lens,
|
||||
Portal,
|
||||
r#"
|
||||
select
|
||||
id,
|
||||
name,
|
||||
base_id,
|
||||
workspace_id,
|
||||
class_oid,
|
||||
display_type as "display_type: LensDisplayType",
|
||||
filter as "filter: Json<Option<PgExpressionAny>>"
|
||||
from lenses
|
||||
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||
from portals
|
||||
where id = $1
|
||||
"#,
|
||||
self.id
|
||||
|
|
@ -84,14 +97,14 @@ where id = $1
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BelongingToBaseQuery {
|
||||
base_id: Uuid,
|
||||
pub struct BelongingToWorkspaceQuery {
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
impl BelongingToBaseQuery {
|
||||
impl BelongingToWorkspaceQuery {
|
||||
pub fn belonging_to_rel(self, rel_oid: Oid) -> BelongingToRelQuery {
|
||||
BelongingToRelQuery {
|
||||
base_id: self.base_id,
|
||||
workspace_id: self.workspace_id,
|
||||
rel_oid,
|
||||
}
|
||||
}
|
||||
|
|
@ -99,26 +112,25 @@ impl BelongingToBaseQuery {
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BelongingToRelQuery {
|
||||
base_id: Uuid,
|
||||
workspace_id: Uuid,
|
||||
rel_oid: Oid,
|
||||
}
|
||||
|
||||
impl BelongingToRelQuery {
|
||||
pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result<Vec<Lens>, sqlx::Error> {
|
||||
pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result<Vec<Portal>, sqlx::Error> {
|
||||
query_as!(
|
||||
Lens,
|
||||
Portal,
|
||||
r#"
|
||||
select
|
||||
id,
|
||||
name,
|
||||
base_id,
|
||||
workspace_id,
|
||||
class_oid,
|
||||
display_type as "display_type: LensDisplayType",
|
||||
filter as "filter: Json<Option<PgExpressionAny>>"
|
||||
from lenses
|
||||
where base_id = $1 and class_oid = $2
|
||||
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||
from portals
|
||||
where workspace_id = $1 and class_oid = $2
|
||||
"#,
|
||||
self.base_id,
|
||||
self.workspace_id,
|
||||
self.rel_oid
|
||||
)
|
||||
.fetch_all(&mut *app_db.conn)
|
||||
|
|
@ -133,34 +145,30 @@ pub enum LensDisplayType {
|
|||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct InsertableLens {
|
||||
pub struct InsertablePortal {
|
||||
name: String,
|
||||
base_id: Uuid,
|
||||
workspace_id: Uuid,
|
||||
class_oid: Oid,
|
||||
display_type: LensDisplayType,
|
||||
}
|
||||
|
||||
impl InsertableLens {
|
||||
pub async fn insert(self, app_db: &mut AppDbClient) -> Result<Lens, sqlx::Error> {
|
||||
impl InsertablePortal {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<Portal, sqlx::Error> {
|
||||
query_as!(
|
||||
Lens,
|
||||
Portal,
|
||||
r#"
|
||||
insert into lenses
|
||||
(id, base_id, class_oid, name, display_type)
|
||||
values ($1, $2, $3, $4, $5)
|
||||
insert into portals
|
||||
(workspace_id, class_oid, name)
|
||||
values ($1, $2, $3)
|
||||
returning
|
||||
id,
|
||||
name,
|
||||
base_id,
|
||||
workspace_id,
|
||||
class_oid,
|
||||
display_type as "display_type: LensDisplayType",
|
||||
filter as "filter: Json<Option<PgExpressionAny>>"
|
||||
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||
"#,
|
||||
Uuid::now_v7(),
|
||||
self.base_id,
|
||||
self.workspace_id,
|
||||
self.class_oid,
|
||||
self.name,
|
||||
self.display_type as LensDisplayType
|
||||
)
|
||||
.fetch_one(&mut *app_db.conn)
|
||||
.await
|
||||
|
|
@ -168,17 +176,17 @@ returning
|
|||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct LensUpdate {
|
||||
pub struct PortalUpdate {
|
||||
id: Uuid,
|
||||
#[builder(setter(strip_option = true))]
|
||||
filter: Option<Option<PgExpressionAny>>,
|
||||
}
|
||||
|
||||
impl LensUpdate {
|
||||
impl PortalUpdate {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<(), sqlx::Error> {
|
||||
if let Some(filter) = self.filter {
|
||||
query!(
|
||||
"update lenses set filter = $1 where id = $2",
|
||||
"update portals set table_filter = $1 where id = $2",
|
||||
Json(filter) as Json<Option<PgExpressionAny>>,
|
||||
self.id
|
||||
)
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
use interim_pgtypes::pg_attribute::PgAttribute;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
|
||||
pub const RFC_3339_S: &str = "%Y-%m-%dT%H:%M:%S";
|
||||
|
||||
/// Struct defining how a field's is displayed and how it accepts input in UI.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[derive(Clone, Debug, Deserialize, EnumIter, EnumString, PartialEq, Serialize, strum::Display)]
|
||||
#[serde(tag = "t", content = "c")]
|
||||
pub enum Presentation {
|
||||
Array { inner: Box<Presentation> },
|
||||
|
|
@ -53,9 +54,27 @@ impl Presentation {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
impl Default for Presentation {
|
||||
/// Defaults to [`Self::Text`] as a reasonable fallback. The [`Default`]
|
||||
/// trait is implemented for convenience, but in the vast majority of cases
|
||||
/// the presentation value should be well defined and this should not be
|
||||
/// called directly.
|
||||
fn default() -> Self {
|
||||
Self::Text {
|
||||
input_mode: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, EnumString, EnumIter, PartialEq, Serialize, strum::Display)]
|
||||
#[serde(tag = "t", content = "c")]
|
||||
pub enum TextInputMode {
|
||||
SingleLine {},
|
||||
MultiLine {},
|
||||
}
|
||||
|
||||
impl Default for TextInputMode {
|
||||
fn default() -> Self {
|
||||
Self::MultiLine {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use crate::client::AppDbClient;
|
|||
pub struct RelInvitation {
|
||||
pub id: Uuid,
|
||||
pub email: String,
|
||||
pub base_id: Uuid,
|
||||
pub workspace_id: Uuid,
|
||||
pub class_oid: Oid,
|
||||
pub created_by: Uuid,
|
||||
pub privilege: String,
|
||||
|
|
@ -53,7 +53,7 @@ where class_oid = $1
|
|||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct UpsertableRelInvitation {
|
||||
email: String,
|
||||
base_id: Uuid,
|
||||
workspace_id: Uuid,
|
||||
class_oid: Oid,
|
||||
created_by: Uuid,
|
||||
privilege: PgPrivilegeType,
|
||||
|
|
@ -67,16 +67,15 @@ impl UpsertableRelInvitation {
|
|||
RelInvitation,
|
||||
"
|
||||
insert into rel_invitations
|
||||
(id, email, base_id, class_oid, privilege, created_by, expires_at)
|
||||
values ($1, $2, $3, $4, $5, $6, $7)
|
||||
on conflict (email, base_id, class_oid, privilege) do update set
|
||||
(email, workspace_id, class_oid, privilege, created_by, expires_at)
|
||||
values ($1, $2, $3, $4, $5, $6)
|
||||
on conflict (email, workspace_id, class_oid, privilege) do update set
|
||||
created_by = excluded.created_by,
|
||||
expires_at = excluded.expires_at
|
||||
returning *
|
||||
",
|
||||
Uuid::now_v7(),
|
||||
self.email,
|
||||
self.base_id,
|
||||
self.workspace_id,
|
||||
self.class_oid,
|
||||
self.privilege.to_abbrev().to_string(),
|
||||
self.created_by,
|
||||
|
|
|
|||
|
|
@ -1,27 +1,39 @@
|
|||
use derive_builder::Builder;
|
||||
use redact::Secret;
|
||||
use sqlx::query_as;
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::client::AppDbClient;
|
||||
|
||||
/// A workspace is 1:1 with a Postgres "database".
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Base {
|
||||
pub struct Workspace {
|
||||
/// Primary key (defaults to UUIDv7).
|
||||
pub id: Uuid,
|
||||
|
||||
/// Human friendly name for the workspace.
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
|
||||
/// `postgresql://` URL of the instance and database hosting this workspace.
|
||||
pub url: Secret<String>,
|
||||
|
||||
/// ID of the user account that created this workspace.
|
||||
pub owner_id: Uuid,
|
||||
pub user_role_prefix: String,
|
||||
}
|
||||
|
||||
impl Base {
|
||||
pub fn insertable_builder() -> InsertableBaseBuilder {
|
||||
InsertableBaseBuilder::default()
|
||||
impl Workspace {
|
||||
/// Build an insert statement to create a new workspace.
|
||||
pub fn insert() -> InsertableWorkspaceBuilder {
|
||||
InsertableWorkspaceBuilder::default()
|
||||
}
|
||||
|
||||
/// Build a single-field query by workspace ID.
|
||||
pub fn with_id(id: Uuid) -> WithIdQuery {
|
||||
WithIdQuery { id }
|
||||
}
|
||||
|
||||
/// Build a query for workspaces filtered by a user's Phono permissions.
|
||||
pub fn with_permission_in<I: IntoIterator<Item = &'static str>>(
|
||||
perms: I,
|
||||
) -> WithPermissionInQueryPartial {
|
||||
|
|
@ -49,13 +61,13 @@ pub struct WithPermissionInQuery {
|
|||
}
|
||||
|
||||
impl WithPermissionInQuery {
|
||||
pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result<Vec<Base>, sqlx::Error> {
|
||||
pub async fn fetch_all(self, app_db: &mut AppDbClient) -> Result<Vec<Workspace>, sqlx::Error> {
|
||||
query_as!(
|
||||
Base,
|
||||
Workspace,
|
||||
"
|
||||
select bases.*
|
||||
from bases inner join base_user_perms as p
|
||||
on p.base_id = bases.id
|
||||
select workspaces.*
|
||||
from workspaces inner join workspace_user_perms as p
|
||||
on p.workspace_id = workspaces.id
|
||||
where p.user_id = $1 and perm = ANY($2)
|
||||
",
|
||||
self.user_id,
|
||||
|
|
@ -74,37 +86,44 @@ impl WithIdQuery {
|
|||
pub async fn fetch_optional(
|
||||
self,
|
||||
app_db: &mut AppDbClient,
|
||||
) -> Result<Option<Base>, sqlx::Error> {
|
||||
query_as!(Base, "select * from bases where id = $1", &self.id)
|
||||
) -> Result<Option<Workspace>, sqlx::Error> {
|
||||
query_as!(
|
||||
Workspace,
|
||||
"select * from workspaces where id = $1",
|
||||
&self.id
|
||||
)
|
||||
.fetch_optional(&mut *app_db.conn)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result<Base, sqlx::Error> {
|
||||
query_as!(Base, "select * from bases where id = $1", &self.id)
|
||||
pub async fn fetch_one(self, app_db: &mut AppDbClient) -> Result<Workspace, sqlx::Error> {
|
||||
query_as!(
|
||||
Workspace,
|
||||
"select * from workspaces where id = $1",
|
||||
&self.id
|
||||
)
|
||||
.fetch_one(&mut *app_db.conn)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Builder)]
|
||||
pub struct InsertableBase {
|
||||
url: String,
|
||||
pub struct InsertableWorkspace {
|
||||
url: Url,
|
||||
owner_id: Uuid,
|
||||
}
|
||||
|
||||
impl InsertableBase {
|
||||
pub async fn insert(self, app_db: &mut AppDbClient) -> Result<Base, sqlx::Error> {
|
||||
impl InsertableWorkspace {
|
||||
pub async fn insert(self, app_db: &mut AppDbClient) -> Result<Workspace, sqlx::Error> {
|
||||
query_as!(
|
||||
Base,
|
||||
Workspace,
|
||||
"
|
||||
insert into bases
|
||||
(id, url, owner_id)
|
||||
values ($1, $2, $3)
|
||||
insert into workspaces
|
||||
(url, owner_id)
|
||||
values ($1, $2)
|
||||
returning *
|
||||
",
|
||||
Uuid::now_v7(),
|
||||
self.url,
|
||||
self.url.to_string(),
|
||||
self.owner_id
|
||||
)
|
||||
.fetch_one(&mut *app_db.conn)
|
||||
130
interim-models/src/workspace_user_perm.rs
Normal file
130
interim-models/src/workspace_user_perm.rs
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use derive_builder::Builder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{Decode, Encode, Postgres, query_as};
|
||||
use strum::EnumString;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::client::AppDbClient;
|
||||
|
||||
/// Assigns an access control permission on a workspace to a user. These are
|
||||
/// derived from the permission grants of the workspace's backing database.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct WorkspaceUserPerm {
|
||||
/// Primary key (defaults to UUIDv7).
|
||||
pub id: Uuid,
|
||||
|
||||
/// Workspace to which the permission belongs.
|
||||
pub workspace_id: Uuid,
|
||||
|
||||
/// **Synthesized field** generated by joining to the `workspaces` table.
|
||||
pub workspace_name: String,
|
||||
|
||||
/// User to which the permission belongs.
|
||||
pub user_id: Uuid,
|
||||
|
||||
/// Permission assigned to the user (currently only "connect").
|
||||
pub perm: PermissionValue,
|
||||
}
|
||||
|
||||
impl WorkspaceUserPerm {
|
||||
/// Construct a single-field query to fetch workspace permissions assigned
|
||||
/// to a user.
|
||||
pub fn belonging_to_user(id: Uuid) -> BelongingToUserQuery {
|
||||
BelongingToUserQuery { id }
|
||||
}
|
||||
|
||||
/// Build an insert statement to create a new object.
|
||||
pub fn insert() -> InsertBuilder {
|
||||
InsertBuilder::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BelongingToUserQuery {
|
||||
id: Uuid,
|
||||
}
|
||||
|
||||
impl BelongingToUserQuery {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
app_db: &mut AppDbClient,
|
||||
) -> Result<Vec<WorkspaceUserPerm>, sqlx::Error> {
|
||||
query_as!(
|
||||
WorkspaceUserPerm,
|
||||
r#"
|
||||
select
|
||||
p.id as id,
|
||||
p.workspace_id as workspace_id,
|
||||
p.user_id as user_id,
|
||||
p.perm as "perm: PermissionValue",
|
||||
w.name as workspace_name
|
||||
from workspace_user_perms as p
|
||||
inner join workspaces as w
|
||||
on w.id = p.workspace_id
|
||||
where p.user_id = $1
|
||||
"#,
|
||||
self.id,
|
||||
)
|
||||
.fetch_all(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct Insert {
|
||||
workspace_id: Uuid,
|
||||
user_id: Uuid,
|
||||
perm: PermissionValue,
|
||||
}
|
||||
|
||||
impl Insert {
|
||||
pub async fn execute(self, app_db: &mut AppDbClient) -> Result<WorkspaceUserPerm, sqlx::Error> {
|
||||
query_as!(
|
||||
WorkspaceUserPerm,
|
||||
r#"
|
||||
with p as (
|
||||
insert into workspace_user_perms (workspace_id, user_id, perm) values ($1, $2, $3)
|
||||
returning
|
||||
id,
|
||||
workspace_id,
|
||||
user_id,
|
||||
perm
|
||||
)
|
||||
select
|
||||
p.id as id,
|
||||
p.workspace_id as workspace_id,
|
||||
p.user_id as user_id,
|
||||
p.perm as "perm: PermissionValue",
|
||||
w.name as workspace_name
|
||||
from workspace_user_perms as p
|
||||
inner join workspaces as w
|
||||
on w.id = p.workspace_id
|
||||
"#,
|
||||
self.workspace_id,
|
||||
self.user_id,
|
||||
self.perm.to_string(),
|
||||
)
|
||||
.fetch_one(app_db.get_conn())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: The sqlx::Decode derive macro doesn't follow the strum serialization.
|
||||
// Does sqlx::Encode?
|
||||
#[derive(Clone, Debug, Deserialize, Encode, EnumString, PartialEq, Serialize, strum::Display)]
|
||||
#[serde(rename = "snake_case")]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum PermissionValue {
|
||||
Connect,
|
||||
}
|
||||
|
||||
impl Decode<'_, Postgres> for PermissionValue {
|
||||
fn decode(
|
||||
value: <Postgres as sqlx::Database>::ValueRef<'_>,
|
||||
) -> Result<Self, sqlx::error::BoxDynError> {
|
||||
let value = <&str as Decode<Postgres>>::decode(value)?;
|
||||
Ok(Self::from_str(value)?)
|
||||
}
|
||||
}
|
||||
|
|
@ -2,11 +2,13 @@ use sqlx::{PgConnection, Postgres, Row as _, pool::PoolConnection, query};
|
|||
|
||||
use crate::escape_identifier;
|
||||
|
||||
pub struct BaseClient {
|
||||
/// Newtype to differentiate between workspace and application database
|
||||
/// connections.
|
||||
pub struct WorkspaceClient {
|
||||
pub(crate) conn: PoolConnection<Postgres>,
|
||||
}
|
||||
|
||||
impl BaseClient {
|
||||
impl WorkspaceClient {
|
||||
pub fn from_pool_conn(conn: PoolConnection<Postgres>) -> Self {
|
||||
Self { conn }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use serde::Serialize;
|
||||
use sqlx::{postgres::types::Oid, query_as};
|
||||
|
||||
use crate::client::BaseClient;
|
||||
use crate::client::WorkspaceClient;
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
pub struct PgAttribute {
|
||||
|
|
@ -59,7 +59,10 @@ pub struct AllForRelQuery {
|
|||
}
|
||||
|
||||
impl AllForRelQuery {
|
||||
pub async fn fetch_all(self, client: &mut BaseClient) -> Result<Vec<PgAttribute>, sqlx::Error> {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Vec<PgAttribute>, sqlx::Error> {
|
||||
query_as!(
|
||||
PgAttribute,
|
||||
r#"
|
||||
|
|
@ -96,7 +99,10 @@ pub struct PkeysForRelQuery {
|
|||
}
|
||||
|
||||
impl PkeysForRelQuery {
|
||||
pub async fn fetch_all(self, client: &mut BaseClient) -> Result<Vec<PgAttribute>, sqlx::Error> {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Vec<PgAttribute>, sqlx::Error> {
|
||||
query_as!(
|
||||
PgAttribute,
|
||||
r#"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
use sqlx::{postgres::types::Oid, query_as};
|
||||
|
||||
use crate::{client::BaseClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace};
|
||||
use crate::{
|
||||
client::WorkspaceClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PgClass {
|
||||
|
|
@ -46,7 +48,7 @@ pub struct PgClass {
|
|||
impl PgClass {
|
||||
pub async fn fetch_namespace(
|
||||
&self,
|
||||
client: &mut BaseClient,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<PgNamespace, sqlx::Error> {
|
||||
PgNamespace::fetch_by_oid(self.relnamespace, &mut *client.conn)
|
||||
.await?
|
||||
|
|
@ -115,7 +117,7 @@ where
|
|||
}
|
||||
|
||||
impl WithOidQuery {
|
||||
pub async fn fetch_one(self, client: &mut BaseClient) -> Result<PgClass, sqlx::Error> {
|
||||
pub async fn fetch_one(self, client: &mut WorkspaceClient) -> Result<PgClass, sqlx::Error> {
|
||||
with_oid_sqlx_query!(self.oid)
|
||||
.fetch_one(&mut *client.conn)
|
||||
.await
|
||||
|
|
@ -123,7 +125,7 @@ impl WithOidQuery {
|
|||
|
||||
pub async fn fetch_optional(
|
||||
self,
|
||||
client: &mut BaseClient,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Option<PgClass>, sqlx::Error> {
|
||||
with_oid_sqlx_query!(self.oid)
|
||||
.fetch_optional(&mut *client.conn)
|
||||
|
|
@ -136,7 +138,10 @@ pub struct WithKindInQuery {
|
|||
}
|
||||
|
||||
impl WithKindInQuery {
|
||||
pub async fn fetch_all(self, client: &mut BaseClient) -> Result<Vec<PgClass>, sqlx::Error> {
|
||||
pub async fn fetch_all(
|
||||
self,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Vec<PgClass>, sqlx::Error> {
|
||||
let kinds_i8: Vec<_> = self
|
||||
.kinds
|
||||
.into_iter()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use sqlx::{postgres::types::Oid, query_as};
|
||||
|
||||
use crate::{client::BaseClient, pg_acl::PgAclItem};
|
||||
use crate::{client::WorkspaceClient, pg_acl::PgAclItem};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PgDatabase {
|
||||
|
|
@ -48,7 +48,7 @@ impl PgDatabase {
|
|||
pub struct CurrentQuery {}
|
||||
|
||||
impl CurrentQuery {
|
||||
pub async fn fetch_one(self, client: &mut BaseClient) -> Result<PgDatabase, sqlx::Error> {
|
||||
pub async fn fetch_one(self, client: &mut WorkspaceClient) -> Result<PgDatabase, sqlx::Error> {
|
||||
query_as!(
|
||||
PgDatabase,
|
||||
r#"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use sqlx::{postgres::types::Oid, prelude::FromRow, query_as};
|
|||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::client::BaseClient;
|
||||
use crate::client::WorkspaceClient;
|
||||
|
||||
#[derive(Clone, Debug, Eq, Hash, FromRow, PartialEq)]
|
||||
pub struct PgRole {
|
||||
|
|
@ -43,7 +43,10 @@ pub struct WithNameInQuery {
|
|||
}
|
||||
|
||||
impl WithNameInQuery {
|
||||
pub async fn fetch_all(&self, client: &mut BaseClient) -> Result<Vec<PgRole>, sqlx::Error> {
|
||||
pub async fn fetch_all(
|
||||
&self,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Vec<PgRole>, sqlx::Error> {
|
||||
query_as!(
|
||||
PgRole,
|
||||
r#"
|
||||
|
|
@ -120,7 +123,7 @@ pub struct MembersOfOidQuery {
|
|||
impl MembersOfOidQuery {
|
||||
pub async fn fetch_tree(
|
||||
self,
|
||||
client: &mut BaseClient,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Option<RoleTree>, sqlx::Error> {
|
||||
let rows: Vec<RoleTreeRow> = query_as(
|
||||
"
|
||||
|
|
@ -162,7 +165,7 @@ pub struct MembersOfRolnameQuery {
|
|||
impl MembersOfRolnameQuery {
|
||||
pub async fn fetch_tree(
|
||||
self,
|
||||
client: &mut BaseClient,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Option<RoleTree>, sqlx::Error> {
|
||||
// This could almost be a macro to DRY with MembersOfOidQuery, except
|
||||
// for the extra ::text:: cast required on the parameter in this query.
|
||||
|
|
@ -206,7 +209,7 @@ pub struct GrantedToQuery {
|
|||
impl GrantedToQuery {
|
||||
pub async fn fetch_tree(
|
||||
self,
|
||||
client: &mut BaseClient,
|
||||
client: &mut WorkspaceClient,
|
||||
) -> Result<Option<RoleTree>, sqlx::Error> {
|
||||
let rows: Vec<RoleTreeRow> = query_as(
|
||||
"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::{FromRef, FromRequestParts},
|
||||
|
|
@ -9,14 +7,17 @@ use interim_models::client::AppDbClient;
|
|||
use oauth2::basic::BasicClient;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
|
||||
use crate::{
|
||||
app_error::AppError, auth, base_pooler::BasePooler, sessions::PgStore, settings::Settings,
|
||||
};
|
||||
use crate::app_error::AppError;
|
||||
use crate::auth;
|
||||
use crate::base_pooler::WorkspacePooler;
|
||||
use crate::sessions::PgStore;
|
||||
use crate::settings::Settings;
|
||||
|
||||
/// Global app configuration
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct App {
|
||||
pub app_db: sqlx::PgPool,
|
||||
pub base_pooler: BasePooler,
|
||||
pub workspace_pooler: WorkspacePooler,
|
||||
pub oauth_client: BasicClient,
|
||||
pub reqwest_client: reqwest::Client,
|
||||
pub session_store: PgStore,
|
||||
|
|
@ -34,12 +35,14 @@ impl App {
|
|||
let session_store = PgStore::new(app_db.clone());
|
||||
let reqwest_client = reqwest::ClientBuilder::new().https_only(true).build()?;
|
||||
let oauth_client = auth::new_oauth_client(&settings)?;
|
||||
|
||||
let base_pooler = BasePooler::new_with_app_db(app_db.clone());
|
||||
let workspace_pooler = WorkspacePooler::builder()
|
||||
.app_db_pool(app_db.clone())
|
||||
.db_role_prefix(settings.db_role_prefix.clone())
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
app_db,
|
||||
base_pooler,
|
||||
workspace_pooler,
|
||||
oauth_client,
|
||||
reqwest_client,
|
||||
session_store,
|
||||
|
|
@ -48,36 +51,25 @@ impl App {
|
|||
}
|
||||
}
|
||||
|
||||
/// Global app configuration, arced for relatively inexpensive clones
|
||||
pub type AppState = Arc<App>;
|
||||
|
||||
/// State extractor for shared reqwest client
|
||||
#[derive(Clone)]
|
||||
pub struct ReqwestClient(pub reqwest::Client);
|
||||
|
||||
impl<S> FromRef<S> for ReqwestClient
|
||||
where
|
||||
S: Into<AppState> + Clone,
|
||||
{
|
||||
fn from_ref(state: &S) -> Self {
|
||||
ReqwestClient(Into::<AppState>::into(state.clone()).reqwest_client.clone())
|
||||
impl FromRef<App> for ReqwestClient {
|
||||
fn from_ref(state: &App) -> Self {
|
||||
ReqwestClient(state.reqwest_client.clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extractor to automatically obtain a Deadpool Diesel connection
|
||||
/// Extractor to automatically obtain a sqlx connection for the application
|
||||
/// database.
|
||||
pub struct AppDbConn(pub AppDbClient);
|
||||
|
||||
impl<S> FromRequestParts<S> for AppDbConn
|
||||
where
|
||||
S: Into<AppState> + Clone + Sync,
|
||||
{
|
||||
impl FromRequestParts<App> for AppDbConn {
|
||||
type Rejection = AppError;
|
||||
|
||||
async fn from_request_parts(_: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let conn = Into::<AppState>::into(state.clone())
|
||||
.app_db
|
||||
.acquire()
|
||||
.await?;
|
||||
async fn from_request_parts(_: &mut Parts, state: &App) -> Result<Self, Self::Rejection> {
|
||||
let conn = state.app_db.acquire().await?;
|
||||
Ok(Self(AppDbClient::from_pool_conn(conn)))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ use serde::{Deserialize, Serialize};
|
|||
|
||||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::{AppState, ReqwestClient},
|
||||
app_state::{App, ReqwestClient},
|
||||
sessions::{AppSession, PgStore},
|
||||
settings::Settings,
|
||||
};
|
||||
|
|
@ -47,7 +47,7 @@ pub fn new_oauth_client(settings: &Settings) -> Result<BasicClient> {
|
|||
}
|
||||
|
||||
/// Creates a router which can be nested within the higher level app router.
|
||||
pub fn new_router() -> Router<AppState> {
|
||||
pub fn new_router() -> Router<App> {
|
||||
Router::new()
|
||||
.route("/login", get(start_login))
|
||||
.route("/callback", get(callback))
|
||||
|
|
@ -56,7 +56,7 @@ pub fn new_router() -> Router<AppState> {
|
|||
|
||||
/// HTTP get handler for /login
|
||||
async fn start_login(
|
||||
State(state): State<AppState>,
|
||||
State(state): State<App>,
|
||||
State(Settings {
|
||||
auth: auth_settings,
|
||||
root_path,
|
||||
|
|
@ -134,7 +134,7 @@ async fn logout(
|
|||
}
|
||||
let jar = jar.remove(Cookie::from(auth_settings.cookie_name));
|
||||
tracing::debug!("Removed session cookie from jar.");
|
||||
Ok((jar, Redirect::to(&format!("{}/", root_path))))
|
||||
Ok((jar, Redirect::to(&format!("{root_path}/"))))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
|
@ -147,7 +147,7 @@ struct AuthRequestQuery {
|
|||
/// HTTP get handler for /callback
|
||||
async fn callback(
|
||||
Query(query): Query<AuthRequestQuery>,
|
||||
State(state): State<AppState>,
|
||||
State(state): State<App>,
|
||||
State(Settings {
|
||||
auth: auth_settings,
|
||||
root_path,
|
||||
|
|
@ -205,7 +205,7 @@ async fn callback(
|
|||
}
|
||||
tracing::debug!("successfully authenticated");
|
||||
Ok(Redirect::to(
|
||||
&redirect_target.unwrap_or(format!("{}/", root_path)),
|
||||
&redirect_target.unwrap_or(format!("{root_path}/")),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,39 +2,38 @@ use std::{collections::HashMap, sync::Arc, time::Duration};
|
|||
|
||||
use anyhow::Result;
|
||||
use axum::extract::FromRef;
|
||||
use interim_models::{base::Base, client::AppDbClient};
|
||||
use interim_pgtypes::client::BaseClient;
|
||||
use derive_builder::Builder;
|
||||
use interim_models::{client::AppDbClient, workspace::Workspace};
|
||||
use interim_pgtypes::client::WorkspaceClient;
|
||||
use sqlx::{Executor, PgPool, postgres::PgPoolOptions, raw_sql};
|
||||
use tokio::sync::{OnceCell, RwLock};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
use crate::app_state::App;
|
||||
|
||||
const MAX_CONNECTIONS: u32 = 4;
|
||||
const IDLE_SECONDS: u64 = 3600;
|
||||
|
||||
// TODO: The Arc<RwLock> this uses will probably need to be cleaned up for
|
||||
// performance eventually.
|
||||
|
||||
/// A collection of multiple SQLx Pools.
|
||||
#[derive(Clone)]
|
||||
pub struct BasePooler {
|
||||
#[derive(Builder, Clone, Debug)]
|
||||
pub struct WorkspacePooler {
|
||||
#[builder(default, setter(skip))]
|
||||
pools: Arc<RwLock<HashMap<Uuid, OnceCell<PgPool>>>>,
|
||||
app_db: PgPool,
|
||||
app_db_pool: PgPool,
|
||||
db_role_prefix: String,
|
||||
}
|
||||
|
||||
impl BasePooler {
|
||||
pub fn new_with_app_db(app_db: PgPool) -> Self {
|
||||
Self {
|
||||
app_db,
|
||||
pools: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
impl WorkspacePooler {
|
||||
pub fn builder() -> WorkspacePoolerBuilder {
|
||||
WorkspacePoolerBuilder::default()
|
||||
}
|
||||
|
||||
async fn get_pool_for(&mut self, base_id: Uuid) -> Result<PgPool> {
|
||||
async fn get_pool_for(&mut self, workspace_id: Uuid) -> Result<PgPool> {
|
||||
let init_cell = || async {
|
||||
let mut app_db = AppDbClient::from_pool_conn(self.app_db.acquire().await?);
|
||||
let base = Base::with_id(base_id).fetch_one(&mut app_db).await?;
|
||||
let mut app_db = AppDbClient::from_pool_conn(self.app_db_pool.acquire().await?);
|
||||
let workspace = Workspace::with_id(workspace_id)
|
||||
.fetch_one(&mut app_db)
|
||||
.await?;
|
||||
Ok(PgPoolOptions::new()
|
||||
.min_connections(0)
|
||||
.max_connections(MAX_CONNECTIONS)
|
||||
|
|
@ -58,13 +57,13 @@ discard sequences;
|
|||
Ok(true)
|
||||
})
|
||||
})
|
||||
.connect(&base.url)
|
||||
.connect(&workspace.url.expose_secret())
|
||||
.await?)
|
||||
};
|
||||
|
||||
// Attempt to get an existing pool without write-locking the map
|
||||
let pools = self.pools.read().await;
|
||||
if let Some(cell) = pools.get(&base_id) {
|
||||
if let Some(cell) = pools.get(&workspace_id) {
|
||||
return Ok(cell
|
||||
.get_or_try_init::<anyhow::Error, _, _>(init_cell)
|
||||
.await?
|
||||
|
|
@ -72,7 +71,7 @@ discard sequences;
|
|||
}
|
||||
drop(pools); // Release read lock
|
||||
let mut pools = self.pools.write().await;
|
||||
let entry = pools.entry(base_id).or_insert(OnceCell::new());
|
||||
let entry = pools.entry(workspace_id).or_insert(OnceCell::new());
|
||||
Ok(entry
|
||||
.get_or_try_init::<anyhow::Error, _, _>(init_cell)
|
||||
.await?
|
||||
|
|
@ -88,14 +87,12 @@ discard sequences;
|
|||
&mut self,
|
||||
base_id: Uuid,
|
||||
set_role: RoleAssignment,
|
||||
) -> Result<BaseClient> {
|
||||
let mut app_db = AppDbClient::from_pool_conn(self.app_db.acquire().await?);
|
||||
) -> Result<WorkspaceClient> {
|
||||
let pool = self.get_pool_for(base_id).await?;
|
||||
let mut client = BaseClient::from_pool_conn(pool.acquire().await?);
|
||||
let mut client = WorkspaceClient::from_pool_conn(pool.acquire().await?);
|
||||
match set_role {
|
||||
RoleAssignment::User(id) => {
|
||||
let base = Base::with_id(base_id).fetch_one(&mut app_db).await?;
|
||||
let prefix = base.user_role_prefix;
|
||||
let prefix = &self.db_role_prefix;
|
||||
let user_id = id.simple();
|
||||
client.init_role(&format!("{prefix}{user_id}")).await?;
|
||||
}
|
||||
|
|
@ -122,12 +119,9 @@ discard sequences;
|
|||
// TODO: Add a cleanup method to remove entries with no connections
|
||||
}
|
||||
|
||||
impl<S> FromRef<S> for BasePooler
|
||||
where
|
||||
S: Into<AppState> + Clone,
|
||||
{
|
||||
fn from_ref(state: &S) -> Self {
|
||||
Into::<AppState>::into(state.clone()).base_pooler.clone()
|
||||
impl FromRef<App> for WorkspacePooler {
|
||||
fn from_ref(state: &App) -> Self {
|
||||
state.workspace_pooler.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use anyhow::Result;
|
||||
use interim_models::{base::Base, client::AppDbClient};
|
||||
use interim_models::{
|
||||
client::AppDbClient,
|
||||
workspace_user_perm::{self, WorkspaceUserPerm},
|
||||
};
|
||||
use interim_pgtypes::{
|
||||
client::BaseClient,
|
||||
client::WorkspaceClient,
|
||||
pg_acl::PgPrivilegeType,
|
||||
pg_database::PgDatabase,
|
||||
pg_role::{PgRole, RoleTree, user_id_from_rolname},
|
||||
|
|
@ -11,19 +14,15 @@ use interim_pgtypes::{
|
|||
use sqlx::query;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct BaseUserPerm {
|
||||
pub id: Uuid,
|
||||
pub base_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub perm: String,
|
||||
}
|
||||
|
||||
pub async fn sync_perms_for_base(
|
||||
base_id: Uuid,
|
||||
/// Derive workspace access control permissions from the permission grants of
|
||||
/// a workspace's backing database.
|
||||
pub(crate) async fn sync_for_workspace(
|
||||
workspace_id: Uuid,
|
||||
app_db: &mut AppDbClient,
|
||||
base_client: &mut BaseClient,
|
||||
workspace_client: &mut WorkspaceClient,
|
||||
db_role_prefix: &str,
|
||||
) -> Result<()> {
|
||||
let db = PgDatabase::current().fetch_one(base_client).await?;
|
||||
let db = PgDatabase::current().fetch_one(workspace_client).await?;
|
||||
let explicit_roles = PgRole::with_name_in(
|
||||
db.datacl
|
||||
.unwrap_or_default()
|
||||
|
|
@ -36,12 +35,12 @@ pub async fn sync_perms_for_base(
|
|||
.map(|item| item.grantee)
|
||||
.collect(),
|
||||
)
|
||||
.fetch_all(base_client)
|
||||
.fetch_all(workspace_client)
|
||||
.await?;
|
||||
let mut all_roles: HashSet<PgRole> = HashSet::new();
|
||||
for explicit_role in explicit_roles {
|
||||
if let Some(role_tree) = RoleTree::members_of_oid(explicit_role.oid)
|
||||
.fetch_tree(base_client)
|
||||
.fetch_tree(workspace_client)
|
||||
.await?
|
||||
{
|
||||
for implicit_role in role_tree.flatten_inherited() {
|
||||
|
|
@ -49,31 +48,24 @@ pub async fn sync_perms_for_base(
|
|||
}
|
||||
}
|
||||
}
|
||||
let base = Base::with_id(base_id).fetch_one(app_db).await?;
|
||||
let user_ids: Vec<Uuid> = all_roles
|
||||
.iter()
|
||||
.filter_map(|role| user_id_from_rolname(&role.rolname, &base.user_role_prefix).ok())
|
||||
.filter_map(|role| user_id_from_rolname(&role.rolname, db_role_prefix).ok())
|
||||
.collect();
|
||||
query!(
|
||||
"delete from base_user_perms where base_id = $1 and not (user_id = any($2))",
|
||||
base_id,
|
||||
"delete from workspace_user_perms where workspace_id = $1 and not (user_id = any($2))",
|
||||
workspace_id,
|
||||
user_ids.as_slice(),
|
||||
)
|
||||
.execute(app_db.get_conn())
|
||||
.await?;
|
||||
for user_id in user_ids {
|
||||
query!(
|
||||
"
|
||||
insert into base_user_perms
|
||||
(id, base_id, user_id, perm)
|
||||
values ($1, $2, $3, 'connect')
|
||||
on conflict (base_id, user_id, perm) do nothing
|
||||
",
|
||||
Uuid::now_v7(),
|
||||
base.id,
|
||||
user_id
|
||||
)
|
||||
.execute(app_db.get_conn())
|
||||
WorkspaceUserPerm::insert()
|
||||
.workspace_id(workspace_id)
|
||||
.user_id(user_id)
|
||||
.perm(workspace_user_perm::PermissionValue::Connect)
|
||||
.build()?
|
||||
.execute(app_db)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@ use std::net::SocketAddr;
|
|||
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::Request,
|
||||
http::{header::CONTENT_SECURITY_POLICY, HeaderValue},
|
||||
middleware::map_request,
|
||||
ServiceExt,
|
||||
extract::Request,
|
||||
http::{HeaderValue, header::CONTENT_SECURITY_POLICY},
|
||||
middleware::map_request,
|
||||
};
|
||||
use chrono::{TimeDelta, Utc};
|
||||
use clap::{Parser, Subcommand};
|
||||
|
|
@ -16,7 +16,7 @@ use tower_http::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
app_state::AppState, middleware::lowercase_uri_path, router::new_router, worker::run_worker,
|
||||
app_state::App, middleware::lowercase_uri_path, routes::new_router, worker::run_worker,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
|
|
@ -43,7 +43,7 @@ pub enum Commands {
|
|||
// mechanisms like Governor::reset_all()
|
||||
}
|
||||
|
||||
pub async fn serve_command(state: AppState) -> Result<()> {
|
||||
pub async fn serve_command(state: App) -> Result<()> {
|
||||
let router = ServiceBuilder::new()
|
||||
.layer(map_request(lowercase_uri_path))
|
||||
.layer(TraceLayer::new_for_http())
|
||||
|
|
@ -73,7 +73,7 @@ pub async fn serve_command(state: AppState) -> Result<()> {
|
|||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn worker_command(args: &WorkerArgs, state: AppState) -> Result<()> {
|
||||
pub async fn worker_command(args: &WorkerArgs, state: App) -> Result<()> {
|
||||
if let Some(loop_seconds) = args.auto_loop_seconds {
|
||||
let loop_delta = TimeDelta::seconds(i64::from(loop_seconds));
|
||||
loop {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
use anyhow::Result;
|
||||
use clap::Parser as _;
|
||||
use dotenvy::dotenv;
|
||||
use interim_models::MIGRATOR;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::{
|
||||
app_state::{App, AppState},
|
||||
app_state::App,
|
||||
cli::{Cli, Commands, serve_command, worker_command},
|
||||
settings::Settings,
|
||||
};
|
||||
|
|
@ -20,7 +21,6 @@ mod middleware;
|
|||
mod navbar;
|
||||
mod navigator;
|
||||
mod renderable_role_tree;
|
||||
mod router;
|
||||
mod routes;
|
||||
mod sessions;
|
||||
mod settings;
|
||||
|
|
@ -29,24 +29,25 @@ mod worker;
|
|||
|
||||
/// Run CLI
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
async fn main() -> Result<()> {
|
||||
// Attempt to pre-load .env in case it contains a RUST_LOG variable
|
||||
dotenv().ok();
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
let settings = Settings::load().unwrap();
|
||||
let settings = Settings::load()?;
|
||||
|
||||
let state: AppState = App::from_settings(settings.clone()).await.unwrap().into();
|
||||
let app = App::from_settings(settings.clone()).await?;
|
||||
|
||||
if settings.run_database_migrations != 0 {
|
||||
MIGRATOR.run(&state.app_db).await.unwrap();
|
||||
MIGRATOR.run(&app.app_db).await?;
|
||||
}
|
||||
|
||||
let cli = Cli::parse();
|
||||
match &cli.command {
|
||||
Commands::Serve => serve_command(state).await.unwrap(),
|
||||
Commands::Worker(args) => worker_command(args, state).await.unwrap(),
|
||||
Commands::Serve => serve_command(app).await?,
|
||||
Commands::Worker(args) => worker_command(args, app).await?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,47 +1,41 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use askama::Template;
|
||||
use derive_builder::Builder;
|
||||
use interim_models::{base::Base, client::AppDbClient, lens::Lens};
|
||||
use interim_models::{client::AppDbClient, portal::Portal, workspace::Workspace};
|
||||
use interim_pgtypes::{
|
||||
client::BaseClient,
|
||||
client::WorkspaceClient,
|
||||
pg_class::{PgClass, PgRelKind},
|
||||
};
|
||||
use sqlx::postgres::types::Oid;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Builder, Clone, Template)]
|
||||
#[template(path = "navbar.html")]
|
||||
pub struct Navbar {
|
||||
pub base: Base,
|
||||
pub namespaces: Vec<NamespaceItem>,
|
||||
#[builder(setter(strip_option))]
|
||||
pub current: Option<NavLocation>,
|
||||
pub root_path: String,
|
||||
use crate::navigator::Navigator;
|
||||
|
||||
#[derive(Builder, Clone, Debug, Template)]
|
||||
#[template(path = "workspace_nav.html")]
|
||||
pub(crate) struct WorkspaceNav {
|
||||
workspace: Workspace,
|
||||
relations: Vec<RelationItem>,
|
||||
#[builder(default, setter(strip_option))]
|
||||
current: Option<NavLocation>,
|
||||
navigator: Navigator,
|
||||
}
|
||||
|
||||
impl Navbar {
|
||||
pub fn builder() -> NavbarBuilder {
|
||||
NavbarBuilder::default()
|
||||
impl WorkspaceNav {
|
||||
pub fn builder() -> WorkspaceNavBuilder {
|
||||
WorkspaceNavBuilder::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct NamespaceItem {
|
||||
pub struct RelationItem {
|
||||
pub name: String,
|
||||
pub rels: Vec<RelItem>,
|
||||
pub oid: Oid,
|
||||
pub portals: Vec<PortalItem>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RelItem {
|
||||
pub name: String,
|
||||
pub class_oid: Oid,
|
||||
pub lenses: Vec<LensItem>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LensItem {
|
||||
pub struct PortalItem {
|
||||
pub name: String,
|
||||
pub id: Uuid,
|
||||
}
|
||||
|
|
@ -53,56 +47,49 @@ pub enum NavLocation {
|
|||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum RelLocation {
|
||||
Lens(Uuid),
|
||||
Rbac,
|
||||
Portal(Uuid),
|
||||
Sharing,
|
||||
}
|
||||
|
||||
impl NavbarBuilder {
|
||||
impl WorkspaceNavBuilder {
|
||||
/// Helper function to populate relations and lenses automatically.
|
||||
/// [`WorkspaceNavBuilder::workspace()`] must be called first, or else this
|
||||
/// method will return an error.
|
||||
pub async fn populate_rels(
|
||||
&mut self,
|
||||
app_db: &mut AppDbClient,
|
||||
base_client: &mut BaseClient,
|
||||
workspace_client: &mut WorkspaceClient,
|
||||
) -> Result<&mut Self> {
|
||||
let rels = PgClass::with_kind_in([PgRelKind::OrdinaryTable])
|
||||
.fetch_all(base_client)
|
||||
.fetch_all(workspace_client)
|
||||
.await?;
|
||||
let mut namespaces: HashMap<String, Vec<RelItem>> = HashMap::new();
|
||||
let mut rel_items = Vec::with_capacity(rels.len());
|
||||
for rel in rels {
|
||||
if rel.regnamespace.as_str() != "pg_catalog"
|
||||
&& rel.regnamespace.as_str() != "information_schema"
|
||||
{
|
||||
let lenses = Lens::belonging_to_base(
|
||||
self.base
|
||||
let portals = Portal::belonging_to_workspace(
|
||||
self.workspace
|
||||
.as_ref()
|
||||
.ok_or(NavbarBuilderError::UninitializedField("base"))?
|
||||
.ok_or(WorkspaceNavBuilderError::UninitializedField("workspace"))?
|
||||
.id,
|
||||
)
|
||||
.belonging_to_rel(rel.oid)
|
||||
.fetch_all(app_db)
|
||||
.await?;
|
||||
let rel_items = namespaces.entry(rel.regnamespace).or_default();
|
||||
rel_items.push(RelItem {
|
||||
rel_items.push(RelationItem {
|
||||
name: rel.relname,
|
||||
class_oid: rel.oid,
|
||||
lenses: lenses
|
||||
oid: rel.oid,
|
||||
portals: portals
|
||||
.into_iter()
|
||||
.map(|lens| LensItem {
|
||||
name: lens.name,
|
||||
id: lens.id,
|
||||
.map(|portal| PortalItem {
|
||||
name: portal.name,
|
||||
id: portal.id,
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(self.namespaces(
|
||||
namespaces
|
||||
.into_iter()
|
||||
.map(|(name, rel_items)| NamespaceItem {
|
||||
name,
|
||||
rels: rel_items,
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
Ok(self.relations(rel_items))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@ use axum::{
|
|||
http::request::Parts,
|
||||
response::{IntoResponse as _, Redirect, Response},
|
||||
};
|
||||
use interim_models::lens::Lens;
|
||||
use interim_models::portal::Portal;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{app_error::AppError, app_state::AppState};
|
||||
use crate::{app_error::AppError, app_state::App};
|
||||
|
||||
/// Helper type for semantically generating URI paths, e.g. for redirects.
|
||||
#[derive(Clone, Debug)]
|
||||
|
|
@ -15,33 +16,44 @@ pub struct Navigator {
|
|||
}
|
||||
|
||||
impl Navigator {
|
||||
pub fn lens_page(&self, lens: &Lens) -> Self {
|
||||
pub(crate) fn workspace_page(&self, workspace_id: Uuid) -> Self {
|
||||
Self {
|
||||
sub_path: format!("/w/{0}/", workspace_id.simple()),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn portal_page(&self, portal: &Portal) -> Self {
|
||||
Self {
|
||||
sub_path: format!(
|
||||
"/d/{0}/r/{1}/l/{2}/",
|
||||
lens.base_id.simple(),
|
||||
lens.class_oid.0,
|
||||
lens.id.simple()
|
||||
"/w/{0}/r/{1}/p/{2}/",
|
||||
portal.workspace_id.simple(),
|
||||
portal.class_oid.0,
|
||||
portal.id.simple()
|
||||
),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn redirect_to(&self) -> Response {
|
||||
Redirect::to(&format!("{0}{1}", self.root_path, self.sub_path)).into_response()
|
||||
pub(crate) fn get_root_path(&self) -> String {
|
||||
self.root_path.to_owned()
|
||||
}
|
||||
|
||||
pub(crate) fn abs_path(&self) -> String {
|
||||
format!("{0}{1}", self.root_path, self.sub_path)
|
||||
}
|
||||
|
||||
pub(crate) fn redirect_to(&self) -> Response {
|
||||
Redirect::to(&self.abs_path()).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for Navigator
|
||||
where
|
||||
S: Into<AppState> + Clone + Sync,
|
||||
{
|
||||
impl FromRequestParts<App> for Navigator {
|
||||
type Rejection = AppError;
|
||||
|
||||
async fn from_request_parts(_: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state: AppState = state.clone().into();
|
||||
async fn from_request_parts(_: &mut Parts, state: &App) -> Result<Self, Self::Rejection> {
|
||||
Ok(Navigator {
|
||||
root_path: app_state.settings.root_path.clone(),
|
||||
root_path: state.settings.root_path.clone(),
|
||||
sub_path: "/".to_owned(),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,13 +14,13 @@ use tower_http::{
|
|||
set_header::SetResponseHeaderLayer,
|
||||
};
|
||||
|
||||
use crate::{app_state::AppState, auth, routes};
|
||||
use crate::{app_state::App, auth, routes};
|
||||
|
||||
pub fn new_router(state: AppState) -> Router<()> {
|
||||
let base_path = state.settings.root_path.clone();
|
||||
pub fn new_router(state: App) -> Router<()> {
|
||||
let root_path = state.settings.root_path.clone();
|
||||
let app = Router::new()
|
||||
.route_with_tsr("/databases/", get(routes::bases::list_bases_page))
|
||||
.route_with_tsr("/databases/add/", post(routes::bases::add_base_page))
|
||||
.route_with_tsr("/workspaces/", get(routes::bases::list_bases_page))
|
||||
.route_with_tsr("/workspaces/add/", post(routes::bases::add_base_page))
|
||||
.route_with_tsr(
|
||||
"/d/{base_id}/config/",
|
||||
get(routes::bases::base_config_page_get),
|
||||
|
|
@ -173,10 +173,10 @@ pub fn new_router(state: AppState) -> Router<()> {
|
|||
),
|
||||
)
|
||||
.with_state(state);
|
||||
if base_path.is_empty() {
|
||||
if root_path.is_empty() {
|
||||
app
|
||||
} else {
|
||||
Router::new().nest(&base_path, app).fallback_service(
|
||||
Router::new().nest(&root_path, app).fallback_service(
|
||||
ServeDir::new("static").not_found_service(ServeFile::new("static/_404.html")),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use uuid::Uuid;
|
|||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{self, BasePooler},
|
||||
base_pooler::{self, WorkspacePooler},
|
||||
base_user_perms::sync_perms_for_base,
|
||||
settings::Settings,
|
||||
user::CurrentUser,
|
||||
|
|
@ -94,7 +94,7 @@ pub struct BaseConfigPageForm {
|
|||
|
||||
pub async fn base_config_page_post(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(BaseConfigPagePath { base_id }): Path<BaseConfigPagePath>,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use sqlx::postgres::types::Oid;
|
|||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{BasePooler, RoleAssignment},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navbar::{NavLocation, Navbar, RelLocation},
|
||||
settings::Settings,
|
||||
user::CurrentUser,
|
||||
|
|
@ -20,7 +20,7 @@ use super::LensPagePath;
|
|||
|
||||
pub async fn lens_page_get(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(LensPagePath {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use sqlx::{postgres::types::Oid, query};
|
|||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{BasePooler, RoleAssignment},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navigator::Navigator,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
|
@ -20,7 +20,7 @@ use crate::{
|
|||
use super::LensPagePath;
|
||||
|
||||
pub async fn insert_page_post(
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ use uuid::Uuid;
|
|||
use crate::{
|
||||
app_error::{AppError, bad_request},
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{BasePooler, RoleAssignment},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
field_info::FieldInfo,
|
||||
navigator::Navigator,
|
||||
settings::Settings,
|
||||
|
|
@ -104,7 +104,7 @@ pub struct AddLensPagePostForm {
|
|||
|
||||
pub async fn add_lens_page_post(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
|
|
@ -144,7 +144,7 @@ pub async fn add_lens_page_post(
|
|||
|
||||
pub async fn get_data_page_get(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(LensPagePath {
|
||||
|
|
@ -172,7 +172,7 @@ pub async fn get_data_page_get(
|
|||
.await?;
|
||||
|
||||
let fields: Vec<FieldInfo> = {
|
||||
let fields: Vec<Field> = Field::belonging_to_lens(lens.id)
|
||||
let fields: Vec<Field> = Field::belonging_to_portal(lens.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
let mut field_info: Vec<FieldInfo> = Vec::with_capacity(fields.len());
|
||||
|
|
@ -287,7 +287,7 @@ fn try_presentation_from_form(form: &AddColumnPageForm) -> Result<Presentation,
|
|||
}
|
||||
|
||||
pub async fn add_column_page_post(
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
|
|
@ -385,7 +385,7 @@ pub struct UpdateValuePageForm {
|
|||
}
|
||||
|
||||
pub async fn update_value_page_post(
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(LensPagePath {
|
||||
base_id, class_oid, ..
|
||||
|
|
@ -431,7 +431,7 @@ pub struct ViewerPagePath {
|
|||
|
||||
pub async fn viewer_page(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(params): Path<ViewerPagePath>,
|
||||
|
|
|
|||
|
|
@ -1,16 +1,132 @@
|
|||
use serde::Deserialize;
|
||||
use uuid::Uuid;
|
||||
//! Hierarchical HTTP routing.
|
||||
//!
|
||||
//! Top level module establishes the overall
|
||||
//! [`axum::Router`], and submodules organize nested subrouters into manageable
|
||||
//! chunks. Pragmatically, the submodule tree should be kept fairly flat, lest
|
||||
//! file paths grow exceedingly long. Deeply nested routers may still be
|
||||
//! implemented, by use of the `super` keyword.
|
||||
|
||||
pub mod bases;
|
||||
pub mod lens_index;
|
||||
pub mod lens_insert;
|
||||
pub mod lens_set_filter;
|
||||
pub mod lenses;
|
||||
pub mod relations;
|
||||
use std::net::SocketAddr;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LensPagePath {
|
||||
base_id: Uuid,
|
||||
class_oid: u32,
|
||||
lens_id: Uuid,
|
||||
use axum::{
|
||||
Router,
|
||||
extract::{ConnectInfo, State, WebSocketUpgrade, ws::WebSocket},
|
||||
http::{HeaderValue, header::CACHE_CONTROL},
|
||||
response::{Redirect, Response},
|
||||
routing::{any, get},
|
||||
};
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::{
|
||||
services::{ServeDir, ServeFile},
|
||||
set_header::SetResponseHeaderLayer,
|
||||
};
|
||||
|
||||
use crate::auth;
|
||||
use crate::{app_state::App, settings::Settings};
|
||||
|
||||
mod relations_single;
|
||||
mod workspaces_multi;
|
||||
mod workspaces_single;
|
||||
|
||||
/// Create the root [`Router`] for the application, including nesting according
|
||||
/// to the `root_path` [`crate::settings::Settings`] value, setting cache
|
||||
/// headers, setting up static file handling, and defining fallback handlers.
|
||||
pub(crate) fn new_router(app: App) -> Router<()> {
|
||||
let root_path = app.settings.root_path.clone();
|
||||
let router = Router::new()
|
||||
.route(
|
||||
"/",
|
||||
get(
|
||||
|State(Settings { root_path, .. }): State<Settings>| async move {
|
||||
Redirect::to(&format!("{root_path}/workspaces/list/"))
|
||||
},
|
||||
),
|
||||
)
|
||||
.nest("/workspaces", workspaces_multi::new_router())
|
||||
.nest("/w/{workspace_id}", workspaces_single::new_router())
|
||||
.nest("/auth", auth::new_router())
|
||||
.route("/__dev-healthz", any(dev_healthz_handler))
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.nest_service(
|
||||
"/js_dist",
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
// FIXME: restore production value
|
||||
// HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"),
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.service(
|
||||
ServeDir::new("js_dist").not_found_service(
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.service(ServeFile::new("static/_404.html")),
|
||||
),
|
||||
),
|
||||
)
|
||||
.nest_service(
|
||||
"/css_dist",
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
// FIXME: restore production value
|
||||
// HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"),
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.service(
|
||||
ServeDir::new("css_dist").not_found_service(
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.service(ServeFile::new("static/_404.html")),
|
||||
),
|
||||
),
|
||||
)
|
||||
.fallback_service(
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"),
|
||||
))
|
||||
.service(
|
||||
ServeDir::new("static").not_found_service(
|
||||
ServiceBuilder::new()
|
||||
.layer(SetResponseHeaderLayer::if_not_present(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("no-cache"),
|
||||
))
|
||||
.service(ServeFile::new("static/_404.html")),
|
||||
),
|
||||
),
|
||||
)
|
||||
.with_state(app);
|
||||
if root_path.is_empty() {
|
||||
router
|
||||
} else {
|
||||
Router::new()
|
||||
.nest(&root_path, router)
|
||||
.fallback(|| async move { Redirect::to(&root_path) })
|
||||
}
|
||||
}
|
||||
|
||||
/// Development endpoint helping to implement home-grown "hot" reloads.
|
||||
async fn dev_healthz_handler(
|
||||
ws: WebSocketUpgrade,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Response {
|
||||
tracing::info!("{addr} connected");
|
||||
ws.on_upgrade(move |socket| handle_dev_healthz_socket(socket, addr))
|
||||
}
|
||||
|
||||
async fn handle_dev_healthz_socket(mut socket: WebSocket, _: SocketAddr) {
|
||||
// Keep socket open indefinitely until the entire server exits
|
||||
while let Some(Ok(_)) = socket.recv().await {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use uuid::Uuid;
|
|||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{self, BasePooler},
|
||||
base_pooler::{self, WorkspacePooler},
|
||||
navbar::{NavLocation, Navbar, RelLocation},
|
||||
renderable_role_tree::RenderableRoleTree,
|
||||
settings::Settings,
|
||||
|
|
@ -34,7 +34,7 @@ pub struct ListRelationsPagePath {
|
|||
|
||||
pub async fn list_relations_page(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(ListRelationsPagePath { base_id }): Path<ListRelationsPagePath>,
|
||||
|
|
@ -116,7 +116,7 @@ pub async fn rel_index_page(
|
|||
|
||||
pub async fn rel_rbac_page(
|
||||
State(settings): State<Settings>,
|
||||
State(mut base_pooler): State<BasePooler>,
|
||||
State(mut base_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(RelPagePath { base_id, class_oid }): Path<RelPagePath>,
|
||||
|
|
|
|||
7
interim-server/src/routes/relations_multi/mod.rs
Normal file
7
interim-server/src/routes/relations_multi/mod.rs
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
use axum::Router;
|
||||
|
||||
use crate::app_state::App;
|
||||
|
||||
pub(super) fn new_router() -> Router<App> {
|
||||
Router::<App>::new().route_with_tsr()
|
||||
}
|
||||
141
interim-server/src/routes/relations_single/add_field_handler.rs
Normal file
141
interim-server/src/routes/relations_single/add_field_handler.rs
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
use axum::{
|
||||
debug_handler,
|
||||
extract::{Path, State},
|
||||
response::Response,
|
||||
};
|
||||
// [`axum_extra`]'s form extractor is preferred:
|
||||
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
||||
use axum_extra::extract::Form;
|
||||
use interim_models::{
|
||||
field::Field,
|
||||
portal::Portal,
|
||||
presentation::{Presentation, RFC_3339_S, TextInputMode},
|
||||
workspace::Workspace,
|
||||
workspace_user_perm::{self, WorkspaceUserPerm},
|
||||
};
|
||||
use interim_pgtypes::{escape_identifier, pg_class::PgClass};
|
||||
use serde::Deserialize;
|
||||
use sqlx::query;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::{App, AppDbConn},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navigator::Navigator,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
portal_id: Uuid,
|
||||
rel_oid: u32,
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct FormBody {
|
||||
name: String,
|
||||
label: String,
|
||||
presentation_tag: String,
|
||||
dropdown_allow_custom: Option<bool>,
|
||||
text_input_mode: Option<String>,
|
||||
timestamp_format: Option<String>,
|
||||
}
|
||||
|
||||
/// HTTP POST handler for adding a [`Field`] to a [`Portal`]. If the field name
|
||||
/// does not match a column in the backing database, a new column is created
|
||||
/// with a compatible type.
|
||||
///
|
||||
/// This handler expects 3 path parameters with the structure described by
|
||||
/// [`PathParams`].
|
||||
#[debug_handler(state = App)]
|
||||
pub(super) async fn post(
|
||||
State(mut workspace_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
Path(PathParams {
|
||||
portal_id,
|
||||
workspace_id,
|
||||
..
|
||||
}): Path<PathParams>,
|
||||
Form(form): Form<FormBody>,
|
||||
) -> Result<Response, AppError> {
|
||||
// Check workspace authorization.
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
if workspace_perms.iter().all(|p| {
|
||||
p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect
|
||||
}) {
|
||||
return Err(forbidden!("access denied to workspace"));
|
||||
}
|
||||
// FIXME ensure workspace corresponds to rel/portal, and that user has
|
||||
// permission to access/alter both as needed.
|
||||
|
||||
let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?;
|
||||
let workspace = Workspace::with_id(portal.workspace_id)
|
||||
.fetch_one(&mut app_db)
|
||||
.await?;
|
||||
|
||||
let mut workspace_client = workspace_pooler
|
||||
.acquire_for(workspace.id, RoleAssignment::User(user.id))
|
||||
.await?;
|
||||
|
||||
let class = PgClass::with_oid(portal.class_oid)
|
||||
.fetch_one(&mut workspace_client)
|
||||
.await?;
|
||||
|
||||
let presentation = try_presentation_from_form(&form)?;
|
||||
|
||||
query(&format!(
|
||||
"alter table {ident} add column if not exists {col} {typ}",
|
||||
ident = class.get_identifier(),
|
||||
col = escape_identifier(&form.name),
|
||||
typ = presentation.attr_data_type_fragment(),
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
|
||||
Field::insert()
|
||||
.portal_id(portal.id)
|
||||
.name(form.name)
|
||||
.table_label(if form.label.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(form.label)
|
||||
})
|
||||
.presentation(presentation)
|
||||
.build()?
|
||||
.insert(&mut app_db)
|
||||
.await?;
|
||||
|
||||
Ok(navigator.portal_page(&portal).redirect_to())
|
||||
}
|
||||
|
||||
fn try_presentation_from_form(form: &FormBody) -> Result<Presentation, AppError> {
|
||||
// Parses the presentation tag into the correct enum variant, but without
|
||||
// meaningful inner value(s). Match arms should all use the
|
||||
// `MyVariant { .. }` pattern to pay attention to only the tag.
|
||||
let presentation_default = Presentation::try_from(form.presentation_tag.as_str())?;
|
||||
Ok(match presentation_default {
|
||||
Presentation::Array { .. } => todo!(),
|
||||
Presentation::Dropdown { .. } => Presentation::Dropdown { allow_custom: true },
|
||||
Presentation::Text { .. } => Presentation::Text {
|
||||
input_mode: form
|
||||
.text_input_mode
|
||||
.clone()
|
||||
.map(|value| TextInputMode::try_from(value.as_str()))
|
||||
.transpose()?
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
Presentation::Timestamp { .. } => Presentation::Timestamp {
|
||||
format: form
|
||||
.timestamp_format
|
||||
.clone()
|
||||
.unwrap_or(RFC_3339_S.to_owned()),
|
||||
},
|
||||
Presentation::Uuid { .. } => Presentation::Uuid {},
|
||||
})
|
||||
}
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
use axum::{extract::Path, response::IntoResponse};
|
||||
use interim_models::{
|
||||
portal::Portal,
|
||||
workspace_user_perm::{self, WorkspaceUserPerm},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use sqlx::postgres::types::Oid;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::AppDbConn,
|
||||
navigator::Navigator,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
workspace_id: Uuid,
|
||||
rel_oid: u32,
|
||||
}
|
||||
|
||||
/// HTTP POST handler for creating a [`Portal`] for an existing backing database
|
||||
/// table. Upon success, it redirects the client to the portal's table viewer.
|
||||
///
|
||||
/// This handler expects 2 path parameters, named `workspace_id`, which should
|
||||
/// deserialize to a UUID, and `rel_oid`, which should deserialize to a u32.
|
||||
pub(super) async fn post(
|
||||
CurrentUser(user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
Path(PathParams {
|
||||
rel_oid,
|
||||
workspace_id,
|
||||
}): Path<PathParams>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
// Check workspace authorization.
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
if workspace_perms.iter().all(|p| {
|
||||
p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect
|
||||
}) {
|
||||
return Err(forbidden!("access denied to workspace"));
|
||||
}
|
||||
|
||||
Portal::insert()
|
||||
.workspace_id(workspace_id)
|
||||
.name("Untitled".to_owned())
|
||||
.class_oid(Oid(rel_oid))
|
||||
.build()?
|
||||
.execute(&mut app_db)
|
||||
.await?;
|
||||
|
||||
Ok(navigator.workspace_page(workspace_id).redirect_to())
|
||||
}
|
||||
145
interim-server/src/routes/relations_single/get_data_handler.rs
Normal file
145
interim-server/src/routes/relations_single/get_data_handler.rs
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use axum::{
|
||||
Json,
|
||||
extract::{Path, State},
|
||||
response::{IntoResponse as _, Response},
|
||||
};
|
||||
use interim_models::{encodable::Encodable, field::Field, portal::Portal};
|
||||
use interim_pgtypes::{escape_identifier, pg_attribute::PgAttribute, pg_class::PgClass};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{postgres::PgRow, query};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
field_info::FieldInfo,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
portal_id: Uuid,
|
||||
}
|
||||
|
||||
const FRONTEND_ROW_LIMIT: i64 = 1000;
|
||||
|
||||
/// HTTP GET handler for an API endpoint returning a JSON encoding of portal
|
||||
/// data to display in a table or similar form.
|
||||
///
|
||||
/// Only queries up to the first [`FRONTEND_ROW_LIMIT`] rows.
|
||||
pub(super) async fn get(
|
||||
State(mut workspace_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
Path(PathParams { portal_id }): Path<PathParams>,
|
||||
) -> Result<Response, AppError> {
|
||||
// FIXME auth
|
||||
let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?;
|
||||
|
||||
let mut workspace_client = workspace_pooler
|
||||
.acquire_for(portal.workspace_id, RoleAssignment::User(current_user.id))
|
||||
.await?;
|
||||
let rel = PgClass::with_oid(portal.class_oid)
|
||||
.fetch_one(&mut workspace_client)
|
||||
.await?;
|
||||
|
||||
let attrs = PgAttribute::all_for_rel(portal.class_oid)
|
||||
.fetch_all(&mut workspace_client)
|
||||
.await?;
|
||||
let pkey_attrs = PgAttribute::pkeys_for_rel(portal.class_oid)
|
||||
.fetch_all(&mut workspace_client)
|
||||
.await?;
|
||||
|
||||
let fields: Vec<FieldInfo> = {
|
||||
let fields: Vec<Field> = Field::belonging_to_portal(portal.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
let mut field_info: Vec<FieldInfo> = Vec::with_capacity(fields.len());
|
||||
for field in fields {
|
||||
if let Some(attr) = attrs.iter().find(|attr| attr.attname == field.name) {
|
||||
field_info.push(FieldInfo {
|
||||
field,
|
||||
has_default: attr.atthasdef,
|
||||
not_null: attr.attnotnull.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
field_info
|
||||
};
|
||||
|
||||
let mut sql_raw = format!(
|
||||
"select {0} from {1}.{2}",
|
||||
pkey_attrs
|
||||
.iter()
|
||||
.chain(attrs.iter())
|
||||
.map(|attr| escape_identifier(&attr.attname))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
escape_identifier(&rel.regnamespace),
|
||||
escape_identifier(&rel.relname),
|
||||
);
|
||||
let rows: Vec<PgRow> = if let Some(filter_expr) = portal.table_filter.0 {
|
||||
let filter_fragment = filter_expr.into_query_fragment();
|
||||
let filter_params = filter_fragment.to_params();
|
||||
sql_raw = format!(
|
||||
"{sql_raw} where {0} limit ${1}",
|
||||
filter_fragment.to_sql(1),
|
||||
filter_params.len() + 1
|
||||
);
|
||||
let mut q = query(&sql_raw);
|
||||
for param in filter_params {
|
||||
q = param.bind_onto(q);
|
||||
}
|
||||
q = q.bind(FRONTEND_ROW_LIMIT);
|
||||
q.fetch_all(workspace_client.get_conn()).await?
|
||||
} else {
|
||||
sql_raw = format!("{sql_raw} limit $1");
|
||||
query(&sql_raw)
|
||||
.bind(FRONTEND_ROW_LIMIT)
|
||||
.fetch_all(workspace_client.get_conn())
|
||||
.await?
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct DataRow {
|
||||
pkey: String,
|
||||
data: Vec<Encodable>,
|
||||
}
|
||||
|
||||
let mut data_rows: Vec<DataRow> = vec![];
|
||||
let mut pkeys: Vec<String> = vec![];
|
||||
for row in rows.iter() {
|
||||
let mut pkey_values: HashMap<String, Encodable> = HashMap::new();
|
||||
for attr in pkey_attrs.clone() {
|
||||
let field = Field::default_from_attr(&attr)
|
||||
.ok_or(anyhow::anyhow!("unsupported primary key column type"))?;
|
||||
pkey_values.insert(field.name.clone(), field.get_value_encodable(row)?);
|
||||
}
|
||||
let pkey = serde_json::to_string(&pkey_values)?;
|
||||
pkeys.push(pkey.clone());
|
||||
let mut row_data: Vec<Encodable> = vec![];
|
||||
for field in fields.iter() {
|
||||
row_data.push(field.field.get_value_encodable(row)?);
|
||||
}
|
||||
data_rows.push(DataRow {
|
||||
pkey,
|
||||
data: row_data,
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ResponseBody {
|
||||
rows: Vec<DataRow>,
|
||||
fields: Vec<FieldInfo>,
|
||||
pkeys: Vec<String>,
|
||||
}
|
||||
Ok(Json(ResponseBody {
|
||||
rows: data_rows,
|
||||
fields,
|
||||
pkeys,
|
||||
})
|
||||
.into_response())
|
||||
}
|
||||
124
interim-server/src/routes/relations_single/insert_handler.rs
Normal file
124
interim-server/src/routes/relations_single/insert_handler.rs
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use axum::{
|
||||
debug_handler,
|
||||
extract::{Path, State},
|
||||
response::Response,
|
||||
};
|
||||
// [`axum_extra`]'s form extractor is required to support repeated keys:
|
||||
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
||||
use axum_extra::extract::Form;
|
||||
use interim_models::{
|
||||
encodable::Encodable,
|
||||
portal::Portal,
|
||||
workspace::Workspace,
|
||||
workspace_user_perm::{self, WorkspaceUserPerm},
|
||||
};
|
||||
use interim_pgtypes::{escape_identifier, pg_class::PgClass};
|
||||
use serde::Deserialize;
|
||||
use sqlx::{postgres::types::Oid, query};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::{App, AppDbConn},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navigator::Navigator,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
portal_id: Uuid,
|
||||
rel_oid: u32,
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
/// HTTP POST handler for inserting one or more rows into a table. This handler
|
||||
/// takes a form where the keys are column names, with keys optionally repeated
|
||||
/// to insert multiple rows at once. If any key is repeated, the others should
|
||||
/// be repeated the same number of times. Form values are expected to be JSON-
|
||||
/// serialized representations of the `[Encodable]` type.
|
||||
#[debug_handler(state = App)]
|
||||
pub(super) async fn post(
|
||||
State(mut workspace_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
Path(PathParams {
|
||||
portal_id,
|
||||
rel_oid,
|
||||
workspace_id,
|
||||
}): Path<PathParams>,
|
||||
Form(form): Form<HashMap<String, Vec<String>>>,
|
||||
) -> Result<Response, AppError> {
|
||||
// Check workspace authorization.
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
if workspace_perms.iter().all(|p| {
|
||||
p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect
|
||||
}) {
|
||||
return Err(forbidden!("access denied to workspace"));
|
||||
}
|
||||
// FIXME ensure workspace corresponds to rel/portal, and that user has
|
||||
// permission to access/alter both as needed.
|
||||
|
||||
let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?;
|
||||
let workspace = Workspace::with_id(portal.workspace_id)
|
||||
.fetch_one(&mut app_db)
|
||||
.await?;
|
||||
|
||||
let mut workspace_client = workspace_pooler
|
||||
.acquire_for(workspace.id, RoleAssignment::User(user.id))
|
||||
.await?;
|
||||
|
||||
let rel = PgClass::with_oid(Oid(rel_oid))
|
||||
.fetch_one(&mut workspace_client)
|
||||
.await?;
|
||||
|
||||
let col_names: Vec<String> = form.keys().cloned().collect();
|
||||
let col_list_sql = col_names
|
||||
.iter()
|
||||
.map(|value| escape_identifier(value))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
|
||||
let n_rows = form.values().map(|value| value.len()).max().unwrap_or(0);
|
||||
if n_rows > 0 {
|
||||
let mut param_index = 1;
|
||||
let mut params: Vec<Encodable> = vec![];
|
||||
let mut row_list: Vec<String> = vec![];
|
||||
for i in 0..n_rows {
|
||||
let mut param_slots: Vec<String> = vec![];
|
||||
for col in col_names.iter() {
|
||||
let maybe_value: Option<Encodable> = form
|
||||
.get(col)
|
||||
.and_then(|col_values| col_values.get(i))
|
||||
.map(|value_raw| serde_json::from_str(value_raw))
|
||||
.transpose()?;
|
||||
if let Some(value) = maybe_value.filter(|value| !value.is_none()) {
|
||||
params.push(value);
|
||||
param_slots.push(format!("${param_index}"));
|
||||
param_index += 1;
|
||||
} else {
|
||||
param_slots.push("default".to_owned());
|
||||
}
|
||||
}
|
||||
row_list.push(format!("({0})", param_slots.join(", ")));
|
||||
}
|
||||
let row_list_sql = row_list.join(",\n");
|
||||
|
||||
let query_sql = &format!(
|
||||
"insert into {ident} ({col_list_sql}) values {row_list_sql}",
|
||||
ident = rel.get_identifier(),
|
||||
);
|
||||
let mut q = query(query_sql);
|
||||
for param in params {
|
||||
q = param.bind_onto(q);
|
||||
}
|
||||
q.execute(workspace_client.get_conn()).await?;
|
||||
}
|
||||
|
||||
Ok(navigator.portal_page(&portal).redirect_to())
|
||||
}
|
||||
22
interim-server/src/routes/relations_single/mod.rs
Normal file
22
interim-server/src/routes/relations_single/mod.rs
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
use axum::{
|
||||
Router,
|
||||
routing::{get, post},
|
||||
};
|
||||
use axum_extra::routing::RouterExt as _;
|
||||
|
||||
use crate::app_state::App;
|
||||
|
||||
mod add_field_handler;
|
||||
mod add_portal_handler;
|
||||
mod get_data_handler;
|
||||
mod insert_handler;
|
||||
mod portal_handler;
|
||||
|
||||
pub(super) fn new_router() -> Router<App> {
|
||||
Router::<App>::new()
|
||||
.route("/add-portal", post(add_portal_handler::post))
|
||||
.route_with_tsr("/p/{portal_id}/", get(portal_handler::get))
|
||||
.route_with_tsr("/p/{portal_id}/get-data/", get(get_data_handler::get))
|
||||
.route("/p/{portal_id}/add-field", post(add_field_handler::post))
|
||||
.route("/p/{portal_id}/insert", post(insert_handler::post))
|
||||
}
|
||||
88
interim-server/src/routes/relations_single/portal_handler.rs
Normal file
88
interim-server/src/routes/relations_single/portal_handler.rs
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
response::{Html, IntoResponse as _, Response},
|
||||
};
|
||||
use interim_models::{expression::PgExpressionAny, portal::Portal, workspace::Workspace};
|
||||
use interim_pgtypes::pg_attribute::PgAttribute;
|
||||
use serde::Deserialize;
|
||||
use sqlx::postgres::types::Oid;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navbar::{NavLocation, RelLocation, WorkspaceNav},
|
||||
navigator::Navigator,
|
||||
settings::Settings,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
portal_id: Uuid,
|
||||
rel_oid: u32,
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
/// HTTP GET handler for the table viewer page of a [`Portal`]. This handler
|
||||
/// performs some relatively simple queries pertaining to table structure, but
|
||||
/// the bulk of the query logic resides in the [`super::get_data_handler`]
|
||||
/// module.
|
||||
pub(super) async fn get(
|
||||
State(settings): State<Settings>,
|
||||
State(mut workspace_pooler): State<WorkspacePooler>,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
CurrentUser(current_user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
Path(PathParams {
|
||||
portal_id,
|
||||
workspace_id,
|
||||
rel_oid,
|
||||
}): Path<PathParams>,
|
||||
) -> Result<Response, AppError> {
|
||||
// FIXME auth
|
||||
|
||||
let workspace = Workspace::with_id(workspace_id)
|
||||
.fetch_one(&mut app_db)
|
||||
.await?;
|
||||
let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?;
|
||||
|
||||
let mut workspace_client = workspace_pooler
|
||||
.acquire_for(portal.workspace_id, RoleAssignment::User(current_user.id))
|
||||
.await?;
|
||||
|
||||
let attrs = PgAttribute::all_for_rel(portal.class_oid)
|
||||
.fetch_all(&mut workspace_client)
|
||||
.await?;
|
||||
let attr_names: Vec<String> = attrs.iter().map(|attr| attr.attname.clone()).collect();
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "lens.html")]
|
||||
struct ResponseTemplate {
|
||||
attr_names: Vec<String>,
|
||||
filter: Option<PgExpressionAny>,
|
||||
settings: Settings,
|
||||
navbar: WorkspaceNav,
|
||||
}
|
||||
Ok(Html(
|
||||
ResponseTemplate {
|
||||
attr_names,
|
||||
filter: portal.table_filter.0,
|
||||
navbar: WorkspaceNav::builder()
|
||||
.navigator(navigator)
|
||||
.workspace(workspace.clone())
|
||||
.populate_rels(&mut app_db, &mut workspace_client)
|
||||
.await?
|
||||
.current(NavLocation::Rel(
|
||||
Oid(rel_oid),
|
||||
Some(RelLocation::Portal(portal.id)),
|
||||
))
|
||||
.build()?,
|
||||
settings,
|
||||
}
|
||||
.render()?,
|
||||
)
|
||||
.into_response())
|
||||
}
|
||||
39
interim-server/src/routes/workspaces_multi/list_handlers.rs
Normal file
39
interim-server/src/routes/workspaces_multi/list_handlers.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
extract::State,
|
||||
response::{Html, IntoResponse},
|
||||
};
|
||||
use interim_models::workspace_user_perm::WorkspaceUserPerm;
|
||||
|
||||
use crate::{
|
||||
app_error::AppError, app_state::AppDbConn, navigator::Navigator, settings::Settings,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
pub(super) async fn get(
|
||||
State(settings): State<Settings>,
|
||||
CurrentUser(user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "workspaces_multi/list.html")]
|
||||
struct ResponseTemplate {
|
||||
navigator: Navigator,
|
||||
settings: Settings,
|
||||
workspace_perms: Vec<WorkspaceUserPerm>,
|
||||
}
|
||||
|
||||
Ok(Html(
|
||||
ResponseTemplate {
|
||||
navigator,
|
||||
settings,
|
||||
workspace_perms,
|
||||
}
|
||||
.render()?,
|
||||
))
|
||||
}
|
||||
12
interim-server/src/routes/workspaces_multi/mod.rs
Normal file
12
interim-server/src/routes/workspaces_multi/mod.rs
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
use axum::{Router, response::Redirect, routing::get};
|
||||
use axum_extra::routing::RouterExt as _;
|
||||
|
||||
use crate::app_state::App;
|
||||
|
||||
mod list_handlers;
|
||||
|
||||
pub(super) fn new_router() -> Router<App> {
|
||||
Router::<App>::new()
|
||||
.route("/", get(|| async move { Redirect::to("list/") }))
|
||||
.route_with_tsr("/list/", get(list_handlers::get))
|
||||
}
|
||||
109
interim-server/src/routes/workspaces_single/add_table_handler.rs
Normal file
109
interim-server/src/routes/workspaces_single/add_table_handler.rs
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
use axum::{
|
||||
extract::{Path, State},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use interim_models::workspace_user_perm::{self, WorkspaceUserPerm};
|
||||
use interim_pgtypes::escape_identifier;
|
||||
use serde::Deserialize;
|
||||
use sqlx::query;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::AppDbConn,
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navigator::Navigator,
|
||||
settings::Settings,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
/// HTTP POST handler for creating a managed Postgres table within a workspace
|
||||
/// database. Upon success, it redirects the client back to the workspace
|
||||
/// homepage, which is expected to display a list of available tables including
|
||||
/// the newly created one.
|
||||
///
|
||||
/// This handler expects 1 path parameter named `workspace_id` which should
|
||||
/// deserialize to a UUID.
|
||||
pub(super) async fn post(
|
||||
State(settings): State<Settings>,
|
||||
State(mut pooler): State<WorkspacePooler>,
|
||||
CurrentUser(user): CurrentUser,
|
||||
navigator: Navigator,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
Path(PathParams { workspace_id }): Path<PathParams>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
// Check workspace authorization.
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
if workspace_perms.iter().all(|p| {
|
||||
p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect
|
||||
}) {
|
||||
return Err(forbidden!("access denied to workspace"));
|
||||
}
|
||||
|
||||
let mut workspace_client = pooler
|
||||
// FIXME: Should this be scoped down to the unprivileged role after
|
||||
// setting up the table owner?
|
||||
.acquire_for(workspace_id, RoleAssignment::Root)
|
||||
.await?;
|
||||
|
||||
let table_owner_rolname = format!("table_owner_{0}", Uuid::new_v4().simple());
|
||||
query(&format!(
|
||||
"create role {0}",
|
||||
escape_identifier(&table_owner_rolname),
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
query(&format!(
|
||||
"grant {0} to {1} with admin option",
|
||||
escape_identifier(&table_owner_rolname),
|
||||
escape_identifier(&format!(
|
||||
"{0}{1}",
|
||||
settings.db_role_prefix,
|
||||
user.id.simple()
|
||||
))
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
query(&format!(
|
||||
"grant create, usage on schema {0} to {1}",
|
||||
escape_identifier(&settings.phono_table_namespace),
|
||||
escape_identifier(&table_owner_rolname),
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
const TABLE_NAME: &str = "untitled";
|
||||
query(&format!(
|
||||
r#"
|
||||
create table {0}.{1} (
|
||||
_id uuid primary key not null default uuidv7(),
|
||||
_created_by text not null default current_user,
|
||||
_created_at timestamptz not null default now(),
|
||||
_form_session uuid,
|
||||
_form_backlink_portal uuid,
|
||||
_form_backlink_row uuid,
|
||||
notes text
|
||||
)
|
||||
"#,
|
||||
escape_identifier(&settings.phono_table_namespace),
|
||||
escape_identifier(TABLE_NAME),
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
query(&format!(
|
||||
"alter table {0}.{1} owner to {2}",
|
||||
escape_identifier(&settings.phono_table_namespace),
|
||||
escape_identifier(TABLE_NAME),
|
||||
escape_identifier(&table_owner_rolname)
|
||||
))
|
||||
.execute(workspace_client.get_conn())
|
||||
.await?;
|
||||
|
||||
Ok(navigator.workspace_page(workspace_id).redirect_to())
|
||||
}
|
||||
21
interim-server/src/routes/workspaces_single/mod.rs
Normal file
21
interim-server/src/routes/workspaces_single/mod.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
use axum::{
|
||||
Router,
|
||||
response::Redirect,
|
||||
routing::{get, post},
|
||||
};
|
||||
use axum_extra::routing::RouterExt as _;
|
||||
|
||||
use crate::app_state::App;
|
||||
|
||||
use super::relations_single;
|
||||
|
||||
mod add_table_handler;
|
||||
mod nav_handler;
|
||||
|
||||
pub(super) fn new_router() -> Router<App> {
|
||||
Router::<App>::new()
|
||||
.route("/", get(|| async move { Redirect::to("nav/") }))
|
||||
.route("/add-table", post(add_table_handler::post))
|
||||
.route_with_tsr("/nav/", get(nav_handler::get))
|
||||
.nest("/r/{rel_oid}", relations_single::new_router())
|
||||
}
|
||||
77
interim-server/src/routes/workspaces_single/nav_handler.rs
Normal file
77
interim-server/src/routes/workspaces_single/nav_handler.rs
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
use askama::Template;
|
||||
use axum::{
|
||||
debug_handler,
|
||||
extract::{Path, State},
|
||||
response::{Html, IntoResponse},
|
||||
};
|
||||
use interim_models::{
|
||||
workspace::Workspace,
|
||||
workspace_user_perm::{self, WorkspaceUserPerm},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
app_error::{AppError, forbidden},
|
||||
app_state::{App, AppDbConn},
|
||||
base_pooler::{RoleAssignment, WorkspacePooler},
|
||||
navbar::WorkspaceNav,
|
||||
navigator::Navigator,
|
||||
settings::Settings,
|
||||
user::CurrentUser,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(super) struct PathParams {
|
||||
workspace_id: Uuid,
|
||||
}
|
||||
|
||||
#[debug_handler(state = App)]
|
||||
pub(super) async fn get(
|
||||
State(settings): State<Settings>,
|
||||
CurrentUser(user): CurrentUser,
|
||||
AppDbConn(mut app_db): AppDbConn,
|
||||
Path(PathParams { workspace_id }): Path<PathParams>,
|
||||
navigator: Navigator,
|
||||
State(mut pooler): State<WorkspacePooler>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
// Check workspace authorization.
|
||||
let workspace_perms = WorkspaceUserPerm::belonging_to_user(user.id)
|
||||
.fetch_all(&mut app_db)
|
||||
.await?;
|
||||
if workspace_perms.iter().all(|p| {
|
||||
p.workspace_id != workspace_id || p.perm != workspace_user_perm::PermissionValue::Connect
|
||||
}) {
|
||||
return Err(forbidden!("access denied to workspace"));
|
||||
}
|
||||
|
||||
let workspace = Workspace::with_id(workspace_id)
|
||||
.fetch_one(&mut app_db)
|
||||
.await?;
|
||||
|
||||
let mut workspace_client = pooler
|
||||
.acquire_for(workspace_id, RoleAssignment::User(user.id))
|
||||
.await?;
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "workspaces_single/nav.html")]
|
||||
struct ResponseTemplate {
|
||||
settings: Settings,
|
||||
workspace: Workspace,
|
||||
workspace_nav: WorkspaceNav,
|
||||
}
|
||||
|
||||
Ok(Html(
|
||||
ResponseTemplate {
|
||||
workspace_nav: WorkspaceNav::builder()
|
||||
.navigator(navigator)
|
||||
.workspace(workspace.clone())
|
||||
.populate_rels(&mut app_db, &mut workspace_client)
|
||||
.await?
|
||||
.build()?,
|
||||
settings,
|
||||
workspace,
|
||||
}
|
||||
.render()?,
|
||||
))
|
||||
}
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
//! Browser session management via [`async_session`].
|
||||
|
||||
use anyhow::Result;
|
||||
use async_session::{Session, SessionStore, async_trait};
|
||||
use axum::{
|
||||
|
|
@ -10,7 +12,7 @@ use chrono::{DateTime, TimeDelta, Utc};
|
|||
use sqlx::{PgPool, query, query_as};
|
||||
use tracing::{Instrument, trace_span};
|
||||
|
||||
use crate::{app_error::AppError, app_state::AppState};
|
||||
use crate::{app_error::AppError, app_state::App};
|
||||
|
||||
const EXPIRY_DAYS: i64 = 7;
|
||||
|
||||
|
|
@ -39,8 +41,8 @@ impl std::fmt::Debug for PgStore {
|
|||
}
|
||||
}
|
||||
|
||||
impl FromRef<AppState> for PgStore {
|
||||
fn from_ref(state: &AppState) -> Self {
|
||||
impl FromRef<App> for PgStore {
|
||||
fn from_ref(state: &App) -> Self {
|
||||
state.session_store.clone()
|
||||
}
|
||||
}
|
||||
|
|
@ -110,13 +112,13 @@ on conflict (id) do update set
|
|||
#[derive(Clone)]
|
||||
pub struct AppSession(pub Option<Session>);
|
||||
|
||||
impl FromRequestParts<AppState> for AppSession {
|
||||
impl FromRequestParts<App> for AppSession {
|
||||
type Rejection = AppError;
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, <Self as FromRequestParts<AppState>>::Rejection> {
|
||||
state: &App,
|
||||
) -> Result<Self, <Self as FromRequestParts<App>>::Rejection> {
|
||||
async move {
|
||||
let jar = parts.extract::<CookieJar>().await.unwrap();
|
||||
let session_cookie = match jar.get(&state.settings.auth.cookie_name) {
|
||||
|
|
|
|||
|
|
@ -1,46 +1,56 @@
|
|||
//! Runtime application configuration values.
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use axum::extract::FromRef;
|
||||
use config::{Config, Environment};
|
||||
use dotenvy::dotenv;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
use crate::app_state::App;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct Settings {
|
||||
pub(crate) struct Settings {
|
||||
/// Prefix under which to nest all routes. If specified, include leading
|
||||
/// slash but no trailing slash, for example "/app". For default behavior,
|
||||
/// leave as empty string.
|
||||
#[serde(default)]
|
||||
pub root_path: String,
|
||||
pub(crate) root_path: String,
|
||||
|
||||
/// When set to 1, dev features such as the frontend reloader will be
|
||||
/// enabled.
|
||||
#[serde(default)]
|
||||
pub dev: u8,
|
||||
pub(crate) dev: u8,
|
||||
|
||||
/// postgresql:// URL for Interim's application database.
|
||||
pub database_url: String,
|
||||
pub(crate) database_url: String,
|
||||
|
||||
#[serde(default = "default_app_db_max_connections")]
|
||||
pub app_db_max_connections: u32,
|
||||
pub(crate) app_db_max_connections: u32,
|
||||
|
||||
/// When set to 1, embedded SQLx migrations will be run on startup.
|
||||
#[serde(default)]
|
||||
pub run_database_migrations: u8,
|
||||
pub(crate) run_database_migrations: u8,
|
||||
|
||||
/// Address for server to bind to
|
||||
#[serde(default = "default_host")]
|
||||
pub host: String,
|
||||
pub(crate) host: String,
|
||||
|
||||
/// Port for server to bind to
|
||||
#[serde(default = "default_port")]
|
||||
pub port: u16,
|
||||
pub(crate) port: u16,
|
||||
|
||||
/// Host visible to end users, for example "https://shout.dev"
|
||||
pub frontend_host: String,
|
||||
/// Host visible to end users, for example "https://phono.dev"
|
||||
pub(crate) frontend_host: String,
|
||||
|
||||
pub auth: AuthSettings,
|
||||
pub(crate) auth: AuthSettings,
|
||||
|
||||
/// String to prepend to user IDs in order to construct Postgres role names.
|
||||
#[serde(default = "default_db_role_prefix")]
|
||||
pub(crate) db_role_prefix: String,
|
||||
|
||||
/// Postgres schema in which to create managed backing tables.
|
||||
#[serde(default = "default_phono_table_namespace")]
|
||||
pub(crate) phono_table_namespace: String,
|
||||
}
|
||||
|
||||
fn default_app_db_max_connections() -> u32 {
|
||||
|
|
@ -55,17 +65,25 @@ fn default_host() -> String {
|
|||
"127.0.0.1".to_owned()
|
||||
}
|
||||
|
||||
fn default_db_role_prefix() -> String {
|
||||
"__phono__".to_owned()
|
||||
}
|
||||
|
||||
fn default_phono_table_namespace() -> String {
|
||||
"phono".to_owned()
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct AuthSettings {
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
pub auth_url: String,
|
||||
pub token_url: String,
|
||||
pub userinfo_url: String,
|
||||
pub logout_url: Option<String>,
|
||||
pub(crate) struct AuthSettings {
|
||||
pub(crate) client_id: String,
|
||||
pub(crate) client_secret: String,
|
||||
pub(crate) auth_url: String,
|
||||
pub(crate) token_url: String,
|
||||
pub(crate) userinfo_url: String,
|
||||
pub(crate) logout_url: Option<String>,
|
||||
|
||||
#[serde(default = "default_cookie_name")]
|
||||
pub cookie_name: String,
|
||||
pub(crate) cookie_name: String,
|
||||
}
|
||||
|
||||
fn default_cookie_name() -> String {
|
||||
|
|
@ -73,7 +91,7 @@ fn default_cookie_name() -> String {
|
|||
}
|
||||
|
||||
impl Settings {
|
||||
pub fn load() -> Result<Self> {
|
||||
pub(crate) fn load() -> Result<Self> {
|
||||
match dotenv() {
|
||||
Err(err) => {
|
||||
if err.not_found() {
|
||||
|
|
@ -99,11 +117,8 @@ impl Settings {
|
|||
}
|
||||
}
|
||||
|
||||
impl<S> FromRef<S> for Settings
|
||||
where
|
||||
S: Into<AppState> + Clone,
|
||||
{
|
||||
fn from_ref(state: &S) -> Self {
|
||||
Into::<AppState>::into(state.clone()).settings.clone()
|
||||
impl FromRef<App> for Settings {
|
||||
fn from_ref(state: &App) -> Self {
|
||||
state.settings.clone()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
//! Provides an Axum extractor to fetch the authenticated user for a request.
|
||||
|
||||
use async_session::{Session, SessionStore as _};
|
||||
use axum::{
|
||||
RequestPartsExt,
|
||||
|
|
@ -15,24 +17,22 @@ use uuid::Uuid;
|
|||
|
||||
use crate::{
|
||||
app_error::AppError,
|
||||
app_state::AppState,
|
||||
app_state::App,
|
||||
auth::{AuthInfo, SESSION_KEY_AUTH_INFO, SESSION_KEY_AUTH_REDIRECT},
|
||||
sessions::AppSession,
|
||||
};
|
||||
|
||||
/// Extractor for the authenticated user associated with an HTTP request. If
|
||||
/// the request is not authenticated, the extractor will abort request handling
|
||||
/// and redirect the client to an OAuth2 login page.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CurrentUser(pub User);
|
||||
pub(crate) struct CurrentUser(pub(crate) User);
|
||||
|
||||
impl<S> FromRequestParts<S> for CurrentUser
|
||||
where
|
||||
S: Into<AppState> + Clone + Sync,
|
||||
{
|
||||
impl FromRequestParts<App> for CurrentUser {
|
||||
type Rejection = CurrentUserRejection;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state: AppState = state.clone().into();
|
||||
let mut session =
|
||||
if let AppSession(Some(value)) = parts.extract_with_state(&app_state).await? {
|
||||
async fn from_request_parts(parts: &mut Parts, state: &App) -> Result<Self, Self::Rejection> {
|
||||
let mut session = if let AppSession(Some(value)) = parts.extract_with_state(state).await? {
|
||||
value
|
||||
} else {
|
||||
Session::new()
|
||||
|
|
@ -48,12 +48,12 @@ where
|
|||
SESSION_KEY_AUTH_REDIRECT,
|
||||
uri.path_and_query()
|
||||
.map(|value| value.to_string())
|
||||
.unwrap_or(format!("{}/", app_state.settings.root_path)),
|
||||
.unwrap_or(format!("{}/", state.settings.root_path)),
|
||||
)?;
|
||||
if let Some(cookie_value) = app_state.session_store.store_session(session).await? {
|
||||
if let Some(cookie_value) = state.session_store.store_session(session).await? {
|
||||
tracing::debug!("adding session cookie to jar");
|
||||
jar.add(
|
||||
Cookie::build((app_state.settings.auth.cookie_name.clone(), cookie_value))
|
||||
Cookie::build((state.settings.auth.cookie_name.clone(), cookie_value))
|
||||
.same_site(SameSite::Lax)
|
||||
.http_only(true)
|
||||
.path("/"),
|
||||
|
|
@ -70,12 +70,12 @@ where
|
|||
};
|
||||
return Err(Self::Rejection::SetCookiesAndRedirect(
|
||||
jar,
|
||||
format!("{}/auth/login", app_state.settings.root_path),
|
||||
format!("{}/auth/login", state.settings.root_path),
|
||||
));
|
||||
};
|
||||
let current_user = if let Some(value) =
|
||||
query_as!(User, "select * from users where uid = $1", &auth_info.sub)
|
||||
.fetch_optional(&app_state.app_db)
|
||||
.fetch_optional(&state.app_db)
|
||||
.await?
|
||||
{
|
||||
value
|
||||
|
|
@ -92,14 +92,14 @@ returning *
|
|||
&auth_info.sub,
|
||||
&auth_info.email
|
||||
)
|
||||
.fetch_optional(&app_state.app_db)
|
||||
.fetch_optional(&state.app_db)
|
||||
.await?
|
||||
{
|
||||
value
|
||||
} else {
|
||||
tracing::debug!("detected race to insert current user record");
|
||||
query_as!(User, "select * from users where uid = $1", &auth_info.sub)
|
||||
.fetch_one(&app_state.app_db)
|
||||
.fetch_one(&state.app_db)
|
||||
.await?
|
||||
};
|
||||
Ok(CurrentUser(current_user))
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
use anyhow::Result;
|
||||
use tracing::Instrument as _;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
use crate::app_state::App;
|
||||
|
||||
pub async fn run_worker(_state: AppState) -> Result<()> {
|
||||
pub async fn run_worker(_state: App) -> Result<()> {
|
||||
async move { Ok(()) }
|
||||
.instrument(tracing::debug_span!("run_worker()"))
|
||||
.await
|
||||
|
|
|
|||
91
interim-server/templates/workspace_nav.html
Normal file
91
interim-server/templates/workspace_nav.html
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
<nav class="navbar">
|
||||
<section>
|
||||
<div class="navbar__heading">
|
||||
<h2>Tables</h2>
|
||||
<form
|
||||
action="{{ navigator.get_root_path() -}}
|
||||
/w/{{ workspace.id.simple() -}}
|
||||
/add-table"
|
||||
method="post"
|
||||
>
|
||||
<!-- FIXME: CSRF -->
|
||||
<button type="submit">+</button>
|
||||
</form>
|
||||
</div>
|
||||
<menu class="navbar__menu">
|
||||
{%- for rel in relations %}
|
||||
<li class="navbar__menu-item
|
||||
{%- if current == Some(NavLocation::Rel(rel.oid.to_owned(), None)) -%}
|
||||
{# preserve space #} navbar__menu-item--active
|
||||
{%- endif -%}
|
||||
">
|
||||
<collapsible-menu
|
||||
class="navbar__collapsible-menu"
|
||||
expanded="
|
||||
{%- if let Some(NavLocation::Rel(rel_oid, _)) = current -%}
|
||||
{%- if rel_oid.to_owned() == rel.oid -%}
|
||||
true
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
"
|
||||
>
|
||||
<h4 slot="summary" class="navbar__heading navbar__heading--entity">
|
||||
{{ rel.name }}
|
||||
</h4>
|
||||
<menu slot="content" class="navbar__menu">
|
||||
<li class="navbar__menu-item">
|
||||
<a
|
||||
href="{{ navigator.get_root_path() }}/r/{{ rel.oid.0 }}/rbac"
|
||||
class="navbar__menu-link"
|
||||
>
|
||||
Sharing
|
||||
</a>
|
||||
</li>
|
||||
<li class="navbar__menu-item">
|
||||
<collapsible-menu class="navbar__collapsible-menu">
|
||||
<div slot="summary" class="navbar__heading">
|
||||
<h5>Portals</h5>
|
||||
<form
|
||||
action="{{ navigator.get_root_path() -}}
|
||||
/w/{{ workspace.id.simple() -}}
|
||||
/r/{{ rel.oid.0 -}}
|
||||
/add-portal"
|
||||
method="post"
|
||||
>
|
||||
<!-- FIXME: CSRF -->
|
||||
<button type="submit">+</button>
|
||||
</form>
|
||||
</div>
|
||||
<menu slot="content" class="navbar__menu">
|
||||
{% for portal in rel.portals %}
|
||||
<li class="navbar__menu-item
|
||||
">
|
||||
<a
|
||||
href="
|
||||
{{- navigator.get_root_path() -}}
|
||||
/w/{{ workspace.id.simple() -}}
|
||||
/r/{{ rel.oid.0 -}}
|
||||
/p/{{ portal.id.simple() -}}
|
||||
"
|
||||
class="navbar__menu-link navbar__menu-link--entity
|
||||
{%- if current == Some(NavLocation::Rel(rel.oid.to_owned(), Some(RelLocation::Portal(portal.id.to_owned())))) -%}
|
||||
{# preserve space #} navbar__menu-link--current
|
||||
{%- endif -%}
|
||||
"
|
||||
>
|
||||
{{ portal.name }}
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</menu>
|
||||
</collapsible-menu>
|
||||
</li>
|
||||
</menu>
|
||||
</collapsible-menu>
|
||||
</li>
|
||||
{% endfor -%}
|
||||
</menu>
|
||||
</section>
|
||||
<script type="module" src="{{ navigator.get_root_path() }}/js_dist/collapsible-menu.webc.mjs"></script>
|
||||
</nav>
|
||||
|
||||
16
interim-server/templates/workspaces_multi/list.html
Normal file
16
interim-server/templates/workspaces_multi/list.html
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block main %}
|
||||
<main>
|
||||
<h1>Workspaces</h1>
|
||||
<ul>
|
||||
{% for workspace_perm in workspace_perms %}
|
||||
<li>
|
||||
<a href="{{ navigator.workspace_page(*workspace_perm.workspace_id).abs_path() }}">
|
||||
{{ workspace_perm.workspace_name }}
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</main>
|
||||
{% endblock %}
|
||||
8
interim-server/templates/workspaces_single/nav.html
Normal file
8
interim-server/templates/workspaces_single/nav.html
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block main %}
|
||||
<main>
|
||||
<h1>{{ workspace.name }}</h1>
|
||||
{{ workspace_nav | safe }}
|
||||
</main>
|
||||
{% endblock %}
|
||||
|
|
@ -3,6 +3,8 @@
|
|||
@use 'globals';
|
||||
@use 'modern-normalize';
|
||||
@use 'forms';
|
||||
@use 'collapsible_menu';
|
||||
@use 'navbar';
|
||||
|
||||
html {
|
||||
font-family: "Averia Serif Libre", "Open Sans", "Helvetica Neue", Arial, sans-serif;
|
||||
|
|
|
|||
|
|
@ -32,10 +32,10 @@
|
|||
(presentation) => ({
|
||||
field: {
|
||||
id: "",
|
||||
label: "",
|
||||
table_label: "",
|
||||
name: "",
|
||||
presentation,
|
||||
width_px: -1,
|
||||
table_width_px: -1,
|
||||
},
|
||||
not_null: true,
|
||||
has_default: false,
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ submission.
|
|||
}
|
||||
</script>
|
||||
|
||||
<div class="field-adder__container">
|
||||
<form method="post" action="add-column">
|
||||
<form method="post" action="add-field">
|
||||
<div class="field-adder__container">
|
||||
<div
|
||||
class="field-adder__header-lookalike"
|
||||
style:display={expanded ? "block" : "none"}
|
||||
|
|
@ -91,7 +91,6 @@ submission.
|
|||
search_input_class="field-adder__label-input"
|
||||
/>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="field-adder__summary-buttons">
|
||||
<button
|
||||
|
|
@ -112,9 +111,9 @@ submission.
|
|||
{@html expanded ? icon_x_mark : icon_plus}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div bind:this={popover_element} class="field-adder__popover" popover="auto">
|
||||
<div bind:this={popover_element} class="field-adder__popover" popover="auto">
|
||||
<!--
|
||||
The "advanced" details for creating a new column or customizing the behavior
|
||||
of a field backed by an existing column overlap with the controls exposed when
|
||||
|
|
@ -125,4 +124,6 @@ submission.
|
|||
bind:label_value
|
||||
on_name_input={handle_name_input}
|
||||
/>
|
||||
</div>
|
||||
<button class="button--primary" type="submit">Create</button>
|
||||
</div>
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -102,10 +102,10 @@ field. This is typically rendered within a popover component, and within an HTML
|
|||
/>
|
||||
</label>
|
||||
<label class="form-section">
|
||||
<div class="form-section__label">Data Type</div>
|
||||
<div class="form-section__label">Present As</div>
|
||||
<select
|
||||
class="form-section__input"
|
||||
name="field-type"
|
||||
name="presentation_tag"
|
||||
onchange={handle_presentation_tag_change}
|
||||
value={presentation?.t}
|
||||
>
|
||||
|
|
@ -121,7 +121,7 @@ field. This is typically rendered within a popover component, and within an HTML
|
|||
<div class="form-section__label">Input Mode</div>
|
||||
<select
|
||||
class="form-section__input"
|
||||
name="input-mode"
|
||||
name="text_input_mode"
|
||||
onchange={handle_text_input_mode_change}
|
||||
value={presentation.c.input_mode.t}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -12,15 +12,15 @@
|
|||
|
||||
let { field = $bindable(), index }: Props = $props();
|
||||
|
||||
const original_label_value = field.field.label;
|
||||
const original_label_value = field.field.table_label;
|
||||
|
||||
let type_indicator_element = $state<HTMLButtonElement | undefined>();
|
||||
let popover_element = $state<HTMLDivElement | undefined>();
|
||||
let name_value = $state(field.field.name);
|
||||
let label_value = $state(field.field.label ?? "");
|
||||
let label_value = $state(field.field.table_label ?? "");
|
||||
|
||||
$effect(() => {
|
||||
field.field.label = label_value === "" ? undefined : label_value;
|
||||
field.field.table_label = label_value === "" ? undefined : label_value;
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
|
|
@ -37,7 +37,7 @@
|
|||
function handle_popover_toggle(ev: ToggleEvent) {
|
||||
if (ev.newState === "closed") {
|
||||
type_indicator_element?.focus();
|
||||
field.field.label = original_label_value;
|
||||
field.field.table_label = original_label_value;
|
||||
label_value = original_label_value ?? "";
|
||||
}
|
||||
}
|
||||
|
|
@ -47,10 +47,10 @@
|
|||
aria-colindex={index}
|
||||
class="field-header__container"
|
||||
role="columnheader"
|
||||
style:width={`${field.field.width_px}px`}
|
||||
style:width={`${field.field.table_width_px}px`}
|
||||
>
|
||||
<div class="field-header__label">
|
||||
{field.field.label ?? field.field.name}
|
||||
{field.field.table_label ?? field.field.name}
|
||||
</div>
|
||||
<div class="field-header__menu-container">
|
||||
<button
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ import { presentation_schema } from "./presentation.svelte.ts";
|
|||
export const field_schema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string().nullish().transform((x) => x ?? undefined),
|
||||
table_label: z.string().nullish().transform((x) => x ?? undefined),
|
||||
presentation: presentation_schema,
|
||||
width_px: z.number(),
|
||||
table_width_px: z.number(),
|
||||
});
|
||||
|
||||
export type Field = z.infer<typeof field_schema>;
|
||||
|
|
|
|||
|
|
@ -472,7 +472,7 @@
|
|||
onmousedown={(ev) => on_cell_click(ev, cell_coords)}
|
||||
ondblclick={() => handle_table_cell_dblclick(cell_coords)}
|
||||
role="gridcell"
|
||||
style:width={`${field.field.width_px}px`}
|
||||
style:width={`${field.field.table_width_px}px`}
|
||||
tabindex="-1"
|
||||
>
|
||||
<div
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue