refactor with updated naming convention

This commit is contained in:
Brent Schroeter 2025-09-23 13:08:51 -07:00
parent 34e0302242
commit c9b755521e
40 changed files with 146 additions and 145 deletions

View file

@ -5,13 +5,13 @@ use uuid::Uuid;
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "t", content = "c")] #[serde(tag = "t", content = "c")]
pub enum Encodable { pub enum Datum {
Text(Option<String>), Text(Option<String>),
Timestamp(Option<DateTime<Utc>>), Timestamp(Option<DateTime<Utc>>),
Uuid(Option<Uuid>), Uuid(Option<Uuid>),
} }
impl Encodable { impl Datum {
// TODO: Can something similar be achieved with a generic return type? // TODO: Can something similar be achieved with a generic return type?
/// Bind this as a parameter to a sqlx query. /// Bind this as a parameter to a sqlx query.
pub fn bind_onto<'a>( pub fn bind_onto<'a>(

View file

@ -3,7 +3,7 @@ use std::fmt::Display;
use interim_pgtypes::escape_identifier; use interim_pgtypes::escape_identifier;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::encodable::Encodable; use crate::datum::Datum;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct QueryFragment { pub struct QueryFragment {
@ -12,7 +12,7 @@ pub struct QueryFragment {
/// the lines of `["select * from foo where id = ", " and status = ", ""]`. /// the lines of `["select * from foo where id = ", " and status = ", ""]`.
/// `plain_sql` should always have exactly one more element than `params`. /// `plain_sql` should always have exactly one more element than `params`.
plain_sql: Vec<String>, plain_sql: Vec<String>,
params: Vec<Encodable>, params: Vec<Datum>,
} }
impl QueryFragment { impl QueryFragment {
@ -34,7 +34,7 @@ impl QueryFragment {
.join("") .join("")
} }
pub fn to_params(&self) -> Vec<Encodable> { pub fn to_params(&self) -> Vec<Datum> {
self.params.clone() self.params.clone()
} }
@ -45,7 +45,7 @@ impl QueryFragment {
} }
} }
pub fn from_param(param: Encodable) -> Self { pub fn from_param(param: Datum) -> Self {
Self { Self {
plain_sql: vec!["".to_owned(), "".to_owned()], plain_sql: vec!["".to_owned(), "".to_owned()],
params: vec![param], params: vec![param],
@ -99,7 +99,7 @@ impl QueryFragment {
pub enum PgExpressionAny { pub enum PgExpressionAny {
Comparison(PgComparisonExpression), Comparison(PgComparisonExpression),
Identifier(PgIdentifierExpression), Identifier(PgIdentifierExpression),
Literal(Encodable), Literal(Datum),
ToJson(PgToJsonExpression), ToJson(PgToJsonExpression),
} }

View file

@ -7,7 +7,7 @@ use thiserror::Error;
use uuid::Uuid; use uuid::Uuid;
use crate::client::AppDbClient; use crate::client::AppDbClient;
use crate::encodable::Encodable; use crate::datum::Datum;
use crate::presentation::Presentation; use crate::presentation::Presentation;
/// A materialization of a database column, fit for consumption by an end user. /// A materialization of a database column, fit for consumption by an end user.
@ -51,7 +51,7 @@ impl Field {
}) })
} }
pub fn get_value_encodable(&self, row: &PgRow) -> Result<Encodable, ParseError> { pub fn get_datum(&self, row: &PgRow) -> Result<Datum, ParseError> {
let value_ref = row let value_ref = row
.try_get_raw(self.name.as_str()) .try_get_raw(self.name.as_str())
.or(Err(ParseError::FieldNotFound))?; .or(Err(ParseError::FieldNotFound))?;
@ -60,12 +60,10 @@ impl Field {
dbg!(&ty); dbg!(&ty);
Ok(match ty { Ok(match ty {
"TEXT" | "VARCHAR" => { "TEXT" | "VARCHAR" => {
Encodable::Text(<Option<String> as Decode<Postgres>>::decode(value_ref).unwrap()) Datum::Text(<Option<String> as Decode<Postgres>>::decode(value_ref).unwrap())
} }
"UUID" => { "UUID" => Datum::Uuid(<Option<Uuid> as Decode<Postgres>>::decode(value_ref).unwrap()),
Encodable::Uuid(<Option<Uuid> as Decode<Postgres>>::decode(value_ref).unwrap()) "TIMESTAMPTZ" => Datum::Timestamp(
}
"TIMESTAMPTZ" => Encodable::Timestamp(
<Option<DateTime<Utc>> as Decode<Postgres>>::decode(value_ref).unwrap(), <Option<DateTime<Utc>> as Decode<Postgres>>::decode(value_ref).unwrap(),
), ),
_ => return Err(ParseError::UnknownType), _ => return Err(ParseError::UnknownType),

View file

@ -1,24 +1,34 @@
use std::str::FromStr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::Decode; use sqlx::{Decode, Postgres};
use strum::{EnumIter, EnumString}; use strum::{EnumIter, EnumString};
/// Languages represented as /// Languages represented as
/// [ISO 639-3 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes). /// [ISO 639-3 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-3_codes).
#[derive( #[derive(Clone, Debug, Deserialize, strum::Display, PartialEq, Serialize, EnumIter, EnumString)]
Clone, Debug, Decode, Deserialize, strum::Display, PartialEq, Serialize, EnumIter, EnumString,
)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")] #[strum(serialize_all = "lowercase")]
// [`sqlx`] implements Decode and Encode to/from the Postgres `TEXT` type based // NOTE: The [`sqlx::Encode`] and [`sqlx::Decode`] derive macros do not seem to
// on the [`std::fmt::Display`] and [`std::str::FromStr`] traits, so it should // use the [`strum`] serializations. The corresponding traits should be
// use the transformations applied by [`strum`]. // implemented explicitly (if used).
// <https://docs.rs/sqlx/latest/sqlx/types/struct.Text.html>
pub enum Language { pub enum Language {
Deu, Deu,
Eng, Eng,
Spa, Spa,
} }
impl Language {
/// Returns language name to be presented in UI.
pub fn as_locale_str(&self) -> &'static str {
match self {
Self::Deu => "Deutsch",
Self::Eng => "English",
Self::Spa => "Español",
}
}
}
impl Default for Language { impl Default for Language {
/// Language defaults to English when necessary, as the product is being /// Language defaults to English when necessary, as the product is being
/// developed with a primarily English speaking/reading/writing market in /// developed with a primarily English speaking/reading/writing market in
@ -28,12 +38,11 @@ impl Default for Language {
} }
} }
impl Language { impl Decode<'_, Postgres> for Language {
pub fn as_locale_str(&self) -> &'static str { fn decode(
match self { value: <Postgres as sqlx::Database>::ValueRef<'_>,
Self::Deu => "Deutsch", ) -> Result<Self, sqlx::error::BoxDynError> {
Self::Eng => "English", let value = <&str as Decode<Postgres>>::decode(value)?;
Self::Spa => "Español", Ok(Self::from_str(value)?)
}
} }
} }

View file

@ -1,5 +1,5 @@
pub mod client; pub mod client;
pub mod encodable; pub mod datum;
pub mod expression; pub mod expression;
pub mod field; pub mod field;
pub mod field_form_prompt; pub mod field_form_prompt;

View file

@ -2,7 +2,7 @@ use std::str::FromStr;
use derive_builder::Builder; use derive_builder::Builder;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Decode, Encode, Postgres, query_as}; use sqlx::{Decode, Postgres, query_as};
use strum::EnumString; use strum::EnumString;
use uuid::Uuid; use uuid::Uuid;
@ -113,7 +113,7 @@ from workspace_user_perms as p
// TODO: The sqlx::Decode derive macro doesn't follow the strum serialization. // TODO: The sqlx::Decode derive macro doesn't follow the strum serialization.
// Does sqlx::Encode? // Does sqlx::Encode?
#[derive(Clone, Debug, Deserialize, Encode, EnumString, PartialEq, Serialize, strum::Display)] #[derive(Clone, Debug, Deserialize, EnumString, PartialEq, Serialize, strum::Display)]
#[serde(rename = "snake_case")] #[serde(rename = "snake_case")]
#[strum(serialize_all = "snake_case")] #[strum(serialize_all = "snake_case")]
pub enum PermissionValue { pub enum PermissionValue {

View file

@ -7,11 +7,10 @@ use interim_models::client::AppDbClient;
use oauth2::basic::BasicClient; use oauth2::basic::BasicClient;
use sqlx::postgres::PgPoolOptions; use sqlx::postgres::PgPoolOptions;
use crate::app_error::AppError; use crate::{
use crate::auth; auth, errors::AppError, sessions::PgStore, settings::Settings,
use crate::base_pooler::WorkspacePooler; workspace_pooler::WorkspacePooler,
use crate::sessions::PgStore; };
use crate::settings::Settings;
/// Global app configuration /// Global app configuration
#[derive(Clone, Debug)] #[derive(Clone, Debug)]

View file

@ -14,8 +14,8 @@ use oauth2::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
app_error::AppError, app::{App, ReqwestClient},
app_state::{App, ReqwestClient}, errors::AppError,
sessions::{AppSession, PgStore}, sessions::{AppSession, PgStore},
settings::Settings, settings::Settings,
}; };

View file

@ -15,9 +15,7 @@ use tower_http::{
compression::CompressionLayer, set_header::response::SetResponseHeaderLayer, trace::TraceLayer, compression::CompressionLayer, set_header::response::SetResponseHeaderLayer, trace::TraceLayer,
}; };
use crate::{ use crate::{app::App, middleware::lowercase_uri_path, routes::new_router, worker::run_worker};
app_state::App, middleware::lowercase_uri_path, routes::new_router, worker::run_worker,
};
#[derive(Parser)] #[derive(Parser)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]

View file

@ -5,20 +5,17 @@ use interim_models::MIGRATOR;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
use crate::{ use crate::{
app_state::App, app::App,
cli::{Cli, Commands, serve_command, worker_command}, cli::{Cli, Commands, serve_command, worker_command},
settings::Settings, settings::Settings,
}; };
mod app_error; mod app;
mod app_state;
mod auth; mod auth;
mod base_pooler;
mod base_user_perms;
mod cli; mod cli;
mod errors;
mod field_info; mod field_info;
mod middleware; mod middleware;
mod navbar;
mod navigator; mod navigator;
mod renderable_role_tree; mod renderable_role_tree;
mod routes; mod routes;
@ -26,6 +23,9 @@ mod sessions;
mod settings; mod settings;
mod user; mod user;
mod worker; mod worker;
mod workspace_nav;
mod workspace_pooler;
mod workspace_user_perms;
/// Run CLI /// Run CLI
#[tokio::main] #[tokio::main]

View file

@ -6,7 +6,7 @@ use axum::{
use interim_models::portal::Portal; use interim_models::portal::Portal;
use uuid::Uuid; use uuid::Uuid;
use crate::{app_error::AppError, app_state::App}; use crate::{app::App, errors::AppError};
/// Helper type for semantically generating URI paths, e.g. for redirects. /// Helper type for semantically generating URI paths, e.g. for redirects.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]

View file

@ -22,7 +22,7 @@ use tower_http::{
}; };
use crate::auth; use crate::auth;
use crate::{app_state::App, settings::Settings}; use crate::{app::App, settings::Settings};
mod relations_single; mod relations_single;
mod workspaces_multi; mod workspaces_multi;

View file

@ -19,11 +19,11 @@ use sqlx::query;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::{AppError, forbidden}, app::{App, AppDbConn},
app_state::{App, AppDbConn}, errors::{AppError, forbidden},
base_pooler::{RoleAssignment, WorkspacePooler},
navigator::Navigator, navigator::Navigator,
user::CurrentUser, user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View file

@ -8,8 +8,8 @@ use sqlx::postgres::types::Oid;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::{AppError, forbidden}, app::AppDbConn,
app_state::AppDbConn, errors::{AppError, forbidden},
navigator::Navigator, navigator::Navigator,
user::CurrentUser, user::CurrentUser,
}; };

View file

@ -5,18 +5,18 @@ use axum::{
extract::{Path, State}, extract::{Path, State},
response::{IntoResponse as _, Response}, response::{IntoResponse as _, Response},
}; };
use interim_models::{encodable::Encodable, field::Field, portal::Portal}; use interim_models::{datum::Datum, field::Field, portal::Portal};
use interim_pgtypes::{escape_identifier, pg_attribute::PgAttribute, pg_class::PgClass}; use interim_pgtypes::{escape_identifier, pg_attribute::PgAttribute, pg_class::PgClass};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{postgres::PgRow, query}; use sqlx::{postgres::PgRow, query};
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::AppError, app::AppDbConn,
app_state::AppDbConn, errors::AppError,
base_pooler::{RoleAssignment, WorkspacePooler},
field_info::FieldInfo, field_info::FieldInfo,
user::CurrentUser, user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
@ -106,23 +106,23 @@ pub(super) async fn get(
#[derive(Serialize)] #[derive(Serialize)]
struct DataRow { struct DataRow {
pkey: String, pkey: String,
data: Vec<Encodable>, data: Vec<Datum>,
} }
let mut data_rows: Vec<DataRow> = vec![]; let mut data_rows: Vec<DataRow> = vec![];
let mut pkeys: Vec<String> = vec![]; let mut pkeys: Vec<String> = vec![];
for row in rows.iter() { for row in rows.iter() {
let mut pkey_values: HashMap<String, Encodable> = HashMap::new(); let mut pkey_values: HashMap<String, Datum> = HashMap::new();
for attr in pkey_attrs.clone() { for attr in pkey_attrs.clone() {
let field = Field::default_from_attr(&attr) let field = Field::default_from_attr(&attr)
.ok_or(anyhow::anyhow!("unsupported primary key column type"))?; .ok_or(anyhow::anyhow!("unsupported primary key column type"))?;
pkey_values.insert(field.name.clone(), field.get_value_encodable(row)?); pkey_values.insert(field.name.clone(), field.get_datum(row)?);
} }
let pkey = serde_json::to_string(&pkey_values)?; let pkey = serde_json::to_string(&pkey_values)?;
pkeys.push(pkey.clone()); pkeys.push(pkey.clone());
let mut row_data: Vec<Encodable> = vec![]; let mut row_data: Vec<Datum> = vec![];
for field in fields.iter() { for field in fields.iter() {
row_data.push(field.field.get_value_encodable(row)?); row_data.push(field.field.get_datum(row)?);
} }
data_rows.push(DataRow { data_rows.push(DataRow {
pkey, pkey,

View file

@ -9,7 +9,7 @@ use axum::{
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform // https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
use axum_extra::extract::Form; use axum_extra::extract::Form;
use interim_models::{ use interim_models::{
encodable::Encodable, datum::Datum,
portal::Portal, portal::Portal,
workspace::Workspace, workspace::Workspace,
workspace_user_perm::{self, WorkspaceUserPerm}, workspace_user_perm::{self, WorkspaceUserPerm},
@ -20,11 +20,11 @@ use sqlx::{postgres::types::Oid, query};
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::{AppError, forbidden}, app::{App, AppDbConn},
app_state::{App, AppDbConn}, errors::{AppError, forbidden},
base_pooler::{RoleAssignment, WorkspacePooler},
navigator::Navigator, navigator::Navigator,
user::CurrentUser, user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -38,7 +38,7 @@ pub(super) struct PathParams {
/// takes a form where the keys are column names, with keys optionally repeated /// takes a form where the keys are column names, with keys optionally repeated
/// to insert multiple rows at once. If any key is repeated, the others should /// to insert multiple rows at once. If any key is repeated, the others should
/// be repeated the same number of times. Form values are expected to be JSON- /// be repeated the same number of times. Form values are expected to be JSON-
/// serialized representations of the `[Encodable]` type. /// serialized representations of the `[Datum]` type.
#[debug_handler(state = App)] #[debug_handler(state = App)]
pub(super) async fn post( pub(super) async fn post(
State(mut workspace_pooler): State<WorkspacePooler>, State(mut workspace_pooler): State<WorkspacePooler>,
@ -87,12 +87,12 @@ pub(super) async fn post(
let n_rows = form.values().map(|value| value.len()).max().unwrap_or(0); let n_rows = form.values().map(|value| value.len()).max().unwrap_or(0);
if n_rows > 0 { if n_rows > 0 {
let mut param_index = 1; let mut param_index = 1;
let mut params: Vec<Encodable> = vec![]; let mut params: Vec<Datum> = vec![];
let mut row_list: Vec<String> = vec![]; let mut row_list: Vec<String> = vec![];
for i in 0..n_rows { for i in 0..n_rows {
let mut param_slots: Vec<String> = vec![]; let mut param_slots: Vec<String> = vec![];
for col in col_names.iter() { for col in col_names.iter() {
let maybe_value: Option<Encodable> = form let maybe_value: Option<Datum> = form
.get(col) .get(col)
.and_then(|col_values| col_values.get(i)) .and_then(|col_values| col_values.get(i))
.map(|value_raw| serde_json::from_str(value_raw)) .map(|value_raw| serde_json::from_str(value_raw))

View file

@ -4,7 +4,7 @@ use axum::{
}; };
use axum_extra::routing::RouterExt as _; use axum_extra::routing::RouterExt as _;
use crate::app_state::App; use crate::app::App;
mod add_field_handler; mod add_field_handler;
mod add_portal_handler; mod add_portal_handler;

View file

@ -10,13 +10,13 @@ use sqlx::postgres::types::Oid;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::AppError, app::AppDbConn,
app_state::AppDbConn, errors::AppError,
base_pooler::{RoleAssignment, WorkspacePooler},
navbar::{NavLocation, RelLocation, WorkspaceNav},
navigator::Navigator, navigator::Navigator,
settings::Settings, settings::Settings,
user::CurrentUser, user::CurrentUser,
workspace_nav::{NavLocation, RelLocation, WorkspaceNav},
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
@ -59,7 +59,7 @@ pub(super) async fn get(
let attr_names: Vec<String> = attrs.iter().map(|attr| attr.attname.clone()).collect(); let attr_names: Vec<String> = attrs.iter().map(|attr| attr.attname.clone()).collect();
#[derive(Template)] #[derive(Template)]
#[template(path = "lens.html")] #[template(path = "portal_table.html")]
struct ResponseTemplate { struct ResponseTemplate {
attr_names: Vec<String>, attr_names: Vec<String>,
filter: Option<PgExpressionAny>, filter: Option<PgExpressionAny>,

View file

@ -6,8 +6,7 @@ use axum::{
use interim_models::workspace_user_perm::WorkspaceUserPerm; use interim_models::workspace_user_perm::WorkspaceUserPerm;
use crate::{ use crate::{
app_error::AppError, app_state::AppDbConn, navigator::Navigator, settings::Settings, app::AppDbConn, errors::AppError, navigator::Navigator, settings::Settings, user::CurrentUser,
user::CurrentUser,
}; };
pub(super) async fn get( pub(super) async fn get(

View file

@ -1,7 +1,7 @@
use axum::{Router, response::Redirect, routing::get}; use axum::{Router, response::Redirect, routing::get};
use axum_extra::routing::RouterExt as _; use axum_extra::routing::RouterExt as _;
use crate::app_state::App; use crate::app::App;
mod list_handlers; mod list_handlers;

View file

@ -9,12 +9,12 @@ use sqlx::query;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::{AppError, forbidden}, app::AppDbConn,
app_state::AppDbConn, errors::{AppError, forbidden},
base_pooler::{RoleAssignment, WorkspacePooler},
navigator::Navigator, navigator::Navigator,
settings::Settings, settings::Settings,
user::CurrentUser, user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View file

@ -5,7 +5,7 @@ use axum::{
}; };
use axum_extra::routing::RouterExt as _; use axum_extra::routing::RouterExt as _;
use crate::app_state::App; use crate::app::App;
use super::relations_single; use super::relations_single;

View file

@ -12,13 +12,13 @@ use serde::Deserialize;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::{AppError, forbidden}, app::{App, AppDbConn},
app_state::{App, AppDbConn}, errors::{AppError, forbidden},
base_pooler::{RoleAssignment, WorkspacePooler},
navbar::WorkspaceNav,
navigator::Navigator, navigator::Navigator,
settings::Settings, settings::Settings,
user::CurrentUser, user::CurrentUser,
workspace_nav::WorkspaceNav,
workspace_pooler::{RoleAssignment, WorkspacePooler},
}; };
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View file

@ -12,7 +12,7 @@ use chrono::{DateTime, TimeDelta, Utc};
use sqlx::{PgPool, query, query_as}; use sqlx::{PgPool, query, query_as};
use tracing::{Instrument, trace_span}; use tracing::{Instrument, trace_span};
use crate::{app_error::AppError, app_state::App}; use crate::{app::App, errors::AppError};
const EXPIRY_DAYS: i64 = 7; const EXPIRY_DAYS: i64 = 7;

View file

@ -6,7 +6,7 @@ use config::{Config, Environment};
use dotenvy::dotenv; use dotenvy::dotenv;
use serde::Deserialize; use serde::Deserialize;
use crate::app_state::App; use crate::app::App;
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub(crate) struct Settings { pub(crate) struct Settings {

View file

@ -16,9 +16,9 @@ use sqlx::query_as;
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
app_error::AppError, app::App,
app_state::App,
auth::{AuthInfo, SESSION_KEY_AUTH_INFO, SESSION_KEY_AUTH_REDIRECT}, auth::{AuthInfo, SESSION_KEY_AUTH_INFO, SESSION_KEY_AUTH_REDIRECT},
errors::AppError,
sessions::AppSession, sessions::AppSession,
}; };

View file

@ -1,7 +1,7 @@
use anyhow::Result; use anyhow::Result;
use tracing::Instrument as _; use tracing::Instrument as _;
use crate::app_state::App; use crate::app::App;
pub async fn run_worker(_state: App) -> Result<()> { pub async fn run_worker(_state: App) -> Result<()> {
async move { Ok(()) } async move { Ok(()) }

View file

@ -9,7 +9,7 @@ use sqlx::{Executor, PgPool, postgres::PgPoolOptions, raw_sql};
use tokio::sync::{OnceCell, RwLock}; use tokio::sync::{OnceCell, RwLock};
use uuid::Uuid; use uuid::Uuid;
use crate::app_state::App; use crate::app::App;
const MAX_CONNECTIONS: u32 = 4; const MAX_CONNECTIONS: u32 = 4;
const IDLE_SECONDS: u64 = 3600; const IDLE_SECONDS: u64 = 3600;

View file

@ -30,12 +30,12 @@
} }
</script> </script>
<div class="encodable-editor__container"> <div class="datum-editor__container">
{#if assignable_fields.length > 0} {#if assignable_fields.length > 0}
<div class="encodable-editor__type-selector"> <div class="datum-editor__type-selector">
<button <button
bind:this={type_selector_menu_button_element} bind:this={type_selector_menu_button_element}
class="encodable-editor__type-selector-menu-button" class="datum-editor__type-selector-menu-button"
onclick={handle_type_selector_menu_button_click} onclick={handle_type_selector_menu_button_click}
type="button" type="button"
> >
@ -43,7 +43,7 @@
</button> </button>
<div <div
bind:this={type_selector_popover_element} bind:this={type_selector_popover_element}
class="encodable-editor__type-selector-popover" class="datum-editor__type-selector-popover"
popover="auto" popover="auto"
> >
{#each assignable_fields as assignable_field_info} {#each assignable_fields as assignable_field_info}
@ -58,7 +58,7 @@
</div> </div>
</div> </div>
{/if} {/if}
<div class="encodable-editor__content"> <div class="datum-editor__content">
{#if field_info.field.presentation.t === "Text" || field_info.field.presentation.t === "Uuid"} {#if field_info.field.presentation.t === "Text" || field_info.field.presentation.t === "Uuid"}
<input bind:value={editor_state.text_value} type="text" /> <input bind:value={editor_state.text_value} type="text" />
{:else if field_info.field.presentation.t === "Timestamp"} {:else if field_info.field.presentation.t === "Timestamp"}

View file

@ -2,9 +2,7 @@ import { z } from "zod";
type Assert<_T extends true> = void; type Assert<_T extends true> = void;
// -------- Encodable -------- // export const all_datum_tags = [
export const all_encodable_tags = [
"Text", "Text",
"Timestamp", "Timestamp",
"Uuid", "Uuid",
@ -12,28 +10,28 @@ export const all_encodable_tags = [
// Type checking to ensure that all valid enum tags are included. // Type checking to ensure that all valid enum tags are included.
type _ = Assert< type _ = Assert<
Encodable["t"] extends (typeof all_encodable_tags)[number] ? true : false Datum["t"] extends (typeof all_datum_tags)[number] ? true : false
>; >;
const encodable_text_schema = z.object({ const datum_text_schema = z.object({
t: z.literal("Text"), t: z.literal("Text"),
c: z.string().nullish().transform((x) => x ?? undefined), c: z.string().nullish().transform((x) => x ?? undefined),
}); });
const encodable_timestamp_schema = z.object({ const datum_timestamp_schema = z.object({
t: z.literal("Timestamp"), t: z.literal("Timestamp"),
c: z.coerce.date().nullish().transform((x) => x ?? undefined), c: z.coerce.date().nullish().transform((x) => x ?? undefined),
}); });
const encodable_uuid_schema = z.object({ const datum_uuid_schema = z.object({
t: z.literal("Uuid"), t: z.literal("Uuid"),
c: z.string().nullish().transform((x) => x ?? undefined), c: z.string().nullish().transform((x) => x ?? undefined),
}); });
export const encodable_schema = z.union([ export const datum_schema = z.union([
encodable_text_schema, datum_text_schema,
encodable_timestamp_schema, datum_timestamp_schema,
encodable_uuid_schema, datum_uuid_schema,
]); ]);
export type Encodable = z.infer<typeof encodable_schema>; export type Datum = z.infer<typeof datum_schema>;

View file

@ -1,6 +1,6 @@
import * as uuid from "uuid"; import * as uuid from "uuid";
import { type Encodable } from "./encodable.svelte.ts"; import { type Datum } from "./datum.svelte.ts";
import { type Presentation } from "./presentation.svelte.ts"; import { type Presentation } from "./presentation.svelte.ts";
type Assert<_T extends true> = void; type Assert<_T extends true> = void;
@ -22,7 +22,7 @@ export const DEFAULT_EDITOR_STATE: EditorState = {
is_null: false, is_null: false,
}; };
export function editor_state_from_encodable(value: Encodable): EditorState { export function editor_state_from_datum(value: Datum): EditorState {
if (value.t === "Text") { if (value.t === "Text") {
return { return {
...DEFAULT_EDITOR_STATE, ...DEFAULT_EDITOR_STATE,
@ -53,10 +53,10 @@ export function editor_state_from_encodable(value: Encodable): EditorState {
throw new Error("this should be unreachable"); throw new Error("this should be unreachable");
} }
export function encodable_from_editor_state( export function datum_from_editor_state(
value: EditorState, value: EditorState,
presentation: Presentation, presentation: Presentation,
): Encodable | undefined { ): Datum | undefined {
if (presentation.t === "Text") { if (presentation.t === "Text") {
return { t: "Text", c: value.text_value }; return { t: "Text", c: value.text_value };
} }

View file

@ -10,15 +10,15 @@
/> />
<script lang="ts"> <script lang="ts">
import EncodableEditor from "./encodable-editor.svelte"; import DatumEditor from "./datum-editor.svelte";
import ExpressionSelector from "./expression-selector.svelte"; import ExpressionSelector from "./expression-selector.svelte";
import { type PgExpressionAny } from "./expression.svelte"; import { type PgExpressionAny } from "./expression.svelte";
import ExpressionEditor from "./expression-editor.webc.svelte"; import ExpressionEditor from "./expression-editor.webc.svelte";
import { import {
DEFAULT_EDITOR_STATE, DEFAULT_EDITOR_STATE,
editor_state_from_encodable, editor_state_from_datum,
type EditorState, type EditorState,
encodable_from_editor_state, datum_from_editor_state,
} from "./editor-state.svelte"; } from "./editor-state.svelte";
import { type FieldInfo } from "./field.svelte"; import { type FieldInfo } from "./field.svelte";
import { type Presentation } from "./presentation.svelte"; import { type Presentation } from "./presentation.svelte";
@ -51,19 +51,19 @@
let editor_state = $state<EditorState>( let editor_state = $state<EditorState>(
value?.t === "Literal" value?.t === "Literal"
? editor_state_from_encodable(value.c) ? editor_state_from_datum(value.c)
: DEFAULT_EDITOR_STATE, : DEFAULT_EDITOR_STATE,
); );
let editor_field_info = $state<FieldInfo>(ASSIGNABLE_FIELDS[0]); let editor_field_info = $state<FieldInfo>(ASSIGNABLE_FIELDS[0]);
$effect(() => { $effect(() => {
if (value?.t === "Literal" && editor_field_info) { if (value?.t === "Literal" && editor_field_info) {
const encodable_value = encodable_from_editor_state( const datum_value = datum_from_editor_state(
editor_state, editor_state,
editor_field_info.field.presentation, editor_field_info.field.presentation,
); );
if (encodable_value) { if (datum_value) {
value.c = encodable_value; value.c = datum_value;
} }
} }
}); });
@ -101,7 +101,7 @@
{/each} {/each}
</select> </select>
{:else if value.t === "Literal"} {:else if value.t === "Literal"}
<EncodableEditor <DatumEditor
bind:editor_state bind:editor_state
bind:field_info={editor_field_info} bind:field_info={editor_field_info}
assignable_fields={ASSIGNABLE_FIELDS} assignable_fields={ASSIGNABLE_FIELDS}

View file

@ -5,7 +5,7 @@ import cube_icon from "../assets/heroicons/20/solid/cube.svg?raw";
import cube_transparent_icon from "../assets/heroicons/20/solid/cube-transparent.svg?raw"; import cube_transparent_icon from "../assets/heroicons/20/solid/cube-transparent.svg?raw";
import hashtag_icon from "../assets/heroicons/20/solid/hashtag.svg?raw"; import hashtag_icon from "../assets/heroicons/20/solid/hashtag.svg?raw";
import variable_icon from "../assets/heroicons/20/solid/variable.svg?raw"; import variable_icon from "../assets/heroicons/20/solid/variable.svg?raw";
import { encodable_schema } from "./encodable.svelte.ts"; import { datum_schema } from "./datum.svelte.ts";
export const all_expression_types = [ export const all_expression_types = [
"Comparison", "Comparison",
@ -89,7 +89,7 @@ const pg_expression_any_identifier_schema = z.object({
const pg_expression_any_literal_schema = z.object({ const pg_expression_any_literal_schema = z.object({
t: z.literal("Literal"), t: z.literal("Literal"),
c: encodable_schema, c: datum_schema,
}); });
const pg_to_json_expression_schema = z.object({ const pg_to_json_expression_schema = z.object({

View file

@ -1,6 +1,6 @@
import { z } from "zod"; import { z } from "zod";
import { type Encodable } from "./encodable.svelte.ts"; import { type Datum } from "./datum.svelte.ts";
import { presentation_schema } from "./presentation.svelte.ts"; import { presentation_schema } from "./presentation.svelte.ts";
export const field_schema = z.object({ export const field_schema = z.object({
@ -28,7 +28,7 @@ export type Coords = [number, number];
export type Row = { export type Row = {
key: string | number; key: string | number;
data: Encodable[]; data: Datum[];
}; };
export function coords_eq(a: Coords, b: Coords): boolean { export function coords_eq(a: Coords, b: Coords): boolean {

View file

@ -1,6 +1,6 @@
import { z } from "zod"; import { z } from "zod";
import { type Encodable } from "./encodable.svelte.ts"; import { type Datum } from "./datum.svelte.ts";
type Assert<_T extends true> = void; type Assert<_T extends true> = void;
@ -73,7 +73,7 @@ export const presentation_schema = z.union([
export type Presentation = z.infer<typeof presentation_schema>; export type Presentation = z.infer<typeof presentation_schema>;
export function get_empty_encodable_for(presentation: Presentation): Encodable { export function get_empty_datum_for(presentation: Presentation): Datum {
if (presentation.t === "Timestamp") { if (presentation.t === "Timestamp") {
return { t: "Timestamp", c: undefined }; return { t: "Timestamp", c: undefined };
} }

View file

@ -15,11 +15,11 @@
import icon_cube_transparent from "../assets/heroicons/20/solid/cube-transparent.svg?raw"; import icon_cube_transparent from "../assets/heroicons/20/solid/cube-transparent.svg?raw";
import icon_exclamation_circle from "../assets/heroicons/20/solid/exclamation-circle.svg?raw"; import icon_exclamation_circle from "../assets/heroicons/20/solid/exclamation-circle.svg?raw";
import icon_sparkles from "../assets/heroicons/20/solid/sparkles.svg?raw"; import icon_sparkles from "../assets/heroicons/20/solid/sparkles.svg?raw";
import { type Encodable, encodable_schema } from "./encodable.svelte"; import { type Datum, datum_schema } from "./datum.svelte";
import EncodableEditor from "./encodable-editor.svelte"; import DatumEditor from "./datum-editor.svelte";
import { import {
DEFAULT_EDITOR_STATE, DEFAULT_EDITOR_STATE,
encodable_from_editor_state, datum_from_editor_state,
type EditorState, type EditorState,
} from "./editor-state.svelte"; } from "./editor-state.svelte";
import { import {
@ -30,7 +30,7 @@
field_info_schema, field_info_schema,
} from "./field.svelte"; } from "./field.svelte";
import FieldHeader from "./field-header.svelte"; import FieldHeader from "./field-header.svelte";
import { get_empty_encodable_for } from "./presentation.svelte"; import { get_empty_datum_for } from "./presentation.svelte";
type Props = { type Props = {
columns?: string[]; columns?: string[];
@ -43,8 +43,8 @@
// This will be identical to coords_initial, unless the change altered a // This will be identical to coords_initial, unless the change altered a
// primary key. // primary key.
coords_updated: Coords; coords_updated: Coords;
value_initial: Encodable; value_initial: Datum;
value_updated: Encodable; value_updated: Datum;
}; };
type LazyData = { type LazyData = {
@ -55,10 +55,10 @@
type Selection = { type Selection = {
region: "main" | "inserter"; region: "main" | "inserter";
coords: Coords; coords: Coords;
original_value: Encodable; original_value: Datum;
}; };
type ParsedPkey = Record<string, Encodable>; type ParsedPkey = Record<string, Datum>;
let selections = $state<Selection[]>([]); let selections = $state<Selection[]>([]);
let editing = $state(false); let editing = $state(false);
@ -92,7 +92,7 @@
function set_selections(arr: Omit<Selection, "original_value">[]) { function set_selections(arr: Omit<Selection, "original_value">[]) {
selections = arr.map((sel) => { selections = arr.map((sel) => {
let cell_data: Encodable | undefined; let cell_data: Datum | undefined;
if (sel.region === "main") { if (sel.region === "main") {
cell_data = lazy_data?.rows[sel.coords[0]].data[sel.coords[1]]; cell_data = lazy_data?.rows[sel.coords[0]].data[sel.coords[1]];
} else if (sel.region === "inserter") { } else if (sel.region === "inserter") {
@ -107,7 +107,7 @@
}); });
if (arr.length === 1) { if (arr.length === 1) {
const [sel] = arr; const [sel] = arr;
let cell_data: Encodable | undefined; let cell_data: Datum | undefined;
if (sel.region === "main") { if (sel.region === "main") {
cell_data = lazy_data?.rows[sel.coords[0]].data[sel.coords[1]]; cell_data = lazy_data?.rows[sel.coords[0]].data[sel.coords[1]];
} else if (sel.region === "inserter") { } else if (sel.region === "inserter") {
@ -218,7 +218,7 @@
function try_sync_edit_to_cells() { function try_sync_edit_to_cells() {
if (lazy_data && editing && selections.length === 1) { if (lazy_data && editing && selections.length === 1) {
const [sel] = selections; const [sel] = selections;
const parsed = encodable_from_editor_state( const parsed = datum_from_editor_state(
editor_state, editor_state,
lazy_data.fields[sel.coords[1]].field.presentation, lazy_data.fields[sel.coords[1]].field.presentation,
); );
@ -246,7 +246,7 @@
if (lazy_data && editing && editor_state && selections.length === 1) { if (lazy_data && editing && editor_state && selections.length === 1) {
const [sel] = selections; const [sel] = selections;
const field = lazy_data.fields[sel.coords[1]]; const field = lazy_data.fields[sel.coords[1]];
const parsed = encodable_from_editor_state( const parsed = datum_from_editor_state(
editor_state, editor_state,
field.field.presentation, field.field.presentation,
); );
@ -337,7 +337,7 @@
{ {
key: inserter_rows.length, key: inserter_rows.length,
data: lazy_data.fields.map(({ field: { presentation } }) => data: lazy_data.fields.map(({ field: { presentation } }) =>
get_empty_encodable_for(presentation), get_empty_datum_for(presentation),
), ),
}, },
]; ];
@ -416,7 +416,7 @@
rows: z.array( rows: z.array(
z.object({ z.object({
pkey: z.string(), pkey: z.string(),
data: z.array(encodable_schema), data: z.array(datum_schema),
}), }),
), ),
fields: z.array(field_info_schema), fields: z.array(field_info_schema),
@ -431,7 +431,7 @@
{ {
key: 0, key: 0,
data: body.fields.map(({ field: { presentation } }) => data: body.fields.map(({ field: { presentation } }) =>
get_empty_encodable_for(presentation), get_empty_datum_for(presentation),
), ),
}, },
]; ];
@ -591,7 +591,7 @@
</div> </div>
<div class="lens-editor"> <div class="lens-editor">
{#if selections.length === 1 && editor_state} {#if selections.length === 1 && editor_state}
<EncodableEditor <DatumEditor
bind:editor_state bind:editor_state
field_info={lazy_data.fields[selections[0].coords[1]]} field_info={lazy_data.fields[selections[0].coords[1]]}
/> />