initial commit

This commit is contained in:
Brent Schroeter 2025-05-02 23:48:54 -07:00
commit 4afe1df9b9
48 changed files with 6164 additions and 0 deletions

8
.dockerignore Normal file
View file

@ -0,0 +1,8 @@
.git/
.env
example.env
build
dev-services
bacon.toml
target
.DS_Store

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
target
.env
.DS_Store

3755
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

37
Cargo.toml Normal file
View file

@ -0,0 +1,37 @@
[package]
name = "interim"
version = "0.0.1"
edition = "2021"
[workspace]
members = ["catalogs"]
[dependencies]
anyhow = "1.0.91"
askama = { version = "0.12.1", features = ["urlencode"] }
async-session = "3.0.0"
axum = { version = "0.8.1", features = ["macros"] }
axum-extra = { version = "0.10.0", features = ["cookie", "form", "typed-header"] }
catalogs = { path = "./catalogs" }
chrono = { version = "0.4.39", features = ["serde"] }
clap = { version = "4.5.31", features = ["derive"] }
config = "0.14.1"
deadpool-diesel = { version = "0.6.1", features = ["postgres", "serde"] }
derive_builder = "0.20.2"
diesel = { version = "2.2.10", features = ["postgres", "chrono", "uuid", "serde_json"] }
diesel_migrations = { version = "2.2.0", features = ["postgres"] }
dotenvy = "0.15.7"
futures = "0.3.31"
oauth2 = "4.4.2"
percent-encoding = "2.3.1"
rand = "0.8.5"
reqwest = { version = "0.12.8", features = ["json"] }
serde = { version = "1.0.213", features = ["derive"] }
serde_json = "1.0.132"
tokio = { version = "1.42.0", features = ["full"] }
tower = "0.5.2"
tower-http = { version = "0.6.2", features = ["compression-gzip", "fs", "normalize-path", "set-header", "trace"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.19", features = ["chrono", "env-filter"] }
uuid = { version = "1.11.0", features = ["serde", "v4", "v7"] }
validator = { version = "0.20.0", features = ["derive"] }

22
Dockerfile Normal file
View file

@ -0,0 +1,22 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.85.0 AS chef
WORKDIR /app
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
# Build dependencies - this is the caching Docker layer!
RUN cargo chef cook --release --recipe-path recipe.json
# Build application
COPY . .
RUN cargo build --release --bin interim
# We do not need the Rust toolchain to run the binary!
FROM debian:bookworm-slim AS runtime
RUN apt-get update && apt-get install -y libpq-dev
WORKDIR /app
COPY --from=builder /app/target/release/interim /usr/local/bin
COPY ./static ./static
ENTRYPOINT ["/usr/local/bin/interim"]

114
bacon.toml Normal file
View file

@ -0,0 +1,114 @@
# This is a configuration file for the bacon tool
#
# Complete help on configuration: https://dystroy.org/bacon/config/
#
# You may check the current default at
# https://github.com/Canop/bacon/blob/main/defaults/default-bacon.toml
default_job = "check"
env.CARGO_TERM_COLOR = "always"
[jobs.check]
command = ["cargo", "check"]
need_stdout = false
[jobs.check-all]
command = ["cargo", "check", "--all-targets"]
need_stdout = false
# Run clippy on the default target
[jobs.clippy]
command = ["cargo", "clippy"]
need_stdout = false
# Run clippy on all targets
# To disable some lints, you may change the job this way:
# [jobs.clippy-all]
# command = [
# "cargo", "clippy",
# "--all-targets",
# "--",
# "-A", "clippy::bool_to_int_with_if",
# "-A", "clippy::collapsible_if",
# "-A", "clippy::derive_partial_eq_without_eq",
# ]
# need_stdout = false
[jobs.clippy-all]
command = ["cargo", "clippy", "--all-targets"]
need_stdout = false
# This job lets you run
# - all tests: bacon test
# - a specific test: bacon test -- config::test_default_files
# - the tests of a package: bacon test -- -- -p config
[jobs.test]
command = ["cargo", "test"]
need_stdout = true
[jobs.nextest]
command = [
"cargo", "nextest", "run",
"--hide-progress-bar", "--failure-output", "final"
]
need_stdout = true
analyzer = "nextest"
[jobs.doc]
command = ["cargo", "doc", "--no-deps"]
need_stdout = false
# If the doc compiles, then it opens in your browser and bacon switches
# to the previous job
[jobs.doc-open]
command = ["cargo", "doc", "--no-deps", "--open"]
need_stdout = false
on_success = "back" # so that we don't open the browser at each change
# You can run your application and have the result displayed in bacon,
# if it makes sense for this crate.
[jobs.run-worker]
command = [
"cargo", "run", "worker",
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = true
default_watch = false
# Run your long-running application (eg server) and have the result displayed in bacon.
# For programs that never stop (eg a server), `background` is set to false
# to have the cargo run output immediately displayed instead of waiting for
# program's end.
# 'on_change_strategy' is set to `kill_then_restart` to have your program restart
# on every change (an alternative would be to use the 'F5' key manually in bacon).
# If you often use this job, it makes sense to override the 'r' key by adding
# a binding `r = job:run-long` at the end of this file .
[jobs.run-server]
command = [
"cargo", "run", "serve"
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = false
on_change_strategy = "kill_then_restart"
kill = ["kill", "-s", "INT"]
watch = ["src", "templates"]
# This parameterized job runs the example of your choice, as soon
# as the code compiles.
# Call it as
# bacon ex -- my-example
[jobs.ex]
command = ["cargo", "run", "--example"]
need_stdout = true
allow_warnings = true
# You may define here keybindings that would be specific to
# a project, for example a shortcut to launch a specific job.
# Shortcuts to internal functions (scrolling, toggling, etc.)
# should go in your personal global prefs.toml file instead.
[keybindings]
r = "job:run-server"
w = "job:run-worker"

199
catalogs/Cargo.lock generated Normal file
View file

@ -0,0 +1,199 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "bitflags"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "darling"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn",
]
[[package]]
name = "darling_macro"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
dependencies = [
"darling_core",
"quote",
"syn",
]
[[package]]
name = "diesel"
version = "2.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff3e1edb1f37b4953dd5176916347289ed43d7119cc2e6c7c3f7849ff44ea506"
dependencies = [
"bitflags",
"byteorder",
"diesel_derives",
"itoa",
"pq-sys",
]
[[package]]
name = "diesel_derives"
version = "2.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68d4216021b3ea446fd2047f5c8f8fe6e98af34508a254a01e4d6bc1e844f84d"
dependencies = [
"diesel_table_macro_syntax",
"dsl_auto_type",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "diesel_table_macro_syntax"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25"
dependencies = [
"syn",
]
[[package]]
name = "dsl_auto_type"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "139ae9aca7527f85f26dd76483eb38533fd84bd571065da1739656ef71c5ff5b"
dependencies = [
"darling",
"either",
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "either"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "info_schema"
version = "0.1.0"
dependencies = [
"diesel",
]
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "libc"
version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "pq-sys"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41c852911b98f5981956037b2ca976660612e548986c30af075e753107bc3400"
dependencies = [
"libc",
"vcpkg",
]
[[package]]
name = "proc-macro2"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"

8
catalogs/Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "catalogs"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.41"
diesel = { version = "2.2.10", features = ["64-column-tables", "chrono", "postgres"], default-features = false }

4
catalogs/Makefile Normal file
View file

@ -0,0 +1,4 @@
.PHONY: run-postgres
run-postgres:
docker run --rm -it -e POSTGRES_PASSWORD=guest -p 127.0.0.1:5432:5432 postgres:17

11
catalogs/diesel.toml Normal file
View file

@ -0,0 +1,11 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
schema = "information_schema"
filter = { except_tables = ["sql_features", "sql_implementation_info", "sql_parts", "sql_sizing"] }
[migrations_directory]
dir = "migrations"

View file

@ -0,0 +1,195 @@
use diesel::{allow_tables_to_appear_in_same_query, joinable, table};
table! {
pg_class (oid) {
/// Row identifier
oid -> Oid,
/// Name of the table, index, view, etc.
relname -> Text,
/// The OID of the namespace that contains this relation
relnamespace -> Oid,
/// The OID of the data type that corresponds to this table's row type, if any; zero for indexes, sequences, and toast tables, which have no pg_type entry
reltype -> Oid,
/// For typed tables, the OID of the underlying composite type; zero for all other relations
reloftype -> Oid,
/// Owner of the relation
relowner -> Oid,
/// The access method used to access this table or index. Not meaningful if the relation is a sequence or has no on-disk file, except for partitioned tables, where, if set, it takes precedence over default_table_access_method when determining the access method to use for partitions created when one is not specified in the creation command.
relam -> Oid,
/// Name of the on-disk file of this relation; zero means this is a “mapped” relation whose disk file name is determined by low-level state
relfilenode -> Oid,
/// The tablespace in which this relation is stored. If zero, the database's default tablespace is implied. Not meaningful if the relation has no on-disk file, except for partitioned tables, where this is the tablespace in which partitions will be created when one is not specified in the creation command.
reltablespace -> Oid,
/// Size of the on-disk representation of this table in pages (of size BLCKSZ). This is only an estimate used by the planner. It is updated by VACUUM, ANALYZE, and a few DDL commands such as CREATE INDEX.
relpages -> Integer,
/// Number of live rows in the table. This is only an estimate used by the planner. It is updated by VACUUM, ANALYZE, and a few DDL commands such as CREATE INDEX. If the table has never yet been vacuumed or analyzed, reltuples contains -1 indicating that the row count is unknown.
reltuples -> Float,
/// Number of pages that are marked all-visible in the table's visibility map. This is only an estimate used by the planner. It is updated by VACUUM, ANALYZE, and a few DDL commands such as CREATE INDEX.
relallvisible -> Integer,
/// OID of the TOAST table associated with this table, zero if none. The TOAST table stores large attributes “out of line” in a secondary table.
reltoastrelid -> Oid,
/// True if this is a table and it has (or recently had) any indexes
relhasindex -> Bool,
/// True if this table is shared across all databases in the cluster. Only certain system catalogs (such as pg_database) are shared.
relisshared -> Bool,
/// p = permanent table/sequence, u = unlogged table/sequence, t = temporary table/sequence
relpersistence -> CChar,
/// r = ordinary table, i = index, S = sequence, t = TOAST table, v = view, m = materialized view, c = composite type, f = foreign table, p = partitioned table, I = partitioned index
relkind -> CChar,
/// Number of user columns in the relation (system columns not counted). There must be this many corresponding entries in pg_attribute. See also pg_attribute.attnum.
relnatts -> SmallInt,
/// Number of CHECK constraints on the table; see pg_constraint catalog
relchecks -> SmallInt,
/// True if table has (or once had) rules; see pg_rewrite catalog
relhasrules -> Bool,
/// True if table has (or once had) triggers; see pg_trigger catalog
relhastriggers -> Bool,
/// True if table or index has (or once had) any inheritance children or partitions
relhassubclass -> Bool,
/// True if table has row-level security enabled; see pg_policy catalog
relrowsecurity -> Bool,
/// True if row-level security (when enabled) will also apply to table owner; see pg_policy catalog
relforcerowsecurity -> Bool,
/// True if relation is populated (this is true for all relations other than some materialized views)
relispopulated -> Bool,
/// Columns used to form “replica identity” for rows: d = default (primary key, if any), n = nothing, f = all columns, i = index with indisreplident set (same as nothing if the index used has been dropped)
relreplident -> CChar,
/// True if table or index is a partition
relispartition -> Bool,
/// For new relations being written during a DDL operation that requires a table rewrite, this contains the OID of the original relation; otherwise zero. That state is only visible internally; this field should never contain anything other than zero for a user-visible relation.
relrewrite -> Oid,
/// All transaction IDs before this one have been replaced with a permanent (“frozen”) transaction ID in this table. This is used to track whether the table needs to be vacuumed in order to prevent transaction ID wraparound or to allow pg_xact to be shrunk. Zero (InvalidTransactionId) if the relation is not a table.
/// Access-method-specific options, as “keyword=value” strings
reloptions -> Array<Text>,
}
}
table! {
pg_roles (oid) {
/// Role name
rolname -> Text,
/// Role has superuser privileges
rolsuper -> Bool,
/// Role automatically inherits privileges of roles it is a member of
rolinherit -> Bool,
/// Role can create more roles
rolcreaterole -> Bool,
/// Role can create databases
rolcreatedb -> Bool,
/// Role can log in. That is, this role can be given as the initial session authorization identifier
rolcanlogin -> Bool,
/// Role is a replication role. A replication role can initiate replication connections and create and drop replication slots.
rolreplication -> Bool,
/// For roles that can log in, this sets maximum number of concurrent connections this role can make. -1 means no limit.
rolconnlimit -> Integer,
/// Not the password (always reads as ********)
rolpassword -> Text,
/// Password expiry time (only used for password authentication); null if no expiration
rolvaliduntil -> Nullable<Timestamptz>,
/// Role bypasses every row-level security policy, see Section 5.9 for more information.
rolbypassrls -> Bool,
/// Role-specific defaults for run-time configuration variables
rolconfig -> Nullable<Array<Text>>,
/// ID of role
oid -> Oid,
}
}
table! {
pg_namespace (oid) {
/// Row identifier
oid -> Oid,
/// Name of the namespace
nspname -> Text,
/// Onwer of the namespace
nspowner -> Oid,
/// Access privileges; see Section 5.8 for details
nspacl -> Array<Text>,
}
}
table! {
pg_attribute (attrelid, attname) {
/// The table this column belongs to
attrelid -> Oid,
/// The column name
attname -> Text,
/// The data type of this column (zero for a dropped column)
atttypid -> Oid,
/// A copy of pg_type.typlen of this column's type
attlen -> SmallInt,
/// The number of the column. Ordinary columns are numbered from 1 up. System columns, such as ctid, have (arbitrary) negative numbers.
attnum -> SmallInt,
/// Always -1 in storage, but when loaded into a row descriptor in memory this might be updated to cache the offset of the attribute within the row
attcacheoff -> Integer,
/// atttypmod records type-specific data supplied at table creation time (for example, the maximum length of a varchar column). It is passed to type-specific input functions and length coercion functions. The value will generally be -1 for types that do not need atttypmod.
atttypmod -> Integer,
/// Number of dimensions, if the column is an array type; otherwise 0. (Presently, the number of dimensions of an array is not enforced, so any nonzero value effectively means “it's an array”.)
attndims -> SmallInt,
/// A copy of pg_type.typbyval of this column's type
attbyval -> Bool,
/// A copy of pg_type.typalign of this column's type
attalign -> CChar,
/// Normally a copy of pg_type.typstorage of this column's type. For TOAST-able data types, this can be altered after column creation to control storage policy.
attstorage -> CChar,
/// The current compression method of the column. Typically this is '\0' to specify use of the current default setting (see default_toast_compression). Otherwise, 'p' selects pglz compression, while 'l' selects LZ4 compression. However, this field is ignored whenever attstorage does not allow compression.
attcompression -> CChar,
/// This represents a not-null constraint.
attnotnull -> Bool,
/// This column has a default expression or generation expression, in which case there will be a corresponding entry in the pg_attrdef catalog that actually defines the expression. (Check attgenerated to determine whether this is a default or a generation expression.)
atthasdef -> Bool,
/// This column has a value which is used where the column is entirely missing from the row, as happens when a column is added with a non-volatile DEFAULT value after the row is created. The actual value used is stored in the attmissingval column.
atthasmissing -> Bool,
/// If a zero byte (''), then not an identity column. Otherwise, a = generated always, d = generated by default.
attidentity -> CChar,
/// If a zero byte (''), then not a generated column. Otherwise, s = stored. (Other values might be added in the future.)
attgenerated -> CChar,
/// This column has been dropped and is no longer valid. A dropped column is still physically present in the table, but is ignored by the parser and so cannot be accessed via SQL.
attisdropped -> Bool,
/// This column is defined locally in the relation. Note that a column can be locally defined and inherited simultaneously.
attislocal -> Bool,
/// The number of direct ancestors this column has. A column with a nonzero number of ancestors cannot be dropped nor renamed.
attinhcount -> SmallInt,
/// The defined collation of the column, or zero if the column is not of a collatable data type
attcollation -> Oid,
/// attstattarget controls the level of detail of statistics accumulated for this column by ANALYZE. A zero value indicates that no statistics should be collected. A null value says to use the system default statistics target. The exact meaning of positive values is data type-dependent. For scalar data types, attstattarget is both the target number of “most common values” to collect, and the target number of histogram bins to create.
attstattarget -> SmallInt,
/// Column-level access privileges, if any have been granted specifically on this column
attacl -> Array<Text>,
/// Attribute-level options, as “keyword=value” strings
attoptions -> Array<Text>,
/// Attribute-level foreign data wrapper options, as “keyword=value” strings
attfdwoptions -> Array<Text>,
}
}
table! {
information_schema.table_privileges (table_catalog, table_schema, table_name, grantor, grantee) {
/// Name of the role that granted the privilege
grantor -> Text,
/// Name of the role that the privilege was granted to
grantee -> Text,
/// Name of the database that contains the table (always the current database)
table_catalog -> Text,
/// Name of the schema that contains the table
table_schema -> Text,
/// Name of the table
table_name -> Text,
/// Type of the privilege: SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, or TRIGGER
privilege_type -> Text,
/// YES if the privilege is grantable, NO if not
is_grantable -> Text,
/// In the SQL standard, WITH HIERARCHY OPTION is a separate (sub-)privilege allowing certain operations on table inheritance hierarchies. In PostgreSQL, this is included in the SELECT privilege, so this column shows YES if the privilege is SELECT, else NO.
with_hierarchy -> Text,
}
}
allow_tables_to_appear_in_same_query!(
pg_attribute,
pg_class,
pg_namespace,
pg_roles,
table_privileges
);
joinable!(pg_class -> pg_roles (relowner));
joinable!(pg_attribute -> pg_class (attrelid));

6
catalogs/src/lib.rs Normal file
View file

@ -0,0 +1,6 @@
mod catalogs_schema;
pub mod pg_attribute;
pub mod pg_class;
pub mod pg_namespace;
pub mod pg_roles;
pub mod table_privileges;

View file

@ -0,0 +1,32 @@
use diesel::{
dsl::{AsSelect, auto_type},
pg::Pg,
prelude::*,
};
use crate::catalogs_schema::pg_attribute;
pub use crate::catalogs_schema::pg_attribute::{dsl, table};
#[derive(Clone, Debug, Queryable, Selectable)]
#[diesel(table_name = pg_attribute)]
#[diesel(primary_key(attrelid, attname))]
pub struct PgAttribute {
pub attrelid: u32,
pub attname: String,
pub atttypid: u32,
pub attnum: i16,
pub attndims: i16,
pub attnotnull: bool,
pub atthasdef: bool,
pub attisdropped: bool,
pub attacl: Vec<String>,
}
impl PgAttribute {
#[auto_type(no_type_alias)]
pub fn all() -> _ {
let select: AsSelect<Self, Pg> = Self::as_select();
table.select(select)
}
}

25
catalogs/src/pg_class.rs Normal file
View file

@ -0,0 +1,25 @@
use diesel::{
dsl::{AsSelect, auto_type},
pg::Pg,
prelude::*,
};
use crate::catalogs_schema::pg_class;
pub use crate::catalogs_schema::pg_class::{dsl, table};
#[derive(Clone, Debug, Queryable, Selectable)]
#[diesel(table_name = pg_class)]
#[diesel(primary_key(oid))]
pub struct PgClass {
pub oid: u32,
pub relname: String,
}
impl PgClass {
#[auto_type(no_type_alias)]
pub fn all() -> _ {
let select: AsSelect<Self, Pg> = Self::as_select();
table.select(select)
}
}

View file

@ -0,0 +1,23 @@
use diesel::{
dsl::{AsSelect, auto_type},
pg::Pg,
prelude::*,
};
use crate::catalogs_schema::pg_namespace;
pub use crate::catalogs_schema::pg_namespace::{dsl, table};
#[derive(Clone, Debug, Queryable, Selectable)]
#[diesel(table_name = pg_namespace)]
pub struct PgNamespace {
pub oid: u32,
}
impl PgNamespace {
#[auto_type(no_type_alias)]
pub fn all() -> _ {
let select: AsSelect<Self, Pg> = Self::as_select();
table.select(select)
}
}

50
catalogs/src/pg_roles.rs Normal file
View file

@ -0,0 +1,50 @@
use chrono::{DateTime, Utc};
use diesel::{
dsl::{AsSelect, auto_type},
pg::Pg,
prelude::*,
};
use crate::catalogs_schema::pg_roles;
pub use crate::catalogs_schema::pg_roles::{dsl, table};
#[derive(Clone, Debug, Queryable, Selectable)]
#[diesel(table_name = pg_roles)]
#[diesel(primary_key(oid))]
pub struct PgRole {
/// Role name
rolname: String,
/// Role has superuser privileges
rolsuper: bool,
/// Role automatically inherits privileges of roles it is a member of
rolinherit: bool,
/// Role can create more roles
rolcreaterole: bool,
/// Role can create databases
rolcreatedb: bool,
/// Role can log in. That is, this role can be given as the initial session authorization identifier
rolcanlogin: bool,
/// Role is a replication role. A replication role can initiate replication connections and create and drop replication slots.
rolreplication: bool,
/// For roles that can log in, this sets maximum number of concurrent connections this role can make. -1 means no limit.
rolconnlimit: i32,
/// Not the password (always reads as ********)
rolpassword: String,
/// Password expiry time (only used for password authentication); null if no expiration
rolvaliduntil: Option<DateTime<Utc>>,
/// Role bypasses every row-level security policy, see Section 5.9 for more information.
rolbypassrls: bool,
/// Role-specific defaults for run-time configuration variables
rolconfig: Option<Vec<String>>,
/// ID of role
oid: u32,
}
impl PgRole {
#[auto_type(no_type_alias)]
pub fn all() -> _ {
let select: AsSelect<Self, Pg> = Self::as_select();
table.select(select)
}
}

View file

@ -0,0 +1,38 @@
use diesel::{
dsl::{AsSelect, auto_type},
pg::Pg,
prelude::*,
};
use crate::catalogs_schema::table_privileges;
pub use crate::catalogs_schema::table_privileges::{dsl, table};
#[derive(Clone, Debug, Queryable, Selectable)]
#[diesel(table_name = table_privileges)]
pub struct TablePrivilege {
/// Name of the role that granted the privilege
grantor: String,
/// Name of the role that the privilege was granted to
grantee: String,
/// Name of the database that contains the table (always the current database)
table_catalog: String,
/// Name of the schema that contains the table
table_schema: String,
/// Name of the table
table_name: String,
/// Type of the privilege: SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, or TRIGGER
privilege_type: String,
/// YES if the privilege is grantable, NO if not
is_grantable: String,
/// In the SQL standard, WITH HIERARCHY OPTION is a separate (sub-)privilege allowing certain operations on table inheritance hierarchies. In PostgreSQL, this is included in the SELECT privilege, so this column shows YES if the privilege is SELECT, else NO.
with_hierarchy: String,
}
impl TablePrivilege {
#[auto_type(no_type_alias)]
pub fn all() -> _ {
let select: AsSelect<Self, Pg> = Self::as_select();
table.select(select)
}
}

9
diesel.toml Normal file
View file

@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

0
migrations/.keep Normal file
View file

View file

@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View file

@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1 @@
DROP TABLE IF EXISTS users;

View file

@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS users (
id UUID NOT NULL PRIMARY KEY,
uid TEXT UNIQUE NOT NULL,
email TEXT NOT NULL
);
CREATE INDEX ON users (uid);

View file

@ -0,0 +1 @@
DROP TABLE IF EXISTS browser_sessions;

View file

@ -0,0 +1,8 @@
CREATE TABLE browser_sessions (
id TEXT NOT NULL PRIMARY KEY,
serialized TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expiry TIMESTAMPTZ
);
CREATE INDEX ON browser_sessions (expiry);
CREATE INDEX ON browser_sessions (created_at);

46
src/abstract_.rs Normal file
View file

@ -0,0 +1,46 @@
use catalogs::{
pg_class::{self, PgClass},
pg_namespace,
table_privileges::{self, TablePrivilege},
};
use diesel::{
dsl::{auto_type, AsSelect},
pg::Pg,
prelude::*,
};
pub fn escape_identifier(identifier: &str) -> String {
// Escaping identifiers for Postgres is fairly easy, provided that the input is
// already known to contain no invalid multi-byte sequences. Backslashes may
// remain as-is, and embedded double quotes are escaped simply by doubling
// them (`"` becomes `""`). Refer to the PQescapeInternal() function in
// libpq (fe-exec.c) and Diesel's PgQueryBuilder::push_identifier().
// Here we also add spaces before and after the identifier quotes so that
// the identifier isn't inadvertently merged into a token immediately
// leading or following it.
format!(" \"{}\" ", identifier.replace('"', "\"\""))
}
// Still waiting for Postgres to gain class consciousness
#[derive(Clone, Queryable, Selectable)]
pub struct PgClassPrivilege {
#[diesel(embed)]
pub class: PgClass,
#[diesel(embed)]
pub privilege: TablePrivilege,
}
#[auto_type(no_type_alias)]
pub fn class_privileges_for_grantees(grantees: Vec<String>) -> _ {
let select: AsSelect<PgClassPrivilege, Pg> = PgClassPrivilege::as_select();
pg_class::table
.inner_join(pg_namespace::table.on(pg_namespace::dsl::oid.eq(pg_class::dsl::relnamespace)))
.inner_join(
table_privileges::table.on(table_privileges::dsl::table_schema
.eq(pg_namespace::dsl::nspname)
.and(table_privileges::dsl::table_name.eq(pg_class::dsl::relname))),
)
.filter(pg_class::dsl::relkind.eq(b'r'))
.filter(table_privileges::dsl::grantee.eq_any(grantees))
.select(select)
}

81
src/app_error.rs Normal file
View file

@ -0,0 +1,81 @@
use std::fmt::{self, Display};
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use validator::ValidationErrors;
/// Custom error type that maps to appropriate HTTP responses.
#[derive(Debug)]
pub enum AppError {
InternalServerError(anyhow::Error),
Forbidden(String),
NotFound(String),
BadRequest(String),
TooManyRequests(String),
}
impl AppError {
pub fn from_validation_errors(errs: ValidationErrors) -> Self {
// TODO: customize validation errors formatting
Self::BadRequest(serde_json::to_string(&errs).unwrap_or("validation error".to_string()))
}
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
match self {
Self::InternalServerError(err) => {
tracing::error!("Application error: {:?}", err);
(StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong").into_response()
}
Self::Forbidden(client_message) => {
tracing::info!("Forbidden: {}", client_message);
(StatusCode::FORBIDDEN, client_message).into_response()
}
Self::NotFound(client_message) => {
tracing::info!("Not found: {}", client_message);
(StatusCode::NOT_FOUND, client_message).into_response()
}
Self::TooManyRequests(client_message) => {
// Debug level so that if this is from a runaway loop, it won't
// overwhelm server logs
tracing::debug!("Too many requests: {}", client_message);
(StatusCode::TOO_MANY_REQUESTS, client_message).into_response()
}
Self::BadRequest(client_message) => {
tracing::info!("Bad user input: {}", client_message);
(StatusCode::BAD_REQUEST, client_message).into_response()
}
}
}
}
// Easily convert semi-arbitrary errors to InternalServerError
impl<E> From<E> for AppError
where
E: Into<anyhow::Error>,
{
fn from(err: E) -> Self {
Self::InternalServerError(Into::<anyhow::Error>::into(err))
}
}
impl Display for AppError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AppError::InternalServerError(inner) => inner.fmt(f),
AppError::Forbidden(client_message) => {
write!(f, "ForbiddenError: {}", client_message)
}
AppError::NotFound(client_message) => {
write!(f, "NotFoundError: {}", client_message)
}
AppError::BadRequest(client_message) => {
write!(f, "BadRequestError: {}", client_message)
}
AppError::TooManyRequests(client_message) => {
write!(f, "TooManyRequestsError: {}", client_message)
}
}
}
}

90
src/app_state.rs Normal file
View file

@ -0,0 +1,90 @@
use std::sync::Arc;
use anyhow::Result;
use axum::{
extract::{FromRef, FromRequestParts},
http::request::Parts,
};
use deadpool_diesel::postgres::{Connection, Pool};
use oauth2::basic::BasicClient;
use crate::{
abstract_::escape_identifier, app_error::AppError, auth, nav::NavbarBuilder, sessions::PgStore,
settings::Settings,
};
/// Global app configuration
pub struct App {
pub db_pool: Pool,
pub navbar_template: NavbarBuilder,
pub oauth_client: BasicClient,
pub reqwest_client: reqwest::Client,
pub session_store: PgStore,
pub settings: Settings,
}
impl App {
/// Initialize global application functions based on config values
pub async fn from_settings(settings: Settings) -> Result<Self> {
let database_url = settings.database_url.clone();
let manager = deadpool_diesel::postgres::Manager::from_config(
database_url,
deadpool_diesel::Runtime::Tokio1,
deadpool_diesel::ManagerConfig {
// Reset role after each interaction is recycled so that user
// sessions remain isolated by deadpool interaction
recycling_method: deadpool_diesel::RecyclingMethod::CustomQuery(
std::borrow::Cow::Owned(format!(
"SET ROLE {}",
escape_identifier(&settings.pg_root_role)
)),
),
},
);
let db_pool = deadpool_diesel::postgres::Pool::builder(manager).build()?;
let session_store = PgStore::new(db_pool.clone());
let reqwest_client = reqwest::ClientBuilder::new().https_only(true).build()?;
let oauth_client = auth::new_oauth_client(&settings)?;
Ok(Self {
db_pool,
navbar_template: NavbarBuilder::default().with_base_path(&settings.base_path),
oauth_client,
reqwest_client,
session_store,
settings,
})
}
}
/// Global app configuration, arced for relatively inexpensive clones
pub type AppState = Arc<App>;
/// State extractor for shared reqwest client
#[derive(Clone)]
pub struct ReqwestClient(pub reqwest::Client);
impl<S> FromRef<S> for ReqwestClient
where
S: Into<AppState> + Clone,
{
fn from_ref(state: &S) -> Self {
ReqwestClient(Into::<AppState>::into(state.clone()).reqwest_client.clone())
}
}
/// Extractor to automatically obtain a Deadpool database connection
pub struct DbConn(pub Connection);
impl<S> FromRequestParts<S> for DbConn
where
S: Into<AppState> + Clone + Sync,
{
type Rejection = AppError;
async fn from_request_parts(_: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let conn = Into::<AppState>::into(state.clone()).db_pool.get().await?;
Ok(Self(conn))
}
}

217
src/auth.rs Normal file
View file

@ -0,0 +1,217 @@
use anyhow::{Context, Result};
use async_session::{Session, SessionStore};
use axum::{
extract::{Query, State},
response::{IntoResponse, Redirect},
routing::get,
Router,
};
use axum_extra::extract::cookie::{Cookie, CookieJar, SameSite};
use oauth2::{
basic::BasicClient, reqwest::async_http_client, AuthUrl, AuthorizationCode, ClientId,
ClientSecret, CsrfToken, RedirectUrl, RefreshToken, TokenResponse, TokenUrl,
};
use serde::{Deserialize, Serialize};
use crate::{
app_error::AppError,
app_state::{AppState, ReqwestClient},
sessions::{AppSession, PgStore},
settings::Settings,
};
const SESSION_KEY_AUTH_CSRF_TOKEN: &str = "oauth_csrf_token";
const SESSION_KEY_AUTH_REFRESH_TOKEN: &str = "oauth_refresh_token";
pub const SESSION_KEY_AUTH_INFO: &str = "auth";
pub const SESSION_KEY_AUTH_REDIRECT: &str = "post_auth_redirect";
/// Creates a new OAuth2 client to be stored in global application state.
pub fn new_oauth_client(settings: &Settings) -> Result<BasicClient> {
Ok(BasicClient::new(
ClientId::new(settings.auth.client_id.clone()),
Some(ClientSecret::new(settings.auth.client_secret.clone())),
AuthUrl::new(settings.auth.auth_url.clone())
.context("failed to create new authorization server URL")?,
Some(
TokenUrl::new(settings.auth.token_url.clone())
.context("failed to create new token endpoint URL")?,
),
)
.set_redirect_uri(
RedirectUrl::new(format!(
"{}{}/auth/callback",
settings.frontend_host, settings.base_path
))
.context("failed to create new redirection URL")?,
))
}
/// Creates a router which can be nested within the higher level app router.
pub fn new_router() -> Router<AppState> {
Router::new()
.route("/login", get(start_login))
.route("/callback", get(callback))
.route("/logout", get(logout))
}
/// HTTP get handler for /login
async fn start_login(
State(state): State<AppState>,
State(Settings {
auth: auth_settings,
base_path,
..
}): State<Settings>,
State(session_store): State<PgStore>,
AppSession(maybe_session): AppSession,
jar: CookieJar,
) -> Result<impl IntoResponse, AppError> {
let mut session = if let Some(value) = maybe_session {
value
} else {
Session::new()
};
if session.get::<AuthInfo>(SESSION_KEY_AUTH_INFO).is_some() {
tracing::debug!("already logged in, redirecting...");
return Ok(Redirect::to(&format!("{}/", base_path)).into_response());
}
assert!(session.get_raw(SESSION_KEY_AUTH_REFRESH_TOKEN).is_none());
let csrf_token = CsrfToken::new_random();
session.insert(SESSION_KEY_AUTH_CSRF_TOKEN, &csrf_token)?;
let (auth_url, _csrf_token) = state.oauth_client.authorize_url(|| csrf_token).url();
let jar = if let Some(cookie_value) = session_store.store_session(session).await? {
tracing::debug!("adding session cookie to jar");
jar.add(
Cookie::build((auth_settings.cookie_name.clone(), cookie_value))
.same_site(SameSite::Lax)
.http_only(true)
.path("/"),
)
} else {
tracing::debug!("inferred that session cookie already in jar");
jar
};
Ok((jar, Redirect::to(auth_url.as_ref())).into_response())
}
/// HTTP get handler for /logout
async fn logout(
State(Settings {
base_path,
auth: auth_settings,
..
}): State<Settings>,
State(ReqwestClient(reqwest_client)): State<ReqwestClient>,
State(session_store): State<PgStore>,
AppSession(session): AppSession,
jar: CookieJar,
) -> Result<impl IntoResponse, AppError> {
if let Some(session) = session {
tracing::debug!("Session {} loaded.", session.id());
if let Some(logout_url) = auth_settings.logout_url {
tracing::debug!("attempting to send logout request to oauth provider");
let refresh_token: Option<RefreshToken> = session.get(SESSION_KEY_AUTH_REFRESH_TOKEN);
if let Some(refresh_token) = refresh_token {
tracing::debug!("Sending logout request to OAuth provider.");
#[derive(Serialize)]
struct LogoutRequestBody {
refresh_token: String,
}
reqwest_client
.post(logout_url)
.json(&LogoutRequestBody {
refresh_token: refresh_token.secret().to_owned(),
})
.send()
.await?
.error_for_status()?;
tracing::debug!("Sent logout request to OAuth provider successfully.");
}
}
session_store.destroy_session(session).await?;
}
let jar = jar.remove(Cookie::from(auth_settings.cookie_name));
tracing::debug!("Removed session cookie from jar.");
Ok((jar, Redirect::to(&format!("{}/", base_path))))
}
#[derive(Debug, Deserialize)]
struct AuthRequestQuery {
code: String,
/// CSRF token
state: String,
}
/// HTTP get handler for /callback
async fn callback(
Query(query): Query<AuthRequestQuery>,
State(state): State<AppState>,
State(Settings {
auth: auth_settings,
base_path,
..
}): State<Settings>,
State(ReqwestClient(reqwest_client)): State<ReqwestClient>,
AppSession(session): AppSession,
) -> Result<impl IntoResponse, AppError> {
let mut session = session.ok_or_else(|| {
tracing::debug!("unable to load session");
AppError::Forbidden(
"our apologies: authentication session expired or lost, please try again".to_owned(),
)
})?;
let session_csrf_token: String = session.get(SESSION_KEY_AUTH_CSRF_TOKEN).ok_or_else(|| {
tracing::debug!("oauth csrf token not found on session");
AppError::Forbidden(
"our apologies: authentication session expired or lost, please try again".to_owned(),
)
})?;
if session_csrf_token != query.state {
tracing::debug!("oauth csrf tokens did not match");
return Err(AppError::Forbidden(
"OAuth CSRF tokens do not match.".to_string(),
));
}
tracing::debug!("exchanging authorization code");
let response = state
.oauth_client
.exchange_code(AuthorizationCode::new(query.code.clone()))
.request_async(async_http_client)
.await?;
tracing::debug!("fetching user info");
let auth_info: AuthInfo = reqwest_client
.get(auth_settings.userinfo_url.as_str())
.bearer_auth(response.access_token().secret())
.send()
.await?
.json()
.await?;
tracing::debug!("updating session");
let redirect_target: Option<String> = session.get(SESSION_KEY_AUTH_REDIRECT);
// Remove this since we don't need or want it sticking around, for both UX
// and security hygiene reasons
session.remove(SESSION_KEY_AUTH_REDIRECT);
session.insert(SESSION_KEY_AUTH_INFO, &auth_info)?;
session.insert(SESSION_KEY_AUTH_REFRESH_TOKEN, response.refresh_token())?;
if state.session_store.store_session(session).await?.is_some() {
return Err(anyhow::anyhow!(
"expected cookie value returned by store_session() to be None for existing session"
)
.into());
}
tracing::debug!("successfully authenticated");
Ok(Redirect::to(
&redirect_target.unwrap_or(format!("{}/", base_path)),
))
}
/// Data stored in the visitor's session upon successful authentication.
#[derive(Debug, Deserialize, Serialize)]
pub struct AuthInfo {
pub sub: String,
pub email: String,
}

94
src/cli.rs Normal file
View file

@ -0,0 +1,94 @@
use anyhow::Result;
use axum::{
extract::Request,
http::{header::CONTENT_SECURITY_POLICY, HeaderValue},
middleware::map_request,
ServiceExt,
};
use chrono::{TimeDelta, Utc};
use clap::{Parser, Subcommand};
use tokio::time::sleep;
use tower::ServiceBuilder;
use tower_http::{
compression::CompressionLayer, normalize_path::NormalizePathLayer,
set_header::response::SetResponseHeaderLayer, trace::TraceLayer,
};
use crate::{
app_state::AppState, middleware::lowercase_uri_path, router::new_router, worker::run_worker,
};
#[derive(Parser)]
#[command(version, about, long_about = None)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
#[derive(Parser)]
pub struct WorkerArgs {
/// Loop the every n seconds instead of exiting after execution
#[arg(long)]
auto_loop_seconds: Option<u32>,
}
#[derive(Subcommand)]
pub enum Commands {
/// Run web server
Serve,
/// Run background worker
Worker(WorkerArgs),
// TODO: add a low-frequency worker task exclusively for self-healing
// mechanisms like Governor::reset_all()
}
pub async fn serve_command(state: AppState) -> Result<()> {
let router = ServiceBuilder::new()
.layer(map_request(lowercase_uri_path))
.layer(TraceLayer::new_for_http())
.layer(CompressionLayer::new())
.layer(SetResponseHeaderLayer::if_not_present(
CONTENT_SECURITY_POLICY,
HeaderValue::from_static("frame-ancestors 'none'"),
))
.layer(NormalizePathLayer::trim_trailing_slash())
.service(new_router(state.clone()));
let listener =
tokio::net::TcpListener::bind((state.settings.host.clone(), state.settings.port))
.await
.unwrap();
tracing::info!(
"App running at http://{}:{}{}",
state.settings.host,
state.settings.port,
state.settings.base_path
);
axum::serve(listener, ServiceExt::<Request>::into_make_service(router))
.await
.map_err(Into::into)
}
pub async fn worker_command(args: &WorkerArgs, state: AppState) -> Result<()> {
if let Some(loop_seconds) = args.auto_loop_seconds {
let loop_delta = TimeDelta::seconds(i64::from(loop_seconds));
loop {
let t_next_loop = Utc::now() + loop_delta;
if let Err(err) = run_worker(state.clone()).await {
tracing::error!("{}", err)
}
let sleep_delta = t_next_loop - Utc::now();
match sleep_delta.to_std() {
Ok(duration) => {
sleep(duration).await;
}
Err(_) => { /* sleep_delta was < 0, so don't sleep */ }
}
}
} else {
run_worker(state).await
}
}

55
src/main.rs Normal file
View file

@ -0,0 +1,55 @@
use clap::Parser as _;
use diesel_migrations::MigrationHarness;
use dotenvy::dotenv;
use tracing_subscriber::EnvFilter;
use crate::{
app_state::{App, AppState},
cli::{serve_command, worker_command, Cli, Commands},
migrations::MIGRATIONS,
settings::Settings,
};
mod abstract_;
mod app_error;
mod app_state;
mod auth;
mod cli;
mod middleware;
mod migrations;
mod nav;
mod router;
mod schema;
mod sessions;
mod settings;
mod users;
mod worker;
/// Run CLI
#[tokio::main]
async fn main() {
// Attempt to pre-load .env in case it contains a RUST_LOG variable
dotenv().ok();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.init();
let settings = Settings::load().unwrap();
let state: AppState = App::from_settings(settings.clone()).await.unwrap().into();
if settings.run_database_migrations == Some(1) {
// Run migrations on server startup
let conn = state.db_pool.get().await.unwrap();
conn.interact(|conn| conn.run_pending_migrations(MIGRATIONS).and(Ok(())))
.await
.unwrap()
.unwrap();
}
let cli = Cli::parse();
match &cli.command {
Commands::Serve => serve_command(state).await.unwrap(),
Commands::Worker(args) => worker_command(args, state).await.unwrap(),
}
}

17
src/middleware.rs Normal file
View file

@ -0,0 +1,17 @@
use axum::http::Request;
/// Pass to axum::middleware::map_request() to transform the entire URI path
/// (but not search query) to lowercase.
pub async fn lowercase_uri_path<B>(mut request: Request<B>) -> Request<B> {
let path = request.uri().path().to_lowercase();
let path_and_query = match request.uri().query() {
Some(query) => format!("{}?{}", path, query),
None => path,
};
let builder =
axum::http::uri::Builder::from(request.uri().clone()).path_and_query(path_and_query);
*request.uri_mut() = builder
.build()
.expect("lowercasing URI path should not break it");
request
}

3
src/migrations.rs Normal file
View file

@ -0,0 +1,3 @@
use diesel_migrations::{embed_migrations, EmbeddedMigrations};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/");

236
src/nav.rs Normal file
View file

@ -0,0 +1,236 @@
use std::collections::HashMap;
use axum::extract::FromRef;
use crate::app_state::AppState;
pub const NAVBAR_ITEM_TEAMS: &str = "teams";
pub const NAVBAR_ITEM_PROJECTS: &str = "projects";
pub const NAVBAR_ITEM_CHANNELS: &str = "channels";
pub const NAVBAR_ITEM_TEAM_MEMBERS: &str = "team-members";
#[derive(Clone, Debug)]
pub struct BreadcrumbTrail {
base_path: String,
breadcrumbs: Vec<Breadcrumb>,
}
impl BreadcrumbTrail {
/// Initialize with a non-empty base path.
pub fn from_base_path(base_path: &str) -> Self {
Self {
base_path: base_path.to_owned(),
breadcrumbs: Vec::new(),
}
}
/// Append an i18n path segment to the base path.
pub fn with_i18n_slug(mut self, language_code: &str) -> Self {
self.base_path.push('/');
self.base_path.push_str(language_code);
self
}
/// Add a breadcrumb by name and slug. If other breadcrumbs have already
/// been added, href will be generated by appending it to the previous href
/// as "<previous>/<slug>". Otherwise, it will be appended to the base path
/// with i18n slug (if any).
pub fn push_slug(mut self, label: &str, slug: &str) -> Self {
let href = if let Some(prev_breadcrumb) = self.iter().last() {
format!(
"{}/{}",
prev_breadcrumb.href,
percent_encoding::percent_encode(
slug.as_bytes(),
percent_encoding::NON_ALPHANUMERIC
)
)
} else {
format!("{}/{}", self.base_path, slug)
};
self.breadcrumbs.push(Breadcrumb {
label: label.to_owned(),
href,
});
self
}
pub fn iter(&self) -> std::slice::Iter<'_, Breadcrumb> {
self.breadcrumbs.iter()
}
/// Get an absolute URI path, starting from the child of the last
/// breadcrumb. For example, if the last breadcrumb has an href of
/// "/en/teams/team123" and the relative path is "../team456", the result
/// will be "/en/teams/team456". If no breadcrumbs exist, the base path
/// with i18n slug (if any) will be used.
pub fn join(&self, rel_path: &str) -> String {
let base = if let Some(breadcrumb) = self.iter().last() {
&breadcrumb.href
} else {
&self.base_path
};
let mut path_buf: Vec<&str> = base.split('/').collect();
for rel_segment in rel_path.split('/') {
if rel_segment == "." {
continue;
} else if rel_segment == ".." {
path_buf.pop();
} else {
path_buf.push(rel_segment);
}
}
path_buf.join("/")
}
}
impl IntoIterator for BreadcrumbTrail {
type Item = Breadcrumb;
type IntoIter = std::vec::IntoIter<Breadcrumb>;
fn into_iter(self) -> Self::IntoIter {
self.breadcrumbs.into_iter()
}
}
#[derive(Clone, Debug)]
pub struct Breadcrumb {
pub href: String,
pub label: String,
}
#[derive(Clone, Debug)]
pub struct NavbarBuilder {
base_path: String,
items: Vec<NavbarItem>,
active_item: Option<String>,
params: HashMap<String, String>,
}
impl NavbarBuilder {
pub fn new() -> Self {
Self {
base_path: "".to_owned(),
items: Vec::new(),
active_item: None,
params: HashMap::new(),
}
}
pub fn with_base_path(mut self, base_path: &str) -> Self {
self.base_path = base_path.to_owned();
self
}
/// Add a navbar item. Subpath is a path relative to the base path, and it
/// may contain placeholders for path params, such as "/{lang}/teams".
/// The navbar item will only be displayed if all corresponding path params
/// are registered using .with_param().
pub fn push_item(mut self, id: &str, label: &str, subpath: &str) -> Self {
self.items.push(NavbarItem {
id: id.to_owned(),
href: subpath.to_owned(),
label: label.to_owned(),
});
self
}
/// Registers a path param with the navbar builder.
pub fn with_param(mut self, k: &str, v: &str) -> Self {
self.params.insert(k.to_owned(), v.to_owned());
self
}
/// If a visible navbar item matches the provided ID, it will render as
/// active. Calling this method overrides any previously specified value.
pub fn with_active_item(mut self, item_id: &str) -> Self {
self.active_item = Some(item_id.to_owned());
self
}
pub fn build(self) -> Navbar {
let mut built_items: Vec<NavbarItem> = Vec::with_capacity(self.items.len());
for item in self.items {
let path_segments = item.href.split('/');
let substituted_segments: Vec<Option<&str>> = path_segments
.map(|segment| {
if segment.starts_with("{") && segment.ends_with("}") {
let param_k = segment[1..segment.len() - 1].trim();
self.params.get(param_k).map(|v| v.as_str())
} else {
Some(segment)
}
})
.collect();
if substituted_segments.iter().all(|segment| segment.is_some()) {
built_items.push(NavbarItem {
id: item.id,
href: format!(
"{}{}",
self.base_path,
substituted_segments
.into_iter()
.map(|segment| {
segment.expect(
"should already have checked that all path segments are Some",
)
})
.collect::<Vec<_>>()
.join("/")
),
label: item.label,
});
}
}
Navbar {
active_item: self.active_item,
items: built_items,
}
}
}
impl Default for NavbarBuilder {
fn default() -> Self {
Self::new()
.push_item(NAVBAR_ITEM_TEAMS, "Teams", "/en/teams")
.push_item(
NAVBAR_ITEM_PROJECTS,
"Projects",
"/en/teams/{team_id}/projects",
)
.push_item(
NAVBAR_ITEM_CHANNELS,
"Channels",
"/en/teams/{team_id}/channels",
)
.push_item(
NAVBAR_ITEM_TEAM_MEMBERS,
"Team Members",
"/en/teams/{team_id}/members",
)
}
}
impl<S> FromRef<S> for NavbarBuilder
where
S: Into<AppState> + Clone,
{
fn from_ref(state: &S) -> Self {
Into::<AppState>::into(state.clone())
.navbar_template
.clone()
}
}
#[derive(Clone, Debug)]
pub struct Navbar {
pub items: Vec<NavbarItem>,
pub active_item: Option<String>,
}
#[derive(Clone, Debug)]
pub struct NavbarItem {
pub href: String,
pub id: String,
pub label: String,
}

136
src/router.rs Normal file
View file

@ -0,0 +1,136 @@
use anyhow::{Context as _, Result};
use askama::Template;
use axum::{
extract::State,
http::{header::CACHE_CONTROL, HeaderValue},
response::{Html, IntoResponse as _, Response},
routing::get,
Router,
};
use catalogs::{
pg_class::{self, PgClass},
pg_namespace,
pg_roles::{self, PgRole},
table_privileges::{self, TablePrivilege},
};
use diesel::{prelude::*, sql_query};
use tower::ServiceBuilder;
use tower_http::{
services::{ServeDir, ServeFile},
set_header::SetResponseHeaderLayer,
};
use crate::{
abstract_::{class_privileges_for_grantees, escape_identifier},
app_error::AppError,
app_state::{AppState, DbConn},
auth,
settings::Settings,
users::CurrentUser,
};
pub fn new_router(state: AppState) -> Router<()> {
let base_path = state.settings.base_path.clone();
let app = Router::new()
.route("/", get(landing_page))
.nest("/auth", auth::new_router())
.layer(SetResponseHeaderLayer::if_not_present(
CACHE_CONTROL,
HeaderValue::from_static("no-cache"),
))
.fallback_service(
ServiceBuilder::new()
.layer(SetResponseHeaderLayer::if_not_present(
CACHE_CONTROL,
HeaderValue::from_static("max-age=21600, stale-while-revalidate=86400"),
))
.service(
ServeDir::new("static").not_found_service(
ServiceBuilder::new()
.layer(SetResponseHeaderLayer::if_not_present(
CACHE_CONTROL,
HeaderValue::from_static("no-cache"),
))
.service(ServeFile::new("static/_404.html")),
),
),
)
.with_state(state);
if base_path.is_empty() {
app
} else {
Router::new().nest(&base_path, app).fallback_service(
ServeDir::new("static").not_found_service(ServeFile::new("static/_404.html")),
)
}
}
async fn landing_page(
State(Settings {
base_path,
pg_user_role_prefix,
..
}): State<Settings>,
DbConn(db_conn): DbConn,
CurrentUser(current_user): CurrentUser,
) -> Result<Response, AppError> {
let grantees = vec![format!(
"{}{}",
pg_user_role_prefix,
current_user.id.simple()
)];
let visible_tables = db_conn
.interact(move |conn| -> Result<Vec<PgClass>> {
let role = PgRole::all()
.filter(pg_roles::dsl::rolname.eq(format!(
"{}{}",
pg_user_role_prefix,
current_user.id.simple()
)))
.first(conn)
.optional()
.context("error reading role")?;
if role.is_none() {
sql_query(format!(
"CREATE ROLE {}",
escape_identifier(&format!(
"{}{}",
pg_user_role_prefix,
current_user.id.simple()
))
))
.execute(conn)
.context("error creating role")?;
}
sql_query(format!(
"SET ROLE {}",
escape_identifier(&format!(
"{}{}",
pg_user_role_prefix,
current_user.id.simple()
))
))
.execute(conn)
.context("error setting role to user")?;
let privileges = class_privileges_for_grantees(grantees)
.load(conn)
.context("error reading classes")?;
Ok(privileges.into_iter().map(|value| value.class).collect())
})
.await
.unwrap()?;
#[derive(Template)]
#[template(path = "tmp.html")]
struct ResponseTemplate {
base_path: String,
relations: Vec<PgClass>,
}
Ok(Html(
ResponseTemplate {
base_path,
relations: visible_tables,
}
.render()?,
)
.into_response())
}

23
src/schema.rs Normal file
View file

@ -0,0 +1,23 @@
// @generated automatically by Diesel CLI.
diesel::table! {
browser_sessions (id) {
id -> Text,
serialized -> Text,
created_at -> Timestamptz,
expiry -> Nullable<Timestamptz>,
}
}
diesel::table! {
users (id) {
id -> Uuid,
uid -> Text,
email -> Text,
}
}
diesel::allow_tables_to_appear_in_same_query!(
browser_sessions,
users,
);

173
src/sessions.rs Normal file
View file

@ -0,0 +1,173 @@
use anyhow::Result;
use async_session::{async_trait, Session, SessionStore};
use axum::{
extract::{FromRef, FromRequestParts},
http::request::Parts,
RequestPartsExt as _,
};
use axum_extra::extract::CookieJar;
use chrono::{DateTime, TimeDelta, Utc};
use diesel::{pg::Pg, prelude::*, upsert::excluded};
use tracing::{trace_span, Instrument};
use crate::{app_error::AppError, app_state::AppState, schema::browser_sessions};
const EXPIRY_DAYS: i64 = 7;
#[derive(Clone, Debug, Identifiable, Queryable, Selectable)]
#[diesel(table_name = browser_sessions)]
#[diesel(check_for_backend(Pg))]
pub struct BrowserSession {
pub id: String,
pub serialized: String,
pub expiry: Option<DateTime<Utc>>,
}
#[derive(Clone)]
pub struct PgStore {
pool: deadpool_diesel::postgres::Pool,
}
impl PgStore {
pub fn new(pool: deadpool_diesel::postgres::Pool) -> PgStore {
Self { pool }
}
}
impl std::fmt::Debug for PgStore {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "PgStore")?;
Ok(())
}
}
impl FromRef<AppState> for PgStore {
fn from_ref(state: &AppState) -> Self {
state.session_store.clone()
}
}
#[async_trait]
impl SessionStore for PgStore {
async fn load_session(&self, cookie_value: String) -> Result<Option<Session>> {
let session_id = Session::id_from_cookie_value(&cookie_value)?;
let conn = self.pool.get().await?;
let row = conn
.interact(move |conn| {
// Drop all sessions without recent activity
diesel::delete(
browser_sessions::table.filter(browser_sessions::expiry.lt(diesel::dsl::now)),
)
.execute(conn)?;
browser_sessions::table
.filter(browser_sessions::id.eq(session_id))
.select(BrowserSession::as_select())
.first(conn)
.optional()
})
.await
.unwrap()?;
Ok(match row {
Some(session) => Some(serde_json::from_str::<Session>(
session.serialized.as_str(),
)?),
None => None,
})
}
async fn store_session(&self, session: Session) -> Result<Option<String>> {
let serialized_data = serde_json::to_string(&session)?;
let session_id = session.id().to_string();
let expiry = session.expiry().copied();
let conn = self.pool.get().await?;
conn.interact(move |conn| {
diesel::insert_into(browser_sessions::table)
.values((
browser_sessions::id.eq(session_id),
browser_sessions::serialized.eq(serialized_data),
browser_sessions::expiry.eq(expiry),
))
.on_conflict(browser_sessions::id)
.do_update()
.set((
browser_sessions::serialized.eq(excluded(browser_sessions::serialized)),
browser_sessions::expiry.eq(excluded(browser_sessions::expiry)),
))
.execute(conn)
})
.await
.unwrap()?;
Ok(session.into_cookie_value())
}
async fn destroy_session(&self, session: Session) -> Result<()> {
let session_id = session.id().to_owned();
let conn = self.pool.get().await?;
conn.interact(move |conn| {
diesel::delete(
browser_sessions::table.filter(browser_sessions::id.eq(session.id().to_string())),
)
.execute(conn)
})
.await
.unwrap()?;
tracing::debug!("destroyed session {}", session_id);
Ok(())
}
async fn clear_store(&self) -> Result<()> {
let conn = self.pool.get().await?;
conn.interact(move |conn| diesel::delete(browser_sessions::table).execute(conn))
.await
.unwrap()?;
Ok(())
}
}
#[derive(Clone)]
pub struct AppSession(pub Option<Session>);
impl FromRequestParts<AppState> for AppSession {
type Rejection = AppError;
async fn from_request_parts(
parts: &mut Parts,
state: &AppState,
) -> Result<Self, <Self as FromRequestParts<AppState>>::Rejection> {
async move {
let jar = parts.extract::<CookieJar>().await.unwrap();
let session_cookie = match jar.get(&state.settings.auth.cookie_name) {
Some(cookie) => cookie,
None => {
tracing::debug!("no session cookie present");
return Ok(AppSession(None));
}
};
tracing::debug!("session cookie loaded");
let maybe_session = state
.session_store
.load_session(session_cookie.value().to_string())
.await?;
if let Some(mut session) = maybe_session {
tracing::debug!("session {} loaded", session.id());
session.expire_in(TimeDelta::days(EXPIRY_DAYS).to_std()?);
if state
.session_store
.store_session(session.clone())
.await?
.is_some()
{
return Err(anyhow::anyhow!(
"expected cookie value returned by store_session() to be None for existing session"
)
.into());
}
Ok(AppSession(Some(session)))
} else {
tracing::debug!("no matching session found in database");
Ok(AppSession(None))
}
// The Span.enter() guard pattern doesn't play nicely async
}.instrument(trace_span!("AppSession::from_request_parts()")).await
}
}

107
src/settings.rs Normal file
View file

@ -0,0 +1,107 @@
use anyhow::{Context as _, Result};
use axum::extract::FromRef;
use config::{Config, Environment};
use dotenvy::dotenv;
use serde::Deserialize;
use crate::app_state::AppState;
#[derive(Clone, Debug, Deserialize)]
pub struct Settings {
/// Prefix under which to nest all routes. If specified, include leading
/// slash but no trailing slash, for example "/app". For default behavior,
/// leave as empty string.
#[serde(default)]
pub base_path: String,
/// postgresql:// URL.
pub database_url: String,
/// Super-user role the server will use to create new user roles and manage
/// database resources.
pub pg_root_role: String,
#[serde(default = "default_pg_user_role_prefix")]
pub pg_user_role_prefix: String,
/// When set to 1, embedded Diesel migrations will be run on startup.
pub run_database_migrations: Option<u8>,
/// Address for server to bind to
#[serde(default = "default_host")]
pub host: String,
/// Port for server to bind to
#[serde(default = "default_port")]
pub port: u16,
/// Host visible to end users, for example "https://shout.dev"
pub frontend_host: String,
pub auth: AuthSettings,
}
fn default_port() -> u16 {
8080
}
fn default_host() -> String {
"127.0.0.1".to_owned()
}
fn default_pg_user_role_prefix() -> String {
"__interim_user__".to_owned()
}
#[derive(Clone, Debug, Deserialize)]
pub struct AuthSettings {
pub client_id: String,
pub client_secret: String,
pub auth_url: String,
pub token_url: String,
pub userinfo_url: String,
pub logout_url: Option<String>,
#[serde(default = "default_cookie_name")]
pub cookie_name: String,
}
fn default_cookie_name() -> String {
"INTERIM_SESSION".to_string()
}
impl Settings {
pub fn load() -> Result<Self> {
match dotenv() {
Err(err) => {
if err.not_found() {
tracing::info!("no .env file found");
} else {
return Err(err).context("dotenvy error");
}
}
Ok(pathbuf) => {
tracing::info!(
"using env file {}",
pathbuf
.to_str()
.ok_or(anyhow::anyhow!("pathbuf is not valid unicode"))?
);
}
}
let s = Config::builder()
.add_source(Environment::default().separator("__"))
.build()
.context("config error")?;
s.try_deserialize().context("deserialize error")
}
}
impl<S> FromRef<S> for Settings
where
S: Into<AppState> + Clone,
{
fn from_ref(state: &S) -> Self {
Into::<AppState>::into(state.clone()).settings.clone()
}
}

175
src/users.rs Normal file
View file

@ -0,0 +1,175 @@
use anyhow::Context;
use async_session::{Session, SessionStore as _};
use axum::{
extract::{FromRequestParts, OriginalUri},
http::{request::Parts, Method},
response::{IntoResponse, Redirect, Response},
RequestPartsExt,
};
use axum_extra::extract::{
cookie::{Cookie, SameSite},
CookieJar,
};
use diesel::{
associations::Identifiable,
deserialize::Queryable,
dsl::{auto_type, insert_into, AsSelect, Eq, Select},
pg::Pg,
prelude::*,
Selectable,
};
use uuid::Uuid;
use crate::{
app_error::AppError,
app_state::AppState,
auth::{AuthInfo, SESSION_KEY_AUTH_INFO, SESSION_KEY_AUTH_REDIRECT},
schema::users,
sessions::AppSession,
};
#[allow(unused_imports)]
pub use crate::schema::users::{dsl, table};
#[derive(Clone, Debug, Identifiable, Insertable, Queryable, Selectable)]
#[diesel(table_name = users)]
#[diesel(check_for_backend(Pg))]
pub struct User {
pub id: Uuid,
pub uid: String,
pub email: String,
}
impl User {
pub fn all() -> Select<users::table, AsSelect<User, Pg>> {
users::table.select(User::as_select())
}
#[auto_type(no_type_alias)]
pub fn with_uid(uid_value: &str) -> _ {
users::uid.eq(uid_value)
}
}
#[derive(Clone, Debug)]
pub struct CurrentUser(pub User);
impl<S> FromRequestParts<S> for CurrentUser
where
S: Into<AppState> + Clone + Sync,
{
type Rejection = CurrentUserRejection;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let app_state: AppState = state.clone().into();
let mut session =
if let AppSession(Some(value)) = parts.extract_with_state(&app_state).await? {
value
} else {
Session::new()
};
let auth_info = if let Some(value) = session.get::<AuthInfo>(SESSION_KEY_AUTH_INFO) {
value
} else {
let jar: CookieJar = parts.extract().await?;
let method: Method = parts.extract().await?;
let jar = if method == Method::GET {
let OriginalUri(uri) = parts.extract().await?;
session.insert(
SESSION_KEY_AUTH_REDIRECT,
uri.path_and_query()
.map(|value| value.to_string())
.unwrap_or(format!("{}/", app_state.settings.base_path)),
)?;
if let Some(cookie_value) = app_state.session_store.store_session(session).await? {
tracing::debug!("adding session cookie to jar");
jar.add(
Cookie::build((app_state.settings.auth.cookie_name.clone(), cookie_value))
.same_site(SameSite::Lax)
.http_only(true)
.path("/"),
)
} else {
tracing::debug!("inferred that session cookie already in jar");
jar
}
} else {
// If request method is not GET then do not attempt to infer the
// redirect target, as there may be no GET handler defined for
// it.
jar
};
return Err(Self::Rejection::SetCookiesAndRedirect(
jar,
format!("{}/auth/login", app_state.settings.base_path),
));
};
let db_conn = app_state.db_pool.get().await?;
let current_user = db_conn
.interact(move |conn| {
let maybe_current_user = User::all()
.filter(User::with_uid(&auth_info.sub))
.first(conn)
.optional()
.context("failed to load maybe_current_user")?;
if let Some(current_user) = maybe_current_user {
return Ok(current_user);
}
let new_user = User {
id: Uuid::now_v7(),
uid: auth_info.sub.clone(),
email: auth_info.email,
};
match insert_into(users::table)
.values(new_user)
.on_conflict(users::uid)
.do_nothing()
.returning(User::as_returning())
.get_result(conn)
{
QueryResult::Err(diesel::result::Error::NotFound) => {
tracing::debug!("detected race to insert current user record");
User::all()
.filter(User::with_uid(&auth_info.sub))
.first(conn)
.context(
"failed to load record after detecting race to insert current user",
)
}
QueryResult::Err(err) => {
Err(err).context("failed to insert current user record")
}
QueryResult::Ok(result) => Ok(result),
}
})
.await
.unwrap()?;
Ok(CurrentUser(current_user))
}
}
pub enum CurrentUserRejection {
AppError(AppError),
SetCookiesAndRedirect(CookieJar, String),
}
// Easily convert semi-arbitrary errors to InternalServerError
impl<E> From<E> for CurrentUserRejection
where
E: Into<AppError>,
{
fn from(err: E) -> Self {
Self::AppError(err.into())
}
}
impl IntoResponse for CurrentUserRejection {
fn into_response(self) -> Response {
match self {
Self::AppError(err) => err.into_response(),
Self::SetCookiesAndRedirect(jar, redirect_to) => {
(jar, Redirect::to(&redirect_to)).into_response()
}
}
}
}

10
src/worker.rs Normal file
View file

@ -0,0 +1,10 @@
use anyhow::Result;
use tracing::Instrument as _;
use crate::app_state::AppState;
pub async fn run_worker(_state: AppState) -> Result<()> {
async move { Ok(()) }
.instrument(tracing::debug_span!("run_worker()"))
.await
}

9
static/_404.html Normal file
View file

@ -0,0 +1,9 @@
<!doctype html>
<html>
<head>
<title>Not found</title>
</head>
<body>
Page not found.
</body>
</html>

BIN
static/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

41
static/logo.svg Normal file
View file

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="1024.0px" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="1024.0px" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<image height="32" id="Image" width="32" xlink:href="data:image/png;base64,
iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAIRlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEbAAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAACCgAwAEAAAAAQAAACAAAAAAX7wP8AAAAAlwSFlzAAALEwAACxMBAJqcGAAAABxpRE9UAAAAAgAAAAAAAAAQAAAAKAAAABAAAAAQAAAA/SNwdToAAADJSURBVFgJzJJRDsMgDEO5/2/vsGvSpuqTQgiGThMaUmVip3aytdRaC085PnXHQ57hHb4jNMv42QCX0X2yEMVdL5WiGla1J/+7v281RPXZAEqXmhQXPsq4fayn/tOGyRBx+78Y4NVSb5rZLm6NB/qohm+wKRZ+bkKyISJHr8yQYhgIQ4/+/W0DWKg/fgh/p8dz3b0jwtZej4bUhr6POzp1iik5GCIzhDOMXmiRb+qmGATTMzKEN6TXkOO57t4RYghliGaIJxx1hicAAAD///tXxmIAAAClSURBVM2SUQ6DMAxDe//f3WHXZPKkJ5mEpu1HgUjIrWMnFtDa53vwHJOFPqLb1aOi7nT3C4YRusfP7hNPuSadE2FvJPaqgfSE+OC4X+Il2QnRGwgv9HnwzqVzIjrLpaPcAyd0vqePmpaIhQDV8u0BRsu3BphZfkuA0ecjaKkrm+F/YKBwxoe+1JbNEGCLdsvQleCPB9DnfCrEf/crAijEnUHYJ/wBtf7/1NmEySMAAAAASUVORK5CYII="/>
</defs>
<g id="Layer-1">
<path d="M0 128C0 57.3076 57.3076 0 128 0L896 0C966.692 0 1024 57.3076 1024 128L1024 896C1024 966.692 966.692 1024 896 1024L128 1024C57.3076 1024 0 966.692 0 896L0 128Z" fill="#0093a6" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="Layer-3">
<path d="M96 512L384 512L384 576L96 576L96 512Z" fill="#ffffff" fill-rule="nonzero" opacity="1" stroke="none"/>
<path d="M416 256L416 832L704 544L416 256Z" fill="none" opacity="1" stroke="#ffffff" stroke-linecap="butt" stroke-linejoin="bevel" stroke-width="64"/>
<path d="M704 256L768 256L768 832L704 832L704 256Z" fill="#ffffff" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="Layer-4">
<path d="M352 160L480 160L480 256L352 256L352 160Z" fill="#0093a6" fill-rule="nonzero" opacity="1" stroke="none"/>
<path d="M352 832L480 832L480 928L352 928L352 832Z" fill="#0093a6" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="Layer-5">
<g opacity="1">
<path d="M520.055 256L608 85.3258" fill="none" opacity="1" stroke="#ffffff" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="32"/>
<path d="M607.131 156.875L608 85.3258L550.239 127.56" fill="none" opacity="1" stroke="#ffffff" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="32"/>
</g>
<g opacity="1">
<path d="M584.055 320L672 149.326" fill="none" opacity="1" stroke="#ffffff" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="32"/>
<path d="M671.131 220.875L672 149.326L614.239 191.56" fill="none" opacity="1" stroke="#ffffff" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="32"/>
</g>
</g>
<g id="Layer-2" visibility="hidden">
<g transform="matrix(32 0 0 32 0 0)">
<clipPath id="cp">
<path d="M0 0L32 0L32 32L0 32Z"/>
</clipPath>
<g clip-path="url(#cp)">
<use opacity="0.490461" xlink:href="#Image"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

13
static/main.css Normal file
View file

@ -0,0 +1,13 @@
:root {
--bs-font-sans-serif: Geist, "Noto Sans", Roboto, "Segoe UI", system-ui, -apple-system, "Helvetica Neue", "Liberation Sans", Arial, sans-serif, "Apple Color Emoji";
}
[data-bs-theme="dark"] {
--bs-body-bg: rgb(27, 28, 30);
--bs-tertiary-bg-rgb: 36, 38, 40;
}
@font-face {
font-family: Geist;
src: url("./geist/geist_variable.ttf");
}

12
templates/base.html Normal file
View file

@ -0,0 +1,12 @@
<!doctype html>
<html lang="en" data-bs-theme="dark">
<head>
<title>{% block title %}Interim{% endblock %}</title>
{% include "meta_tags.html" %}
<link rel="stylesheet" href="{{ base_path }}/main.css">
</head>
<body>
{% include "nav.html" %}
{% block main %}{% endblock main %}
</body>
</html>

4
templates/meta_tags.html Normal file
View file

@ -0,0 +1,4 @@
<meta charset="UTF-8">
<meta name="robots" content="noindex,nofollow">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="icon" href="{{ base_path }}/favicon.ico">

7
templates/nav.html Normal file
View file

@ -0,0 +1,7 @@
<nav>
<ul>
<li>
<a href="{{ base_path }}/auth/login">Login</a>
</li>
</ul>
</nav>

18
templates/tmp.html Normal file
View file

@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block main %}
<table>
<thead>
<tr>
<th>Name</th>
</tr>
</thead>
<tbody>
{% for relation in relations %}
<tr>
<td>{{ relation.relname }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}