Compare commits

...

6 commits

Author SHA1 Message Date
Brent Schroeter
3a07c42f02 update readme 2026-02-16 15:40:35 +00:00
Brent Schroeter
b2257ab1c0 add support for any() and all() array comparisons 2026-02-16 04:36:18 +00:00
Brent Schroeter
928f6cb759 replace bespoke expression constructs with sql parsing 2026-02-14 03:24:37 +00:00
Brent Schroeter
36a0c27ad4 move relevant common types to phono-pestgros 2026-02-13 08:27:47 +00:00
Brent Schroeter
17ccd80764 revert to stable rust release channel 2026-02-13 08:11:21 +00:00
Brent Schroeter
a4ffb44f4d add phono-pestgros crate 2026-02-13 08:09:17 +00:00
55 changed files with 1662 additions and 1099 deletions

145
Cargo.lock generated
View file

@ -364,6 +364,15 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "backtrace-ext"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
dependencies = [
"backtrace",
]
[[package]]
name = "base64"
version = "0.13.1"
@ -1222,7 +1231,7 @@ version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df"
dependencies = [
"unicode-width",
"unicode-width 0.2.2",
]
[[package]]
@ -1756,6 +1765,12 @@ version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]]
name = "is_ci"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
@ -1916,6 +1931,36 @@ version = "2.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
[[package]]
name = "miette"
version = "7.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7"
dependencies = [
"backtrace",
"backtrace-ext",
"cfg-if 1.0.3",
"miette-derive",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"terminal_size",
"textwrap",
"unicode-width 0.1.14",
]
[[package]]
name = "miette-derive"
version = "7.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "mime"
version = "0.3.17"
@ -2174,6 +2219,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
[[package]]
name = "parking"
version = "2.2.1"
@ -2226,20 +2277,22 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "pest"
version = "2.8.0"
version = "2.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6"
checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662"
dependencies = [
"memchr",
"thiserror 2.0.12",
"miette",
"serde",
"serde_json",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.8.0"
version = "2.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5"
checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77"
dependencies = [
"pest",
"pest_generator",
@ -2247,9 +2300,9 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.8.0"
version = "2.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841"
checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f"
dependencies = [
"pest",
"pest_meta",
@ -2260,11 +2313,10 @@ dependencies = [
[[package]]
name = "pest_meta"
version = "2.8.0"
version = "2.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0"
checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220"
dependencies = [
"once_cell",
"pest",
"sha2 0.10.9",
]
@ -2328,6 +2380,7 @@ dependencies = [
"chrono",
"derive_builder",
"nom 8.0.0",
"phono-pestgros",
"regex",
"serde",
"sqlx",
@ -2345,6 +2398,7 @@ dependencies = [
"derive_builder",
"futures",
"phono-backends",
"phono-pestgros",
"redact",
"regex",
"serde",
@ -2366,6 +2420,21 @@ dependencies = [
"thiserror 2.0.12",
]
[[package]]
name = "phono-pestgros"
version = "0.0.1"
dependencies = [
"bigdecimal",
"chrono",
"pest",
"pest_derive",
"serde",
"serde_json",
"sqlx",
"thiserror 2.0.12",
"uuid",
]
[[package]]
name = "phono-server"
version = "0.0.1"
@ -2389,6 +2458,7 @@ dependencies = [
"phono-backends",
"phono-models",
"phono-namegen",
"phono-pestgros",
"rand 0.8.5",
"redact",
"regex",
@ -3489,6 +3559,27 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "supports-color"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
dependencies = [
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91"
[[package]]
name = "supports-unicode"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
[[package]]
name = "syn"
version = "2.0.101"
@ -3592,6 +3683,26 @@ dependencies = [
"utf-8",
]
[[package]]
name = "terminal_size"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
dependencies = [
"rustix",
"windows-sys 0.59.0",
]
[[package]]
name = "textwrap"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
dependencies = [
"unicode-linebreak",
"unicode-width 0.2.2",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@ -4020,6 +4131,12 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-normalization"
version = "0.1.24"
@ -4041,6 +4158,12 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.2"

View file

@ -1,5 +1,3 @@
cargo-features = ["codegen-backend"]
[workspace]
resolver = "3"
members = ["phono-*"]
@ -19,6 +17,7 @@ futures = "0.3.31"
phono-backends = { path = "./phono-backends" }
phono-models = { path = "./phono-models" }
phono-namegen = { path = "./phono-namegen" }
phono-pestgros = { path = "./phono-pestgros" }
rand = "0.8.5"
redact = { version = "0.1.11", features = ["serde", "zeroize"] }
regex = "1.11.1"
@ -35,7 +34,5 @@ uuid = { version = "1.11.0", features = ["serde", "v4", "v7"] }
validator = { version = "0.20.0", features = ["derive"] }
[profile.dev]
# Use experimental compiler backend for ~30% faster dev builds.
codegen-backend = "cranelift"
# Skip generating debug info for ~10% faster dev builds.
debug = false

View file

@ -1,25 +1,38 @@
# Phonograph
Phonograph is a friendly, collaborative data platform for nerds of all stripes,
built around PostgreSQL.
Phonograph is a friendly, collaborative data platform for nerds of all stripes.
It's for teams who wish they used Postgres, but actually use:
Contemporary browser-centric platforms like Airtable open new frontiers for data
engineering, but they suffer from limitations that conventional databases have
long since addressed: relational data models are an afterthought; row-level
security is missing; third party integrations must be implemented piecemeal for
lack of a standardized API. Phonograph addresses these shortfalls by
implementing an accessible front-end interface backed by an existing, mature
database management system.
- Airtable
- Google Sheets
- Excel
- `inventory_export-2026-02-15_v3-FINAL.csv`
![Screenshot of multi-cursor editing, as well as a dropdown menu with a highlighted option labeled "PostgreSQL credentials"](./docs/screenshot_multi_cursor.png)
Phonograph is built on top of Postgres, exposing a curated subset of features
and adds a familiar user interface for developers and end users alike.
# Browser Compatibility Note
# Features and Design
[CSS anchor positioning](https://developer.mozilla.org/en-US/docs/Web/CSS/Guides/Anchor_positioning)
is a relatively new API, and Phonograph uses it extensively. Browser support is
now fairly good, but
[Firefox users in particular](https://bugzilla.mozilla.org/show_bug.cgi?id=1988225)
should upgrade to version 147 or newer for best experience.
- Leverages Postgres RBAC for robust authorization, including RLS (planned),
while adding support for invite-by-email and a familiar permissions model for
collaboration.
- Integrates with effectively any third party software with a Postgres driver.
- Powerful user interface inspired by your favorite text editor features, like
multi-cursor editing.
- Write filters and generated column specs (planned) as PostgreSQL—made
possible by a custom SQL parser with support for advanced syntax like
`column = ANY(array)`.
![Screenshot of a spreadsheet-like user interface. A sidebar lists "Tables" and "Portals", and a table on the right displays URL, numeric, and single-select data.](./docs/screenshot_collab.png)
![Screenshot of a terminal overlayed above another page listing credentials and associated permissions. The terminal runs `psql` and displays a query running on the data from the previous screenshot.](./docs/screenshot_psql.png)
# An Experiment from Second System Technologies
Phonograph is a proof-of-concept built by Second System Technologies, to solve
real world problems, and its bones are built to scale. It's a work in progress,
which means that some expected features are missing and we have yet to sand out
many rough edges.
# Development Quickstart
@ -39,39 +52,3 @@ external OAuth2 provider is required to manage authentication.
# The Phonograph Authorization Model
Refer to documentation in [docs/auth.md](./docs/auth.md).
# Copyright and License
All original source code in this repository is copyright (C) 2025 Second System
Technologies LLC and distributed under the terms in
[the "LICENSE" file](./LICENSE). Certain third-party assets within
[the "static" directory](./static) may be governed by different licenses, for
example the Open Font License or MIT License, as stated by their original
authors. Copies of each relevant license have been included alongside these
files as needed.
# LLM Code Policy
Large language model code generation is permitted sparingly in very limited
cases, for example for completing clearly defined transformations which span
multiple files and are not supported by conventional code actions. All code
generated by LLMs is considered reflective of its author, and authors are
expected to thoroughly and frequently review before committing affected work.
As of this writing, models display a strong bias towards patterns which are well
represented in public open source projects. This can cause them to tend towards
suboptimal one-size-fits-most or simply outdated coding practices in certain
circumstances. LLM assistance should be sufficiently constrained to avoid
allowing outputs to dictate or implicitly guide significant design decisions.
Furthermore, current language models broadly behave adversarily, in the sense
that they are optimized to make perceiving model outputs versus non-model
outputs as difficult as possible. This can make generated code uniquely
challenging to review effectively. In this context, non-trivial business logic,
particularly logic with security implications, may not be implemented with
direct assistance from LLM tools.
Examples of LLM-assisted changes in practice:
- Replacing SVG icons with similar webfont icons from a different icon pack.
(Revision `ztrnxzqv` (Git `a8dd49f7`))

BIN
docs/screenshot_collab.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 449 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 805 KiB

BIN
docs/screenshot_psql.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 796 KiB

View file

@ -1,7 +1,7 @@
[tools]
deno = "latest"
rebar = "latest"
rust = { version = "nightly", components = "rust-analyzer,clippy,rustc-codegen-cranelift-preview" }
rust = { version = "1.93", components = "rust-analyzer,clippy" }
watchexec = "latest"
"cargo:sqlx-cli" = "0.8.6"

View file

@ -7,6 +7,7 @@ version.workspace = true
chrono = { workspace = true }
derive_builder = { workspace = true }
nom = "8.0.0"
phono-pestgros = { workspace = true }
regex = { workspace = true }
serde = { workspace = true }
sqlx = { workspace = true }

View file

@ -1,7 +1,6 @@
use phono_pestgros::escape_identifier;
use sqlx::{PgConnection, Postgres, Row as _, pool::PoolConnection, query};
use crate::escape_identifier;
/// Newtype to differentiate between workspace and application database
/// connections.
#[derive(Debug)]

View file

@ -22,6 +22,3 @@ pub mod pg_database;
pub mod pg_namespace;
pub mod pg_role;
pub mod rolnames;
mod utils;
pub use utils::escape_identifier;

View file

@ -1,10 +1,9 @@
use std::fmt::Display;
use phono_pestgros::escape_identifier;
use sqlx::{Encode, Postgres, postgres::types::Oid, query_as, query_as_unchecked};
use crate::{
client::WorkspaceClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace,
};
use crate::{client::WorkspaceClient, pg_acl::PgAclItem, pg_namespace::PgNamespace};
#[derive(Clone, Debug)]
pub struct PgClass {

View file

@ -1,25 +0,0 @@
/// Given a raw identifier (such as a table name, column name, etc.), format it
/// so that it may be safely interpolated into a SQL query.
pub fn escape_identifier(identifier: &str) -> String {
// Escaping identifiers for Postgres is fairly easy, provided that the input is
// already known to contain no invalid multi-byte sequences. Backslashes may
// remain as-is, and embedded double quotes are escaped simply by doubling
// them (`"` becomes `""`). Refer to the PQescapeInternal() function in
// libpq (fe-exec.c) and Diesel's PgQueryBuilder::push_identifier().
format!("\"{}\"", identifier.replace('"', "\"\""))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_escape_identifier() {
assert_eq!(escape_identifier("hello"), r#""hello""#);
assert_eq!(escape_identifier("hello world"), r#""hello world""#);
assert_eq!(
escape_identifier(r#""hello" "world""#),
r#""""hello"" ""world""""#
);
}
}

View file

@ -9,6 +9,7 @@ chrono = { workspace = true }
derive_builder = { workspace = true }
futures = { workspace = true }
phono-backends = { workspace = true }
phono-pestgros = { workspace = true }
redact = { workspace = true }
regex = { workspace = true }
serde = { workspace = true }

View file

@ -0,0 +1,2 @@
alter table portals add column if not exists table_filter jsonb not null default 'null';
alter table portals drop column if exists filter;

View file

@ -0,0 +1,5 @@
alter table portals add column if not exists filter text not null default '';
-- This is irreversible and ordinarily should be run in a later migration, but
-- it's being rolled out while manually verifying that there will be negligible
-- impact to users, so I'm folding it into this migration for convenience.
alter table portals drop column if exists table_filter;

View file

@ -93,18 +93,20 @@ impl<'a> Accessor<Portal> for PortalAccessor<'a> {
AccessError::NotFound
})?;
spec.verify_workspace_id
.is_none_or(|value| portal.workspace_id == value)
.ok_or_else(|| {
debug!("workspace_id check failed for portal");
AccessError::NotFound
})?;
spec.verify_rel_oid
.is_none_or(|value| portal.class_oid == value)
.ok_or_else(|| {
debug!("rel_oid check failed for portal");
AccessError::NotFound
})?;
if spec
.verify_workspace_id
.is_some_and(|value| portal.workspace_id != value)
{
debug!("workspace_id check failed for portal");
return Err(AccessError::NotFound);
}
if spec
.verify_rel_oid
.is_some_and(|value| portal.class_oid != value)
{
debug!("rel_oid check failed for portal");
return Err(AccessError::NotFound);
}
let rel = if let Some(value) = spec.using_rel {
value

View file

@ -1,164 +0,0 @@
use std::fmt::Display;
use phono_backends::escape_identifier;
use serde::{Deserialize, Serialize};
use crate::{datum::Datum, query_builders::QueryFragment};
/// Building block of a syntax tree for a constrained subset of SQL that can be
/// statically analyzed, to validate that user-provided expressions perform only
/// operations that are read-only and otherwise safe to execute.
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "t", content = "c")]
pub enum PgExpressionAny {
Comparison(PgComparisonExpression),
Identifier(PgIdentifierExpression),
Literal(Datum),
ToJson(PgToJsonExpression),
}
impl PgExpressionAny {
pub fn into_query_fragment(self) -> QueryFragment {
match self {
Self::Comparison(expr) => expr.into_query_fragment(),
Self::Identifier(expr) => expr.into_query_fragment(),
Self::Literal(expr) => {
if expr.is_none() {
QueryFragment::from_sql("null")
} else {
QueryFragment::from_param(expr)
}
}
Self::ToJson(expr) => expr.into_query_fragment(),
}
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "t", content = "c")]
pub enum PgComparisonExpression {
Infix(PgInfixExpression<PgComparisonOperator>),
IsNull(PgIsNullExpression),
IsNotNull(PgIsNotNullExpression),
}
impl PgComparisonExpression {
fn into_query_fragment(self) -> QueryFragment {
match self {
Self::Infix(expr) => expr.into_query_fragment(),
Self::IsNull(expr) => expr.into_query_fragment(),
Self::IsNotNull(expr) => expr.into_query_fragment(),
}
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PgInfixExpression<T: Display> {
pub operator: T,
pub lhs: Box<PgExpressionAny>,
pub rhs: Box<PgExpressionAny>,
}
impl<T: Display> PgInfixExpression<T> {
fn into_query_fragment(self) -> QueryFragment {
QueryFragment::concat([
QueryFragment::from_sql("(("),
self.lhs.into_query_fragment(),
QueryFragment::from_sql(&format!(") {} (", self.operator)),
self.rhs.into_query_fragment(),
QueryFragment::from_sql("))"),
])
}
}
#[derive(Clone, Debug, strum::Display, Deserialize, PartialEq, Serialize)]
pub enum PgComparisonOperator {
#[strum(to_string = "and")]
And,
#[strum(to_string = "=")]
Eq,
#[strum(to_string = ">")]
Gt,
#[strum(to_string = "<")]
Lt,
#[strum(to_string = "<>")]
Neq,
#[strum(to_string = "or")]
Or,
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PgIsNullExpression {
lhs: Box<PgExpressionAny>,
}
impl PgIsNullExpression {
fn into_query_fragment(self) -> QueryFragment {
QueryFragment::concat([
QueryFragment::from_sql("(("),
self.lhs.into_query_fragment(),
QueryFragment::from_sql(") is null)"),
])
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PgIsNotNullExpression {
lhs: Box<PgExpressionAny>,
}
impl PgIsNotNullExpression {
fn into_query_fragment(self) -> QueryFragment {
QueryFragment::concat([
QueryFragment::from_sql("(("),
self.lhs.into_query_fragment(),
QueryFragment::from_sql(") is not null)"),
])
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PgIdentifierExpression {
pub parts_raw: Vec<String>,
}
impl PgIdentifierExpression {
fn into_query_fragment(self) -> QueryFragment {
QueryFragment::join(
self.parts_raw
.iter()
.map(|part| QueryFragment::from_sql(&escape_identifier(part))),
QueryFragment::from_sql("."),
)
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PgToJsonExpression {
entries: Vec<(String, PgExpressionAny)>,
}
impl PgToJsonExpression {
/// Generates a query fragment to the effect of:
/// `to_json((select ($expr) as "ident", ($expr2) as "ident2"))`
fn into_query_fragment(self) -> QueryFragment {
if self.entries.is_empty() {
QueryFragment::from_sql("'{}'")
} else {
QueryFragment::concat([
QueryFragment::from_sql("to_json((select "),
QueryFragment::join(
self.entries.into_iter().map(|(key, value)| {
QueryFragment::concat([
QueryFragment::from_sql("("),
value.into_query_fragment(),
QueryFragment::from_sql(&format!(") as {}", escape_identifier(&key))),
])
}),
QueryFragment::from_sql(", "),
),
QueryFragment::from_sql("))"),
])
}
}
}

View file

@ -2,6 +2,7 @@ use bigdecimal::BigDecimal;
use chrono::{DateTime, Utc};
use derive_builder::Builder;
use phono_backends::pg_attribute::PgAttribute;
use phono_pestgros::Datum;
use serde::{Deserialize, Serialize};
use sqlx::Acquire as _;
use sqlx::{
@ -11,9 +12,7 @@ use sqlx::{
use thiserror::Error;
use uuid::Uuid;
use crate::client::AppDbClient;
use crate::datum::Datum;
use crate::presentation::Presentation;
use crate::{client::AppDbClient, presentation::Presentation};
/// A materialization of a database column, fit for consumption by an end user.
///

View file

@ -14,20 +14,15 @@
// received a copy of the GNU Affero General Public License along with this
// program. If not, see <http://www.gnu.org/licenses/>.
#![feature(bool_to_result)] // Enable support for `ok_or()` on bools.
pub mod accessors;
pub mod client;
pub mod cluster;
pub mod datum;
pub mod errors;
pub mod expression;
pub mod field;
pub mod language;
mod macros;
pub mod portal;
pub mod presentation;
pub mod query_builders;
pub mod service_cred;
pub mod user;
pub mod workspace;

View file

@ -3,13 +3,11 @@ use std::sync::LazyLock;
use derive_builder::Builder;
use regex::Regex;
use serde::Serialize;
use sqlx::{postgres::types::Oid, query, query_as, types::Json};
use sqlx::{postgres::types::Oid, query, query_as};
use uuid::Uuid;
use validator::Validate;
use crate::{
client::AppDbClient, errors::QueryError, expression::PgExpressionAny, macros::with_id_query,
};
use crate::{client::AppDbClient, errors::QueryError, macros::with_id_query};
pub static RE_PORTAL_NAME: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9][()a-zA-Z0-9 _-]*[a-zA-Z0-9()_-]$").unwrap());
@ -36,7 +34,7 @@ pub struct Portal {
/// JSONB-encoded expression to use for filtering rows in the web-based
/// table view.
pub table_filter: Json<Option<PgExpressionAny>>,
pub filter: String,
}
impl Portal {
@ -65,7 +63,7 @@ select
workspace_id,
class_oid,
form_public,
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
filter
from portals
where id = $1
"#,
@ -87,7 +85,7 @@ select
workspace_id,
class_oid,
form_public,
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
filter
from portals
where workspace_id = $1
"#,
@ -122,7 +120,7 @@ select
workspace_id,
class_oid,
form_public,
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
filter
from portals
where workspace_id = $1 and class_oid = $2
"#,
@ -161,7 +159,7 @@ returning
workspace_id,
class_oid,
form_public,
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
filter
"#,
self.workspace_id,
self.class_oid,
@ -180,7 +178,7 @@ pub struct Update {
form_public: Option<bool>,
#[builder(default, setter(strip_option = true))]
table_filter: Option<Option<PgExpressionAny>>,
filter: Option<String>,
#[builder(default, setter(strip_option = true))]
#[validate(regex(path = *RE_PORTAL_NAME))]
@ -196,16 +194,16 @@ impl Update {
query!(
"update portals set form_public = $1 where id = $2",
form_public,
self.id
self.id,
)
.execute(app_db.get_conn())
.await?;
}
if let Some(table_filter) = self.table_filter {
if let Some(filter) = self.filter {
query!(
"update portals set table_filter = $1 where id = $2",
Json(table_filter) as Json<Option<PgExpressionAny>>,
self.id
"update portals set filter = $1 where id = $2",
filter,
self.id,
)
.execute(app_db.get_conn())
.await?;

View file

@ -1,171 +0,0 @@
//! Assorted utilities for dynamically constructing and manipulating [`sqlx`]
//! queries.
use sqlx::{Postgres, QueryBuilder};
use crate::datum::Datum;
/// Representation of a partial, parameterized SQL query. Allows callers to
/// build queries iteratively and dynamically, handling parameter numbering
/// (`$1`, `$2`, `$3`, ...) automatically.
///
/// This is similar to [`sqlx::QueryBuilder`], except that [`QueryFragment`]
/// objects are composable and may be concatenated to each other.
#[derive(Clone, Debug, PartialEq)]
pub struct QueryFragment {
/// SQL string, split wherever there is a query parameter. For example,
/// `select * from foo where id = $1 and status = $2` is represented along
/// the lines of `["select * from foo where id = ", " and status = ", ""]`.
/// `plain_sql` should always have exactly one more element than `params`.
plain_sql: Vec<String>,
params: Vec<Datum>,
}
impl QueryFragment {
/// Validate invariants. Should be run immediately before returning any
/// useful output.
fn gut_checks(&self) {
assert!(self.plain_sql.len() == self.params.len() + 1);
}
/// Parse from a SQL string with no parameters.
pub fn from_sql(sql: &str) -> Self {
Self {
plain_sql: vec![sql.to_owned()],
params: vec![],
}
}
/// Convenience function to construct an empty value.
pub fn empty() -> Self {
Self::from_sql("")
}
/// Parse from a parameter value with no additional SQL. (Renders as `$n`,
/// where`n` is the appropriate parameter index.)
pub fn from_param(param: Datum) -> Self {
Self {
plain_sql: vec!["".to_owned(), "".to_owned()],
params: vec![param],
}
}
/// Append another query fragment to this one.
pub fn push(&mut self, mut other: QueryFragment) {
let tail = self
.plain_sql
.pop()
.expect("already asserted that vec contains at least 1 item");
let head = other
.plain_sql
.first()
.expect("already asserted that vec contains at least 1 item");
self.plain_sql.push(format!("{tail}{head}"));
for value in other.plain_sql.drain(1..) {
self.plain_sql.push(value);
}
self.params.append(&mut other.params);
}
/// Combine multiple QueryFragments with a separator, similar to
/// [`Vec::join`].
pub fn join<I: IntoIterator<Item = Self>>(fragments: I, sep: Self) -> Self {
let mut acc = QueryFragment::from_sql("");
let mut iter = fragments.into_iter();
let mut fragment = match iter.next() {
Some(value) => value,
None => return acc,
};
for next_fragment in iter {
acc.push(fragment);
acc.push(sep.clone());
fragment = next_fragment;
}
acc.push(fragment);
acc
}
/// Convenience method equivalent to:
/// `QueryFragment::concat(fragments, QueryFragment::from_sql(""))`
pub fn concat<I: IntoIterator<Item = Self>>(fragments: I) -> Self {
Self::join(fragments, Self::from_sql(""))
}
/// Checks whether value is empty. A value is considered empty if the
/// resulting SQL code is 0 characters long.
pub fn is_empty(&self) -> bool {
self.gut_checks();
self.plain_sql.len() == 1
&& self
.plain_sql
.first()
.expect("already checked that len == 1")
.is_empty()
}
}
impl From<QueryFragment> for QueryBuilder<'_, Postgres> {
fn from(value: QueryFragment) -> Self {
value.gut_checks();
let mut builder = QueryBuilder::new("");
let mut param_iter = value.params.into_iter();
for plain_sql in value.plain_sql {
builder.push(plain_sql);
if let Some(param) = param_iter.next() {
param.push_bind_onto(&mut builder);
}
}
builder
}
}
/// Helper type to make it easier to build and reason about multiple related SQL
/// queries.
#[derive(Clone, Debug)]
pub struct SelectQuery {
/// Query fragment following (not including) "select ".
pub selection: QueryFragment,
/// Query fragment following (not including) "from ".
pub source: QueryFragment,
/// Query fragment following (not including) "where ", or empty if not
/// applicable.
pub filters: QueryFragment,
/// Query fragment following (not including) "order by ", or empty if not
/// applicable.
pub order: QueryFragment,
/// Query fragment following (not including) "limit ", or empty if not
/// applicable.
pub limit: QueryFragment,
}
impl From<SelectQuery> for QueryFragment {
fn from(value: SelectQuery) -> Self {
let mut result = QueryFragment::from_sql("select ");
result.push(value.selection);
result.push(QueryFragment::from_sql(" from "));
result.push(value.source);
if !value.filters.is_empty() {
result.push(QueryFragment::from_sql(" where "));
result.push(value.filters);
}
if !value.order.is_empty() {
result.push(QueryFragment::from_sql(" order by "));
result.push(value.order);
}
if !value.limit.is_empty() {
result.push(QueryFragment::from_sql(" limit "));
result.push(value.limit);
}
result
}
}
impl From<SelectQuery> for QueryBuilder<'_, Postgres> {
fn from(value: SelectQuery) -> Self {
QueryFragment::from(value).into()
}
}

15
phono-pestgros/Cargo.toml Normal file
View file

@ -0,0 +1,15 @@
[package]
name = "phono-pestgros"
edition.workspace = true
version.workspace = true
[dependencies]
bigdecimal = { workspace = true }
chrono = { workspace = true }
pest = { version = "2.8.6", features = ["miette-error"] }
pest_derive = "2.8.6"
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
thiserror = { workspace = true }
uuid = { workspace = true }

View file

@ -0,0 +1,52 @@
use crate::{Datum, Expr, InfixOp};
#[test]
fn empty_array_parses() {
assert_eq!(Expr::try_from("array[]"), Ok(Expr::Array(vec![])));
}
#[test]
fn array_of_literals_parses() {
assert_eq!(
Expr::try_from("array[1, 2, 3]"),
Ok(Expr::Array(vec![
Expr::Literal(Datum::Numeric(Some(1.into()))),
Expr::Literal(Datum::Numeric(Some(2.into()))),
Expr::Literal(Datum::Numeric(Some(3.into()))),
])),
);
}
#[test]
fn array_of_exprs_parses() {
assert_eq!(
Expr::try_from("array[(1), 2 + 3]"),
Ok(Expr::Array(vec![
Expr::Literal(Datum::Numeric(Some(1.into()))),
Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
op: InfixOp::Add,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
},
])),
);
}
#[test]
fn array_cmp_modifier_parses() {
assert_eq!(
Expr::try_from("3 = any(array[3])"),
Ok(Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
op: InfixOp::WithCmpModifierAny(Box::new(InfixOp::Eq)),
rhs: Box::new(Expr::Array(vec![Expr::Literal(Datum::Numeric(Some(
3.into()
)))]))
}),
);
}
#[test]
fn non_parenthesized_array_cmp_modifier_fails() {
assert!(Expr::try_from("3 = any array[3]").is_err());
}

View file

@ -4,6 +4,9 @@ use serde::{Deserialize, Serialize};
use sqlx::{Postgres, QueryBuilder};
use uuid::Uuid;
/// Enum representing all supported literal types, providing convenience
/// methods for working with them in [`sqlx`] queries, and defining a [`serde`]
/// encoding for use across the application stack.
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "t", content = "c")]
pub enum Datum {
@ -20,6 +23,7 @@ pub enum Datum {
Uuid(Option<Uuid>),
}
// TODO: Should sqlx helpers be moved to a separate crate?
impl Datum {
// TODO: Can something similar be achieved with a generic return type?
/// Bind this as a parameter to a sqlx query.

View file

@ -0,0 +1,25 @@
use std::error::Error;
use sqlx::{Postgres, QueryBuilder};
use crate::Expr;
#[test]
fn sql_converts_to_query_builder() -> Result<(), Box<dyn Error>> {
let expr = Expr::try_from("3 + 5 < 10")?;
assert_eq!(
QueryBuilder::<'_, Postgres>::from(expr).sql(),
"(($1) + ($2)) < ($3)",
);
Ok(())
}
#[test]
fn cmp_array_modifier_round_trips() -> Result<(), Box<dyn Error>> {
let expr = Expr::try_from("1 = 2 and 3 < any(array[4])")?;
assert_eq!(
QueryBuilder::<'_, Postgres>::from(expr).sql(),
"(($1) = ($2)) and (($3) < any (array[($4)]))",
);
Ok(())
}

View file

@ -0,0 +1,76 @@
use std::error::Error;
use crate::{Datum, Expr, FnArgs, InfixOp};
#[test]
fn parses_without_args() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from("now()")?,
Expr::FnCall {
name: vec!["now".to_owned()],
args: FnArgs::Exprs {
distinct_flag: false,
exprs: vec![],
},
}
);
Ok(())
}
#[test]
fn parses_with_args() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from("repeat('hello!', 1 + 2)")?,
Expr::FnCall {
name: vec!["repeat".to_owned()],
args: FnArgs::Exprs {
distinct_flag: false,
exprs: vec![
Expr::Literal(Datum::Text(Some("hello!".to_owned()))),
Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
op: InfixOp::Add,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
}
],
},
}
);
Ok(())
}
#[test]
fn schema_qualified() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from(r#"my_schema."MyFunc"('hello!', 1)"#)?,
Expr::FnCall {
name: vec!["my_schema".to_owned(), "MyFunc".to_owned()],
args: FnArgs::Exprs {
distinct_flag: false,
exprs: vec![
Expr::Literal(Datum::Text(Some("hello!".to_owned()))),
Expr::Literal(Datum::Numeric(Some(1.into()))),
],
},
}
);
Ok(())
}
#[test]
fn distinct_aggregate() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from(r#"AGGREGATOR(DISTINCT a."Col 1", b."Col 2")"#)?,
Expr::FnCall {
name: vec!["aggregator".to_owned()],
args: FnArgs::Exprs {
distinct_flag: true,
exprs: vec![
Expr::ObjName(vec!["a".to_owned(), "Col 1".to_owned()]),
Expr::ObjName(vec!["b".to_owned(), "Col 2".to_owned()]),
],
},
}
);
Ok(())
}

View file

@ -0,0 +1,230 @@
//! Based on
//! https://github.com/pest-parser/pest/blob/master/grammars/src/grammars/sql.pest.
//! (Original is dual-licensed under MIT/Apache-2.0.)
//!
//! PostgreSQL departs extensively from the SQLite flavored dialect captured in
//! the original grammar. For example, rules for identifiers/object names
//! differ, as do keywords, built-in types, and syntax for specifying function
//! arguments, type modifiers, CTEs, and so on.
//!
//! This grammar covers a larger subset of the Postgres SQL dialect, but it is a
//! work in progress and is far from complete. It should only be used to parse
//! input that is "PostgreSQL-esque", not input that expects spec compliance.
Command = _{ SOI ~ (Query | ExplainQuery | DDL | ACL) ~ EOF }
ACL = _{ DropRole | DropUser | CreateRole | CreateUser | AlterUser | GrantPrivilege | RevokePrivilege }
CreateUser = {
^"create" ~ ^"user" ~ Identifier ~ (^"with")? ~ ^"password" ~ SingleQuotedString ~
AuthMethod?
}
AlterUser = {
^"alter" ~ ^"user" ~ Identifier ~ (^"with")? ~ AlterOption
}
AlterOption = _{ AlterLogin | AlterNoLogin | AlterPassword }
AlterLogin = { ^"login" }
AlterNoLogin = { ^"nologin" }
AlterPassword = { ^"password" ~ SingleQuotedString ~ AuthMethod? }
AuthMethod = { ^"using" ~ (ChapSha1 | Md5 | Ldap) }
ChapSha1 = { ^"chap-sha1" }
Md5 = { ^"md5" }
Ldap = { ^"ldap" }
DropUser = { ^"drop" ~ ^"user" ~ Identifier }
CreateRole = { ^"create" ~ ^"role" ~ Identifier }
DropRole = { ^"drop" ~ ^"role" ~ Identifier }
GrantPrivilege = { ^"grant" ~ PrivBlock ~ ^"to" ~ Identifier }
RevokePrivilege = { ^"revoke" ~ PrivBlock ~ ^"from" ~ Identifier }
PrivBlock = _{ PrivBlockPrivilege | PrivBlockRolePass }
PrivBlockPrivilege = {Privilege ~ (PrivBlockUser | PrivBlockSpecificUser | PrivBlockRole
| PrivBlockSpecificRole | PrivBlockTable | PrivBlockSpecificTable)}
PrivBlockUser = { ^"user" }
PrivBlockSpecificUser = { ^"on" ~ ^"user" ~ Identifier }
PrivBlockRole = { ^"role" }
PrivBlockSpecificRole = { ^"on" ~ ^"role" ~ Identifier }
PrivBlockTable = { ^"table" }
PrivBlockSpecificTable = { ^"on" ~ ^"table" ~ Identifier }
PrivBlockRolePass = { Identifier }
Privilege = _{ PrivilegeRead | PrivilegeWrite | PrivilegeExecute |
PrivilegeCreate | PrivilegeAlter | PrivilegeDrop |
PrivilegeSession | PrivilegeUsage }
PrivilegeAlter = { ^"alter" }
PrivilegeCreate = { ^"create" }
PrivilegeDrop = { ^"drop" }
PrivilegeExecute = { ^"execute" }
PrivilegeRead = { ^"read" }
PrivilegeSession = { ^"session" }
PrivilegeUsage = { ^"usage" }
PrivilegeWrite = { ^"write" }
DDL = _{ CreateTable | DropTable | CreateProc }
CreateTable = {
^"create" ~ ^"table" ~ Identifier ~
"(" ~ Columns ~ "," ~ PrimaryKey ~ ")" ~
Distribution
}
Columns = { ColumnDef ~ ("," ~ ColumnDef)* }
ColumnDef = { Identifier ~ TypeCast ~ ColumnDefIsNull? }
ColumnDefIsNull = { NotFlag? ~ ^"null" }
PrimaryKey = {
^"primary" ~ ^"key" ~
"(" ~ Identifier ~ ("," ~ Identifier)* ~ ")"
}
Distribution = { ^"distributed" ~ (Global | Sharding) }
Global = { ^"globally" }
Sharding = { ^"by" ~ "(" ~ Identifier ~ ("," ~ Identifier)* ~ ")"}
DropTable = { ^"drop" ~ ^"table" ~ Identifier }
CreateProc = {
^"create" ~ ^"procedure" ~ Identifier ~
"(" ~ ProcParams? ~ ")" ~ (^"language" ~ ProcLanguage)? ~
((^"as" ~ "$$" ~ ProcBody ~ "$$") | (^"begin" ~ "atomic" ~ ProcBody ~ "end"))
}
ProcParams = { ProcParamDef ~ ("," ~ ProcParamDef)* }
ProcParamDef = { TypeCast }
ProcLanguage = { SQL }
SQL = { ^"sql" }
ProcBody = { (Insert | Update | Delete) }
ExplainQuery = _{ Explain }
Explain = { ^"explain" ~ Query }
Query = { (SelectWithOptionalContinuation | Values | Insert | Update | Delete) }
SelectWithOptionalContinuation = { Select ~ (ExceptContinuation | UnionAllContinuation)? }
ExceptContinuation = { ((^"except" ~ ^"distinct") | ^"except") ~ Select }
UnionAllContinuation = { ^"union" ~ ^"all" ~ Select }
Select = {
^"select" ~ Projection ~ ^"from" ~ Scan ~
Join? ~ WhereClause? ~
(^"group" ~ ^"by" ~ GroupBy)? ~
(^"having" ~ Having)?
}
Projection = { Distinct? ~ ProjectionElement ~ ("," ~ ProjectionElement)* }
ProjectionElement = _{ Asterisk | Column }
Column = { Expr ~ ((^"as")? ~ Identifier)? }
Asterisk = { "*" }
WhereClause = _{ ^"where" ~ Selection }
Selection = { Expr }
Scan = { (Identifier | SubQuery) ~ ((^"as")? ~ Identifier)? }
Join = { JoinKind? ~ ^"join" ~ Scan ~ ^"on" ~ Expr }
JoinKind = _{ ( InnerJoinKind | LeftJoinKind ) }
InnerJoinKind = { ^"inner" }
LeftJoinKind = { ^"left" ~ (^"outer")? }
GroupBy = { Expr ~ ("," ~ Expr)* }
Having = { Expr }
SubQuery = { "(" ~ (SelectWithOptionalContinuation | Values) ~ ")" }
Insert = { ^"insert" ~ ^"into" ~ Identifier ~ ("(" ~ TargetColumns ~ ")")? ~ (Values | Select) ~ OnConflict? }
TargetColumns = { Identifier ~ ("," ~ Identifier)* }
OnConflict = _{ ^"on conflict" ~ ^"do" ~ (DoNothing | DoReplace | DoFail) }
DoReplace = { ^"replace" }
DoNothing = { ^"nothing" }
DoFail = { ^"fail" }
Update = { ^"update" ~ Identifier ~ ^"set" ~ UpdateList ~ (UpdateFrom | WhereClause)? }
UpdateList = { UpdateItem ~ ("," ~ UpdateItem)* }
UpdateItem = { Identifier ~ "=" ~ Expr }
UpdateFrom = _{ ^"from" ~ Scan ~ (^"where" ~ Expr)? }
Values = { ^"values" ~ Row ~ ("," ~ Row)* }
Delete = { ^"delete" ~ ^"from" ~ Identifier ~ (^"where" ~ DeleteFilter)? }
DeleteFilter = { Expr }
Identifier = ${ DoubleQuotedIdentifier | UnquotedIdentifier }
DoubleQuotedIdentifier = @{ "\"" ~ ("\"\"" | '\u{01}'..'\u{21}' | '\u{23}'..'\u{10FFFF}')+ ~ "\"" }
UnquotedIdentifier = @{ !(Keyword ~ ("(" | "[" | WHITESPACE | "," | EOF)) ~ (UnquotedIdentifierStart ~ UnquotedIdentifierRemainder*) }
UnquotedIdentifierStart = _{ 'a'..'я' | 'A'..'Я' | "_" }
UnquotedIdentifierRemainder = _{ UnquotedIdentifierStart | "$" | ASCII_DIGIT }
Keyword = { ^"left" | ^"having" | ^"not" | ^"inner" | ^"group"
| ^"on" | ^"join" | ^"from" | ^"exists" | ^"except"
| ^"union" | ^"where" | ^"distinct" | ^"between" | ^"option"
| ^"values" | ^"with" | ^"as" | ^"array" | ^"any" | ^"some"
| ^"all" | ^"in" }
ExprRoot = _{ &SOI ~ Expr ~ &EOI }
Expr = { ExprAtomValue ~ (ExprInfixOp ~ ExprAtomValue)* }
ExprInfixOp = _{ Between | NonCmpInfixOp | CmpInfixOp | ConcatInfixOp | And | Or }
Between = { NotFlag? ~ ^"between" }
And = { ^"and" }
Or = { ^"or" }
ConcatInfixOp = { "||" }
CmpInfixOp = { (NotEq | GtEq | Gt | LtEq | Lt | Eq | Lt) ~ (CmpArrayModifier ~ &ExpressionInParentheses)? }
Eq = { "=" }
Gt = { ">" }
GtEq = { ">=" }
Lt = { "<" }
LtEq = { "<=" }
NotEq = { "<>" | "!=" }
NonCmpInfixOp = _{ Add | Subtract | Multiply | Divide | In }
Add = { "+" }
Subtract = { "-" }
Multiply = { "*" }
Divide = { "/" }
In = { NotFlag? ~ ^"in" }
CmpArrayModifier = { CmpModifierAny | CmpModifierAll }
CmpModifierAny = { ^"any" | ^"some "}
CmpModifierAll = { ^"all" }
ExprAtomValue = _{ UnaryNot* ~ AtomicExpr ~ IsNullPostfix? }
UnaryNot = @{ NotFlag }
IsNullPostfix = { ^"is" ~ NotFlag? ~ ^"null" }
AtomicExpr = _{ Literal | Parameter | IdentifierWithOptionalContinuation | ExpressionInParentheses | UnaryOperator | SubQuery | Row | SquareBracketArray }
// TODO: Empty arrays don't parse without the `!"]"` prefix in the
// optional sequence of sub-expressions, but the reason is not
// immediately clear: the ']' character doesn't seem like it should
// be compatible with the beginning of any `AtomicExpr`. This may
// be worth investigating.
SquareBracketArray = { ^"array" ~ "[" ~ (!"]" ~ (Expr ~ ("," ~ Expr)*))? ~ "]" }
Literal = _{ True | False | Null | Double | Decimal | Unsigned | Integer | SingleQuotedString }
True = { ^"true" }
False = { ^"false" }
Null = { ^"null" }
Decimal = @{ Integer ~ ("." ~ ASCII_DIGIT*) }
Double = @{ Integer ~ ("." ~ ASCII_DIGIT*)? ~ (^"e" ~ Integer) }
Integer = @{ ("+" | "-")? ~ ASCII_DIGIT+ }
Unsigned = @{ ASCII_DIGIT+ }
// TODO: Handle dollar-quoted string literals.
SingleQuotedString = @{ "'" ~ ("''" | (!("'") ~ ANY))* ~ "'" }
Parameter = @{ "$" ~ Unsigned }
// Postgres permits qualified object names with a single identifier
// part, 2 parts plus a function invocation, 3 parts, or 3 parts
// plus a function invocation. For simplicity, assume that an
// arbitrary number of qualifications (e.g. "a.b.c.d[...]") are
// supported.
// TODO: Disallow whitespace where it shouldn't be.
IdentifierWithOptionalContinuation = { Identifier ~ QualifiedIdentifierContinuation* ~ FunctionInvocationContinuation? }
QualifiedIdentifierContinuation = ${ "." ~ Identifier }
FunctionInvocationContinuation = { "(" ~ (CountAsterisk | FunctionArgs)? ~ ")" }
// TODO: Support named argument notation
// (`my_func(name => value)`).
// TODO: Support keywords within args list as applicable.
FunctionArgs = { Distinct? ~ (Expr ~ ("," ~ Expr)*)? }
CountAsterisk = { "*" }
ExpressionInParentheses = { "(" ~ Expr ~ ")" }
CastInfix = { Expr ~ "::" ~ TypeCast }
TypeCast = {
TypeBool
| TypeDecimal
| TypeDouble
| TypeInt
| TypeNumeric
| TypeText
| TypeVarchar
}
TypeBool = { (^"boolean" | ^"bool") }
TypeDecimal = { ^"decimal" }
TypeDouble = { ^"double" }
TypeInt = { (^"integer" | ^"int") }
TypeNumeric = { ^"numeric" }
TypeText = { ^"text" }
TypeVarchar = { ^"varchar" ~ "(" ~ Unsigned ~ ")" }
TypeDate = { ^"date" }
TypeTime = { ^"time" ~ Unsigned? ~ (WithTimeZone | WithoutTimeZone)? }
TypeTimestamp = { ^"timestamp" ~ Unsigned? ~ (WithTimeZone | WithoutTimeZone)? }
WithTimeZone = { ^"with" ~ ^"time" ~ ^"zone" }
WithoutTimeZone = { ^"without" ~ ^"time" ~ ^"zone" }
UnaryOperator = _{ Exists }
Exists = { NotFlag? ~ ^"exists" ~ SubQuery }
Row = { "(" ~ Expr ~ ("," ~ Expr)* ~ ")" }
Distinct = { ^"distinct" }
NotFlag = { ^"not" }
EOF = { EOI | ";" }
WHITESPACE = _{ " " | "\t" | "\n" | "\r\n" }

View file

@ -0,0 +1,37 @@
//! Unit tests for identifier and object name parsing within expressions.
use crate::{Expr, escape_identifier};
#[test]
fn escaper_escapes() {
assert_eq!(escape_identifier("hello"), r#""hello""#);
assert_eq!(escape_identifier("hello world"), r#""hello world""#);
assert_eq!(
escape_identifier(r#""hello" "world""#),
r#""""hello"" ""world""""#
);
}
#[test]
fn qualified_obj_name_parses() {
assert_eq!(
Expr::try_from(r#""""Hello"", World! 四十二".deep_thought"#),
Ok(Expr::ObjName(vec![
r#""Hello", World! 四十二"#.to_owned(),
"deep_thought".to_owned(),
])),
);
}
#[test]
fn misquoted_ident_fails_to_parse() {
assert!(Expr::try_from(r#""Hello, "World!""#).is_err());
}
#[test]
fn unquoted_ident_lowercased() {
assert_eq!(
Expr::try_from("HeLlO_WoRlD"),
Ok(Expr::ObjName(vec!["hello_world".to_owned()])),
);
}

433
phono-pestgros/src/lib.rs Normal file
View file

@ -0,0 +1,433 @@
//! Incomplete but useful parser and generator for Postgres flavored SQL
//! expressions and more, based on a modified version of the
//! [official Pest SQL grammar](https://github.com/pest-parser/pest/blob/79dd30d11aab6f0fba3cd79bd48f456209b966b3/grammars/src/grammars/sql.pest).
//!
//! This grammar covers a larger subset of the Postgres SQL dialect, but it is a
//! work in progress and is far from complete. It should only be used to parse
//! input that is "PostgreSQL-esque", not input that expects spec compliance.
//!
//! ## Example
//!
//! ```
//! use phono_pestgros::{ArithOp, BoolOp, Datum, Expr, InfixOp};
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! let expr = Expr::try_from("3 + 5 < 10")?;
//!
//! assert_eq!(expr, Expr::Infix {
//! lhs: Box::new(Expr::Infix {
//! lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
//! op: InfixOp::ArithInfix(ArithOp::Add),
//! rhs: Box::new(Expr::Literal(Datum::Numeric(Some(5.into())))),
//! }),
//! op: InfixOp::BoolInfix(BoolOp::Lt),
//! rhs: Box::new(Expr::Literal(Datum::Numeric(Some(10.into())))),
//! });
//!
//! assert_eq!(QueryBuilder::try_from(expr).sql(), "(($1) + ($2)) < ($3)");
//! # Ok(())
//! # }
//! ```
use std::{str::FromStr, sync::LazyLock};
use bigdecimal::BigDecimal;
use pest::{
Parser as _,
iterators::{Pair, Pairs},
pratt_parser::PrattParser,
};
use pest_derive::Parser;
pub use crate::{datum::Datum, query_builders::QueryFragment};
mod datum;
mod query_builders;
#[cfg(test)]
mod array_tests;
#[cfg(test)]
mod fragment_tests;
#[cfg(test)]
mod func_invocation_tests;
#[cfg(test)]
mod identifier_tests;
#[cfg(test)]
mod literal_tests;
#[cfg(test)]
mod op_tests;
/// Given a raw identifier (such as a table name, column name, etc.), format it
/// so that it may be safely interpolated into a SQL query.
///
/// Note that in PostgreSQL, unquoted identifiers are case-insensitive (or,
/// rather, they are always implicitly converted to lowercase), while quoted
/// identifiers are case-sensitive. The caller of this function is responsible
/// for performing conversion to lowercase as appropriate.
pub fn escape_identifier(identifier: &str) -> String {
// Escaping identifiers for Postgres is fairly easy, provided that the input is
// already known to contain no invalid multi-byte sequences. Backslashes may
// remain as-is, and embedded double quotes are escaped simply by doubling
// them (`"` becomes `""`). Refer to the PQescapeInternal() function in
// libpq (fe-exec.c) and Diesel's PgQueryBuilder::push_identifier().
format!("\"{0}\"", identifier.replace('"', "\"\""))
}
/// Decodes a SQL representation of an identifier. If the input is unquoted, it
/// is converted to lowercase. If it is double quoted, the surrounding quotes
/// are stripped and escaped inner double quotes (double-double quotes, if you
/// will) are converted to single-double quotes. The opposite of
/// [`escape_identifier`], sort of.
///
/// Assumes that the provided identifier is well-formed. Basic gut checks are
/// performed, but they are non-exhaustive.
///
/// `U&"..."`-style escaped Unicode identifiers are not yet supported.
fn parse_ident(value: &str) -> String {
assert!(
!value.to_lowercase().starts_with("u&"),
"escaped Unicode identifiers are not supported"
);
if value.starts_with('"') {
assert!(value.ends_with('"'), "malformed double-quoted identifier");
{
// Strip first and last characters.
let mut chars = value.chars();
chars.next();
chars.next_back();
chars.as_str()
}
.replace(r#""""#, r#"""#)
} else {
// TODO: assert validity with regex
value.to_lowercase()
}
}
/// Decodes a single-quoted string literal. Removes surrounding quotes and
/// replaces embedded single quotes (double-single quotes) with single-single
/// quotes.
///
/// Assumes that the provided identifier is well-formed. Basic gut checks are
/// performed, but they are non-exhaustive.
///
/// `E'...'`-style, dollar-quoted, and other (relatively) uncommon formats for
/// text literals are not yet supported.
fn parse_text_literal(value: &str) -> String {
assert!(value.starts_with('\'') && value.ends_with('\''));
{
// Strip first and last characters.
let mut chars = value.chars();
chars.next();
chars.next_back();
chars.as_str()
}
.replace("''", "'")
}
/// Primary parser and code generation for [`Rule`] types.
#[derive(Parser)]
#[grammar = "src/grammar.pest"]
struct PsqlParser;
/// Secondary parser configuration for handling operator precedence.
static PRATT_PARSER: LazyLock<PrattParser<Rule>> = LazyLock::new(|| {
use pest::pratt_parser::{
Assoc::{Left, Right},
Op,
};
PrattParser::new()
.op(Op::infix(Rule::Or, Left))
.op(Op::infix(Rule::Between, Left))
.op(Op::infix(Rule::And, Left))
.op(Op::prefix(Rule::UnaryNot))
.op(Op::infix(Rule::CmpInfixOp, Right))
// Official Pest example overstates the concat operator's precedence. It
// should be lower precedence than add/subtract.
.op(Op::infix(Rule::ConcatInfixOp, Left))
.op(Op::infix(Rule::Add, Left) | Op::infix(Rule::Subtract, Left))
.op(Op::infix(Rule::Multiply, Left) | Op::infix(Rule::Divide, Left))
.op(Op::infix(Rule::CastInfix, Left))
.op(Op::postfix(Rule::IsNullPostfix))
});
/// Represents a SQL expression. An expression is a collection of values and
/// operators that theoretically evaluates to some value, such as a boolean
/// condition, an object name, or a string dynamically derived from other
/// values. An expression is *not* a complete SQL statement, command, or query.
#[non_exhaustive]
#[derive(Clone, Debug, PartialEq)]
pub enum Expr {
Infix {
lhs: Box<Expr>,
op: InfixOp,
rhs: Box<Expr>,
},
Literal(Datum),
ObjName(Vec<String>),
FnCall {
name: Vec<String>,
args: FnArgs,
},
Not(Box<Expr>),
Nullness {
is_null: bool,
expr: Box<Expr>,
},
Array(Vec<Expr>),
}
impl TryFrom<&str> for Expr {
type Error = ParseError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
// `ExprRoot` is a silent rule which simply dictates that the inner
// `Expr` rule must consume the entire input.
let pairs = PsqlParser::parse(Rule::ExprRoot, value)?;
parse_expr_pairs(pairs)
}
}
#[non_exhaustive]
#[derive(Clone, Debug, PartialEq)]
pub enum InfixOp {
// Arithmetic ops:
Add,
Concat,
Div,
Mult,
Sub,
// Boolean ops:
And,
Or,
Eq,
Gt,
Gte,
Lt,
Lte,
Neq,
// Miscellaneous ops:
Cast,
// Array comparison modifiers (such as `= any(array[])`):
// TODO: This is an awkward pattern, which is capable of representing
// invalid expressions (such as `3 + any(array[])`). I expect it'll need to
// be rewritten at some point anyways to handle other keyword-driven infix
// syntax, but for expediency I'm leaving a more robust solution as a
// challenge for another day.
WithCmpModifierAny(Box<Self>),
WithCmpModifierAll(Box<Self>),
}
#[derive(Clone, Debug, PartialEq)]
pub enum FnArgs {
CountAsterisk,
Exprs {
/// `true` for aggregator invocations with the `DISTINCT` keyword
/// specified.
distinct_flag: bool,
exprs: Vec<Expr>,
},
}
/// Recursive helper, which does most of the work to convert [`pest`]'s pattern
/// matching output to a usable syntax tree.
fn parse_expr_pairs(expr_pairs: Pairs<'_, Rule>) -> Result<Expr, ParseError> {
PRATT_PARSER
.map_primary(|pair| match pair.as_rule() {
Rule::Expr | Rule::ExpressionInParentheses => parse_expr_pairs(pair.into_inner()),
Rule::Decimal | Rule::Double | Rule::Integer | Rule::Unsigned => Ok(Expr::Literal(
Datum::Numeric(Some(BigDecimal::from_str(pair.as_str()).expect(
"parsed numeric values should always be convertible to BigDecimal",
))),
)),
Rule::SingleQuotedString => Ok(Expr::Literal(Datum::Text(Some(parse_text_literal(pair.as_str()))))),
Rule::IdentifierWithOptionalContinuation => {
let mut name: Vec<String> = vec![];
let mut fn_args: Option<FnArgs> = None;
let inner = pair.into_inner();
for inner_pair in inner {
match inner_pair.as_rule() {
Rule::Identifier => {
name.push(parse_ident(inner_pair.as_str()));
}
Rule::QualifiedIdentifierContinuation => {
let ident_cont = inner_pair.as_str();
assert!(
ident_cont.starts_with('.'),
"QualifiedIdentifierContinuation should always start with the infix dot",
);
name.push(parse_ident({
// Strip leading dot.
let mut chars = ident_cont.chars();
chars.next();
chars.as_str()
}));
}
Rule::FunctionInvocationContinuation => {
fn_args = Some(parse_function_invocation_continuation(inner_pair)?);
}
_ => unreachable!(
"IdentifierWithOptionalContinuation has only 3 valid child rules",
),
}
}
Ok(if let Some(fn_args) = fn_args {
Expr::FnCall { name, args: fn_args }
} else {
Expr::ObjName(name)
})
}
Rule::SquareBracketArray => {
let mut arr_items: Vec<Expr> = vec![];
for inner_pair in pair.into_inner() {
match inner_pair.as_rule() {
Rule::Expr => {arr_items.push(parse_expr_pairs(inner_pair.into_inner())?);}
_ => unreachable!(
"SquareBracketArray has only Exprs as direct child rules",
),
}
}
Ok(Expr::Array(arr_items))
}
rule => Err(ParseError::UnknownRule(rule)),
})
.map_infix(|lhs, op, rhs| Ok(Expr::Infix {
lhs: Box::new(lhs?),
op: match op.as_rule() {
Rule::Add => InfixOp::Add,
Rule::ConcatInfixOp => InfixOp::Concat,
Rule::Divide => InfixOp::Div,
Rule::Multiply => InfixOp::Mult,
Rule::Subtract => InfixOp::Sub,
Rule::And => InfixOp::And,
Rule::CmpInfixOp => parse_cmp_op(op)?,
Rule::Or => InfixOp::Or,
Rule::CastInfix => InfixOp::Cast,
rule => Err(ParseError::UnknownRule(rule))?,
},
rhs: Box::new(rhs?),
}))
.map_prefix(|op, child| Ok(match op.as_rule() {
Rule::UnaryNot => Expr::Not(Box::new(child?)),
rule => Err(ParseError::UnknownRule(rule))?,
}))
.map_postfix(|child, op| Ok(match op.as_rule() {
Rule::IsNullPostfix => Expr::Nullness {
is_null: op
.into_inner()
.next()
.map(|inner| inner.as_rule()) != Some(Rule::NotFlag),
expr: Box::new(child?),
},
rule => Err(ParseError::UnknownRule(rule))?,
}))
.parse(expr_pairs)
}
fn parse_cmp_op(op: Pair<'_, Rule>) -> Result<InfixOp, ParseError> {
let mut base_op: Option<InfixOp> = None;
for inner in op.into_inner() {
match inner.as_rule() {
Rule::Eq => {
base_op = Some(InfixOp::Eq);
}
Rule::Gt => {
base_op = Some(InfixOp::Gt);
}
Rule::GtEq => {
base_op = Some(InfixOp::Gte);
}
Rule::Lt => {
base_op = Some(InfixOp::Lt);
}
Rule::LtEq => {
base_op = Some(InfixOp::Lte);
}
Rule::NotEq => {
base_op = Some(InfixOp::Neq);
}
Rule::CmpArrayModifier => {
if let Some(base_op) = base_op {
return Ok(
match inner
.into_inner()
.next()
.expect("CmpArrayModifier should be a simple enumeration")
.as_rule()
{
Rule::CmpModifierAny => InfixOp::WithCmpModifierAny(Box::new(base_op)),
Rule::CmpModifierAll => InfixOp::WithCmpModifierAll(Box::new(base_op)),
rule => Err(ParseError::UnknownRule(rule))?,
},
);
} else {
return Err(ParseError::UnknownRule(Rule::CmpArrayModifier));
}
}
rule => Err(ParseError::UnknownRule(rule))?,
}
}
Ok(base_op.expect("CmpInfixOp always has at least one child"))
}
fn parse_function_invocation_continuation(pair: Pair<'_, Rule>) -> Result<FnArgs, ParseError> {
let mut cont_inner_iter = pair.into_inner();
let fn_args = if let Some(cont_inner) = cont_inner_iter.next() {
match cont_inner.as_rule() {
Rule::FunctionArgs => {
let mut distinct_flag = false;
let mut exprs: Vec<Expr> = vec![];
for arg_inner in cont_inner.into_inner() {
match arg_inner.as_rule() {
Rule::Distinct => {
distinct_flag = true;
}
Rule::Expr => {
exprs.push(parse_expr_pairs(arg_inner.into_inner())?);
}
_ => unreachable!(
"only valid children of FunctionArgs are Distinct and Expr"
),
}
}
FnArgs::Exprs {
distinct_flag,
exprs,
}
}
Rule::CountAsterisk => FnArgs::CountAsterisk,
_ => unreachable!(
"only valid children of FunctionInvocationContinuation are FunctionArgs and CountAsterisk"
),
}
} else {
FnArgs::Exprs {
distinct_flag: false,
exprs: vec![],
}
};
assert!(
cont_inner_iter.next().is_none(),
"function should have consumed entire FunctionInvocationContinuation pair",
);
Ok(fn_args)
}
#[derive(Clone, Debug, PartialEq, thiserror::Error)]
#[error("parse error")]
pub enum ParseError {
#[error("unknown rule")]
UnknownRule(Rule),
#[error("pest failed to parse: {0}")]
Pest(pest::error::Error<Rule>),
}
impl From<pest::error::Error<Rule>> for ParseError {
fn from(value: pest::error::Error<Rule>) -> Self {
Self::Pest(value)
}
}

View file

@ -0,0 +1,30 @@
use std::error::Error;
use crate::{Datum, Expr};
#[test]
fn text_parses() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from("'Hello, World!'")?,
Expr::Literal(Datum::Text(Some("Hello, World!".to_owned())))
);
Ok(())
}
#[test]
fn escaped_quotes_parse() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from("'''Hello, World!'''")?,
Expr::Literal(Datum::Text(Some("'Hello, World!'".to_owned())))
);
Ok(())
}
#[test]
fn numeric_parses() -> Result<(), Box<dyn Error>> {
assert_eq!(
Expr::try_from("1234.56")?,
Expr::Literal(Datum::Numeric(Some("1234.56".parse()?)))
);
Ok(())
}

View file

@ -0,0 +1,106 @@
//! Unit tests for infix operator parsing within expressions.
use crate::{Datum, Expr, InfixOp};
#[test]
fn add_op_parses() {
assert_eq!(
// https://xkcd.com/3184/
Expr::try_from("six + 7"),
Ok(Expr::Infix {
lhs: Box::new(Expr::ObjName(vec!["six".to_owned()])),
op: InfixOp::Add,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(7.into())))),
})
);
}
#[test]
fn mult_op_parses() {
assert_eq!(
Expr::try_from("six * 7"),
Ok(Expr::Infix {
lhs: Box::new(Expr::ObjName(vec!["six".to_owned()])),
op: InfixOp::Mult,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(7.into())))),
})
);
}
#[test]
fn arith_precedence() {
assert_eq!(
Expr::try_from("(1 + 2) * 3 + 4"),
Ok(Expr::Infix {
lhs: Box::new(Expr::Infix {
lhs: Box::new(Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
op: InfixOp::Add,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
}),
op: InfixOp::Mult,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
}),
op: InfixOp::Add,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(4.into())))),
})
);
assert_eq!(
Expr::try_from("1 - 2 / (3 - 4)"),
Ok(Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
op: InfixOp::Sub,
rhs: Box::new(Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
op: InfixOp::Div,
rhs: Box::new(Expr::Infix {
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
op: InfixOp::Sub,
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(4.into())))),
}),
})
})
);
}
#[test]
fn is_null_parses() {
assert_eq!(
Expr::try_from("my_var is null"),
Ok(Expr::Nullness {
is_null: true,
expr: Box::new(Expr::ObjName(vec!["my_var".to_owned()]))
}),
);
}
#[test]
fn is_not_null_parses() {
assert_eq!(
Expr::try_from("my_var is not null"),
Ok(Expr::Nullness {
is_null: false,
expr: Box::new(Expr::ObjName(vec!["my_var".to_owned()]))
}),
);
}
#[test]
fn not_parses() {
assert_eq!(
Expr::try_from("not my_var"),
Ok(Expr::Not(Box::new(Expr::ObjName(vec![
"my_var".to_owned()
])))),
);
}
#[test]
fn repeated_nots_parse() {
assert_eq!(
Expr::try_from("not not my_var"),
Ok(Expr::Not(Box::new(Expr::Not(Box::new(Expr::ObjName(
vec!["my_var".to_owned()]
)))))),
);
}

View file

@ -0,0 +1,241 @@
//! Assorted utilities for dynamically constructing and manipulating [`sqlx`]
//! queries.
use sqlx::{Postgres, QueryBuilder};
use crate::{Datum, Expr, FnArgs, InfixOp, escape_identifier};
/// Representation of a partial, parameterized SQL query. Allows callers to
/// build queries iteratively and dynamically, handling parameter numbering
/// (`$1`, `$2`, `$3`, ...) automatically.
///
/// This is similar to [`sqlx::QueryBuilder`], except that [`QueryFragment`]
/// objects are composable and may be concatenated to each other.
#[derive(Clone, Debug, PartialEq)]
pub struct QueryFragment {
/// SQL string, split wherever there is a query parameter. For example,
/// `select * from foo where id = $1 and status = $2` is represented along
/// the lines of `["select * from foo where id = ", " and status = ", ""]`.
/// `plain_sql` should always have exactly one more element than `params`.
plain_sql: Vec<String>,
params: Vec<Datum>,
}
impl QueryFragment {
/// Validate invariants. Should be run immediately before returning any
/// useful output.
fn gut_checks(&self) {
assert!(self.plain_sql.len() == self.params.len() + 1);
}
/// Parse from a SQL string with no parameters.
pub fn from_sql(sql: &str) -> Self {
Self {
plain_sql: vec![sql.to_owned()],
params: vec![],
}
}
/// Convenience function to construct an empty value.
pub fn empty() -> Self {
Self::from_sql("")
}
/// Parse from a parameter value with no additional SQL. (Renders as `$n`,
/// where`n` is the appropriate parameter index.)
pub fn from_param(param: Datum) -> Self {
Self {
plain_sql: vec!["".to_owned(), "".to_owned()],
params: vec![param],
}
}
/// Append another query fragment to this one.
pub fn push(&mut self, mut other: QueryFragment) {
let tail = self
.plain_sql
.pop()
.expect("already asserted that vec contains at least 1 item");
let head = other
.plain_sql
.first()
.expect("already asserted that vec contains at least 1 item");
self.plain_sql.push(format!("{tail}{head}"));
for value in other.plain_sql.drain(1..) {
self.plain_sql.push(value);
}
self.params.append(&mut other.params);
}
/// Combine multiple QueryFragments with a separator, similar to
/// [`Vec::join`].
pub fn join<I: IntoIterator<Item = Self>>(fragments: I, sep: Self) -> Self {
let mut acc = QueryFragment::from_sql("");
let mut iter = fragments.into_iter();
let mut fragment = match iter.next() {
Some(value) => value,
None => return acc,
};
for next_fragment in iter {
acc.push(fragment);
acc.push(sep.clone());
fragment = next_fragment;
}
acc.push(fragment);
acc
}
/// Convenience method equivalent to:
/// `QueryFragment::concat(fragments, QueryFragment::from_sql(""))`
pub fn concat<I: IntoIterator<Item = Self>>(fragments: I) -> Self {
Self::join(fragments, Self::from_sql(""))
}
/// Checks whether value is empty. A value is considered empty if the
/// resulting SQL code is 0 characters long.
pub fn is_empty(&self) -> bool {
self.gut_checks();
self.plain_sql.len() == 1
&& self
.plain_sql
.first()
.expect("already checked that len == 1")
.is_empty()
}
}
impl From<Expr> for QueryFragment {
fn from(value: Expr) -> Self {
match value {
Expr::Infix { lhs, op, rhs } => Self::concat([
// RHS and LHS must be explicitly wrapped in parentheses to
// ensure correct precedence, because parentheses are taken
// into account **but not preserved** when parsing.
Self::from_sql("("),
(*lhs).into(),
Self::from_sql(") "),
op.into(),
// The RHS expression **must** be parenthesized to correctly
// reconstruct syntax like `= any (array[...])`.
Self::from_sql(" ("),
(*rhs).into(),
Self::from_sql(")"),
]),
Expr::Literal(datum) => Self::from_param(datum),
Expr::ObjName(idents) => Self::join(
idents
.iter()
.map(|ident| Self::from_sql(&escape_identifier(ident))),
Self::from_sql("."),
),
Expr::Not(expr) => {
Self::concat([Self::from_sql("not ("), (*expr).into(), Self::from_sql(")")])
}
Expr::Nullness { is_null, expr } => Self::concat([
Self::from_sql("("),
(*expr).into(),
Self::from_sql(if is_null {
") is null"
} else {
") is not null"
}),
]),
Expr::FnCall { name, args } => {
let mut fragment = Self::empty();
fragment.push(Self::join(
name.iter()
.map(|ident| Self::from_sql(&escape_identifier(ident))),
Self::from_sql("."),
));
fragment.push(Self::from_sql("("));
match args {
FnArgs::CountAsterisk => {
fragment.push(Self::from_sql("*"));
}
FnArgs::Exprs {
distinct_flag,
exprs,
} => {
if distinct_flag {
fragment.push(Self::from_sql("distinct "));
}
fragment.push(Self::join(
exprs.into_iter().map(|expr| {
// Wrap arguments in parentheses to ensure they
// are appropriately distinguishable from each
// other regardless of the presence of extra
// commas.
Self::concat([
Self::from_sql("("),
expr.into(),
Self::from_sql(")"),
])
}),
Self::from_sql(", "),
));
}
}
fragment.push(Self::from_sql(")"));
fragment
}
Expr::Array(arr_items) => Self::concat([
Self::from_sql("array["),
Self::join(
arr_items.into_iter().map(|item| {
Self::concat([Self::from_sql("("), item.into(), Self::from_sql(")")])
}),
Self::from_sql(", "),
),
Self::from_sql("]"),
]),
}
}
}
impl From<InfixOp> for QueryFragment {
fn from(value: InfixOp) -> Self {
match value {
InfixOp::Add => Self::from_sql("+"),
InfixOp::Concat => Self::from_sql("||"),
InfixOp::Div => Self::from_sql("/"),
InfixOp::Mult => Self::from_sql("*"),
InfixOp::Sub => Self::from_sql("-"),
InfixOp::And => Self::from_sql("and"),
InfixOp::Or => Self::from_sql("or"),
InfixOp::Eq => Self::from_sql("="),
InfixOp::Gt => Self::from_sql(">"),
InfixOp::Gte => Self::from_sql(">="),
InfixOp::Lt => Self::from_sql("<"),
InfixOp::Lte => Self::from_sql("<="),
InfixOp::Neq => Self::from_sql("<>"),
InfixOp::Cast => Self::from_sql("::"),
InfixOp::WithCmpModifierAny(inner) => {
Self::concat([(*inner).into(), Self::from_sql(" any")])
}
InfixOp::WithCmpModifierAll(inner) => {
Self::concat([(*inner).into(), Self::from_sql(" all")])
}
}
}
}
impl From<QueryFragment> for QueryBuilder<'_, Postgres> {
fn from(value: QueryFragment) -> Self {
value.gut_checks();
let mut builder = QueryBuilder::new("");
let mut param_iter = value.params.into_iter();
for plain_sql in value.plain_sql {
builder.push(plain_sql);
if let Some(param) = param_iter.next() {
param.push_bind_onto(&mut builder);
}
}
builder
}
}
impl From<Expr> for QueryBuilder<'_, Postgres> {
fn from(value: Expr) -> Self {
Self::from(QueryFragment::from(value))
}
}

View file

@ -23,6 +23,7 @@ percent-encoding = "2.3.1"
phono-backends = { workspace = true }
phono-models = { workspace = true }
phono-namegen = { workspace = true }
phono-pestgros = { workspace = true }
rand = { workspace = true }
redact = { workspace = true }
regex = { workspace = true }

View file

@ -7,7 +7,6 @@ use anyhow::anyhow;
use askama::Template;
use phono_backends::{
client::WorkspaceClient,
escape_identifier,
pg_acl::{PgAclItem, PgPrivilegeType},
pg_class::PgClass,
pg_role::RoleTree,
@ -17,6 +16,7 @@ use phono_backends::{
},
};
use phono_models::{accessors::Actor, service_cred::ServiceCred, user::User};
use phono_pestgros::escape_identifier;
use serde::{Deserialize, Serialize};
use sqlx::{postgres::types::Oid, prelude::FromRow, query, query_as};
use tracing::{Instrument, info_span};

View file

@ -6,12 +6,13 @@ use axum::{
// [`axum_extra`]'s form extractor is preferred:
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
use axum_extra::extract::Form;
use phono_backends::{escape_identifier, pg_class::PgClass};
use phono_backends::pg_class::PgClass;
use phono_models::{
accessors::{Accessor as _, Actor, portal::PortalAccessor},
field::Field,
presentation::Presentation,
};
use phono_pestgros::escape_identifier;
use serde::Deserialize;
use sqlx::{postgres::types::Oid, query};
use uuid::Uuid;

View file

@ -5,22 +5,17 @@ use axum::{
extract::{Path, State},
response::{IntoResponse as _, Response},
};
use phono_backends::{
escape_identifier, pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass,
};
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass};
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
datum::Datum,
expression::PgExpressionAny,
field::Field,
query_builders::{QueryFragment, SelectQuery},
};
use phono_pestgros::{Datum, Expr, FnArgs, InfixOp, QueryFragment, escape_identifier};
use serde::{Deserialize, Serialize};
use sqlx::{
QueryBuilder,
Postgres, QueryBuilder,
postgres::{PgRow, types::Oid},
};
use tracing::debug;
use uuid::Uuid;
use validator::Validate;
@ -42,7 +37,8 @@ pub(super) struct PathParams {
#[derive(Debug, Deserialize, Validate)]
pub(super) struct FormBody {
subfilter: Option<String>,
#[serde(default)]
subfilter: String,
}
const FRONTEND_ROW_LIMIT: i64 = 1000;
@ -128,24 +124,8 @@ pub(super) async fn get(
)),
filters: QueryFragment::join(
[
portal
.table_filter
.0
.map(|filter| filter.into_query_fragment()),
form.subfilter
.and_then(|value| {
if value.is_empty() {
None
} else {
serde_json::from_str::<Option<PgExpressionAny>>(&value)
// Ignore invalid input. A user likely pasted incorrectly
// or made a typo.
.inspect_err(|_| debug!("ignoring invalid subfilter expression"))
.ok()
.flatten()
}
})
.map(|filter| filter.into_query_fragment()),
into_safe_filter_sql(&portal.filter),
into_safe_filter_sql(&form.subfilter),
]
.into_iter()
.flatten(),
@ -215,3 +195,157 @@ pub(super) async fn get(
})
.into_response())
}
/// Helper type to make it easier to build and reason about multiple related SQL
/// queries.
#[derive(Clone, Debug)]
pub struct SelectQuery {
/// Query fragment following (not including) "select ".
pub selection: QueryFragment,
/// Query fragment following (not including) "from ".
pub source: QueryFragment,
/// Query fragment following (not including) "where ", or empty if not
/// applicable.
pub filters: QueryFragment,
/// Query fragment following (not including) "order by ", or empty if not
/// applicable.
pub order: QueryFragment,
/// Query fragment following (not including) "limit ", or empty if not
/// applicable.
pub limit: QueryFragment,
}
impl From<SelectQuery> for QueryFragment {
fn from(value: SelectQuery) -> Self {
let mut result = QueryFragment::from_sql("select ");
result.push(value.selection);
result.push(QueryFragment::from_sql(" from "));
result.push(value.source);
if !value.filters.is_empty() {
result.push(QueryFragment::from_sql(" where "));
result.push(value.filters);
}
if !value.order.is_empty() {
result.push(QueryFragment::from_sql(" order by "));
result.push(value.order);
}
if !value.limit.is_empty() {
result.push(QueryFragment::from_sql(" limit "));
result.push(value.limit);
}
result
}
}
impl From<SelectQuery> for QueryBuilder<'_, Postgres> {
fn from(value: SelectQuery) -> Self {
QueryFragment::from(value).into()
}
}
/// Users are allowed to put any text they want in the `Portal.filter` field.
/// This needs to be either transformed into a SQL expression that we trust to
/// be injected into a `WHERE` clause or disregarded if no such expression can
/// be generated.
///
/// Given the known (not to mention unknown) limitations of [`phono_pestgros`]'s
/// homegrown PostgreSQL grammar, trying to positively establish the correctness
/// and trustworthiness of a filter expression exactly as written would be
/// impractical and dangerous. Instead, we validate the syntax tree as parsed by
/// [`phono_pestgros`] (even if the parsing logic isn't spec-compliant), and use
/// the SQL expression only after it has been converted back from parsed form.
fn into_safe_filter_sql(expr_text: &str) -> Option<QueryFragment> {
if let Ok(expr) = Expr::try_from(expr_text)
&& is_safe_filter_expr(&expr)
{
Some(expr.into())
} else {
None
}
}
fn is_safe_filter_expr(expr: &Expr) -> bool {
match expr {
Expr::Literal(_) | &Expr::ObjName(_) => true,
Expr::Infix { lhs, op, rhs } => {
is_safe_filter_infix_op(op) && is_safe_filter_expr(lhs) && is_safe_filter_expr(rhs)
}
Expr::FnCall { name, args } => match name
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>()
.as_slice()
{
// Math:
&["abs"]
| &["ceil"]
| &["floor"]
| &["ln"]
| &["log"]
| &["mod"]
| &["power"]
| &["pi"]
| &["round"]
| &["sqrt"]
| &["trunc"]
// Timestamp:
| &["now"]
| &["to_timestamp"]
// Strings:
| &["upper"]
| &["lower"]
| &["replace"]
| &["btrim"]
| &["length"]
| &["concat_ws"]
| &["lpad"]
| &["rpad"]
| &["regexp_replace"]
| &["regexp_matches"]
| &["to_char"]
// Misc:
| &["any"] => match args {
FnArgs::Exprs {
distinct_flag,
exprs,
} => !distinct_flag && exprs.iter().all(is_safe_filter_expr),
_ => false,
},
_ => false,
},
Expr::Not(inner) => is_safe_filter_expr(inner),
Expr::Nullness {
is_null: _,
expr: inner,
} => is_safe_filter_expr(inner),
Expr::Array(arr_items) => arr_items.iter().all(is_safe_filter_expr),
_ => false,
}
}
fn is_safe_filter_infix_op(op: &InfixOp) -> bool {
match op {
InfixOp::Add
| InfixOp::Concat
| InfixOp::Div
| InfixOp::Mult
| InfixOp::Sub
// Boolean:
| InfixOp::And
| InfixOp::Or
| InfixOp::Eq
| InfixOp::Gt
| InfixOp::Gte
| InfixOp::Lt
| InfixOp::Lte
| InfixOp::Neq
// Miscellaneous:
| InfixOp::Cast => true,
InfixOp::WithCmpModifierAny(inner) | InfixOp::WithCmpModifierAll(inner) => is_safe_filter_infix_op(inner),
_ => false
}
}

View file

@ -8,11 +8,9 @@ use axum::{
// [`axum_extra`]'s form extractor is required to support repeated keys:
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
use axum_extra::extract::Form;
use phono_backends::{escape_identifier, pg_acl::PgPrivilegeType, pg_class::PgClass};
use phono_models::{
accessors::{Accessor as _, Actor, portal::PortalAccessor},
datum::Datum,
};
use phono_backends::{pg_acl::PgPrivilegeType, pg_class::PgClass};
use phono_models::accessors::{Accessor as _, Actor, portal::PortalAccessor};
use phono_pestgros::{Datum, escape_identifier};
use serde::Deserialize;
use sqlx::{postgres::types::Oid, query};
use uuid::Uuid;

View file

@ -6,7 +6,6 @@ use axum::{
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute};
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
expression::PgExpressionAny,
workspace::Workspace,
};
use serde::{Deserialize, Serialize};
@ -98,7 +97,7 @@ pub(super) async fn get(
struct ResponseTemplate {
columns: Vec<ColumnInfo>,
attr_names: Vec<String>,
filter: Option<PgExpressionAny>,
filter: String,
settings: Settings,
subfilter_str: String,
navbar: WorkspaceNav,
@ -107,7 +106,7 @@ pub(super) async fn get(
ResponseTemplate {
columns,
attr_names,
filter: portal.table_filter.0,
filter: portal.filter,
navbar: WorkspaceNav::builder()
.navigator(navigator)
.workspace(workspace)

View file

@ -3,11 +3,12 @@ use axum::{
extract::{Path, State},
response::Response,
};
use phono_backends::{escape_identifier, pg_class::PgClass};
use phono_backends::pg_class::PgClass;
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
field::Field,
};
use phono_pestgros::escape_identifier;
use serde::Deserialize;
use sqlx::{postgres::types::Oid, query};
use uuid::Uuid;

View file

@ -8,7 +8,6 @@ use axum::{
use axum_extra::extract::Form;
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
expression::PgExpressionAny,
portal::Portal,
};
use serde::Deserialize;
@ -32,11 +31,10 @@ pub(super) struct PathParams {
#[derive(Debug, Deserialize)]
pub(super) struct FormBody {
filter_expression: Option<String>,
filter: String,
}
/// HTTP POST handler for applying a [`PgExpressionAny`] filter to a portal's
/// table viewer.
/// HTTP POST handler for applying a filter to a portal's table viewer.
///
/// This handler expects 3 path parameters with the structure described by
/// [`PathParams`].
@ -51,7 +49,7 @@ pub(super) async fn post(
rel_oid,
workspace_id,
}): Path<PathParams>,
Form(form): Form<FormBody>,
Form(FormBody { filter }): Form<FormBody>,
) -> Result<Response, AppError> {
// FIXME: csrf
@ -70,12 +68,9 @@ pub(super) async fn post(
.fetch_one()
.await?;
let filter: Option<PgExpressionAny> =
serde_json::from_str(&form.filter_expression.unwrap_or("null".to_owned()))?;
Portal::update()
.id(portal.id)
.table_filter(filter)
.filter(filter)
.build()?
.execute(&mut app_db)
.await?;

View file

@ -5,7 +5,8 @@ use axum::{
extract::{Path, State},
response::Response,
};
use phono_backends::{escape_identifier, pg_class::PgClass};
use phono_backends::pg_class::PgClass;
use phono_pestgros::escape_identifier;
use regex::Regex;
use serde::Deserialize;
use sqlx::{postgres::types::Oid, query};

View file

@ -5,13 +5,9 @@ use axum::{
extract::{Path, State},
response::{IntoResponse as _, Response},
};
use phono_backends::{
escape_identifier, pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass,
};
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
datum::Datum,
};
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass};
use phono_models::accessors::{Accessor, Actor, portal::PortalAccessor};
use phono_pestgros::{Datum, escape_identifier};
use serde::Deserialize;
use serde_json::json;
use sqlx::{Acquire as _, postgres::types::Oid, query};

View file

@ -1,9 +1,10 @@
use axum::{extract::State, response::IntoResponse};
use phono_backends::{client::WorkspaceClient, escape_identifier, rolnames::ROLE_PREFIX_USER};
use phono_backends::{client::WorkspaceClient, rolnames::ROLE_PREFIX_USER};
use phono_models::{
client::AppDbClient, cluster::Cluster, user::User, workspace::Workspace,
workspace_user_perm::WorkspaceMembership,
};
use phono_pestgros::escape_identifier;
use sqlx::{Connection as _, PgConnection, query};
use crate::{

View file

@ -3,14 +3,14 @@ use axum::{
extract::{Path, State},
response::IntoResponse,
};
use phono_backends::{
escape_identifier,
rolnames::{ROLE_PREFIX_SERVICE_CRED, SERVICE_CRED_CONN_LIMIT, SERVICE_CRED_SUFFIX_LEN},
use phono_backends::rolnames::{
ROLE_PREFIX_SERVICE_CRED, SERVICE_CRED_CONN_LIMIT, SERVICE_CRED_SUFFIX_LEN,
};
use phono_models::{
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
service_cred::ServiceCred,
};
use phono_pestgros::escape_identifier;
use rand::distributions::{Alphanumeric, DistString};
use redact::Secret;
use serde::Deserialize;

View file

@ -2,14 +2,11 @@ use axum::{
extract::{Path, State},
response::IntoResponse,
};
use phono_backends::{
escape_identifier,
rolnames::{
ROLE_PREFIX_TABLE_OWNER, ROLE_PREFIX_TABLE_READER, ROLE_PREFIX_TABLE_WRITER,
ROLE_PREFIX_USER,
},
use phono_backends::rolnames::{
ROLE_PREFIX_TABLE_OWNER, ROLE_PREFIX_TABLE_READER, ROLE_PREFIX_TABLE_WRITER, ROLE_PREFIX_USER,
};
use phono_models::accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor};
use phono_pestgros::escape_identifier;
use serde::Deserialize;
use sqlx::{Acquire as _, query};
use uuid::Uuid;

View file

@ -3,12 +3,13 @@ use axum::{
extract::{Path, State},
response::IntoResponse,
};
use phono_backends::{escape_identifier, pg_database::PgDatabase, rolnames::ROLE_PREFIX_USER};
use phono_backends::{pg_database::PgDatabase, rolnames::ROLE_PREFIX_USER};
use phono_models::{
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
user::User,
workspace_user_perm::WorkspaceMembership,
};
use phono_pestgros::escape_identifier;
use serde::Deserialize;
use sqlx::query;
use uuid::Uuid;

View file

@ -9,8 +9,7 @@
Portal Settings
</a>
<filter-menu
identifier-hints="{{ attr_names | json }}"
initial-value="{{ filter | json }}"
initial-value="{{ filter }}"
></filter-menu>
{% endblock %}

View file

@ -1,78 +0,0 @@
.expression-editor__container {
background: #eee;
border-radius: var(--default-border-radius--rounded);
display: flex;
}
.expression-editor__sidebar {
display: grid;
grid-template:
'padding-top' 1fr
'operator-selector' max-content
'actions' minmax(max-content, 1fr);
}
.expression-editor__main {
background: #fff;
border-radius: var(--default-border-radius--rounded);
border: solid 1px var(--default-border-color);
flex: 1;
padding: var(--default-padding);
}
.expression-editor__action-button {
padding: var(--default-padding);
svg path {
fill: currentColor;
}
}
.expression-editor__params {
display: flex;
flex-direction: column;
gap: var(--default-padding);
}
.expression-selector {
grid-area: operator-selector;
}
.expression-selector__expression-button {
align-items: center;
display: flex;
justify-content: center;
height: 2.5rem;
padding: 0;
width: 2.5rem;
svg path {
fill: currentColor;
}
}
.expression-selector__popover:popover-open {
top: anchor(bottom);
margin-top: 0.25rem;
position: absolute;
display: flex;
flex-direction: column;
padding: 0;
background: #fff;
}
.expression-selector__section {
align-items: center;
display: grid;
grid-template-columns: repeat(3, 1fr);
justify-content: center;
list-style-type: none;
margin: var(--default-padding);
padding: 0;
}
.expression-selector__li {
align-items: center;
display: flex;
justify-content: center;
}

View file

@ -1,8 +1,3 @@
/*
@use 'forms';
@use 'condition-editor';
*/
/* ======== Theming ======== */
:root {

View file

@ -1,5 +1,3 @@
@import "./expression-editor.css";
:root {
--table-header-border-color: var(--default-border-color);
--table-cell-border-color: oklch(from var(--default-border-color) calc(l * 1.15) c h);

View file

@ -1,91 +0,0 @@
<svelte:options
customElement={{
props: {
identifier_hints: { attribute: "identifier-hints", type: "Array" },
value: { reflect: true, type: "Object" },
},
shadow: "none",
tag: "expression-editor",
}}
/>
<script lang="ts">
import DatumEditor from "./datum-editor.svelte";
import ExpressionSelector from "./expression-selector.svelte";
import { type PgExpressionAny } from "./expression.svelte";
import ExpressionEditor from "./expression-editor.webc.svelte";
import { RFC_3339_S, type Presentation } from "./presentation.svelte";
import type { Datum } from "./datum.svelte";
const POTENTIAL_PRESENTATIONS: Presentation[] = [
{ t: "Numeric", c: {} },
{ t: "Text", c: { input_mode: { t: "MultiLine", c: {} } } },
{ t: "Timestamp", c: { format: RFC_3339_S } },
{ t: "Uuid", c: {} },
];
type Props = {
identifier_hints?: string[];
value?: PgExpressionAny;
};
let { identifier_hints = [], value = $bindable() }: Props = $props();
// Dynamic state to bind to datum editor.
let editor_value = $state<Datum | undefined>();
let editor_presentation = $state<Presentation>(POTENTIAL_PRESENTATIONS[0]);
$effect(() => {
editor_value = value?.t === "Literal" ? value.c : undefined;
});
function handle_identifier_selector_change(
ev: Event & { currentTarget: HTMLSelectElement },
) {
if (value?.t === "Identifier") {
value.c.parts_raw = [ev.currentTarget.value];
}
}
function handle_editor_change(datum_value: Datum) {
if (value?.t === "Literal") {
value.c = datum_value;
}
}
</script>
<div class="expression-editor__container">
<div class="expression-editor__sidebar">
<ExpressionSelector bind:value />
</div>
{#if value !== undefined}
<div class="expression-editor__main">
<div class="expression-editor__params">
{#if value.t === "Comparison"}
{#if value.c.t === "Infix"}
<ExpressionEditor bind:value={value.c.c.lhs} {identifier_hints} />
<ExpressionEditor bind:value={value.c.c.rhs} {identifier_hints} />
{:else if value.c.t === "IsNull" || value.c.t === "IsNotNull"}
<ExpressionEditor bind:value={value.c.c.lhs} {identifier_hints} />
{/if}
{:else if value.t === "Identifier"}
<select
onchange={handle_identifier_selector_change}
value={value.c.parts_raw[0]}
>
{#each identifier_hints as hint}
<option value={hint}>{hint}</option>
{/each}
</select>
{:else if value.t === "Literal"}
<DatumEditor
bind:current_presentation={editor_presentation}
bind:value={editor_value}
potential_presentations={POTENTIAL_PRESENTATIONS}
on_change={handle_editor_change}
/>
{/if}
</div>
</div>
{/if}
</div>

View file

@ -1,185 +0,0 @@
<!--
@component
Dropdown menu with grid of buttons for quickly selecting a Postgres expression
type. Used by `<ExpressionEditor />`.
-->
<script lang="ts">
import { type PgExpressionAny, expression_icon } from "./expression.svelte";
type Props = {
on_change?(new_value: PgExpressionAny): void;
value?: PgExpressionAny;
};
let { on_change, value = $bindable() }: Props = $props();
let menu_button_element = $state<HTMLButtonElement | undefined>();
let popover_element = $state<HTMLDivElement | undefined>();
// Hacky workaround because as of September 2025 implicit anchor association
// is still pretty broken, at least in Firefox.
let anchor_name = $state(`--anchor-${Math.floor(Math.random() * 1000000)}`);
const expressions: ReadonlyArray<{
section_label: string;
expressions: ReadonlyArray<PgExpressionAny>;
}> = [
{
section_label: "Comparisons",
expressions: [
{
t: "Comparison",
c: {
t: "Infix",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
operator: "Eq",
rhs: { t: "Literal", c: { t: "Text", c: "" } },
},
},
},
{
t: "Comparison",
c: {
t: "Infix",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
operator: "Neq",
rhs: { t: "Literal", c: { t: "Text", c: "" } },
},
},
},
{
t: "Comparison",
c: {
t: "Infix",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
operator: "Lt",
rhs: { t: "Literal", c: { t: "Text", c: "" } },
},
},
},
{
t: "Comparison",
c: {
t: "Infix",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
operator: "Gt",
rhs: { t: "Literal", c: { t: "Text", c: "" } },
},
},
},
{
t: "Comparison",
c: {
t: "IsNull",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
},
},
},
{
t: "Comparison",
c: {
t: "IsNotNull",
c: {
lhs: { t: "Identifier", c: { parts_raw: [] } },
},
},
},
],
},
{
section_label: "Conjunctions",
expressions: [
{
t: "Comparison",
c: { t: "Infix", c: { operator: "And" } },
},
{
t: "Comparison",
c: { t: "Infix", c: { operator: "Or" } },
},
],
},
{
section_label: "Values",
expressions: [
{
t: "Identifier",
c: { parts_raw: [] },
},
{
t: "Literal",
c: { t: "Text", c: "" },
},
],
},
{
section_label: "Transformations",
expressions: [
{
t: "ToJson",
c: { entries: [] },
},
],
},
];
let iconography_current = $derived(value && expression_icon(value));
function handle_menu_button_click() {
popover_element?.togglePopover();
}
function handle_expression_button_click(expr: PgExpressionAny) {
value = expr;
popover_element?.hidePopover();
menu_button_element?.focus();
on_change?.(value);
}
</script>
<div class="expression-selector">
<button
aria-label={`Select expression type (current: ${iconography_current?.label ?? "None"})`}
bind:this={menu_button_element}
class="expression-selector__expression-button"
onclick={handle_menu_button_click}
style:anchor-name={anchor_name}
title={iconography_current?.label}
type="button"
>
{#if value}
{@html iconography_current?.html}
{:else}
<i class="ti ti-circle-plus"></i>
{/if}
</button>
<div
bind:this={popover_element}
class="popover expression-selector__popover"
popover="auto"
style:position-anchor={anchor_name}
>
{#each expressions as section}
<ul class="expression-selector__section">
{#each section.expressions as expr}
{@const iconography = expression_icon(expr)}
<li class="expression-selector__li">
<button
class="expression-selector__expression-button"
onclick={() => handle_expression_button_click(expr)}
title={iconography.label}
type="button"
>
{@html iconography.html}
</button>
</li>
{/each}
</ul>
{/each}
</div>
</div>

View file

@ -1,175 +0,0 @@
import { z } from "zod";
import { datum_schema } from "./datum.svelte.ts";
export const all_expression_types = [
"Comparison",
"Identifier",
"Literal",
"ToJson",
] as const;
// Type checking to ensure that all valid enum tags are included.
type Assert<_T extends true> = void;
type _ = Assert<PgExpressionAny["t"] extends PgExpressionType ? true : false>;
export const expression_type_schema = z.enum(all_expression_types);
export const all_infix_comparison_operators = [
"Eq",
"Neq",
"Gt",
"Lt",
"And",
"Or",
] as const;
const pg_comparison_operator_schema = z.enum(all_infix_comparison_operators);
const pg_infix_expression_schema = z.object({
operator: z.union([pg_comparison_operator_schema]),
get lhs() {
return pg_expression_any_schema.optional();
},
get rhs() {
return pg_expression_any_schema.optional();
},
});
const pg_comparison_expression_infix_schema = z.object({
t: z.literal("Infix"),
c: pg_infix_expression_schema,
});
const pg_is_null_expression_schema = z.object({
get lhs() {
return pg_expression_any_schema.optional();
},
});
const pg_comparison_expression_is_null_schema = z.object({
t: z.literal("IsNull"),
c: pg_is_null_expression_schema,
});
const pg_is_not_null_expression_schema = z.object({
get lhs() {
return pg_expression_any_schema.optional();
},
});
const pg_comparison_expression_is_not_null_schema = z.object({
t: z.literal("IsNotNull"),
c: pg_is_not_null_expression_schema,
});
const pg_comparison_expression_schema = z.union([
pg_comparison_expression_infix_schema,
pg_comparison_expression_is_null_schema,
pg_comparison_expression_is_not_null_schema,
]);
const pg_expression_any_comparison_schema = z.object({
t: z.literal("Comparison"),
c: pg_comparison_expression_schema,
});
const pg_identifier_expression_schema = z.object({
parts_raw: z.array(z.string()),
});
const pg_expression_any_identifier_schema = z.object({
t: z.literal("Identifier"),
c: pg_identifier_expression_schema,
});
const pg_expression_any_literal_schema = z.object({
t: z.literal("Literal"),
c: datum_schema,
});
const pg_to_json_expression_schema = z.object({
get entries() {
return z.array(z.tuple([z.string(), pg_expression_any_schema.optional()]));
},
});
const pg_expression_any_to_json_expression_schema = z.object({
t: z.literal("ToJson"),
c: pg_to_json_expression_schema,
});
export const pg_expression_any_schema = z.union([
pg_expression_any_comparison_schema,
pg_expression_any_identifier_schema,
pg_expression_any_literal_schema,
pg_expression_any_to_json_expression_schema,
]);
export type PgExpressionAny = z.infer<typeof pg_expression_any_schema>;
export type PgExpressionType = z.infer<typeof expression_type_schema>;
export function expression_human_name(expr_type: PgExpressionType): string {
if (expr_type === "Comparison") {
return "Condition";
}
if (expr_type === "Identifier") {
return "Identifier";
}
if (expr_type === "Literal") {
return "Literal";
}
if (expr_type === "ToJson") {
return "JSON";
}
// Type guard to check for exhaustive matching.
type _ = Assert<typeof expr_type extends never ? true : false>;
throw new Error("this should be unreachable");
}
export function expression_icon(expr: PgExpressionAny): {
html: string;
label: string;
} {
if (expr.t === "Comparison") {
if (expr.c.t === "Infix") {
const op = expr.c.c.operator;
if (op === "And") {
return { html: "&&", label: "And" };
}
if (op === "Eq") {
return { html: "=", label: "Is Equal To" };
}
if (op === "Gt") {
return { html: ">", label: "Is Greater Than" };
}
if (op === "Lt") {
return { html: "<", label: "Is Less Than" };
}
if (op === "Or") {
return { html: "||", label: "Or" };
}
if (op === "Neq") {
return { html: "\u2260", label: "Is Not Equal To" };
}
// Type guard to check for exhaustive matching.
type _ = Assert<typeof op extends never ? true : false>;
throw new Error("this should be unreachable");
} else if (expr.c.t === "IsNull") {
return { html: '<i class="ti ti-cube-3d-sphere-off"></i>', label: "Is Null" };
} else if (expr.c.t === "IsNotNull") {
return { html: '<i class="ti ti-cube"></i>', label: "Is Not Null" };
}
// Type guard to check for exhaustive matching.
type _ = Assert<typeof expr.c extends never ? true : false>;
throw new Error("this should be unreachable");
} else if (expr.t === "Identifier") {
return { html: '<i class="ti ti-variable"></i>', label: "Dynamic Value" };
} else if (expr.t === "Literal") {
return { html: '<i class="ti ti-hash"></i>', label: "Static Value" };
} else if (expr.t === "ToJson") {
return { html: '<i class="ti ti-code"></i>', label: "JSON String" };
}
// Type guard to check for exhaustive matching.
type _ = Assert<typeof expr extends never ? true : false>;
throw new Error("this should be unreachable");
}

View file

@ -1,51 +1,42 @@
<svelte:options
customElement={{
props: {
identifier_hints: { attribute: "identifier-hints", type: "Array" },
initialValue: { attribute: "initial-value", type: "Object" },
},
props: { initialValue: { attribute: "initial-value" } },
shadow: "none",
tag: "filter-menu",
}}
/>
<script lang="ts">
import { type PgExpressionAny } from "./expression.svelte";
import BasicDropdown from "./basic-dropdown.webc.svelte";
import ExpressionEditor from "./expression-editor.webc.svelte";
type Props = {
identifier_hints?: string[];
initialValue?: PgExpressionAny | null;
initialValue?: string;
};
let { identifier_hints = [], initialValue }: Props = $props();
let { initialValue = "" }: Props = $props();
let expr = $state<PgExpressionAny | undefined>(initialValue ?? undefined);
function handle_clear_button_click() {
expr = undefined;
}
let expr = $state(initialValue);
</script>
<div class="filter-menu toolbar-item">
<BasicDropdown>
<span slot="button-contents">Filter</span>
<form action="set-filter" class="padded" method="post" slot="popover">
<ExpressionEditor bind:value={expr} {identifier_hints} />
<div class="form__label">Filter expression (SQL)</div>
<textarea
class="form__input"
name="filter"
rows="8"
cols="60"
placeholder="For example: LOWER(&quot;my_column&quot;) = 'hello world'"
>{expr}</textarea
>
<div class="form__buttons">
<input
name="filter_expression"
type="hidden"
value={JSON.stringify(expr)}
/>
<button
class="button button--secondary"
onclick={handle_clear_button_click}
type="button"
>
Clear
</button>
<button class="button button--primary" type="submit">Apply</button>
</div>
</form>

View file

@ -32,7 +32,7 @@ component.
subfilter?: string;
};
let { columns = [], subfilter = "null" }: Props = $props();
let { columns = [], subfilter = "" }: Props = $props();
type LazyData = {
count: number;
@ -71,7 +71,7 @@ component.
{columns}
fields={lazy_data.fields}
rows_main={lazy_data.rows}
subfilter_active={!!subfilter && subfilter !== "null"}
subfilter_active={!!subfilter}
total_count={lazy_data.count}
/>
{/if}