Compare commits
No commits in common. "main" and "main" have entirely different histories.
59 changed files with 1253 additions and 1732 deletions
145
Cargo.lock
generated
145
Cargo.lock
generated
|
|
@ -364,15 +364,6 @@ dependencies = [
|
||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "backtrace-ext"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
|
|
||||||
dependencies = [
|
|
||||||
"backtrace",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.13.1"
|
version = "0.13.1"
|
||||||
|
|
@ -1231,7 +1222,7 @@ version = "0.2.24"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df"
|
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-width 0.2.2",
|
"unicode-width",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1765,12 +1756,6 @@ version = "2.11.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
|
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "is_ci"
|
|
||||||
version = "1.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "is_terminal_polyfill"
|
name = "is_terminal_polyfill"
|
||||||
version = "1.70.1"
|
version = "1.70.1"
|
||||||
|
|
@ -1931,36 +1916,6 @@ version = "2.7.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
|
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "miette"
|
|
||||||
version = "7.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7"
|
|
||||||
dependencies = [
|
|
||||||
"backtrace",
|
|
||||||
"backtrace-ext",
|
|
||||||
"cfg-if 1.0.3",
|
|
||||||
"miette-derive",
|
|
||||||
"owo-colors",
|
|
||||||
"supports-color",
|
|
||||||
"supports-hyperlinks",
|
|
||||||
"supports-unicode",
|
|
||||||
"terminal_size",
|
|
||||||
"textwrap",
|
|
||||||
"unicode-width 0.1.14",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "miette-derive"
|
|
||||||
version = "7.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mime"
|
name = "mime"
|
||||||
version = "0.3.17"
|
version = "0.3.17"
|
||||||
|
|
@ -2219,12 +2174,6 @@ version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "owo-colors"
|
|
||||||
version = "4.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking"
|
name = "parking"
|
||||||
version = "2.2.1"
|
version = "2.2.1"
|
||||||
|
|
@ -2277,22 +2226,20 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest"
|
name = "pest"
|
||||||
version = "2.8.6"
|
version = "2.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662"
|
checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"miette",
|
"thiserror 2.0.12",
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"ucd-trie",
|
"ucd-trie",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_derive"
|
name = "pest_derive"
|
||||||
version = "2.8.6"
|
version = "2.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77"
|
checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_generator",
|
"pest_generator",
|
||||||
|
|
@ -2300,9 +2247,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_generator"
|
name = "pest_generator"
|
||||||
version = "2.8.6"
|
version = "2.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f"
|
checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
|
|
@ -2313,10 +2260,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_meta"
|
name = "pest_meta"
|
||||||
version = "2.8.6"
|
version = "2.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220"
|
checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
"pest",
|
"pest",
|
||||||
"sha2 0.10.9",
|
"sha2 0.10.9",
|
||||||
]
|
]
|
||||||
|
|
@ -2380,7 +2328,6 @@ dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"derive_builder",
|
"derive_builder",
|
||||||
"nom 8.0.0",
|
"nom 8.0.0",
|
||||||
"phono-pestgros",
|
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
|
|
@ -2398,7 +2345,6 @@ dependencies = [
|
||||||
"derive_builder",
|
"derive_builder",
|
||||||
"futures",
|
"futures",
|
||||||
"phono-backends",
|
"phono-backends",
|
||||||
"phono-pestgros",
|
|
||||||
"redact",
|
"redact",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
@ -2420,21 +2366,6 @@ dependencies = [
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "phono-pestgros"
|
|
||||||
version = "0.0.1"
|
|
||||||
dependencies = [
|
|
||||||
"bigdecimal",
|
|
||||||
"chrono",
|
|
||||||
"pest",
|
|
||||||
"pest_derive",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"sqlx",
|
|
||||||
"thiserror 2.0.12",
|
|
||||||
"uuid",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "phono-server"
|
name = "phono-server"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
|
|
@ -2458,7 +2389,6 @@ dependencies = [
|
||||||
"phono-backends",
|
"phono-backends",
|
||||||
"phono-models",
|
"phono-models",
|
||||||
"phono-namegen",
|
"phono-namegen",
|
||||||
"phono-pestgros",
|
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"redact",
|
"redact",
|
||||||
"regex",
|
"regex",
|
||||||
|
|
@ -3559,27 +3489,6 @@ version = "2.6.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supports-color"
|
|
||||||
version = "3.0.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
|
|
||||||
dependencies = [
|
|
||||||
"is_ci",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supports-hyperlinks"
|
|
||||||
version = "3.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supports-unicode"
|
|
||||||
version = "3.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.101"
|
version = "2.0.101"
|
||||||
|
|
@ -3683,26 +3592,6 @@ dependencies = [
|
||||||
"utf-8",
|
"utf-8",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "terminal_size"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
|
|
||||||
dependencies = [
|
|
||||||
"rustix",
|
|
||||||
"windows-sys 0.59.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "textwrap"
|
|
||||||
version = "0.16.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-linebreak",
|
|
||||||
"unicode-width 0.2.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.69"
|
version = "1.0.69"
|
||||||
|
|
@ -4131,12 +4020,6 @@ version = "1.0.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-linebreak"
|
|
||||||
version = "0.1.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-normalization"
|
name = "unicode-normalization"
|
||||||
version = "0.1.24"
|
version = "0.1.24"
|
||||||
|
|
@ -4158,12 +4041,6 @@ version = "1.12.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
|
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-width"
|
|
||||||
version = "0.1.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-width"
|
name = "unicode-width"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
cargo-features = ["codegen-backend"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
resolver = "3"
|
resolver = "3"
|
||||||
members = ["phono-*"]
|
members = ["phono-*"]
|
||||||
|
|
@ -17,7 +19,6 @@ futures = "0.3.31"
|
||||||
phono-backends = { path = "./phono-backends" }
|
phono-backends = { path = "./phono-backends" }
|
||||||
phono-models = { path = "./phono-models" }
|
phono-models = { path = "./phono-models" }
|
||||||
phono-namegen = { path = "./phono-namegen" }
|
phono-namegen = { path = "./phono-namegen" }
|
||||||
phono-pestgros = { path = "./phono-pestgros" }
|
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
redact = { version = "0.1.11", features = ["serde", "zeroize"] }
|
redact = { version = "0.1.11", features = ["serde", "zeroize"] }
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
|
|
@ -34,5 +35,7 @@ uuid = { version = "1.11.0", features = ["serde", "v4", "v7"] }
|
||||||
validator = { version = "0.20.0", features = ["derive"] }
|
validator = { version = "0.20.0", features = ["derive"] }
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
|
# Use experimental compiler backend for ~30% faster dev builds.
|
||||||
|
codegen-backend = "cranelift"
|
||||||
# Skip generating debug info for ~10% faster dev builds.
|
# Skip generating debug info for ~10% faster dev builds.
|
||||||
debug = false
|
debug = false
|
||||||
|
|
|
||||||
85
README.md
85
README.md
|
|
@ -1,42 +1,25 @@
|
||||||
# Phonograph
|
# Phonograph
|
||||||
|
|
||||||
Phonograph is a friendly, collaborative data platform for nerds of all stripes.
|
Phonograph is a friendly, collaborative data platform for nerds of all stripes,
|
||||||
It's for teams who wish they used Postgres, but actually use:
|
built around PostgreSQL.
|
||||||
|
|
||||||
- Airtable
|
Contemporary browser-centric platforms like Airtable open new frontiers for data
|
||||||
- Google Sheets
|
engineering, but they suffer from limitations that conventional databases have
|
||||||
- Excel
|
long since addressed: relational data models are an afterthought; row-level
|
||||||
- `inventory_export-2026-02-15_v3-FINAL.csv`
|
security is missing; third party integrations must be implemented piecemeal for
|
||||||
|
lack of a standardized API. Phonograph addresses these shortfalls by
|
||||||
|
implementing an accessible front-end interface backed by an existing, mature
|
||||||
|
database management system.
|
||||||
|
|
||||||
Phonograph is built on top of Postgres, exposing a curated subset of features
|

|
||||||
and adds a familiar user interface for developers and end users alike.
|
|
||||||
|
|
||||||
# Demo
|
# Browser Compatibility Note
|
||||||
|
|
||||||
Try the hosted demo at [phono.dev](https://phono.dev)!
|
[CSS anchor positioning](https://developer.mozilla.org/en-US/docs/Web/CSS/Guides/Anchor_positioning)
|
||||||
|
is a relatively new API, and Phonograph uses it extensively. Browser support is
|
||||||
# Features and Design
|
now fairly good, but
|
||||||
|
[Firefox users in particular](https://bugzilla.mozilla.org/show_bug.cgi?id=1988225)
|
||||||
- Leverages Postgres RBAC for robust authorization, including RLS (planned),
|
should upgrade to version 147 or newer for best experience.
|
||||||
while adding support for invite-by-email and a familiar permissions model for
|
|
||||||
collaboration.
|
|
||||||
- Integrates with effectively any third party software with a Postgres driver.
|
|
||||||
- Powerful user interface inspired by your favorite text editor features, like
|
|
||||||
multi-cursor editing.
|
|
||||||
- Write filters and generated column specs (planned) as PostgreSQL—made
|
|
||||||
possible by a custom SQL parser with support for advanced syntax like
|
|
||||||
`column = ANY(array)`.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
# An Experiment from Second System Technologies
|
|
||||||
|
|
||||||
Phonograph is a proof-of-concept built by Second System Technologies, to solve
|
|
||||||
real world problems, and its bones are built to scale. It's a work in progress,
|
|
||||||
which means that some expected features are missing and we have yet to sand out
|
|
||||||
many rough edges.
|
|
||||||
|
|
||||||
# Development Quickstart
|
# Development Quickstart
|
||||||
|
|
||||||
|
|
@ -56,3 +39,39 @@ external OAuth2 provider is required to manage authentication.
|
||||||
# The Phonograph Authorization Model
|
# The Phonograph Authorization Model
|
||||||
|
|
||||||
Refer to documentation in [docs/auth.md](./docs/auth.md).
|
Refer to documentation in [docs/auth.md](./docs/auth.md).
|
||||||
|
|
||||||
|
# Copyright and License
|
||||||
|
|
||||||
|
All original source code in this repository is copyright (C) 2025 Second System
|
||||||
|
Technologies LLC and distributed under the terms in
|
||||||
|
[the "LICENSE" file](./LICENSE). Certain third-party assets within
|
||||||
|
[the "static" directory](./static) may be governed by different licenses, for
|
||||||
|
example the Open Font License or MIT License, as stated by their original
|
||||||
|
authors. Copies of each relevant license have been included alongside these
|
||||||
|
files as needed.
|
||||||
|
|
||||||
|
# LLM Code Policy
|
||||||
|
|
||||||
|
Large language model code generation is permitted sparingly in very limited
|
||||||
|
cases, for example for completing clearly defined transformations which span
|
||||||
|
multiple files and are not supported by conventional code actions. All code
|
||||||
|
generated by LLMs is considered reflective of its author, and authors are
|
||||||
|
expected to thoroughly and frequently review before committing affected work.
|
||||||
|
|
||||||
|
As of this writing, models display a strong bias towards patterns which are well
|
||||||
|
represented in public open source projects. This can cause them to tend towards
|
||||||
|
suboptimal one-size-fits-most or simply outdated coding practices in certain
|
||||||
|
circumstances. LLM assistance should be sufficiently constrained to avoid
|
||||||
|
allowing outputs to dictate or implicitly guide significant design decisions.
|
||||||
|
|
||||||
|
Furthermore, current language models broadly behave adversarily, in the sense
|
||||||
|
that they are optimized to make perceiving model outputs versus non-model
|
||||||
|
outputs as difficult as possible. This can make generated code uniquely
|
||||||
|
challenging to review effectively. In this context, non-trivial business logic,
|
||||||
|
particularly logic with security implications, may not be implemented with
|
||||||
|
direct assistance from LLM tools.
|
||||||
|
|
||||||
|
Examples of LLM-assisted changes in practice:
|
||||||
|
|
||||||
|
- Replacing SVG icons with similar webfont icons from a different icon pack.
|
||||||
|
(Revision `ztrnxzqv` (Git `a8dd49f7`))
|
||||||
|
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 216 KiB |
BIN
docs/screenshot_multi_cursor.png
Normal file
BIN
docs/screenshot_multi_cursor.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 805 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 369 KiB |
|
|
@ -1,7 +1,7 @@
|
||||||
[tools]
|
[tools]
|
||||||
deno = "latest"
|
deno = "latest"
|
||||||
rebar = "latest"
|
rebar = "latest"
|
||||||
rust = { version = "1.93", components = "rust-analyzer,clippy" }
|
rust = { version = "nightly", components = "rust-analyzer,clippy,rustc-codegen-cranelift-preview" }
|
||||||
watchexec = "latest"
|
watchexec = "latest"
|
||||||
"cargo:sqlx-cli" = "0.8.6"
|
"cargo:sqlx-cli" = "0.8.6"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ version.workspace = true
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
derive_builder = { workspace = true }
|
derive_builder = { workspace = true }
|
||||||
nom = "8.0.0"
|
nom = "8.0.0"
|
||||||
phono-pestgros = { workspace = true }
|
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
sqlx = { workspace = true }
|
sqlx = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use sqlx::{PgConnection, Postgres, Row as _, pool::PoolConnection, query};
|
use sqlx::{PgConnection, Postgres, Row as _, pool::PoolConnection, query};
|
||||||
|
|
||||||
|
use crate::escape_identifier;
|
||||||
|
|
||||||
/// Newtype to differentiate between workspace and application database
|
/// Newtype to differentiate between workspace and application database
|
||||||
/// connections.
|
/// connections.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
||||||
|
|
@ -22,3 +22,6 @@ pub mod pg_database;
|
||||||
pub mod pg_namespace;
|
pub mod pg_namespace;
|
||||||
pub mod pg_role;
|
pub mod pg_role;
|
||||||
pub mod rolnames;
|
pub mod rolnames;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub use utils::escape_identifier;
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use sqlx::{Encode, Postgres, postgres::types::Oid, query_as, query_as_unchecked};
|
use sqlx::{Encode, Postgres, postgres::types::Oid, query_as, query_as_unchecked};
|
||||||
|
|
||||||
use crate::{client::WorkspaceClient, pg_acl::PgAclItem, pg_namespace::PgNamespace};
|
use crate::{
|
||||||
|
client::WorkspaceClient, escape_identifier, pg_acl::PgAclItem, pg_namespace::PgNamespace,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct PgClass {
|
pub struct PgClass {
|
||||||
|
|
|
||||||
25
phono-backends/src/utils.rs
Normal file
25
phono-backends/src/utils.rs
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
/// Given a raw identifier (such as a table name, column name, etc.), format it
|
||||||
|
/// so that it may be safely interpolated into a SQL query.
|
||||||
|
pub fn escape_identifier(identifier: &str) -> String {
|
||||||
|
// Escaping identifiers for Postgres is fairly easy, provided that the input is
|
||||||
|
// already known to contain no invalid multi-byte sequences. Backslashes may
|
||||||
|
// remain as-is, and embedded double quotes are escaped simply by doubling
|
||||||
|
// them (`"` becomes `""`). Refer to the PQescapeInternal() function in
|
||||||
|
// libpq (fe-exec.c) and Diesel's PgQueryBuilder::push_identifier().
|
||||||
|
format!("\"{}\"", identifier.replace('"', "\"\""))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_identifier() {
|
||||||
|
assert_eq!(escape_identifier("hello"), r#""hello""#);
|
||||||
|
assert_eq!(escape_identifier("hello world"), r#""hello world""#);
|
||||||
|
assert_eq!(
|
||||||
|
escape_identifier(r#""hello" "world""#),
|
||||||
|
r#""""hello"" ""world""""#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -9,7 +9,6 @@ chrono = { workspace = true }
|
||||||
derive_builder = { workspace = true }
|
derive_builder = { workspace = true }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
phono-backends = { workspace = true }
|
phono-backends = { workspace = true }
|
||||||
phono-pestgros = { workspace = true }
|
|
||||||
redact = { workspace = true }
|
redact = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
alter table portals add column if not exists table_filter jsonb not null default 'null';
|
|
||||||
alter table portals drop column if exists filter;
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
alter table portals add column if not exists filter text not null default '';
|
|
||||||
-- This is irreversible and ordinarily should be run in a later migration, but
|
|
||||||
-- it's being rolled out while manually verifying that there will be negligible
|
|
||||||
-- impact to users, so I'm folding it into this migration for convenience.
|
|
||||||
alter table portals drop column if exists table_filter;
|
|
||||||
|
|
@ -93,20 +93,18 @@ impl<'a> Accessor<Portal> for PortalAccessor<'a> {
|
||||||
AccessError::NotFound
|
AccessError::NotFound
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if spec
|
spec.verify_workspace_id
|
||||||
.verify_workspace_id
|
.is_none_or(|value| portal.workspace_id == value)
|
||||||
.is_some_and(|value| portal.workspace_id != value)
|
.ok_or_else(|| {
|
||||||
{
|
|
||||||
debug!("workspace_id check failed for portal");
|
debug!("workspace_id check failed for portal");
|
||||||
return Err(AccessError::NotFound);
|
AccessError::NotFound
|
||||||
}
|
})?;
|
||||||
if spec
|
spec.verify_rel_oid
|
||||||
.verify_rel_oid
|
.is_none_or(|value| portal.class_oid == value)
|
||||||
.is_some_and(|value| portal.class_oid != value)
|
.ok_or_else(|| {
|
||||||
{
|
|
||||||
debug!("rel_oid check failed for portal");
|
debug!("rel_oid check failed for portal");
|
||||||
return Err(AccessError::NotFound);
|
AccessError::NotFound
|
||||||
}
|
})?;
|
||||||
|
|
||||||
let rel = if let Some(value) = spec.using_rel {
|
let rel = if let Some(value) = spec.using_rel {
|
||||||
value
|
value
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,6 @@ use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{Postgres, QueryBuilder};
|
use sqlx::{Postgres, QueryBuilder};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Enum representing all supported literal types, providing convenience
|
|
||||||
/// methods for working with them in [`sqlx`] queries, and defining a [`serde`]
|
|
||||||
/// encoding for use across the application stack.
|
|
||||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
#[serde(tag = "t", content = "c")]
|
#[serde(tag = "t", content = "c")]
|
||||||
pub enum Datum {
|
pub enum Datum {
|
||||||
|
|
@ -23,7 +20,6 @@ pub enum Datum {
|
||||||
Uuid(Option<Uuid>),
|
Uuid(Option<Uuid>),
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Should sqlx helpers be moved to a separate crate?
|
|
||||||
impl Datum {
|
impl Datum {
|
||||||
// TODO: Can something similar be achieved with a generic return type?
|
// TODO: Can something similar be achieved with a generic return type?
|
||||||
/// Bind this as a parameter to a sqlx query.
|
/// Bind this as a parameter to a sqlx query.
|
||||||
164
phono-models/src/expression.rs
Normal file
164
phono-models/src/expression.rs
Normal file
|
|
@ -0,0 +1,164 @@
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use phono_backends::escape_identifier;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{datum::Datum, query_builders::QueryFragment};
|
||||||
|
|
||||||
|
/// Building block of a syntax tree for a constrained subset of SQL that can be
|
||||||
|
/// statically analyzed, to validate that user-provided expressions perform only
|
||||||
|
/// operations that are read-only and otherwise safe to execute.
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
#[serde(tag = "t", content = "c")]
|
||||||
|
pub enum PgExpressionAny {
|
||||||
|
Comparison(PgComparisonExpression),
|
||||||
|
Identifier(PgIdentifierExpression),
|
||||||
|
Literal(Datum),
|
||||||
|
ToJson(PgToJsonExpression),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgExpressionAny {
|
||||||
|
pub fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
match self {
|
||||||
|
Self::Comparison(expr) => expr.into_query_fragment(),
|
||||||
|
Self::Identifier(expr) => expr.into_query_fragment(),
|
||||||
|
Self::Literal(expr) => {
|
||||||
|
if expr.is_none() {
|
||||||
|
QueryFragment::from_sql("null")
|
||||||
|
} else {
|
||||||
|
QueryFragment::from_param(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self::ToJson(expr) => expr.into_query_fragment(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
#[serde(tag = "t", content = "c")]
|
||||||
|
pub enum PgComparisonExpression {
|
||||||
|
Infix(PgInfixExpression<PgComparisonOperator>),
|
||||||
|
IsNull(PgIsNullExpression),
|
||||||
|
IsNotNull(PgIsNotNullExpression),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgComparisonExpression {
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
match self {
|
||||||
|
Self::Infix(expr) => expr.into_query_fragment(),
|
||||||
|
Self::IsNull(expr) => expr.into_query_fragment(),
|
||||||
|
Self::IsNotNull(expr) => expr.into_query_fragment(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub struct PgInfixExpression<T: Display> {
|
||||||
|
pub operator: T,
|
||||||
|
pub lhs: Box<PgExpressionAny>,
|
||||||
|
pub rhs: Box<PgExpressionAny>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Display> PgInfixExpression<T> {
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
QueryFragment::concat([
|
||||||
|
QueryFragment::from_sql("(("),
|
||||||
|
self.lhs.into_query_fragment(),
|
||||||
|
QueryFragment::from_sql(&format!(") {} (", self.operator)),
|
||||||
|
self.rhs.into_query_fragment(),
|
||||||
|
QueryFragment::from_sql("))"),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, strum::Display, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub enum PgComparisonOperator {
|
||||||
|
#[strum(to_string = "and")]
|
||||||
|
And,
|
||||||
|
#[strum(to_string = "=")]
|
||||||
|
Eq,
|
||||||
|
#[strum(to_string = ">")]
|
||||||
|
Gt,
|
||||||
|
#[strum(to_string = "<")]
|
||||||
|
Lt,
|
||||||
|
#[strum(to_string = "<>")]
|
||||||
|
Neq,
|
||||||
|
#[strum(to_string = "or")]
|
||||||
|
Or,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub struct PgIsNullExpression {
|
||||||
|
lhs: Box<PgExpressionAny>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgIsNullExpression {
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
QueryFragment::concat([
|
||||||
|
QueryFragment::from_sql("(("),
|
||||||
|
self.lhs.into_query_fragment(),
|
||||||
|
QueryFragment::from_sql(") is null)"),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub struct PgIsNotNullExpression {
|
||||||
|
lhs: Box<PgExpressionAny>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgIsNotNullExpression {
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
QueryFragment::concat([
|
||||||
|
QueryFragment::from_sql("(("),
|
||||||
|
self.lhs.into_query_fragment(),
|
||||||
|
QueryFragment::from_sql(") is not null)"),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub struct PgIdentifierExpression {
|
||||||
|
pub parts_raw: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgIdentifierExpression {
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
QueryFragment::join(
|
||||||
|
self.parts_raw
|
||||||
|
.iter()
|
||||||
|
.map(|part| QueryFragment::from_sql(&escape_identifier(part))),
|
||||||
|
QueryFragment::from_sql("."),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
|
pub struct PgToJsonExpression {
|
||||||
|
entries: Vec<(String, PgExpressionAny)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PgToJsonExpression {
|
||||||
|
/// Generates a query fragment to the effect of:
|
||||||
|
/// `to_json((select ($expr) as "ident", ($expr2) as "ident2"))`
|
||||||
|
fn into_query_fragment(self) -> QueryFragment {
|
||||||
|
if self.entries.is_empty() {
|
||||||
|
QueryFragment::from_sql("'{}'")
|
||||||
|
} else {
|
||||||
|
QueryFragment::concat([
|
||||||
|
QueryFragment::from_sql("to_json((select "),
|
||||||
|
QueryFragment::join(
|
||||||
|
self.entries.into_iter().map(|(key, value)| {
|
||||||
|
QueryFragment::concat([
|
||||||
|
QueryFragment::from_sql("("),
|
||||||
|
value.into_query_fragment(),
|
||||||
|
QueryFragment::from_sql(&format!(") as {}", escape_identifier(&key))),
|
||||||
|
])
|
||||||
|
}),
|
||||||
|
QueryFragment::from_sql(", "),
|
||||||
|
),
|
||||||
|
QueryFragment::from_sql("))"),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,7 +2,6 @@ use bigdecimal::BigDecimal;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use derive_builder::Builder;
|
use derive_builder::Builder;
|
||||||
use phono_backends::pg_attribute::PgAttribute;
|
use phono_backends::pg_attribute::PgAttribute;
|
||||||
use phono_pestgros::Datum;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::Acquire as _;
|
use sqlx::Acquire as _;
|
||||||
use sqlx::{
|
use sqlx::{
|
||||||
|
|
@ -12,7 +11,9 @@ use sqlx::{
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{client::AppDbClient, presentation::Presentation};
|
use crate::client::AppDbClient;
|
||||||
|
use crate::datum::Datum;
|
||||||
|
use crate::presentation::Presentation;
|
||||||
|
|
||||||
/// A materialization of a database column, fit for consumption by an end user.
|
/// A materialization of a database column, fit for consumption by an end user.
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -14,15 +14,20 @@
|
||||||
// received a copy of the GNU Affero General Public License along with this
|
// received a copy of the GNU Affero General Public License along with this
|
||||||
// program. If not, see <http://www.gnu.org/licenses/>.
|
// program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
#![feature(bool_to_result)] // Enable support for `ok_or()` on bools.
|
||||||
|
|
||||||
pub mod accessors;
|
pub mod accessors;
|
||||||
pub mod client;
|
pub mod client;
|
||||||
pub mod cluster;
|
pub mod cluster;
|
||||||
|
pub mod datum;
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
|
pub mod expression;
|
||||||
pub mod field;
|
pub mod field;
|
||||||
pub mod language;
|
pub mod language;
|
||||||
mod macros;
|
mod macros;
|
||||||
pub mod portal;
|
pub mod portal;
|
||||||
pub mod presentation;
|
pub mod presentation;
|
||||||
|
pub mod query_builders;
|
||||||
pub mod service_cred;
|
pub mod service_cred;
|
||||||
pub mod user;
|
pub mod user;
|
||||||
pub mod workspace;
|
pub mod workspace;
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,13 @@ use std::sync::LazyLock;
|
||||||
use derive_builder::Builder;
|
use derive_builder::Builder;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use sqlx::{postgres::types::Oid, query, query_as};
|
use sqlx::{postgres::types::Oid, query, query_as, types::Json};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
use crate::{client::AppDbClient, errors::QueryError, macros::with_id_query};
|
use crate::{
|
||||||
|
client::AppDbClient, errors::QueryError, expression::PgExpressionAny, macros::with_id_query,
|
||||||
|
};
|
||||||
|
|
||||||
pub static RE_PORTAL_NAME: LazyLock<Regex> =
|
pub static RE_PORTAL_NAME: LazyLock<Regex> =
|
||||||
LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9][()a-zA-Z0-9 _-]*[a-zA-Z0-9()_-]$").unwrap());
|
LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9][()a-zA-Z0-9 _-]*[a-zA-Z0-9()_-]$").unwrap());
|
||||||
|
|
@ -34,7 +36,7 @@ pub struct Portal {
|
||||||
|
|
||||||
/// JSONB-encoded expression to use for filtering rows in the web-based
|
/// JSONB-encoded expression to use for filtering rows in the web-based
|
||||||
/// table view.
|
/// table view.
|
||||||
pub filter: String,
|
pub table_filter: Json<Option<PgExpressionAny>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Portal {
|
impl Portal {
|
||||||
|
|
@ -63,7 +65,7 @@ select
|
||||||
workspace_id,
|
workspace_id,
|
||||||
class_oid,
|
class_oid,
|
||||||
form_public,
|
form_public,
|
||||||
filter
|
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||||
from portals
|
from portals
|
||||||
where id = $1
|
where id = $1
|
||||||
"#,
|
"#,
|
||||||
|
|
@ -85,7 +87,7 @@ select
|
||||||
workspace_id,
|
workspace_id,
|
||||||
class_oid,
|
class_oid,
|
||||||
form_public,
|
form_public,
|
||||||
filter
|
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||||
from portals
|
from portals
|
||||||
where workspace_id = $1
|
where workspace_id = $1
|
||||||
"#,
|
"#,
|
||||||
|
|
@ -120,7 +122,7 @@ select
|
||||||
workspace_id,
|
workspace_id,
|
||||||
class_oid,
|
class_oid,
|
||||||
form_public,
|
form_public,
|
||||||
filter
|
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||||
from portals
|
from portals
|
||||||
where workspace_id = $1 and class_oid = $2
|
where workspace_id = $1 and class_oid = $2
|
||||||
"#,
|
"#,
|
||||||
|
|
@ -159,7 +161,7 @@ returning
|
||||||
workspace_id,
|
workspace_id,
|
||||||
class_oid,
|
class_oid,
|
||||||
form_public,
|
form_public,
|
||||||
filter
|
table_filter as "table_filter: Json<Option<PgExpressionAny>>"
|
||||||
"#,
|
"#,
|
||||||
self.workspace_id,
|
self.workspace_id,
|
||||||
self.class_oid,
|
self.class_oid,
|
||||||
|
|
@ -178,7 +180,7 @@ pub struct Update {
|
||||||
form_public: Option<bool>,
|
form_public: Option<bool>,
|
||||||
|
|
||||||
#[builder(default, setter(strip_option = true))]
|
#[builder(default, setter(strip_option = true))]
|
||||||
filter: Option<String>,
|
table_filter: Option<Option<PgExpressionAny>>,
|
||||||
|
|
||||||
#[builder(default, setter(strip_option = true))]
|
#[builder(default, setter(strip_option = true))]
|
||||||
#[validate(regex(path = *RE_PORTAL_NAME))]
|
#[validate(regex(path = *RE_PORTAL_NAME))]
|
||||||
|
|
@ -194,16 +196,16 @@ impl Update {
|
||||||
query!(
|
query!(
|
||||||
"update portals set form_public = $1 where id = $2",
|
"update portals set form_public = $1 where id = $2",
|
||||||
form_public,
|
form_public,
|
||||||
self.id,
|
self.id
|
||||||
)
|
)
|
||||||
.execute(app_db.get_conn())
|
.execute(app_db.get_conn())
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
if let Some(filter) = self.filter {
|
if let Some(table_filter) = self.table_filter {
|
||||||
query!(
|
query!(
|
||||||
"update portals set filter = $1 where id = $2",
|
"update portals set table_filter = $1 where id = $2",
|
||||||
filter,
|
Json(table_filter) as Json<Option<PgExpressionAny>>,
|
||||||
self.id,
|
self.id
|
||||||
)
|
)
|
||||||
.execute(app_db.get_conn())
|
.execute(app_db.get_conn())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
|
||||||
171
phono-models/src/query_builders.rs
Normal file
171
phono-models/src/query_builders.rs
Normal file
|
|
@ -0,0 +1,171 @@
|
||||||
|
//! Assorted utilities for dynamically constructing and manipulating [`sqlx`]
|
||||||
|
//! queries.
|
||||||
|
|
||||||
|
use sqlx::{Postgres, QueryBuilder};
|
||||||
|
|
||||||
|
use crate::datum::Datum;
|
||||||
|
|
||||||
|
/// Representation of a partial, parameterized SQL query. Allows callers to
|
||||||
|
/// build queries iteratively and dynamically, handling parameter numbering
|
||||||
|
/// (`$1`, `$2`, `$3`, ...) automatically.
|
||||||
|
///
|
||||||
|
/// This is similar to [`sqlx::QueryBuilder`], except that [`QueryFragment`]
|
||||||
|
/// objects are composable and may be concatenated to each other.
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct QueryFragment {
|
||||||
|
/// SQL string, split wherever there is a query parameter. For example,
|
||||||
|
/// `select * from foo where id = $1 and status = $2` is represented along
|
||||||
|
/// the lines of `["select * from foo where id = ", " and status = ", ""]`.
|
||||||
|
/// `plain_sql` should always have exactly one more element than `params`.
|
||||||
|
plain_sql: Vec<String>,
|
||||||
|
params: Vec<Datum>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl QueryFragment {
|
||||||
|
/// Validate invariants. Should be run immediately before returning any
|
||||||
|
/// useful output.
|
||||||
|
fn gut_checks(&self) {
|
||||||
|
assert!(self.plain_sql.len() == self.params.len() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse from a SQL string with no parameters.
|
||||||
|
pub fn from_sql(sql: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
plain_sql: vec![sql.to_owned()],
|
||||||
|
params: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenience function to construct an empty value.
|
||||||
|
pub fn empty() -> Self {
|
||||||
|
Self::from_sql("")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse from a parameter value with no additional SQL. (Renders as `$n`,
|
||||||
|
/// where`n` is the appropriate parameter index.)
|
||||||
|
pub fn from_param(param: Datum) -> Self {
|
||||||
|
Self {
|
||||||
|
plain_sql: vec!["".to_owned(), "".to_owned()],
|
||||||
|
params: vec![param],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Append another query fragment to this one.
|
||||||
|
pub fn push(&mut self, mut other: QueryFragment) {
|
||||||
|
let tail = self
|
||||||
|
.plain_sql
|
||||||
|
.pop()
|
||||||
|
.expect("already asserted that vec contains at least 1 item");
|
||||||
|
let head = other
|
||||||
|
.plain_sql
|
||||||
|
.first()
|
||||||
|
.expect("already asserted that vec contains at least 1 item");
|
||||||
|
self.plain_sql.push(format!("{tail}{head}"));
|
||||||
|
for value in other.plain_sql.drain(1..) {
|
||||||
|
self.plain_sql.push(value);
|
||||||
|
}
|
||||||
|
self.params.append(&mut other.params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Combine multiple QueryFragments with a separator, similar to
|
||||||
|
/// [`Vec::join`].
|
||||||
|
pub fn join<I: IntoIterator<Item = Self>>(fragments: I, sep: Self) -> Self {
|
||||||
|
let mut acc = QueryFragment::from_sql("");
|
||||||
|
let mut iter = fragments.into_iter();
|
||||||
|
let mut fragment = match iter.next() {
|
||||||
|
Some(value) => value,
|
||||||
|
None => return acc,
|
||||||
|
};
|
||||||
|
for next_fragment in iter {
|
||||||
|
acc.push(fragment);
|
||||||
|
acc.push(sep.clone());
|
||||||
|
fragment = next_fragment;
|
||||||
|
}
|
||||||
|
acc.push(fragment);
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenience method equivalent to:
|
||||||
|
/// `QueryFragment::concat(fragments, QueryFragment::from_sql(""))`
|
||||||
|
pub fn concat<I: IntoIterator<Item = Self>>(fragments: I) -> Self {
|
||||||
|
Self::join(fragments, Self::from_sql(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks whether value is empty. A value is considered empty if the
|
||||||
|
/// resulting SQL code is 0 characters long.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.gut_checks();
|
||||||
|
self.plain_sql.len() == 1
|
||||||
|
&& self
|
||||||
|
.plain_sql
|
||||||
|
.first()
|
||||||
|
.expect("already checked that len == 1")
|
||||||
|
.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<QueryFragment> for QueryBuilder<'_, Postgres> {
|
||||||
|
fn from(value: QueryFragment) -> Self {
|
||||||
|
value.gut_checks();
|
||||||
|
let mut builder = QueryBuilder::new("");
|
||||||
|
let mut param_iter = value.params.into_iter();
|
||||||
|
for plain_sql in value.plain_sql {
|
||||||
|
builder.push(plain_sql);
|
||||||
|
if let Some(param) = param_iter.next() {
|
||||||
|
param.push_bind_onto(&mut builder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper type to make it easier to build and reason about multiple related SQL
|
||||||
|
/// queries.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SelectQuery {
|
||||||
|
/// Query fragment following (not including) "select ".
|
||||||
|
pub selection: QueryFragment,
|
||||||
|
|
||||||
|
/// Query fragment following (not including) "from ".
|
||||||
|
pub source: QueryFragment,
|
||||||
|
|
||||||
|
/// Query fragment following (not including) "where ", or empty if not
|
||||||
|
/// applicable.
|
||||||
|
pub filters: QueryFragment,
|
||||||
|
|
||||||
|
/// Query fragment following (not including) "order by ", or empty if not
|
||||||
|
/// applicable.
|
||||||
|
pub order: QueryFragment,
|
||||||
|
|
||||||
|
/// Query fragment following (not including) "limit ", or empty if not
|
||||||
|
/// applicable.
|
||||||
|
pub limit: QueryFragment,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SelectQuery> for QueryFragment {
|
||||||
|
fn from(value: SelectQuery) -> Self {
|
||||||
|
let mut result = QueryFragment::from_sql("select ");
|
||||||
|
result.push(value.selection);
|
||||||
|
result.push(QueryFragment::from_sql(" from "));
|
||||||
|
result.push(value.source);
|
||||||
|
if !value.filters.is_empty() {
|
||||||
|
result.push(QueryFragment::from_sql(" where "));
|
||||||
|
result.push(value.filters);
|
||||||
|
}
|
||||||
|
if !value.order.is_empty() {
|
||||||
|
result.push(QueryFragment::from_sql(" order by "));
|
||||||
|
result.push(value.order);
|
||||||
|
}
|
||||||
|
if !value.limit.is_empty() {
|
||||||
|
result.push(QueryFragment::from_sql(" limit "));
|
||||||
|
result.push(value.limit);
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SelectQuery> for QueryBuilder<'_, Postgres> {
|
||||||
|
fn from(value: SelectQuery) -> Self {
|
||||||
|
QueryFragment::from(value).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "phono-pestgros"
|
|
||||||
edition.workspace = true
|
|
||||||
version.workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
bigdecimal = { workspace = true }
|
|
||||||
chrono = { workspace = true }
|
|
||||||
pest = { version = "2.8.6", features = ["miette-error"] }
|
|
||||||
pest_derive = "2.8.6"
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
sqlx = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
uuid = { workspace = true }
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
use crate::{Datum, Expr, InfixOp};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn empty_array_parses() {
|
|
||||||
assert_eq!(Expr::try_from("array[]"), Ok(Expr::Array(vec![])));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn array_of_literals_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("array[1, 2, 3]"),
|
|
||||||
Ok(Expr::Array(vec![
|
|
||||||
Expr::Literal(Datum::Numeric(Some(1.into()))),
|
|
||||||
Expr::Literal(Datum::Numeric(Some(2.into()))),
|
|
||||||
Expr::Literal(Datum::Numeric(Some(3.into()))),
|
|
||||||
])),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn array_of_exprs_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("array[(1), 2 + 3]"),
|
|
||||||
Ok(Expr::Array(vec![
|
|
||||||
Expr::Literal(Datum::Numeric(Some(1.into()))),
|
|
||||||
Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
|
|
||||||
op: InfixOp::Add,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
|
|
||||||
},
|
|
||||||
])),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn array_cmp_modifier_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("3 = any(array[3])"),
|
|
||||||
Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
|
|
||||||
op: InfixOp::WithCmpModifierAny(Box::new(InfixOp::Eq)),
|
|
||||||
rhs: Box::new(Expr::Array(vec![Expr::Literal(Datum::Numeric(Some(
|
|
||||||
3.into()
|
|
||||||
)))]))
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn non_parenthesized_array_cmp_modifier_fails() {
|
|
||||||
assert!(Expr::try_from("3 = any array[3]").is_err());
|
|
||||||
}
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use sqlx::{Postgres, QueryBuilder};
|
|
||||||
|
|
||||||
use crate::Expr;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn sql_converts_to_query_builder() -> Result<(), Box<dyn Error>> {
|
|
||||||
let expr = Expr::try_from("3 + 5 < 10")?;
|
|
||||||
assert_eq!(
|
|
||||||
QueryBuilder::<'_, Postgres>::from(expr).sql(),
|
|
||||||
"(($1) + ($2)) < ($3)",
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn cmp_array_modifier_round_trips() -> Result<(), Box<dyn Error>> {
|
|
||||||
let expr = Expr::try_from("1 = 2 and 3 < any(array[4])")?;
|
|
||||||
assert_eq!(
|
|
||||||
QueryBuilder::<'_, Postgres>::from(expr).sql(),
|
|
||||||
"(($1) = ($2)) and (($3) < any (array[($4)]))",
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,76 +0,0 @@
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use crate::{Datum, Expr, FnArgs, InfixOp};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parses_without_args() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("now()")?,
|
|
||||||
Expr::FnCall {
|
|
||||||
name: vec!["now".to_owned()],
|
|
||||||
args: FnArgs::Exprs {
|
|
||||||
distinct_flag: false,
|
|
||||||
exprs: vec![],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parses_with_args() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("repeat('hello!', 1 + 2)")?,
|
|
||||||
Expr::FnCall {
|
|
||||||
name: vec!["repeat".to_owned()],
|
|
||||||
args: FnArgs::Exprs {
|
|
||||||
distinct_flag: false,
|
|
||||||
exprs: vec![
|
|
||||||
Expr::Literal(Datum::Text(Some("hello!".to_owned()))),
|
|
||||||
Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
|
|
||||||
op: InfixOp::Add,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
|
|
||||||
}
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn schema_qualified() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from(r#"my_schema."MyFunc"('hello!', 1)"#)?,
|
|
||||||
Expr::FnCall {
|
|
||||||
name: vec!["my_schema".to_owned(), "MyFunc".to_owned()],
|
|
||||||
args: FnArgs::Exprs {
|
|
||||||
distinct_flag: false,
|
|
||||||
exprs: vec![
|
|
||||||
Expr::Literal(Datum::Text(Some("hello!".to_owned()))),
|
|
||||||
Expr::Literal(Datum::Numeric(Some(1.into()))),
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn distinct_aggregate() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from(r#"AGGREGATOR(DISTINCT a."Col 1", b."Col 2")"#)?,
|
|
||||||
Expr::FnCall {
|
|
||||||
name: vec!["aggregator".to_owned()],
|
|
||||||
args: FnArgs::Exprs {
|
|
||||||
distinct_flag: true,
|
|
||||||
exprs: vec![
|
|
||||||
Expr::ObjName(vec!["a".to_owned(), "Col 1".to_owned()]),
|
|
||||||
Expr::ObjName(vec!["b".to_owned(), "Col 2".to_owned()]),
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,230 +0,0 @@
|
||||||
//! Based on
|
|
||||||
//! https://github.com/pest-parser/pest/blob/master/grammars/src/grammars/sql.pest.
|
|
||||||
//! (Original is dual-licensed under MIT/Apache-2.0.)
|
|
||||||
//!
|
|
||||||
//! PostgreSQL departs extensively from the SQLite flavored dialect captured in
|
|
||||||
//! the original grammar. For example, rules for identifiers/object names
|
|
||||||
//! differ, as do keywords, built-in types, and syntax for specifying function
|
|
||||||
//! arguments, type modifiers, CTEs, and so on.
|
|
||||||
//!
|
|
||||||
//! This grammar covers a larger subset of the Postgres SQL dialect, but it is a
|
|
||||||
//! work in progress and is far from complete. It should only be used to parse
|
|
||||||
//! input that is "PostgreSQL-esque", not input that expects spec compliance.
|
|
||||||
|
|
||||||
Command = _{ SOI ~ (Query | ExplainQuery | DDL | ACL) ~ EOF }
|
|
||||||
|
|
||||||
ACL = _{ DropRole | DropUser | CreateRole | CreateUser | AlterUser | GrantPrivilege | RevokePrivilege }
|
|
||||||
CreateUser = {
|
|
||||||
^"create" ~ ^"user" ~ Identifier ~ (^"with")? ~ ^"password" ~ SingleQuotedString ~
|
|
||||||
AuthMethod?
|
|
||||||
}
|
|
||||||
AlterUser = {
|
|
||||||
^"alter" ~ ^"user" ~ Identifier ~ (^"with")? ~ AlterOption
|
|
||||||
}
|
|
||||||
AlterOption = _{ AlterLogin | AlterNoLogin | AlterPassword }
|
|
||||||
AlterLogin = { ^"login" }
|
|
||||||
AlterNoLogin = { ^"nologin" }
|
|
||||||
AlterPassword = { ^"password" ~ SingleQuotedString ~ AuthMethod? }
|
|
||||||
AuthMethod = { ^"using" ~ (ChapSha1 | Md5 | Ldap) }
|
|
||||||
ChapSha1 = { ^"chap-sha1" }
|
|
||||||
Md5 = { ^"md5" }
|
|
||||||
Ldap = { ^"ldap" }
|
|
||||||
DropUser = { ^"drop" ~ ^"user" ~ Identifier }
|
|
||||||
CreateRole = { ^"create" ~ ^"role" ~ Identifier }
|
|
||||||
DropRole = { ^"drop" ~ ^"role" ~ Identifier }
|
|
||||||
GrantPrivilege = { ^"grant" ~ PrivBlock ~ ^"to" ~ Identifier }
|
|
||||||
RevokePrivilege = { ^"revoke" ~ PrivBlock ~ ^"from" ~ Identifier }
|
|
||||||
PrivBlock = _{ PrivBlockPrivilege | PrivBlockRolePass }
|
|
||||||
PrivBlockPrivilege = {Privilege ~ (PrivBlockUser | PrivBlockSpecificUser | PrivBlockRole
|
|
||||||
| PrivBlockSpecificRole | PrivBlockTable | PrivBlockSpecificTable)}
|
|
||||||
PrivBlockUser = { ^"user" }
|
|
||||||
PrivBlockSpecificUser = { ^"on" ~ ^"user" ~ Identifier }
|
|
||||||
PrivBlockRole = { ^"role" }
|
|
||||||
PrivBlockSpecificRole = { ^"on" ~ ^"role" ~ Identifier }
|
|
||||||
PrivBlockTable = { ^"table" }
|
|
||||||
PrivBlockSpecificTable = { ^"on" ~ ^"table" ~ Identifier }
|
|
||||||
PrivBlockRolePass = { Identifier }
|
|
||||||
Privilege = _{ PrivilegeRead | PrivilegeWrite | PrivilegeExecute |
|
|
||||||
PrivilegeCreate | PrivilegeAlter | PrivilegeDrop |
|
|
||||||
PrivilegeSession | PrivilegeUsage }
|
|
||||||
PrivilegeAlter = { ^"alter" }
|
|
||||||
PrivilegeCreate = { ^"create" }
|
|
||||||
PrivilegeDrop = { ^"drop" }
|
|
||||||
PrivilegeExecute = { ^"execute" }
|
|
||||||
PrivilegeRead = { ^"read" }
|
|
||||||
PrivilegeSession = { ^"session" }
|
|
||||||
PrivilegeUsage = { ^"usage" }
|
|
||||||
PrivilegeWrite = { ^"write" }
|
|
||||||
|
|
||||||
DDL = _{ CreateTable | DropTable | CreateProc }
|
|
||||||
CreateTable = {
|
|
||||||
^"create" ~ ^"table" ~ Identifier ~
|
|
||||||
"(" ~ Columns ~ "," ~ PrimaryKey ~ ")" ~
|
|
||||||
Distribution
|
|
||||||
}
|
|
||||||
Columns = { ColumnDef ~ ("," ~ ColumnDef)* }
|
|
||||||
ColumnDef = { Identifier ~ TypeCast ~ ColumnDefIsNull? }
|
|
||||||
ColumnDefIsNull = { NotFlag? ~ ^"null" }
|
|
||||||
PrimaryKey = {
|
|
||||||
^"primary" ~ ^"key" ~
|
|
||||||
"(" ~ Identifier ~ ("," ~ Identifier)* ~ ")"
|
|
||||||
}
|
|
||||||
Distribution = { ^"distributed" ~ (Global | Sharding) }
|
|
||||||
Global = { ^"globally" }
|
|
||||||
Sharding = { ^"by" ~ "(" ~ Identifier ~ ("," ~ Identifier)* ~ ")"}
|
|
||||||
DropTable = { ^"drop" ~ ^"table" ~ Identifier }
|
|
||||||
|
|
||||||
CreateProc = {
|
|
||||||
^"create" ~ ^"procedure" ~ Identifier ~
|
|
||||||
"(" ~ ProcParams? ~ ")" ~ (^"language" ~ ProcLanguage)? ~
|
|
||||||
((^"as" ~ "$$" ~ ProcBody ~ "$$") | (^"begin" ~ "atomic" ~ ProcBody ~ "end"))
|
|
||||||
}
|
|
||||||
ProcParams = { ProcParamDef ~ ("," ~ ProcParamDef)* }
|
|
||||||
ProcParamDef = { TypeCast }
|
|
||||||
ProcLanguage = { SQL }
|
|
||||||
SQL = { ^"sql" }
|
|
||||||
ProcBody = { (Insert | Update | Delete) }
|
|
||||||
|
|
||||||
ExplainQuery = _{ Explain }
|
|
||||||
Explain = { ^"explain" ~ Query }
|
|
||||||
|
|
||||||
Query = { (SelectWithOptionalContinuation | Values | Insert | Update | Delete) }
|
|
||||||
SelectWithOptionalContinuation = { Select ~ (ExceptContinuation | UnionAllContinuation)? }
|
|
||||||
ExceptContinuation = { ((^"except" ~ ^"distinct") | ^"except") ~ Select }
|
|
||||||
UnionAllContinuation = { ^"union" ~ ^"all" ~ Select }
|
|
||||||
Select = {
|
|
||||||
^"select" ~ Projection ~ ^"from" ~ Scan ~
|
|
||||||
Join? ~ WhereClause? ~
|
|
||||||
(^"group" ~ ^"by" ~ GroupBy)? ~
|
|
||||||
(^"having" ~ Having)?
|
|
||||||
}
|
|
||||||
Projection = { Distinct? ~ ProjectionElement ~ ("," ~ ProjectionElement)* }
|
|
||||||
ProjectionElement = _{ Asterisk | Column }
|
|
||||||
Column = { Expr ~ ((^"as")? ~ Identifier)? }
|
|
||||||
Asterisk = { "*" }
|
|
||||||
WhereClause = _{ ^"where" ~ Selection }
|
|
||||||
Selection = { Expr }
|
|
||||||
Scan = { (Identifier | SubQuery) ~ ((^"as")? ~ Identifier)? }
|
|
||||||
Join = { JoinKind? ~ ^"join" ~ Scan ~ ^"on" ~ Expr }
|
|
||||||
JoinKind = _{ ( InnerJoinKind | LeftJoinKind ) }
|
|
||||||
InnerJoinKind = { ^"inner" }
|
|
||||||
LeftJoinKind = { ^"left" ~ (^"outer")? }
|
|
||||||
GroupBy = { Expr ~ ("," ~ Expr)* }
|
|
||||||
Having = { Expr }
|
|
||||||
SubQuery = { "(" ~ (SelectWithOptionalContinuation | Values) ~ ")" }
|
|
||||||
Insert = { ^"insert" ~ ^"into" ~ Identifier ~ ("(" ~ TargetColumns ~ ")")? ~ (Values | Select) ~ OnConflict? }
|
|
||||||
TargetColumns = { Identifier ~ ("," ~ Identifier)* }
|
|
||||||
OnConflict = _{ ^"on conflict" ~ ^"do" ~ (DoNothing | DoReplace | DoFail) }
|
|
||||||
DoReplace = { ^"replace" }
|
|
||||||
DoNothing = { ^"nothing" }
|
|
||||||
DoFail = { ^"fail" }
|
|
||||||
Update = { ^"update" ~ Identifier ~ ^"set" ~ UpdateList ~ (UpdateFrom | WhereClause)? }
|
|
||||||
UpdateList = { UpdateItem ~ ("," ~ UpdateItem)* }
|
|
||||||
UpdateItem = { Identifier ~ "=" ~ Expr }
|
|
||||||
UpdateFrom = _{ ^"from" ~ Scan ~ (^"where" ~ Expr)? }
|
|
||||||
Values = { ^"values" ~ Row ~ ("," ~ Row)* }
|
|
||||||
Delete = { ^"delete" ~ ^"from" ~ Identifier ~ (^"where" ~ DeleteFilter)? }
|
|
||||||
DeleteFilter = { Expr }
|
|
||||||
|
|
||||||
Identifier = ${ DoubleQuotedIdentifier | UnquotedIdentifier }
|
|
||||||
DoubleQuotedIdentifier = @{ "\"" ~ ("\"\"" | '\u{01}'..'\u{21}' | '\u{23}'..'\u{10FFFF}')+ ~ "\"" }
|
|
||||||
UnquotedIdentifier = @{ !(Keyword ~ ("(" | "[" | WHITESPACE | "," | EOF)) ~ (UnquotedIdentifierStart ~ UnquotedIdentifierRemainder*) }
|
|
||||||
UnquotedIdentifierStart = _{ 'a'..'я' | 'A'..'Я' | "_" }
|
|
||||||
UnquotedIdentifierRemainder = _{ UnquotedIdentifierStart | "$" | ASCII_DIGIT }
|
|
||||||
Keyword = { ^"left" | ^"having" | ^"not" | ^"inner" | ^"group"
|
|
||||||
| ^"on" | ^"join" | ^"from" | ^"exists" | ^"except"
|
|
||||||
| ^"union" | ^"where" | ^"distinct" | ^"between" | ^"option"
|
|
||||||
| ^"values" | ^"with" | ^"as" | ^"array" | ^"any" | ^"some"
|
|
||||||
| ^"all" | ^"in" }
|
|
||||||
|
|
||||||
ExprRoot = _{ &SOI ~ Expr ~ &EOI }
|
|
||||||
Expr = { ExprAtomValue ~ (ExprInfixOp ~ ExprAtomValue)* }
|
|
||||||
ExprInfixOp = _{ Between | NonCmpInfixOp | CmpInfixOp | ConcatInfixOp | And | Or }
|
|
||||||
Between = { NotFlag? ~ ^"between" }
|
|
||||||
And = { ^"and" }
|
|
||||||
Or = { ^"or" }
|
|
||||||
ConcatInfixOp = { "||" }
|
|
||||||
CmpInfixOp = { (NotEq | GtEq | Gt | LtEq | Lt | Eq | Lt) ~ (CmpArrayModifier ~ &ExpressionInParentheses)? }
|
|
||||||
Eq = { "=" }
|
|
||||||
Gt = { ">" }
|
|
||||||
GtEq = { ">=" }
|
|
||||||
Lt = { "<" }
|
|
||||||
LtEq = { "<=" }
|
|
||||||
NotEq = { "<>" | "!=" }
|
|
||||||
NonCmpInfixOp = _{ Add | Subtract | Multiply | Divide | In }
|
|
||||||
Add = { "+" }
|
|
||||||
Subtract = { "-" }
|
|
||||||
Multiply = { "*" }
|
|
||||||
Divide = { "/" }
|
|
||||||
In = { NotFlag? ~ ^"in" }
|
|
||||||
CmpArrayModifier = { CmpModifierAny | CmpModifierAll }
|
|
||||||
CmpModifierAny = { ^"any" | ^"some "}
|
|
||||||
CmpModifierAll = { ^"all" }
|
|
||||||
ExprAtomValue = _{ UnaryNot* ~ AtomicExpr ~ IsNullPostfix? }
|
|
||||||
UnaryNot = @{ NotFlag }
|
|
||||||
IsNullPostfix = { ^"is" ~ NotFlag? ~ ^"null" }
|
|
||||||
AtomicExpr = _{ Literal | Parameter | IdentifierWithOptionalContinuation | ExpressionInParentheses | UnaryOperator | SubQuery | Row | SquareBracketArray }
|
|
||||||
// TODO: Empty arrays don't parse without the `!"]"` prefix in the
|
|
||||||
// optional sequence of sub-expressions, but the reason is not
|
|
||||||
// immediately clear: the ']' character doesn't seem like it should
|
|
||||||
// be compatible with the beginning of any `AtomicExpr`. This may
|
|
||||||
// be worth investigating.
|
|
||||||
SquareBracketArray = { ^"array" ~ "[" ~ (!"]" ~ (Expr ~ ("," ~ Expr)*))? ~ "]" }
|
|
||||||
Literal = _{ True | False | Null | Double | Decimal | Unsigned | Integer | SingleQuotedString }
|
|
||||||
True = { ^"true" }
|
|
||||||
False = { ^"false" }
|
|
||||||
Null = { ^"null" }
|
|
||||||
Decimal = @{ Integer ~ ("." ~ ASCII_DIGIT*) }
|
|
||||||
Double = @{ Integer ~ ("." ~ ASCII_DIGIT*)? ~ (^"e" ~ Integer) }
|
|
||||||
Integer = @{ ("+" | "-")? ~ ASCII_DIGIT+ }
|
|
||||||
Unsigned = @{ ASCII_DIGIT+ }
|
|
||||||
// TODO: Handle dollar-quoted string literals.
|
|
||||||
SingleQuotedString = @{ "'" ~ ("''" | (!("'") ~ ANY))* ~ "'" }
|
|
||||||
Parameter = @{ "$" ~ Unsigned }
|
|
||||||
// Postgres permits qualified object names with a single identifier
|
|
||||||
// part, 2 parts plus a function invocation, 3 parts, or 3 parts
|
|
||||||
// plus a function invocation. For simplicity, assume that an
|
|
||||||
// arbitrary number of qualifications (e.g. "a.b.c.d[...]") are
|
|
||||||
// supported.
|
|
||||||
// TODO: Disallow whitespace where it shouldn't be.
|
|
||||||
IdentifierWithOptionalContinuation = { Identifier ~ QualifiedIdentifierContinuation* ~ FunctionInvocationContinuation? }
|
|
||||||
QualifiedIdentifierContinuation = ${ "." ~ Identifier }
|
|
||||||
FunctionInvocationContinuation = { "(" ~ (CountAsterisk | FunctionArgs)? ~ ")" }
|
|
||||||
// TODO: Support named argument notation
|
|
||||||
// (`my_func(name => value)`).
|
|
||||||
// TODO: Support keywords within args list as applicable.
|
|
||||||
FunctionArgs = { Distinct? ~ (Expr ~ ("," ~ Expr)*)? }
|
|
||||||
CountAsterisk = { "*" }
|
|
||||||
ExpressionInParentheses = { "(" ~ Expr ~ ")" }
|
|
||||||
CastInfix = { Expr ~ "::" ~ TypeCast }
|
|
||||||
TypeCast = {
|
|
||||||
TypeBool
|
|
||||||
| TypeDecimal
|
|
||||||
| TypeDouble
|
|
||||||
| TypeInt
|
|
||||||
| TypeNumeric
|
|
||||||
| TypeText
|
|
||||||
| TypeVarchar
|
|
||||||
}
|
|
||||||
TypeBool = { (^"boolean" | ^"bool") }
|
|
||||||
TypeDecimal = { ^"decimal" }
|
|
||||||
TypeDouble = { ^"double" }
|
|
||||||
TypeInt = { (^"integer" | ^"int") }
|
|
||||||
TypeNumeric = { ^"numeric" }
|
|
||||||
TypeText = { ^"text" }
|
|
||||||
TypeVarchar = { ^"varchar" ~ "(" ~ Unsigned ~ ")" }
|
|
||||||
|
|
||||||
TypeDate = { ^"date" }
|
|
||||||
TypeTime = { ^"time" ~ Unsigned? ~ (WithTimeZone | WithoutTimeZone)? }
|
|
||||||
TypeTimestamp = { ^"timestamp" ~ Unsigned? ~ (WithTimeZone | WithoutTimeZone)? }
|
|
||||||
WithTimeZone = { ^"with" ~ ^"time" ~ ^"zone" }
|
|
||||||
WithoutTimeZone = { ^"without" ~ ^"time" ~ ^"zone" }
|
|
||||||
|
|
||||||
UnaryOperator = _{ Exists }
|
|
||||||
Exists = { NotFlag? ~ ^"exists" ~ SubQuery }
|
|
||||||
Row = { "(" ~ Expr ~ ("," ~ Expr)* ~ ")" }
|
|
||||||
|
|
||||||
Distinct = { ^"distinct" }
|
|
||||||
NotFlag = { ^"not" }
|
|
||||||
EOF = { EOI | ";" }
|
|
||||||
WHITESPACE = _{ " " | "\t" | "\n" | "\r\n" }
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
//! Unit tests for identifier and object name parsing within expressions.
|
|
||||||
|
|
||||||
use crate::{Expr, escape_identifier};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn escaper_escapes() {
|
|
||||||
assert_eq!(escape_identifier("hello"), r#""hello""#);
|
|
||||||
assert_eq!(escape_identifier("hello world"), r#""hello world""#);
|
|
||||||
assert_eq!(
|
|
||||||
escape_identifier(r#""hello" "world""#),
|
|
||||||
r#""""hello"" ""world""""#
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn qualified_obj_name_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from(r#""""Hello"", World! 四十二".deep_thought"#),
|
|
||||||
Ok(Expr::ObjName(vec![
|
|
||||||
r#""Hello", World! 四十二"#.to_owned(),
|
|
||||||
"deep_thought".to_owned(),
|
|
||||||
])),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn misquoted_ident_fails_to_parse() {
|
|
||||||
assert!(Expr::try_from(r#""Hello, "World!""#).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn unquoted_ident_lowercased() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("HeLlO_WoRlD"),
|
|
||||||
Ok(Expr::ObjName(vec!["hello_world".to_owned()])),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
@ -1,433 +0,0 @@
|
||||||
//! Incomplete but useful parser and generator for Postgres flavored SQL
|
|
||||||
//! expressions and more, based on a modified version of the
|
|
||||||
//! [official Pest SQL grammar](https://github.com/pest-parser/pest/blob/79dd30d11aab6f0fba3cd79bd48f456209b966b3/grammars/src/grammars/sql.pest).
|
|
||||||
//!
|
|
||||||
//! This grammar covers a larger subset of the Postgres SQL dialect, but it is a
|
|
||||||
//! work in progress and is far from complete. It should only be used to parse
|
|
||||||
//! input that is "PostgreSQL-esque", not input that expects spec compliance.
|
|
||||||
//!
|
|
||||||
//! ## Example
|
|
||||||
//!
|
|
||||||
//! ```
|
|
||||||
//! use phono_pestgros::{ArithOp, BoolOp, Datum, Expr, InfixOp};
|
|
||||||
//!
|
|
||||||
//! # fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
//! let expr = Expr::try_from("3 + 5 < 10")?;
|
|
||||||
//!
|
|
||||||
//! assert_eq!(expr, Expr::Infix {
|
|
||||||
//! lhs: Box::new(Expr::Infix {
|
|
||||||
//! lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
|
|
||||||
//! op: InfixOp::ArithInfix(ArithOp::Add),
|
|
||||||
//! rhs: Box::new(Expr::Literal(Datum::Numeric(Some(5.into())))),
|
|
||||||
//! }),
|
|
||||||
//! op: InfixOp::BoolInfix(BoolOp::Lt),
|
|
||||||
//! rhs: Box::new(Expr::Literal(Datum::Numeric(Some(10.into())))),
|
|
||||||
//! });
|
|
||||||
//!
|
|
||||||
//! assert_eq!(QueryBuilder::try_from(expr).sql(), "(($1) + ($2)) < ($3)");
|
|
||||||
//! # Ok(())
|
|
||||||
//! # }
|
|
||||||
//! ```
|
|
||||||
|
|
||||||
use std::{str::FromStr, sync::LazyLock};
|
|
||||||
|
|
||||||
use bigdecimal::BigDecimal;
|
|
||||||
use pest::{
|
|
||||||
Parser as _,
|
|
||||||
iterators::{Pair, Pairs},
|
|
||||||
pratt_parser::PrattParser,
|
|
||||||
};
|
|
||||||
use pest_derive::Parser;
|
|
||||||
|
|
||||||
pub use crate::{datum::Datum, query_builders::QueryFragment};
|
|
||||||
|
|
||||||
mod datum;
|
|
||||||
mod query_builders;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod array_tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod fragment_tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod func_invocation_tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod identifier_tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod literal_tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod op_tests;
|
|
||||||
|
|
||||||
/// Given a raw identifier (such as a table name, column name, etc.), format it
|
|
||||||
/// so that it may be safely interpolated into a SQL query.
|
|
||||||
///
|
|
||||||
/// Note that in PostgreSQL, unquoted identifiers are case-insensitive (or,
|
|
||||||
/// rather, they are always implicitly converted to lowercase), while quoted
|
|
||||||
/// identifiers are case-sensitive. The caller of this function is responsible
|
|
||||||
/// for performing conversion to lowercase as appropriate.
|
|
||||||
pub fn escape_identifier(identifier: &str) -> String {
|
|
||||||
// Escaping identifiers for Postgres is fairly easy, provided that the input is
|
|
||||||
// already known to contain no invalid multi-byte sequences. Backslashes may
|
|
||||||
// remain as-is, and embedded double quotes are escaped simply by doubling
|
|
||||||
// them (`"` becomes `""`). Refer to the PQescapeInternal() function in
|
|
||||||
// libpq (fe-exec.c) and Diesel's PgQueryBuilder::push_identifier().
|
|
||||||
format!("\"{0}\"", identifier.replace('"', "\"\""))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decodes a SQL representation of an identifier. If the input is unquoted, it
|
|
||||||
/// is converted to lowercase. If it is double quoted, the surrounding quotes
|
|
||||||
/// are stripped and escaped inner double quotes (double-double quotes, if you
|
|
||||||
/// will) are converted to single-double quotes. The opposite of
|
|
||||||
/// [`escape_identifier`], sort of.
|
|
||||||
///
|
|
||||||
/// Assumes that the provided identifier is well-formed. Basic gut checks are
|
|
||||||
/// performed, but they are non-exhaustive.
|
|
||||||
///
|
|
||||||
/// `U&"..."`-style escaped Unicode identifiers are not yet supported.
|
|
||||||
fn parse_ident(value: &str) -> String {
|
|
||||||
assert!(
|
|
||||||
!value.to_lowercase().starts_with("u&"),
|
|
||||||
"escaped Unicode identifiers are not supported"
|
|
||||||
);
|
|
||||||
if value.starts_with('"') {
|
|
||||||
assert!(value.ends_with('"'), "malformed double-quoted identifier");
|
|
||||||
{
|
|
||||||
// Strip first and last characters.
|
|
||||||
let mut chars = value.chars();
|
|
||||||
chars.next();
|
|
||||||
chars.next_back();
|
|
||||||
chars.as_str()
|
|
||||||
}
|
|
||||||
.replace(r#""""#, r#"""#)
|
|
||||||
} else {
|
|
||||||
// TODO: assert validity with regex
|
|
||||||
value.to_lowercase()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decodes a single-quoted string literal. Removes surrounding quotes and
|
|
||||||
/// replaces embedded single quotes (double-single quotes) with single-single
|
|
||||||
/// quotes.
|
|
||||||
///
|
|
||||||
/// Assumes that the provided identifier is well-formed. Basic gut checks are
|
|
||||||
/// performed, but they are non-exhaustive.
|
|
||||||
///
|
|
||||||
/// `E'...'`-style, dollar-quoted, and other (relatively) uncommon formats for
|
|
||||||
/// text literals are not yet supported.
|
|
||||||
fn parse_text_literal(value: &str) -> String {
|
|
||||||
assert!(value.starts_with('\'') && value.ends_with('\''));
|
|
||||||
{
|
|
||||||
// Strip first and last characters.
|
|
||||||
let mut chars = value.chars();
|
|
||||||
chars.next();
|
|
||||||
chars.next_back();
|
|
||||||
chars.as_str()
|
|
||||||
}
|
|
||||||
.replace("''", "'")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Primary parser and code generation for [`Rule`] types.
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[grammar = "src/grammar.pest"]
|
|
||||||
struct PsqlParser;
|
|
||||||
|
|
||||||
/// Secondary parser configuration for handling operator precedence.
|
|
||||||
static PRATT_PARSER: LazyLock<PrattParser<Rule>> = LazyLock::new(|| {
|
|
||||||
use pest::pratt_parser::{
|
|
||||||
Assoc::{Left, Right},
|
|
||||||
Op,
|
|
||||||
};
|
|
||||||
|
|
||||||
PrattParser::new()
|
|
||||||
.op(Op::infix(Rule::Or, Left))
|
|
||||||
.op(Op::infix(Rule::Between, Left))
|
|
||||||
.op(Op::infix(Rule::And, Left))
|
|
||||||
.op(Op::prefix(Rule::UnaryNot))
|
|
||||||
.op(Op::infix(Rule::CmpInfixOp, Right))
|
|
||||||
// Official Pest example overstates the concat operator's precedence. It
|
|
||||||
// should be lower precedence than add/subtract.
|
|
||||||
.op(Op::infix(Rule::ConcatInfixOp, Left))
|
|
||||||
.op(Op::infix(Rule::Add, Left) | Op::infix(Rule::Subtract, Left))
|
|
||||||
.op(Op::infix(Rule::Multiply, Left) | Op::infix(Rule::Divide, Left))
|
|
||||||
.op(Op::infix(Rule::CastInfix, Left))
|
|
||||||
.op(Op::postfix(Rule::IsNullPostfix))
|
|
||||||
});
|
|
||||||
|
|
||||||
/// Represents a SQL expression. An expression is a collection of values and
|
|
||||||
/// operators that theoretically evaluates to some value, such as a boolean
|
|
||||||
/// condition, an object name, or a string dynamically derived from other
|
|
||||||
/// values. An expression is *not* a complete SQL statement, command, or query.
|
|
||||||
#[non_exhaustive]
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub enum Expr {
|
|
||||||
Infix {
|
|
||||||
lhs: Box<Expr>,
|
|
||||||
op: InfixOp,
|
|
||||||
rhs: Box<Expr>,
|
|
||||||
},
|
|
||||||
Literal(Datum),
|
|
||||||
ObjName(Vec<String>),
|
|
||||||
FnCall {
|
|
||||||
name: Vec<String>,
|
|
||||||
args: FnArgs,
|
|
||||||
},
|
|
||||||
Not(Box<Expr>),
|
|
||||||
Nullness {
|
|
||||||
is_null: bool,
|
|
||||||
expr: Box<Expr>,
|
|
||||||
},
|
|
||||||
Array(Vec<Expr>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for Expr {
|
|
||||||
type Error = ParseError;
|
|
||||||
|
|
||||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
||||||
// `ExprRoot` is a silent rule which simply dictates that the inner
|
|
||||||
// `Expr` rule must consume the entire input.
|
|
||||||
let pairs = PsqlParser::parse(Rule::ExprRoot, value)?;
|
|
||||||
parse_expr_pairs(pairs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[non_exhaustive]
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub enum InfixOp {
|
|
||||||
// Arithmetic ops:
|
|
||||||
Add,
|
|
||||||
Concat,
|
|
||||||
Div,
|
|
||||||
Mult,
|
|
||||||
Sub,
|
|
||||||
|
|
||||||
// Boolean ops:
|
|
||||||
And,
|
|
||||||
Or,
|
|
||||||
Eq,
|
|
||||||
Gt,
|
|
||||||
Gte,
|
|
||||||
Lt,
|
|
||||||
Lte,
|
|
||||||
Neq,
|
|
||||||
|
|
||||||
// Miscellaneous ops:
|
|
||||||
Cast,
|
|
||||||
|
|
||||||
// Array comparison modifiers (such as `= any(array[])`):
|
|
||||||
// TODO: This is an awkward pattern, which is capable of representing
|
|
||||||
// invalid expressions (such as `3 + any(array[])`). I expect it'll need to
|
|
||||||
// be rewritten at some point anyways to handle other keyword-driven infix
|
|
||||||
// syntax, but for expediency I'm leaving a more robust solution as a
|
|
||||||
// challenge for another day.
|
|
||||||
WithCmpModifierAny(Box<Self>),
|
|
||||||
WithCmpModifierAll(Box<Self>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub enum FnArgs {
|
|
||||||
CountAsterisk,
|
|
||||||
Exprs {
|
|
||||||
/// `true` for aggregator invocations with the `DISTINCT` keyword
|
|
||||||
/// specified.
|
|
||||||
distinct_flag: bool,
|
|
||||||
exprs: Vec<Expr>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recursive helper, which does most of the work to convert [`pest`]'s pattern
|
|
||||||
/// matching output to a usable syntax tree.
|
|
||||||
fn parse_expr_pairs(expr_pairs: Pairs<'_, Rule>) -> Result<Expr, ParseError> {
|
|
||||||
PRATT_PARSER
|
|
||||||
.map_primary(|pair| match pair.as_rule() {
|
|
||||||
Rule::Expr | Rule::ExpressionInParentheses => parse_expr_pairs(pair.into_inner()),
|
|
||||||
Rule::Decimal | Rule::Double | Rule::Integer | Rule::Unsigned => Ok(Expr::Literal(
|
|
||||||
Datum::Numeric(Some(BigDecimal::from_str(pair.as_str()).expect(
|
|
||||||
"parsed numeric values should always be convertible to BigDecimal",
|
|
||||||
))),
|
|
||||||
)),
|
|
||||||
Rule::SingleQuotedString => Ok(Expr::Literal(Datum::Text(Some(parse_text_literal(pair.as_str()))))),
|
|
||||||
Rule::IdentifierWithOptionalContinuation => {
|
|
||||||
let mut name: Vec<String> = vec![];
|
|
||||||
let mut fn_args: Option<FnArgs> = None;
|
|
||||||
let inner = pair.into_inner();
|
|
||||||
for inner_pair in inner {
|
|
||||||
match inner_pair.as_rule() {
|
|
||||||
Rule::Identifier => {
|
|
||||||
name.push(parse_ident(inner_pair.as_str()));
|
|
||||||
}
|
|
||||||
Rule::QualifiedIdentifierContinuation => {
|
|
||||||
let ident_cont = inner_pair.as_str();
|
|
||||||
assert!(
|
|
||||||
ident_cont.starts_with('.'),
|
|
||||||
"QualifiedIdentifierContinuation should always start with the infix dot",
|
|
||||||
);
|
|
||||||
name.push(parse_ident({
|
|
||||||
// Strip leading dot.
|
|
||||||
let mut chars = ident_cont.chars();
|
|
||||||
chars.next();
|
|
||||||
chars.as_str()
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
Rule::FunctionInvocationContinuation => {
|
|
||||||
fn_args = Some(parse_function_invocation_continuation(inner_pair)?);
|
|
||||||
}
|
|
||||||
_ => unreachable!(
|
|
||||||
"IdentifierWithOptionalContinuation has only 3 valid child rules",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(if let Some(fn_args) = fn_args {
|
|
||||||
Expr::FnCall { name, args: fn_args }
|
|
||||||
} else {
|
|
||||||
Expr::ObjName(name)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Rule::SquareBracketArray => {
|
|
||||||
let mut arr_items: Vec<Expr> = vec![];
|
|
||||||
for inner_pair in pair.into_inner() {
|
|
||||||
match inner_pair.as_rule() {
|
|
||||||
Rule::Expr => {arr_items.push(parse_expr_pairs(inner_pair.into_inner())?);}
|
|
||||||
_ => unreachable!(
|
|
||||||
"SquareBracketArray has only Exprs as direct child rules",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(Expr::Array(arr_items))
|
|
||||||
}
|
|
||||||
rule => Err(ParseError::UnknownRule(rule)),
|
|
||||||
})
|
|
||||||
.map_infix(|lhs, op, rhs| Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(lhs?),
|
|
||||||
op: match op.as_rule() {
|
|
||||||
Rule::Add => InfixOp::Add,
|
|
||||||
Rule::ConcatInfixOp => InfixOp::Concat,
|
|
||||||
Rule::Divide => InfixOp::Div,
|
|
||||||
Rule::Multiply => InfixOp::Mult,
|
|
||||||
Rule::Subtract => InfixOp::Sub,
|
|
||||||
Rule::And => InfixOp::And,
|
|
||||||
Rule::CmpInfixOp => parse_cmp_op(op)?,
|
|
||||||
Rule::Or => InfixOp::Or,
|
|
||||||
Rule::CastInfix => InfixOp::Cast,
|
|
||||||
rule => Err(ParseError::UnknownRule(rule))?,
|
|
||||||
},
|
|
||||||
rhs: Box::new(rhs?),
|
|
||||||
}))
|
|
||||||
.map_prefix(|op, child| Ok(match op.as_rule() {
|
|
||||||
Rule::UnaryNot => Expr::Not(Box::new(child?)),
|
|
||||||
rule => Err(ParseError::UnknownRule(rule))?,
|
|
||||||
}))
|
|
||||||
.map_postfix(|child, op| Ok(match op.as_rule() {
|
|
||||||
Rule::IsNullPostfix => Expr::Nullness {
|
|
||||||
is_null: op
|
|
||||||
.into_inner()
|
|
||||||
.next()
|
|
||||||
.map(|inner| inner.as_rule()) != Some(Rule::NotFlag),
|
|
||||||
expr: Box::new(child?),
|
|
||||||
},
|
|
||||||
rule => Err(ParseError::UnknownRule(rule))?,
|
|
||||||
}))
|
|
||||||
.parse(expr_pairs)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_cmp_op(op: Pair<'_, Rule>) -> Result<InfixOp, ParseError> {
|
|
||||||
let mut base_op: Option<InfixOp> = None;
|
|
||||||
for inner in op.into_inner() {
|
|
||||||
match inner.as_rule() {
|
|
||||||
Rule::Eq => {
|
|
||||||
base_op = Some(InfixOp::Eq);
|
|
||||||
}
|
|
||||||
Rule::Gt => {
|
|
||||||
base_op = Some(InfixOp::Gt);
|
|
||||||
}
|
|
||||||
Rule::GtEq => {
|
|
||||||
base_op = Some(InfixOp::Gte);
|
|
||||||
}
|
|
||||||
Rule::Lt => {
|
|
||||||
base_op = Some(InfixOp::Lt);
|
|
||||||
}
|
|
||||||
Rule::LtEq => {
|
|
||||||
base_op = Some(InfixOp::Lte);
|
|
||||||
}
|
|
||||||
Rule::NotEq => {
|
|
||||||
base_op = Some(InfixOp::Neq);
|
|
||||||
}
|
|
||||||
Rule::CmpArrayModifier => {
|
|
||||||
if let Some(base_op) = base_op {
|
|
||||||
return Ok(
|
|
||||||
match inner
|
|
||||||
.into_inner()
|
|
||||||
.next()
|
|
||||||
.expect("CmpArrayModifier should be a simple enumeration")
|
|
||||||
.as_rule()
|
|
||||||
{
|
|
||||||
Rule::CmpModifierAny => InfixOp::WithCmpModifierAny(Box::new(base_op)),
|
|
||||||
Rule::CmpModifierAll => InfixOp::WithCmpModifierAll(Box::new(base_op)),
|
|
||||||
rule => Err(ParseError::UnknownRule(rule))?,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return Err(ParseError::UnknownRule(Rule::CmpArrayModifier));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rule => Err(ParseError::UnknownRule(rule))?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(base_op.expect("CmpInfixOp always has at least one child"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_function_invocation_continuation(pair: Pair<'_, Rule>) -> Result<FnArgs, ParseError> {
|
|
||||||
let mut cont_inner_iter = pair.into_inner();
|
|
||||||
let fn_args = if let Some(cont_inner) = cont_inner_iter.next() {
|
|
||||||
match cont_inner.as_rule() {
|
|
||||||
Rule::FunctionArgs => {
|
|
||||||
let mut distinct_flag = false;
|
|
||||||
let mut exprs: Vec<Expr> = vec![];
|
|
||||||
for arg_inner in cont_inner.into_inner() {
|
|
||||||
match arg_inner.as_rule() {
|
|
||||||
Rule::Distinct => {
|
|
||||||
distinct_flag = true;
|
|
||||||
}
|
|
||||||
Rule::Expr => {
|
|
||||||
exprs.push(parse_expr_pairs(arg_inner.into_inner())?);
|
|
||||||
}
|
|
||||||
_ => unreachable!(
|
|
||||||
"only valid children of FunctionArgs are Distinct and Expr"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
FnArgs::Exprs {
|
|
||||||
distinct_flag,
|
|
||||||
exprs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Rule::CountAsterisk => FnArgs::CountAsterisk,
|
|
||||||
_ => unreachable!(
|
|
||||||
"only valid children of FunctionInvocationContinuation are FunctionArgs and CountAsterisk"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FnArgs::Exprs {
|
|
||||||
distinct_flag: false,
|
|
||||||
exprs: vec![],
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assert!(
|
|
||||||
cont_inner_iter.next().is_none(),
|
|
||||||
"function should have consumed entire FunctionInvocationContinuation pair",
|
|
||||||
);
|
|
||||||
Ok(fn_args)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, thiserror::Error)]
|
|
||||||
#[error("parse error")]
|
|
||||||
pub enum ParseError {
|
|
||||||
#[error("unknown rule")]
|
|
||||||
UnknownRule(Rule),
|
|
||||||
#[error("pest failed to parse: {0}")]
|
|
||||||
Pest(pest::error::Error<Rule>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pest::error::Error<Rule>> for ParseError {
|
|
||||||
fn from(value: pest::error::Error<Rule>) -> Self {
|
|
||||||
Self::Pest(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use crate::{Datum, Expr};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn text_parses() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("'Hello, World!'")?,
|
|
||||||
Expr::Literal(Datum::Text(Some("Hello, World!".to_owned())))
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn escaped_quotes_parse() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("'''Hello, World!'''")?,
|
|
||||||
Expr::Literal(Datum::Text(Some("'Hello, World!'".to_owned())))
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn numeric_parses() -> Result<(), Box<dyn Error>> {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("1234.56")?,
|
|
||||||
Expr::Literal(Datum::Numeric(Some("1234.56".parse()?)))
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,106 +0,0 @@
|
||||||
//! Unit tests for infix operator parsing within expressions.
|
|
||||||
|
|
||||||
use crate::{Datum, Expr, InfixOp};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_op_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
// https://xkcd.com/3184/
|
|
||||||
Expr::try_from("six + 7"),
|
|
||||||
Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::ObjName(vec!["six".to_owned()])),
|
|
||||||
op: InfixOp::Add,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(7.into())))),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn mult_op_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("six * 7"),
|
|
||||||
Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::ObjName(vec!["six".to_owned()])),
|
|
||||||
op: InfixOp::Mult,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(7.into())))),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn arith_precedence() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("(1 + 2) * 3 + 4"),
|
|
||||||
Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
|
|
||||||
op: InfixOp::Add,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
|
|
||||||
}),
|
|
||||||
op: InfixOp::Mult,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
|
|
||||||
}),
|
|
||||||
op: InfixOp::Add,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(4.into())))),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("1 - 2 / (3 - 4)"),
|
|
||||||
Ok(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(1.into())))),
|
|
||||||
op: InfixOp::Sub,
|
|
||||||
rhs: Box::new(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(2.into())))),
|
|
||||||
op: InfixOp::Div,
|
|
||||||
rhs: Box::new(Expr::Infix {
|
|
||||||
lhs: Box::new(Expr::Literal(Datum::Numeric(Some(3.into())))),
|
|
||||||
op: InfixOp::Sub,
|
|
||||||
rhs: Box::new(Expr::Literal(Datum::Numeric(Some(4.into())))),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_null_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("my_var is null"),
|
|
||||||
Ok(Expr::Nullness {
|
|
||||||
is_null: true,
|
|
||||||
expr: Box::new(Expr::ObjName(vec!["my_var".to_owned()]))
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_not_null_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("my_var is not null"),
|
|
||||||
Ok(Expr::Nullness {
|
|
||||||
is_null: false,
|
|
||||||
expr: Box::new(Expr::ObjName(vec!["my_var".to_owned()]))
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn not_parses() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("not my_var"),
|
|
||||||
Ok(Expr::Not(Box::new(Expr::ObjName(vec![
|
|
||||||
"my_var".to_owned()
|
|
||||||
])))),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn repeated_nots_parse() {
|
|
||||||
assert_eq!(
|
|
||||||
Expr::try_from("not not my_var"),
|
|
||||||
Ok(Expr::Not(Box::new(Expr::Not(Box::new(Expr::ObjName(
|
|
||||||
vec!["my_var".to_owned()]
|
|
||||||
)))))),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
@ -1,241 +0,0 @@
|
||||||
//! Assorted utilities for dynamically constructing and manipulating [`sqlx`]
|
|
||||||
//! queries.
|
|
||||||
|
|
||||||
use sqlx::{Postgres, QueryBuilder};
|
|
||||||
|
|
||||||
use crate::{Datum, Expr, FnArgs, InfixOp, escape_identifier};
|
|
||||||
|
|
||||||
/// Representation of a partial, parameterized SQL query. Allows callers to
|
|
||||||
/// build queries iteratively and dynamically, handling parameter numbering
|
|
||||||
/// (`$1`, `$2`, `$3`, ...) automatically.
|
|
||||||
///
|
|
||||||
/// This is similar to [`sqlx::QueryBuilder`], except that [`QueryFragment`]
|
|
||||||
/// objects are composable and may be concatenated to each other.
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct QueryFragment {
|
|
||||||
/// SQL string, split wherever there is a query parameter. For example,
|
|
||||||
/// `select * from foo where id = $1 and status = $2` is represented along
|
|
||||||
/// the lines of `["select * from foo where id = ", " and status = ", ""]`.
|
|
||||||
/// `plain_sql` should always have exactly one more element than `params`.
|
|
||||||
plain_sql: Vec<String>,
|
|
||||||
params: Vec<Datum>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl QueryFragment {
|
|
||||||
/// Validate invariants. Should be run immediately before returning any
|
|
||||||
/// useful output.
|
|
||||||
fn gut_checks(&self) {
|
|
||||||
assert!(self.plain_sql.len() == self.params.len() + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse from a SQL string with no parameters.
|
|
||||||
pub fn from_sql(sql: &str) -> Self {
|
|
||||||
Self {
|
|
||||||
plain_sql: vec![sql.to_owned()],
|
|
||||||
params: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience function to construct an empty value.
|
|
||||||
pub fn empty() -> Self {
|
|
||||||
Self::from_sql("")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse from a parameter value with no additional SQL. (Renders as `$n`,
|
|
||||||
/// where`n` is the appropriate parameter index.)
|
|
||||||
pub fn from_param(param: Datum) -> Self {
|
|
||||||
Self {
|
|
||||||
plain_sql: vec!["".to_owned(), "".to_owned()],
|
|
||||||
params: vec![param],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Append another query fragment to this one.
|
|
||||||
pub fn push(&mut self, mut other: QueryFragment) {
|
|
||||||
let tail = self
|
|
||||||
.plain_sql
|
|
||||||
.pop()
|
|
||||||
.expect("already asserted that vec contains at least 1 item");
|
|
||||||
let head = other
|
|
||||||
.plain_sql
|
|
||||||
.first()
|
|
||||||
.expect("already asserted that vec contains at least 1 item");
|
|
||||||
self.plain_sql.push(format!("{tail}{head}"));
|
|
||||||
for value in other.plain_sql.drain(1..) {
|
|
||||||
self.plain_sql.push(value);
|
|
||||||
}
|
|
||||||
self.params.append(&mut other.params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Combine multiple QueryFragments with a separator, similar to
|
|
||||||
/// [`Vec::join`].
|
|
||||||
pub fn join<I: IntoIterator<Item = Self>>(fragments: I, sep: Self) -> Self {
|
|
||||||
let mut acc = QueryFragment::from_sql("");
|
|
||||||
let mut iter = fragments.into_iter();
|
|
||||||
let mut fragment = match iter.next() {
|
|
||||||
Some(value) => value,
|
|
||||||
None => return acc,
|
|
||||||
};
|
|
||||||
for next_fragment in iter {
|
|
||||||
acc.push(fragment);
|
|
||||||
acc.push(sep.clone());
|
|
||||||
fragment = next_fragment;
|
|
||||||
}
|
|
||||||
acc.push(fragment);
|
|
||||||
acc
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience method equivalent to:
|
|
||||||
/// `QueryFragment::concat(fragments, QueryFragment::from_sql(""))`
|
|
||||||
pub fn concat<I: IntoIterator<Item = Self>>(fragments: I) -> Self {
|
|
||||||
Self::join(fragments, Self::from_sql(""))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks whether value is empty. A value is considered empty if the
|
|
||||||
/// resulting SQL code is 0 characters long.
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.gut_checks();
|
|
||||||
self.plain_sql.len() == 1
|
|
||||||
&& self
|
|
||||||
.plain_sql
|
|
||||||
.first()
|
|
||||||
.expect("already checked that len == 1")
|
|
||||||
.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Expr> for QueryFragment {
|
|
||||||
fn from(value: Expr) -> Self {
|
|
||||||
match value {
|
|
||||||
Expr::Infix { lhs, op, rhs } => Self::concat([
|
|
||||||
// RHS and LHS must be explicitly wrapped in parentheses to
|
|
||||||
// ensure correct precedence, because parentheses are taken
|
|
||||||
// into account **but not preserved** when parsing.
|
|
||||||
Self::from_sql("("),
|
|
||||||
(*lhs).into(),
|
|
||||||
Self::from_sql(") "),
|
|
||||||
op.into(),
|
|
||||||
// The RHS expression **must** be parenthesized to correctly
|
|
||||||
// reconstruct syntax like `= any (array[...])`.
|
|
||||||
Self::from_sql(" ("),
|
|
||||||
(*rhs).into(),
|
|
||||||
Self::from_sql(")"),
|
|
||||||
]),
|
|
||||||
Expr::Literal(datum) => Self::from_param(datum),
|
|
||||||
Expr::ObjName(idents) => Self::join(
|
|
||||||
idents
|
|
||||||
.iter()
|
|
||||||
.map(|ident| Self::from_sql(&escape_identifier(ident))),
|
|
||||||
Self::from_sql("."),
|
|
||||||
),
|
|
||||||
Expr::Not(expr) => {
|
|
||||||
Self::concat([Self::from_sql("not ("), (*expr).into(), Self::from_sql(")")])
|
|
||||||
}
|
|
||||||
Expr::Nullness { is_null, expr } => Self::concat([
|
|
||||||
Self::from_sql("("),
|
|
||||||
(*expr).into(),
|
|
||||||
Self::from_sql(if is_null {
|
|
||||||
") is null"
|
|
||||||
} else {
|
|
||||||
") is not null"
|
|
||||||
}),
|
|
||||||
]),
|
|
||||||
Expr::FnCall { name, args } => {
|
|
||||||
let mut fragment = Self::empty();
|
|
||||||
fragment.push(Self::join(
|
|
||||||
name.iter()
|
|
||||||
.map(|ident| Self::from_sql(&escape_identifier(ident))),
|
|
||||||
Self::from_sql("."),
|
|
||||||
));
|
|
||||||
fragment.push(Self::from_sql("("));
|
|
||||||
match args {
|
|
||||||
FnArgs::CountAsterisk => {
|
|
||||||
fragment.push(Self::from_sql("*"));
|
|
||||||
}
|
|
||||||
FnArgs::Exprs {
|
|
||||||
distinct_flag,
|
|
||||||
exprs,
|
|
||||||
} => {
|
|
||||||
if distinct_flag {
|
|
||||||
fragment.push(Self::from_sql("distinct "));
|
|
||||||
}
|
|
||||||
fragment.push(Self::join(
|
|
||||||
exprs.into_iter().map(|expr| {
|
|
||||||
// Wrap arguments in parentheses to ensure they
|
|
||||||
// are appropriately distinguishable from each
|
|
||||||
// other regardless of the presence of extra
|
|
||||||
// commas.
|
|
||||||
Self::concat([
|
|
||||||
Self::from_sql("("),
|
|
||||||
expr.into(),
|
|
||||||
Self::from_sql(")"),
|
|
||||||
])
|
|
||||||
}),
|
|
||||||
Self::from_sql(", "),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fragment.push(Self::from_sql(")"));
|
|
||||||
fragment
|
|
||||||
}
|
|
||||||
Expr::Array(arr_items) => Self::concat([
|
|
||||||
Self::from_sql("array["),
|
|
||||||
Self::join(
|
|
||||||
arr_items.into_iter().map(|item| {
|
|
||||||
Self::concat([Self::from_sql("("), item.into(), Self::from_sql(")")])
|
|
||||||
}),
|
|
||||||
Self::from_sql(", "),
|
|
||||||
),
|
|
||||||
Self::from_sql("]"),
|
|
||||||
]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<InfixOp> for QueryFragment {
|
|
||||||
fn from(value: InfixOp) -> Self {
|
|
||||||
match value {
|
|
||||||
InfixOp::Add => Self::from_sql("+"),
|
|
||||||
InfixOp::Concat => Self::from_sql("||"),
|
|
||||||
InfixOp::Div => Self::from_sql("/"),
|
|
||||||
InfixOp::Mult => Self::from_sql("*"),
|
|
||||||
InfixOp::Sub => Self::from_sql("-"),
|
|
||||||
InfixOp::And => Self::from_sql("and"),
|
|
||||||
InfixOp::Or => Self::from_sql("or"),
|
|
||||||
InfixOp::Eq => Self::from_sql("="),
|
|
||||||
InfixOp::Gt => Self::from_sql(">"),
|
|
||||||
InfixOp::Gte => Self::from_sql(">="),
|
|
||||||
InfixOp::Lt => Self::from_sql("<"),
|
|
||||||
InfixOp::Lte => Self::from_sql("<="),
|
|
||||||
InfixOp::Neq => Self::from_sql("<>"),
|
|
||||||
InfixOp::Cast => Self::from_sql("::"),
|
|
||||||
InfixOp::WithCmpModifierAny(inner) => {
|
|
||||||
Self::concat([(*inner).into(), Self::from_sql(" any")])
|
|
||||||
}
|
|
||||||
InfixOp::WithCmpModifierAll(inner) => {
|
|
||||||
Self::concat([(*inner).into(), Self::from_sql(" all")])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<QueryFragment> for QueryBuilder<'_, Postgres> {
|
|
||||||
fn from(value: QueryFragment) -> Self {
|
|
||||||
value.gut_checks();
|
|
||||||
let mut builder = QueryBuilder::new("");
|
|
||||||
let mut param_iter = value.params.into_iter();
|
|
||||||
for plain_sql in value.plain_sql {
|
|
||||||
builder.push(plain_sql);
|
|
||||||
if let Some(param) = param_iter.next() {
|
|
||||||
param.push_bind_onto(&mut builder);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
builder
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Expr> for QueryBuilder<'_, Postgres> {
|
|
||||||
fn from(value: Expr) -> Self {
|
|
||||||
Self::from(QueryFragment::from(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -23,7 +23,6 @@ percent-encoding = "2.3.1"
|
||||||
phono-backends = { workspace = true }
|
phono-backends = { workspace = true }
|
||||||
phono-models = { workspace = true }
|
phono-models = { workspace = true }
|
||||||
phono-namegen = { workspace = true }
|
phono-namegen = { workspace = true }
|
||||||
phono-pestgros = { workspace = true }
|
|
||||||
rand = { workspace = true }
|
rand = { workspace = true }
|
||||||
redact = { workspace = true }
|
redact = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ use anyhow::anyhow;
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use phono_backends::{
|
use phono_backends::{
|
||||||
client::WorkspaceClient,
|
client::WorkspaceClient,
|
||||||
|
escape_identifier,
|
||||||
pg_acl::{PgAclItem, PgPrivilegeType},
|
pg_acl::{PgAclItem, PgPrivilegeType},
|
||||||
pg_class::PgClass,
|
pg_class::PgClass,
|
||||||
pg_role::RoleTree,
|
pg_role::RoleTree,
|
||||||
|
|
@ -16,7 +17,6 @@ use phono_backends::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use phono_models::{accessors::Actor, service_cred::ServiceCred, user::User};
|
use phono_models::{accessors::Actor, service_cred::ServiceCred, user::User};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{postgres::types::Oid, prelude::FromRow, query, query_as};
|
use sqlx::{postgres::types::Oid, prelude::FromRow, query, query_as};
|
||||||
use tracing::{Instrument, info_span};
|
use tracing::{Instrument, info_span};
|
||||||
|
|
|
||||||
|
|
@ -6,13 +6,12 @@ use axum::{
|
||||||
// [`axum_extra`]'s form extractor is preferred:
|
// [`axum_extra`]'s form extractor is preferred:
|
||||||
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
||||||
use axum_extra::extract::Form;
|
use axum_extra::extract::Form;
|
||||||
use phono_backends::pg_class::PgClass;
|
use phono_backends::{escape_identifier, pg_class::PgClass};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor as _, Actor, portal::PortalAccessor},
|
accessors::{Accessor as _, Actor, portal::PortalAccessor},
|
||||||
field::Field,
|
field::Field,
|
||||||
presentation::Presentation,
|
presentation::Presentation,
|
||||||
};
|
};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::{postgres::types::Oid, query};
|
use sqlx::{postgres::types::Oid, query};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
||||||
|
|
@ -5,17 +5,22 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::{IntoResponse as _, Response},
|
response::{IntoResponse as _, Response},
|
||||||
};
|
};
|
||||||
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass};
|
use phono_backends::{
|
||||||
|
escape_identifier, pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass,
|
||||||
|
};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor, Actor, portal::PortalAccessor},
|
accessors::{Accessor, Actor, portal::PortalAccessor},
|
||||||
|
datum::Datum,
|
||||||
|
expression::PgExpressionAny,
|
||||||
field::Field,
|
field::Field,
|
||||||
|
query_builders::{QueryFragment, SelectQuery},
|
||||||
};
|
};
|
||||||
use phono_pestgros::{Datum, Expr, FnArgs, InfixOp, QueryFragment, escape_identifier};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{
|
use sqlx::{
|
||||||
Postgres, QueryBuilder,
|
QueryBuilder,
|
||||||
postgres::{PgRow, types::Oid},
|
postgres::{PgRow, types::Oid},
|
||||||
};
|
};
|
||||||
|
use tracing::debug;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
|
|
@ -37,8 +42,7 @@ pub(super) struct PathParams {
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Validate)]
|
#[derive(Debug, Deserialize, Validate)]
|
||||||
pub(super) struct FormBody {
|
pub(super) struct FormBody {
|
||||||
#[serde(default)]
|
subfilter: Option<String>,
|
||||||
subfilter: String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const FRONTEND_ROW_LIMIT: i64 = 1000;
|
const FRONTEND_ROW_LIMIT: i64 = 1000;
|
||||||
|
|
@ -124,8 +128,24 @@ pub(super) async fn get(
|
||||||
)),
|
)),
|
||||||
filters: QueryFragment::join(
|
filters: QueryFragment::join(
|
||||||
[
|
[
|
||||||
into_safe_filter_sql(&portal.filter),
|
portal
|
||||||
into_safe_filter_sql(&form.subfilter),
|
.table_filter
|
||||||
|
.0
|
||||||
|
.map(|filter| filter.into_query_fragment()),
|
||||||
|
form.subfilter
|
||||||
|
.and_then(|value| {
|
||||||
|
if value.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
serde_json::from_str::<Option<PgExpressionAny>>(&value)
|
||||||
|
// Ignore invalid input. A user likely pasted incorrectly
|
||||||
|
// or made a typo.
|
||||||
|
.inspect_err(|_| debug!("ignoring invalid subfilter expression"))
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|filter| filter.into_query_fragment()),
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten(),
|
.flatten(),
|
||||||
|
|
@ -195,157 +215,3 @@ pub(super) async fn get(
|
||||||
})
|
})
|
||||||
.into_response())
|
.into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper type to make it easier to build and reason about multiple related SQL
|
|
||||||
/// queries.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct SelectQuery {
|
|
||||||
/// Query fragment following (not including) "select ".
|
|
||||||
pub selection: QueryFragment,
|
|
||||||
|
|
||||||
/// Query fragment following (not including) "from ".
|
|
||||||
pub source: QueryFragment,
|
|
||||||
|
|
||||||
/// Query fragment following (not including) "where ", or empty if not
|
|
||||||
/// applicable.
|
|
||||||
pub filters: QueryFragment,
|
|
||||||
|
|
||||||
/// Query fragment following (not including) "order by ", or empty if not
|
|
||||||
/// applicable.
|
|
||||||
pub order: QueryFragment,
|
|
||||||
|
|
||||||
/// Query fragment following (not including) "limit ", or empty if not
|
|
||||||
/// applicable.
|
|
||||||
pub limit: QueryFragment,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SelectQuery> for QueryFragment {
|
|
||||||
fn from(value: SelectQuery) -> Self {
|
|
||||||
let mut result = QueryFragment::from_sql("select ");
|
|
||||||
result.push(value.selection);
|
|
||||||
result.push(QueryFragment::from_sql(" from "));
|
|
||||||
result.push(value.source);
|
|
||||||
if !value.filters.is_empty() {
|
|
||||||
result.push(QueryFragment::from_sql(" where "));
|
|
||||||
result.push(value.filters);
|
|
||||||
}
|
|
||||||
if !value.order.is_empty() {
|
|
||||||
result.push(QueryFragment::from_sql(" order by "));
|
|
||||||
result.push(value.order);
|
|
||||||
}
|
|
||||||
if !value.limit.is_empty() {
|
|
||||||
result.push(QueryFragment::from_sql(" limit "));
|
|
||||||
result.push(value.limit);
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SelectQuery> for QueryBuilder<'_, Postgres> {
|
|
||||||
fn from(value: SelectQuery) -> Self {
|
|
||||||
QueryFragment::from(value).into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Users are allowed to put any text they want in the `Portal.filter` field.
|
|
||||||
/// This needs to be either transformed into a SQL expression that we trust to
|
|
||||||
/// be injected into a `WHERE` clause or disregarded if no such expression can
|
|
||||||
/// be generated.
|
|
||||||
///
|
|
||||||
/// Given the known (not to mention unknown) limitations of [`phono_pestgros`]'s
|
|
||||||
/// homegrown PostgreSQL grammar, trying to positively establish the correctness
|
|
||||||
/// and trustworthiness of a filter expression exactly as written would be
|
|
||||||
/// impractical and dangerous. Instead, we validate the syntax tree as parsed by
|
|
||||||
/// [`phono_pestgros`] (even if the parsing logic isn't spec-compliant), and use
|
|
||||||
/// the SQL expression only after it has been converted back from parsed form.
|
|
||||||
fn into_safe_filter_sql(expr_text: &str) -> Option<QueryFragment> {
|
|
||||||
if let Ok(expr) = Expr::try_from(expr_text)
|
|
||||||
&& is_safe_filter_expr(&expr)
|
|
||||||
{
|
|
||||||
Some(expr.into())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_safe_filter_expr(expr: &Expr) -> bool {
|
|
||||||
match expr {
|
|
||||||
Expr::Literal(_) | &Expr::ObjName(_) => true,
|
|
||||||
Expr::Infix { lhs, op, rhs } => {
|
|
||||||
is_safe_filter_infix_op(op) && is_safe_filter_expr(lhs) && is_safe_filter_expr(rhs)
|
|
||||||
}
|
|
||||||
Expr::FnCall { name, args } => match name
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.as_slice()
|
|
||||||
{
|
|
||||||
// Math:
|
|
||||||
&["abs"]
|
|
||||||
| &["ceil"]
|
|
||||||
| &["floor"]
|
|
||||||
| &["ln"]
|
|
||||||
| &["log"]
|
|
||||||
| &["mod"]
|
|
||||||
| &["power"]
|
|
||||||
| &["pi"]
|
|
||||||
| &["round"]
|
|
||||||
| &["sqrt"]
|
|
||||||
| &["trunc"]
|
|
||||||
// Timestamp:
|
|
||||||
| &["now"]
|
|
||||||
| &["to_timestamp"]
|
|
||||||
// Strings:
|
|
||||||
| &["upper"]
|
|
||||||
| &["lower"]
|
|
||||||
| &["replace"]
|
|
||||||
| &["btrim"]
|
|
||||||
| &["length"]
|
|
||||||
| &["concat_ws"]
|
|
||||||
| &["lpad"]
|
|
||||||
| &["rpad"]
|
|
||||||
| &["regexp_replace"]
|
|
||||||
| &["regexp_matches"]
|
|
||||||
| &["to_char"]
|
|
||||||
// Misc:
|
|
||||||
| &["any"] => match args {
|
|
||||||
FnArgs::Exprs {
|
|
||||||
distinct_flag,
|
|
||||||
exprs,
|
|
||||||
} => !distinct_flag && exprs.iter().all(is_safe_filter_expr),
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
Expr::Not(inner) => is_safe_filter_expr(inner),
|
|
||||||
Expr::Nullness {
|
|
||||||
is_null: _,
|
|
||||||
expr: inner,
|
|
||||||
} => is_safe_filter_expr(inner),
|
|
||||||
Expr::Array(arr_items) => arr_items.iter().all(is_safe_filter_expr),
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_safe_filter_infix_op(op: &InfixOp) -> bool {
|
|
||||||
match op {
|
|
||||||
InfixOp::Add
|
|
||||||
| InfixOp::Concat
|
|
||||||
| InfixOp::Div
|
|
||||||
| InfixOp::Mult
|
|
||||||
| InfixOp::Sub
|
|
||||||
// Boolean:
|
|
||||||
| InfixOp::And
|
|
||||||
| InfixOp::Or
|
|
||||||
| InfixOp::Eq
|
|
||||||
| InfixOp::Gt
|
|
||||||
| InfixOp::Gte
|
|
||||||
| InfixOp::Lt
|
|
||||||
| InfixOp::Lte
|
|
||||||
| InfixOp::Neq
|
|
||||||
// Miscellaneous:
|
|
||||||
| InfixOp::Cast => true,
|
|
||||||
InfixOp::WithCmpModifierAny(inner) | InfixOp::WithCmpModifierAll(inner) => is_safe_filter_infix_op(inner),
|
|
||||||
_ => false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,11 @@ use axum::{
|
||||||
// [`axum_extra`]'s form extractor is required to support repeated keys:
|
// [`axum_extra`]'s form extractor is required to support repeated keys:
|
||||||
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
// https://docs.rs/axum-extra/0.10.1/axum_extra/extract/struct.Form.html#differences-from-axumextractform
|
||||||
use axum_extra::extract::Form;
|
use axum_extra::extract::Form;
|
||||||
use phono_backends::{pg_acl::PgPrivilegeType, pg_class::PgClass};
|
use phono_backends::{escape_identifier, pg_acl::PgPrivilegeType, pg_class::PgClass};
|
||||||
use phono_models::accessors::{Accessor as _, Actor, portal::PortalAccessor};
|
use phono_models::{
|
||||||
use phono_pestgros::{Datum, escape_identifier};
|
accessors::{Accessor as _, Actor, portal::PortalAccessor},
|
||||||
|
datum::Datum,
|
||||||
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::{postgres::types::Oid, query};
|
use sqlx::{postgres::types::Oid, query};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ use axum::{
|
||||||
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute};
|
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor, Actor, portal::PortalAccessor},
|
accessors::{Accessor, Actor, portal::PortalAccessor},
|
||||||
|
expression::PgExpressionAny,
|
||||||
workspace::Workspace,
|
workspace::Workspace,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
@ -97,7 +98,7 @@ pub(super) async fn get(
|
||||||
struct ResponseTemplate {
|
struct ResponseTemplate {
|
||||||
columns: Vec<ColumnInfo>,
|
columns: Vec<ColumnInfo>,
|
||||||
attr_names: Vec<String>,
|
attr_names: Vec<String>,
|
||||||
filter: String,
|
filter: Option<PgExpressionAny>,
|
||||||
settings: Settings,
|
settings: Settings,
|
||||||
subfilter_str: String,
|
subfilter_str: String,
|
||||||
navbar: WorkspaceNav,
|
navbar: WorkspaceNav,
|
||||||
|
|
@ -106,7 +107,7 @@ pub(super) async fn get(
|
||||||
ResponseTemplate {
|
ResponseTemplate {
|
||||||
columns,
|
columns,
|
||||||
attr_names,
|
attr_names,
|
||||||
filter: portal.filter,
|
filter: portal.table_filter.0,
|
||||||
navbar: WorkspaceNav::builder()
|
navbar: WorkspaceNav::builder()
|
||||||
.navigator(navigator)
|
.navigator(navigator)
|
||||||
.workspace(workspace)
|
.workspace(workspace)
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,11 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::Response,
|
response::Response,
|
||||||
};
|
};
|
||||||
use phono_backends::pg_class::PgClass;
|
use phono_backends::{escape_identifier, pg_class::PgClass};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor, Actor, portal::PortalAccessor},
|
accessors::{Accessor, Actor, portal::PortalAccessor},
|
||||||
field::Field,
|
field::Field,
|
||||||
};
|
};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::{postgres::types::Oid, query};
|
use sqlx::{postgres::types::Oid, query};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ use axum::{
|
||||||
use axum_extra::extract::Form;
|
use axum_extra::extract::Form;
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor, Actor, portal::PortalAccessor},
|
accessors::{Accessor, Actor, portal::PortalAccessor},
|
||||||
|
expression::PgExpressionAny,
|
||||||
portal::Portal,
|
portal::Portal,
|
||||||
};
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
@ -31,10 +32,11 @@ pub(super) struct PathParams {
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub(super) struct FormBody {
|
pub(super) struct FormBody {
|
||||||
filter: String,
|
filter_expression: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// HTTP POST handler for applying a filter to a portal's table viewer.
|
/// HTTP POST handler for applying a [`PgExpressionAny`] filter to a portal's
|
||||||
|
/// table viewer.
|
||||||
///
|
///
|
||||||
/// This handler expects 3 path parameters with the structure described by
|
/// This handler expects 3 path parameters with the structure described by
|
||||||
/// [`PathParams`].
|
/// [`PathParams`].
|
||||||
|
|
@ -49,7 +51,7 @@ pub(super) async fn post(
|
||||||
rel_oid,
|
rel_oid,
|
||||||
workspace_id,
|
workspace_id,
|
||||||
}): Path<PathParams>,
|
}): Path<PathParams>,
|
||||||
Form(FormBody { filter }): Form<FormBody>,
|
Form(form): Form<FormBody>,
|
||||||
) -> Result<Response, AppError> {
|
) -> Result<Response, AppError> {
|
||||||
// FIXME: csrf
|
// FIXME: csrf
|
||||||
|
|
||||||
|
|
@ -68,9 +70,12 @@ pub(super) async fn post(
|
||||||
.fetch_one()
|
.fetch_one()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let filter: Option<PgExpressionAny> =
|
||||||
|
serde_json::from_str(&form.filter_expression.unwrap_or("null".to_owned()))?;
|
||||||
|
|
||||||
Portal::update()
|
Portal::update()
|
||||||
.id(portal.id)
|
.id(portal.id)
|
||||||
.filter(filter)
|
.table_filter(filter)
|
||||||
.build()?
|
.build()?
|
||||||
.execute(&mut app_db)
|
.execute(&mut app_db)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,7 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::Response,
|
response::Response,
|
||||||
};
|
};
|
||||||
use phono_backends::pg_class::PgClass;
|
use phono_backends::{escape_identifier, pg_class::PgClass};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::{postgres::types::Oid, query};
|
use sqlx::{postgres::types::Oid, query};
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,13 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::{IntoResponse as _, Response},
|
response::{IntoResponse as _, Response},
|
||||||
};
|
};
|
||||||
use phono_backends::{pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass};
|
use phono_backends::{
|
||||||
use phono_models::accessors::{Accessor, Actor, portal::PortalAccessor};
|
escape_identifier, pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass,
|
||||||
use phono_pestgros::{Datum, escape_identifier};
|
};
|
||||||
|
use phono_models::{
|
||||||
|
accessors::{Accessor, Actor, portal::PortalAccessor},
|
||||||
|
datum::Datum,
|
||||||
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use sqlx::{Acquire as _, postgres::types::Oid, query};
|
use sqlx::{Acquire as _, postgres::types::Oid, query};
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,9 @@
|
||||||
use axum::{extract::State, response::IntoResponse};
|
use axum::{extract::State, response::IntoResponse};
|
||||||
use phono_backends::{client::WorkspaceClient, rolnames::ROLE_PREFIX_USER};
|
use phono_backends::{client::WorkspaceClient, escape_identifier, rolnames::ROLE_PREFIX_USER};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
client::AppDbClient, cluster::Cluster, user::User, workspace::Workspace,
|
client::AppDbClient, cluster::Cluster, user::User, workspace::Workspace,
|
||||||
workspace_user_perm::WorkspaceMembership,
|
workspace_user_perm::WorkspaceMembership,
|
||||||
};
|
};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use sqlx::{Connection as _, PgConnection, query};
|
use sqlx::{Connection as _, PgConnection, query};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
|
||||||
|
|
@ -3,14 +3,14 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
use phono_backends::rolnames::{
|
use phono_backends::{
|
||||||
ROLE_PREFIX_SERVICE_CRED, SERVICE_CRED_CONN_LIMIT, SERVICE_CRED_SUFFIX_LEN,
|
escape_identifier,
|
||||||
|
rolnames::{ROLE_PREFIX_SERVICE_CRED, SERVICE_CRED_CONN_LIMIT, SERVICE_CRED_SUFFIX_LEN},
|
||||||
};
|
};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
|
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
|
||||||
service_cred::ServiceCred,
|
service_cred::ServiceCred,
|
||||||
};
|
};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use rand::distributions::{Alphanumeric, DistString};
|
||||||
use redact::Secret;
|
use redact::Secret;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,14 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
use phono_backends::rolnames::{
|
use phono_backends::{
|
||||||
ROLE_PREFIX_TABLE_OWNER, ROLE_PREFIX_TABLE_READER, ROLE_PREFIX_TABLE_WRITER, ROLE_PREFIX_USER,
|
escape_identifier,
|
||||||
|
rolnames::{
|
||||||
|
ROLE_PREFIX_TABLE_OWNER, ROLE_PREFIX_TABLE_READER, ROLE_PREFIX_TABLE_WRITER,
|
||||||
|
ROLE_PREFIX_USER,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use phono_models::accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor};
|
use phono_models::accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::{Acquire as _, query};
|
use sqlx::{Acquire as _, query};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,12 @@ use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
use phono_backends::{pg_database::PgDatabase, rolnames::ROLE_PREFIX_USER};
|
use phono_backends::{escape_identifier, pg_database::PgDatabase, rolnames::ROLE_PREFIX_USER};
|
||||||
use phono_models::{
|
use phono_models::{
|
||||||
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
|
accessors::{Accessor as _, Actor, workspace::WorkspaceAccessor},
|
||||||
user::User,
|
user::User,
|
||||||
workspace_user_perm::WorkspaceMembership,
|
workspace_user_perm::WorkspaceMembership,
|
||||||
};
|
};
|
||||||
use phono_pestgros::escape_identifier;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::query;
|
use sqlx::query;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,8 @@
|
||||||
Portal Settings
|
Portal Settings
|
||||||
</a>
|
</a>
|
||||||
<filter-menu
|
<filter-menu
|
||||||
initial-value="{{ filter }}"
|
identifier-hints="{{ attr_names | json }}"
|
||||||
|
initial-value="{{ filter | json }}"
|
||||||
></filter-menu>
|
></filter-menu>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
||||||
78
static/expression-editor.css
Normal file
78
static/expression-editor.css
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
.expression-editor__container {
|
||||||
|
background: #eee;
|
||||||
|
border-radius: var(--default-border-radius--rounded);
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-editor__sidebar {
|
||||||
|
display: grid;
|
||||||
|
grid-template:
|
||||||
|
'padding-top' 1fr
|
||||||
|
'operator-selector' max-content
|
||||||
|
'actions' minmax(max-content, 1fr);
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-editor__main {
|
||||||
|
background: #fff;
|
||||||
|
border-radius: var(--default-border-radius--rounded);
|
||||||
|
border: solid 1px var(--default-border-color);
|
||||||
|
flex: 1;
|
||||||
|
padding: var(--default-padding);
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-editor__action-button {
|
||||||
|
padding: var(--default-padding);
|
||||||
|
|
||||||
|
svg path {
|
||||||
|
fill: currentColor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-editor__params {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--default-padding);
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-selector {
|
||||||
|
grid-area: operator-selector;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-selector__expression-button {
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
height: 2.5rem;
|
||||||
|
padding: 0;
|
||||||
|
width: 2.5rem;
|
||||||
|
|
||||||
|
svg path {
|
||||||
|
fill: currentColor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-selector__popover:popover-open {
|
||||||
|
top: anchor(bottom);
|
||||||
|
margin-top: 0.25rem;
|
||||||
|
position: absolute;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
padding: 0;
|
||||||
|
background: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-selector__section {
|
||||||
|
align-items: center;
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(3, 1fr);
|
||||||
|
justify-content: center;
|
||||||
|
list-style-type: none;
|
||||||
|
margin: var(--default-padding);
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-selector__li {
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
@ -1,3 +1,8 @@
|
||||||
|
/*
|
||||||
|
@use 'forms';
|
||||||
|
@use 'condition-editor';
|
||||||
|
*/
|
||||||
|
|
||||||
/* ======== Theming ======== */
|
/* ======== Theming ======== */
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
@import "./expression-editor.css";
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
--table-header-border-color: var(--default-border-color);
|
--table-header-border-color: var(--default-border-color);
|
||||||
--table-cell-border-color: oklch(from var(--default-border-color) calc(l * 1.15) c h);
|
--table-cell-border-color: oklch(from var(--default-border-color) calc(l * 1.15) c h);
|
||||||
|
|
|
||||||
47
svelte/README.md
Normal file
47
svelte/README.md
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
# Svelte + TS + Vite
|
||||||
|
|
||||||
|
This template should help get you started developing with Svelte and TypeScript in Vite.
|
||||||
|
|
||||||
|
## Recommended IDE Setup
|
||||||
|
|
||||||
|
[VS Code](https://code.visualstudio.com/) + [Svelte](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode).
|
||||||
|
|
||||||
|
## Need an official Svelte framework?
|
||||||
|
|
||||||
|
Check out [SvelteKit](https://github.com/sveltejs/kit#readme), which is also powered by Vite. Deploy anywhere with its serverless-first approach and adapt to various platforms, with out of the box support for TypeScript, SCSS, and Less, and easily-added support for mdsvex, GraphQL, PostCSS, Tailwind CSS, and more.
|
||||||
|
|
||||||
|
## Technical considerations
|
||||||
|
|
||||||
|
**Why use this over SvelteKit?**
|
||||||
|
|
||||||
|
- It brings its own routing solution which might not be preferable for some users.
|
||||||
|
- It is first and foremost a framework that just happens to use Vite under the hood, not a Vite app.
|
||||||
|
|
||||||
|
This template contains as little as possible to get started with Vite + TypeScript + Svelte, while taking into account the developer experience with regards to HMR and intellisense. It demonstrates capabilities on par with the other `create-vite` templates and is a good starting point for beginners dipping their toes into a Vite + Svelte project.
|
||||||
|
|
||||||
|
Should you later need the extended capabilities and extensibility provided by SvelteKit, the template has been structured similarly to SvelteKit so that it is easy to migrate.
|
||||||
|
|
||||||
|
**Why `global.d.ts` instead of `compilerOptions.types` inside `jsconfig.json` or `tsconfig.json`?**
|
||||||
|
|
||||||
|
Setting `compilerOptions.types` shuts out all other types not explicitly listed in the configuration. Using triple-slash references keeps the default TypeScript setting of accepting type information from the entire workspace, while also adding `svelte` and `vite/client` type information.
|
||||||
|
|
||||||
|
**Why include `.vscode/extensions.json`?**
|
||||||
|
|
||||||
|
Other templates indirectly recommend extensions via the README, but this file allows VS Code to prompt the user to install the recommended extension upon opening the project.
|
||||||
|
|
||||||
|
**Why enable `allowJs` in the TS template?**
|
||||||
|
|
||||||
|
While `allowJs: false` would indeed prevent the use of `.js` files in the project, it does not prevent the use of JavaScript syntax in `.svelte` files. In addition, it would force `checkJs: false`, bringing the worst of both worlds: not being able to guarantee the entire codebase is TypeScript, and also having worse typechecking for the existing JavaScript. In addition, there are valid use cases in which a mixed codebase may be relevant.
|
||||||
|
|
||||||
|
**Why is HMR not preserving my local component state?**
|
||||||
|
|
||||||
|
HMR state preservation comes with a number of gotchas! It has been disabled by default in both `svelte-hmr` and `@sveltejs/vite-plugin-svelte` due to its often surprising behavior. You can read the details [here](https://github.com/rixo/svelte-hmr#svelte-hmr).
|
||||||
|
|
||||||
|
If you have state that's important to retain within a component, consider creating an external store which would not be replaced by HMR.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// store.ts
|
||||||
|
// An extremely simple external store
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
export default writable(0)
|
||||||
|
```
|
||||||
1
svelte/public/vite.svg
Normal file
1
svelte/public/vite.svg
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||||
|
After Width: | Height: | Size: 1.5 KiB |
91
svelte/src/expression-editor.webc.svelte
Normal file
91
svelte/src/expression-editor.webc.svelte
Normal file
|
|
@ -0,0 +1,91 @@
|
||||||
|
<svelte:options
|
||||||
|
customElement={{
|
||||||
|
props: {
|
||||||
|
identifier_hints: { attribute: "identifier-hints", type: "Array" },
|
||||||
|
value: { reflect: true, type: "Object" },
|
||||||
|
},
|
||||||
|
shadow: "none",
|
||||||
|
tag: "expression-editor",
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
|
import DatumEditor from "./datum-editor.svelte";
|
||||||
|
import ExpressionSelector from "./expression-selector.svelte";
|
||||||
|
import { type PgExpressionAny } from "./expression.svelte";
|
||||||
|
import ExpressionEditor from "./expression-editor.webc.svelte";
|
||||||
|
import { RFC_3339_S, type Presentation } from "./presentation.svelte";
|
||||||
|
import type { Datum } from "./datum.svelte";
|
||||||
|
|
||||||
|
const POTENTIAL_PRESENTATIONS: Presentation[] = [
|
||||||
|
{ t: "Numeric", c: {} },
|
||||||
|
{ t: "Text", c: { input_mode: { t: "MultiLine", c: {} } } },
|
||||||
|
{ t: "Timestamp", c: { format: RFC_3339_S } },
|
||||||
|
{ t: "Uuid", c: {} },
|
||||||
|
];
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
identifier_hints?: string[];
|
||||||
|
value?: PgExpressionAny;
|
||||||
|
};
|
||||||
|
|
||||||
|
let { identifier_hints = [], value = $bindable() }: Props = $props();
|
||||||
|
|
||||||
|
// Dynamic state to bind to datum editor.
|
||||||
|
let editor_value = $state<Datum | undefined>();
|
||||||
|
let editor_presentation = $state<Presentation>(POTENTIAL_PRESENTATIONS[0]);
|
||||||
|
|
||||||
|
$effect(() => {
|
||||||
|
editor_value = value?.t === "Literal" ? value.c : undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
function handle_identifier_selector_change(
|
||||||
|
ev: Event & { currentTarget: HTMLSelectElement },
|
||||||
|
) {
|
||||||
|
if (value?.t === "Identifier") {
|
||||||
|
value.c.parts_raw = [ev.currentTarget.value];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handle_editor_change(datum_value: Datum) {
|
||||||
|
if (value?.t === "Literal") {
|
||||||
|
value.c = datum_value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="expression-editor__container">
|
||||||
|
<div class="expression-editor__sidebar">
|
||||||
|
<ExpressionSelector bind:value />
|
||||||
|
</div>
|
||||||
|
{#if value !== undefined}
|
||||||
|
<div class="expression-editor__main">
|
||||||
|
<div class="expression-editor__params">
|
||||||
|
{#if value.t === "Comparison"}
|
||||||
|
{#if value.c.t === "Infix"}
|
||||||
|
<ExpressionEditor bind:value={value.c.c.lhs} {identifier_hints} />
|
||||||
|
<ExpressionEditor bind:value={value.c.c.rhs} {identifier_hints} />
|
||||||
|
{:else if value.c.t === "IsNull" || value.c.t === "IsNotNull"}
|
||||||
|
<ExpressionEditor bind:value={value.c.c.lhs} {identifier_hints} />
|
||||||
|
{/if}
|
||||||
|
{:else if value.t === "Identifier"}
|
||||||
|
<select
|
||||||
|
onchange={handle_identifier_selector_change}
|
||||||
|
value={value.c.parts_raw[0]}
|
||||||
|
>
|
||||||
|
{#each identifier_hints as hint}
|
||||||
|
<option value={hint}>{hint}</option>
|
||||||
|
{/each}
|
||||||
|
</select>
|
||||||
|
{:else if value.t === "Literal"}
|
||||||
|
<DatumEditor
|
||||||
|
bind:current_presentation={editor_presentation}
|
||||||
|
bind:value={editor_value}
|
||||||
|
potential_presentations={POTENTIAL_PRESENTATIONS}
|
||||||
|
on_change={handle_editor_change}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
185
svelte/src/expression-selector.svelte
Normal file
185
svelte/src/expression-selector.svelte
Normal file
|
|
@ -0,0 +1,185 @@
|
||||||
|
<!--
|
||||||
|
@component
|
||||||
|
Dropdown menu with grid of buttons for quickly selecting a Postgres expression
|
||||||
|
type. Used by `<ExpressionEditor />`.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
|
import { type PgExpressionAny, expression_icon } from "./expression.svelte";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
on_change?(new_value: PgExpressionAny): void;
|
||||||
|
value?: PgExpressionAny;
|
||||||
|
};
|
||||||
|
|
||||||
|
let { on_change, value = $bindable() }: Props = $props();
|
||||||
|
|
||||||
|
let menu_button_element = $state<HTMLButtonElement | undefined>();
|
||||||
|
let popover_element = $state<HTMLDivElement | undefined>();
|
||||||
|
// Hacky workaround because as of September 2025 implicit anchor association
|
||||||
|
// is still pretty broken, at least in Firefox.
|
||||||
|
let anchor_name = $state(`--anchor-${Math.floor(Math.random() * 1000000)}`);
|
||||||
|
|
||||||
|
const expressions: ReadonlyArray<{
|
||||||
|
section_label: string;
|
||||||
|
expressions: ReadonlyArray<PgExpressionAny>;
|
||||||
|
}> = [
|
||||||
|
{
|
||||||
|
section_label: "Comparisons",
|
||||||
|
expressions: [
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "Infix",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
operator: "Eq",
|
||||||
|
rhs: { t: "Literal", c: { t: "Text", c: "" } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "Infix",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
operator: "Neq",
|
||||||
|
rhs: { t: "Literal", c: { t: "Text", c: "" } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "Infix",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
operator: "Lt",
|
||||||
|
rhs: { t: "Literal", c: { t: "Text", c: "" } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "Infix",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
operator: "Gt",
|
||||||
|
rhs: { t: "Literal", c: { t: "Text", c: "" } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "IsNull",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: {
|
||||||
|
t: "IsNotNull",
|
||||||
|
c: {
|
||||||
|
lhs: { t: "Identifier", c: { parts_raw: [] } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
section_label: "Conjunctions",
|
||||||
|
expressions: [
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: { t: "Infix", c: { operator: "And" } },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Comparison",
|
||||||
|
c: { t: "Infix", c: { operator: "Or" } },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
section_label: "Values",
|
||||||
|
expressions: [
|
||||||
|
{
|
||||||
|
t: "Identifier",
|
||||||
|
c: { parts_raw: [] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
t: "Literal",
|
||||||
|
c: { t: "Text", c: "" },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
section_label: "Transformations",
|
||||||
|
expressions: [
|
||||||
|
{
|
||||||
|
t: "ToJson",
|
||||||
|
c: { entries: [] },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
let iconography_current = $derived(value && expression_icon(value));
|
||||||
|
|
||||||
|
function handle_menu_button_click() {
|
||||||
|
popover_element?.togglePopover();
|
||||||
|
}
|
||||||
|
|
||||||
|
function handle_expression_button_click(expr: PgExpressionAny) {
|
||||||
|
value = expr;
|
||||||
|
popover_element?.hidePopover();
|
||||||
|
menu_button_element?.focus();
|
||||||
|
on_change?.(value);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="expression-selector">
|
||||||
|
<button
|
||||||
|
aria-label={`Select expression type (current: ${iconography_current?.label ?? "None"})`}
|
||||||
|
bind:this={menu_button_element}
|
||||||
|
class="expression-selector__expression-button"
|
||||||
|
onclick={handle_menu_button_click}
|
||||||
|
style:anchor-name={anchor_name}
|
||||||
|
title={iconography_current?.label}
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
{#if value}
|
||||||
|
{@html iconography_current?.html}
|
||||||
|
{:else}
|
||||||
|
<i class="ti ti-circle-plus"></i>
|
||||||
|
{/if}
|
||||||
|
</button>
|
||||||
|
<div
|
||||||
|
bind:this={popover_element}
|
||||||
|
class="popover expression-selector__popover"
|
||||||
|
popover="auto"
|
||||||
|
style:position-anchor={anchor_name}
|
||||||
|
>
|
||||||
|
{#each expressions as section}
|
||||||
|
<ul class="expression-selector__section">
|
||||||
|
{#each section.expressions as expr}
|
||||||
|
{@const iconography = expression_icon(expr)}
|
||||||
|
<li class="expression-selector__li">
|
||||||
|
<button
|
||||||
|
class="expression-selector__expression-button"
|
||||||
|
onclick={() => handle_expression_button_click(expr)}
|
||||||
|
title={iconography.label}
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
{@html iconography.html}
|
||||||
|
</button>
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
</ul>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
175
svelte/src/expression.svelte.ts
Normal file
175
svelte/src/expression.svelte.ts
Normal file
|
|
@ -0,0 +1,175 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { datum_schema } from "./datum.svelte.ts";
|
||||||
|
|
||||||
|
export const all_expression_types = [
|
||||||
|
"Comparison",
|
||||||
|
"Identifier",
|
||||||
|
"Literal",
|
||||||
|
"ToJson",
|
||||||
|
] as const;
|
||||||
|
// Type checking to ensure that all valid enum tags are included.
|
||||||
|
type Assert<_T extends true> = void;
|
||||||
|
type _ = Assert<PgExpressionAny["t"] extends PgExpressionType ? true : false>;
|
||||||
|
|
||||||
|
export const expression_type_schema = z.enum(all_expression_types);
|
||||||
|
|
||||||
|
export const all_infix_comparison_operators = [
|
||||||
|
"Eq",
|
||||||
|
"Neq",
|
||||||
|
"Gt",
|
||||||
|
"Lt",
|
||||||
|
"And",
|
||||||
|
"Or",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
const pg_comparison_operator_schema = z.enum(all_infix_comparison_operators);
|
||||||
|
|
||||||
|
const pg_infix_expression_schema = z.object({
|
||||||
|
operator: z.union([pg_comparison_operator_schema]),
|
||||||
|
get lhs() {
|
||||||
|
return pg_expression_any_schema.optional();
|
||||||
|
},
|
||||||
|
get rhs() {
|
||||||
|
return pg_expression_any_schema.optional();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_comparison_expression_infix_schema = z.object({
|
||||||
|
t: z.literal("Infix"),
|
||||||
|
c: pg_infix_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_is_null_expression_schema = z.object({
|
||||||
|
get lhs() {
|
||||||
|
return pg_expression_any_schema.optional();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_comparison_expression_is_null_schema = z.object({
|
||||||
|
t: z.literal("IsNull"),
|
||||||
|
c: pg_is_null_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_is_not_null_expression_schema = z.object({
|
||||||
|
get lhs() {
|
||||||
|
return pg_expression_any_schema.optional();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_comparison_expression_is_not_null_schema = z.object({
|
||||||
|
t: z.literal("IsNotNull"),
|
||||||
|
c: pg_is_not_null_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_comparison_expression_schema = z.union([
|
||||||
|
pg_comparison_expression_infix_schema,
|
||||||
|
pg_comparison_expression_is_null_schema,
|
||||||
|
pg_comparison_expression_is_not_null_schema,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const pg_expression_any_comparison_schema = z.object({
|
||||||
|
t: z.literal("Comparison"),
|
||||||
|
c: pg_comparison_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_identifier_expression_schema = z.object({
|
||||||
|
parts_raw: z.array(z.string()),
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_expression_any_identifier_schema = z.object({
|
||||||
|
t: z.literal("Identifier"),
|
||||||
|
c: pg_identifier_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_expression_any_literal_schema = z.object({
|
||||||
|
t: z.literal("Literal"),
|
||||||
|
c: datum_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_to_json_expression_schema = z.object({
|
||||||
|
get entries() {
|
||||||
|
return z.array(z.tuple([z.string(), pg_expression_any_schema.optional()]));
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const pg_expression_any_to_json_expression_schema = z.object({
|
||||||
|
t: z.literal("ToJson"),
|
||||||
|
c: pg_to_json_expression_schema,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const pg_expression_any_schema = z.union([
|
||||||
|
pg_expression_any_comparison_schema,
|
||||||
|
pg_expression_any_identifier_schema,
|
||||||
|
pg_expression_any_literal_schema,
|
||||||
|
pg_expression_any_to_json_expression_schema,
|
||||||
|
]);
|
||||||
|
|
||||||
|
export type PgExpressionAny = z.infer<typeof pg_expression_any_schema>;
|
||||||
|
export type PgExpressionType = z.infer<typeof expression_type_schema>;
|
||||||
|
|
||||||
|
export function expression_human_name(expr_type: PgExpressionType): string {
|
||||||
|
if (expr_type === "Comparison") {
|
||||||
|
return "Condition";
|
||||||
|
}
|
||||||
|
if (expr_type === "Identifier") {
|
||||||
|
return "Identifier";
|
||||||
|
}
|
||||||
|
if (expr_type === "Literal") {
|
||||||
|
return "Literal";
|
||||||
|
}
|
||||||
|
if (expr_type === "ToJson") {
|
||||||
|
return "JSON";
|
||||||
|
}
|
||||||
|
// Type guard to check for exhaustive matching.
|
||||||
|
type _ = Assert<typeof expr_type extends never ? true : false>;
|
||||||
|
throw new Error("this should be unreachable");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function expression_icon(expr: PgExpressionAny): {
|
||||||
|
html: string;
|
||||||
|
label: string;
|
||||||
|
} {
|
||||||
|
if (expr.t === "Comparison") {
|
||||||
|
if (expr.c.t === "Infix") {
|
||||||
|
const op = expr.c.c.operator;
|
||||||
|
if (op === "And") {
|
||||||
|
return { html: "&&", label: "And" };
|
||||||
|
}
|
||||||
|
if (op === "Eq") {
|
||||||
|
return { html: "=", label: "Is Equal To" };
|
||||||
|
}
|
||||||
|
if (op === "Gt") {
|
||||||
|
return { html: ">", label: "Is Greater Than" };
|
||||||
|
}
|
||||||
|
if (op === "Lt") {
|
||||||
|
return { html: "<", label: "Is Less Than" };
|
||||||
|
}
|
||||||
|
if (op === "Or") {
|
||||||
|
return { html: "||", label: "Or" };
|
||||||
|
}
|
||||||
|
if (op === "Neq") {
|
||||||
|
return { html: "\u2260", label: "Is Not Equal To" };
|
||||||
|
}
|
||||||
|
// Type guard to check for exhaustive matching.
|
||||||
|
type _ = Assert<typeof op extends never ? true : false>;
|
||||||
|
throw new Error("this should be unreachable");
|
||||||
|
} else if (expr.c.t === "IsNull") {
|
||||||
|
return { html: '<i class="ti ti-cube-3d-sphere-off"></i>', label: "Is Null" };
|
||||||
|
} else if (expr.c.t === "IsNotNull") {
|
||||||
|
return { html: '<i class="ti ti-cube"></i>', label: "Is Not Null" };
|
||||||
|
}
|
||||||
|
// Type guard to check for exhaustive matching.
|
||||||
|
type _ = Assert<typeof expr.c extends never ? true : false>;
|
||||||
|
throw new Error("this should be unreachable");
|
||||||
|
} else if (expr.t === "Identifier") {
|
||||||
|
return { html: '<i class="ti ti-variable"></i>', label: "Dynamic Value" };
|
||||||
|
} else if (expr.t === "Literal") {
|
||||||
|
return { html: '<i class="ti ti-hash"></i>', label: "Static Value" };
|
||||||
|
} else if (expr.t === "ToJson") {
|
||||||
|
return { html: '<i class="ti ti-code"></i>', label: "JSON String" };
|
||||||
|
}
|
||||||
|
// Type guard to check for exhaustive matching.
|
||||||
|
type _ = Assert<typeof expr extends never ? true : false>;
|
||||||
|
throw new Error("this should be unreachable");
|
||||||
|
}
|
||||||
|
|
@ -1,42 +1,51 @@
|
||||||
<svelte:options
|
<svelte:options
|
||||||
customElement={{
|
customElement={{
|
||||||
props: { initialValue: { attribute: "initial-value" } },
|
props: {
|
||||||
|
identifier_hints: { attribute: "identifier-hints", type: "Array" },
|
||||||
|
initialValue: { attribute: "initial-value", type: "Object" },
|
||||||
|
},
|
||||||
shadow: "none",
|
shadow: "none",
|
||||||
tag: "filter-menu",
|
tag: "filter-menu",
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import { type PgExpressionAny } from "./expression.svelte";
|
||||||
import BasicDropdown from "./basic-dropdown.webc.svelte";
|
import BasicDropdown from "./basic-dropdown.webc.svelte";
|
||||||
|
import ExpressionEditor from "./expression-editor.webc.svelte";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
initialValue?: string;
|
identifier_hints?: string[];
|
||||||
|
initialValue?: PgExpressionAny | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
let { initialValue = "" }: Props = $props();
|
let { identifier_hints = [], initialValue }: Props = $props();
|
||||||
|
|
||||||
let expr = $state(initialValue);
|
let expr = $state<PgExpressionAny | undefined>(initialValue ?? undefined);
|
||||||
|
|
||||||
|
function handle_clear_button_click() {
|
||||||
|
expr = undefined;
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="filter-menu toolbar-item">
|
<div class="filter-menu toolbar-item">
|
||||||
<BasicDropdown>
|
<BasicDropdown>
|
||||||
<span slot="button-contents">Filter</span>
|
<span slot="button-contents">Filter</span>
|
||||||
<form action="set-filter" class="padded" method="post" slot="popover">
|
<form action="set-filter" class="padded" method="post" slot="popover">
|
||||||
<div class="form__label">Filter expression (SQL)</div>
|
<ExpressionEditor bind:value={expr} {identifier_hints} />
|
||||||
<textarea
|
|
||||||
class="form__input"
|
|
||||||
name="filter"
|
|
||||||
rows="8"
|
|
||||||
cols="60"
|
|
||||||
placeholder="For example: LOWER("my_column") = 'hello world'"
|
|
||||||
>{expr}</textarea
|
|
||||||
>
|
|
||||||
<div class="form__buttons">
|
<div class="form__buttons">
|
||||||
<input
|
<input
|
||||||
name="filter_expression"
|
name="filter_expression"
|
||||||
type="hidden"
|
type="hidden"
|
||||||
value={JSON.stringify(expr)}
|
value={JSON.stringify(expr)}
|
||||||
/>
|
/>
|
||||||
|
<button
|
||||||
|
class="button button--secondary"
|
||||||
|
onclick={handle_clear_button_click}
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
Clear
|
||||||
|
</button>
|
||||||
<button class="button button--primary" type="submit">Apply</button>
|
<button class="button button--primary" type="submit">Apply</button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
|
|
|
||||||
|
|
@ -6,13 +6,12 @@ commit queue, datum editor, and field headers.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { AsyncTaskQueue, withRetry } from "attq";
|
|
||||||
|
|
||||||
import { type Datum, parse_datum_from_text } from "../datum.svelte";
|
import { type Datum, parse_datum_from_text } from "../datum.svelte";
|
||||||
import DatumEditor from "../datum-editor.svelte";
|
import DatumEditor from "../datum-editor.svelte";
|
||||||
import { BLUR_DEBOUNCE_MS } from "../datum-editor-common.svelte";
|
import { BLUR_DEBOUNCE_MS } from "../datum-editor-common.svelte";
|
||||||
import { type Row, type FieldInfo } from "../field.svelte";
|
import { type Row, type FieldInfo } from "../field.svelte";
|
||||||
import { get_empty_datum_for } from "../presentation.svelte";
|
import { get_empty_datum_for } from "../presentation.svelte";
|
||||||
|
import { UndoStack, type Undoable } from "./undo-stack.svelte";
|
||||||
import {
|
import {
|
||||||
type Coords,
|
type Coords,
|
||||||
coords_eq,
|
coords_eq,
|
||||||
|
|
@ -22,7 +21,7 @@ commit queue, datum editor, and field headers.
|
||||||
import FieldAdder from "./field-adder.svelte";
|
import FieldAdder from "./field-adder.svelte";
|
||||||
import FieldHeader from "./field-header.svelte";
|
import FieldHeader from "./field-header.svelte";
|
||||||
import TableCell from "./table-cell.svelte";
|
import TableCell from "./table-cell.svelte";
|
||||||
import { invert_diff, type Undoable, UndoStack } from "./undo-stack.svelte";
|
import { AsyncTaskQueue, withRetry } from "attq";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
columns: {
|
columns: {
|
||||||
|
|
@ -43,9 +42,14 @@ commit queue, datum editor, and field headers.
|
||||||
total_count,
|
total_count,
|
||||||
}: Props = $props();
|
}: Props = $props();
|
||||||
|
|
||||||
|
type Selection = {
|
||||||
|
coords: Coords;
|
||||||
|
original_value: Datum;
|
||||||
|
};
|
||||||
|
|
||||||
type Delta = Undoable<Coords, Datum>;
|
type Delta = Undoable<Coords, Datum>;
|
||||||
|
|
||||||
let selections = $state<Delta[]>([]);
|
let selections = $state<Selection[]>([]);
|
||||||
let editor_value = $state<Datum | undefined>(undefined);
|
let editor_value = $state<Datum | undefined>(undefined);
|
||||||
let datum_editor = $state<DatumEditor | undefined>();
|
let datum_editor = $state<DatumEditor | undefined>();
|
||||||
let focus_cursor = $state<(() => unknown) | undefined>();
|
let focus_cursor = $state<(() => unknown) | undefined>();
|
||||||
|
|
@ -91,7 +95,15 @@ commit queue, datum editor, and field headers.
|
||||||
// the UI and as replayed to the server side.
|
// the UI and as replayed to the server side.
|
||||||
const undo_stack = new UndoStack<Delta>({
|
const undo_stack = new UndoStack<Delta>({
|
||||||
apply_diff: (diff) => {
|
apply_diff: (diff) => {
|
||||||
set_cell_values(diff);
|
for (const {
|
||||||
|
loc: { region, row_idx, field_idx },
|
||||||
|
value_updated,
|
||||||
|
} of diff) {
|
||||||
|
// TODO: Does reactivity work with a ternary on the lhs?
|
||||||
|
(region === "main" ? rows_main : rows_inserter)[row_idx].data[
|
||||||
|
field_idx
|
||||||
|
] = value_updated;
|
||||||
|
}
|
||||||
upload_queue.push(diff);
|
upload_queue.push(diff);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
@ -190,16 +202,6 @@ commit queue, datum editor, and field headers.
|
||||||
|
|
||||||
// -------- Updates and Effects -------- //
|
// -------- Updates and Effects -------- //
|
||||||
|
|
||||||
function set_cell_values(diff: Delta[]) {
|
|
||||||
for (const {
|
|
||||||
loc: { region, row_idx, field_idx },
|
|
||||||
value_updated,
|
|
||||||
} of diff) {
|
|
||||||
(region === "main" ? rows_main : rows_inserter)[row_idx].data[field_idx] =
|
|
||||||
value_updated;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function set_selections(arr: Coords[]) {
|
function set_selections(arr: Coords[]) {
|
||||||
selections = arr.map((coords) => {
|
selections = arr.map((coords) => {
|
||||||
let cell_data: Datum | undefined;
|
let cell_data: Datum | undefined;
|
||||||
|
|
@ -210,11 +212,7 @@ commit queue, datum editor, and field headers.
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`invalid region: ${coords.region}`);
|
throw new Error(`invalid region: ${coords.region}`);
|
||||||
}
|
}
|
||||||
return {
|
return { coords, original_value: cell_data! };
|
||||||
loc: coords,
|
|
||||||
value_initial: cell_data!,
|
|
||||||
value_updated: cell_data!,
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
if (arr.length === 1) {
|
if (arr.length === 1) {
|
||||||
const [coords] = arr;
|
const [coords] = arr;
|
||||||
|
|
@ -243,22 +241,23 @@ commit queue, datum editor, and field headers.
|
||||||
if (
|
if (
|
||||||
additive &&
|
additive &&
|
||||||
first_selection !== undefined &&
|
first_selection !== undefined &&
|
||||||
!coords_eq(new_cursor, first_selection.loc)
|
!coords_eq(new_cursor, first_selection.coords)
|
||||||
) {
|
) {
|
||||||
// By convention, we keep the first selected cell at the end of the
|
// By convention, we keep the first selected cell at the end of the
|
||||||
// selections array, and the current cursor at the beginning. Everything
|
// selections array, and the current cursor at the beginning. Everything
|
||||||
// in the bounded box should be populated in between.
|
// in the bounded box should be populated in between.
|
||||||
set_selections([
|
set_selections([
|
||||||
new_cursor,
|
new_cursor,
|
||||||
...get_box(first_selection.loc, new_cursor, {
|
...get_box(first_selection.coords, new_cursor, {
|
||||||
n_fields: fields.length,
|
n_fields: fields.length,
|
||||||
n_rows_main: rows_main.length,
|
n_rows_main: rows_main.length,
|
||||||
n_rows_inserter: rows_inserter.length,
|
n_rows_inserter: rows_inserter.length,
|
||||||
}).filter(
|
}).filter(
|
||||||
(sel) =>
|
(sel) =>
|
||||||
!coords_eq(sel, new_cursor) && !coords_eq(sel, first_selection.loc),
|
!coords_eq(sel, new_cursor) &&
|
||||||
|
!coords_eq(sel, first_selection.coords),
|
||||||
),
|
),
|
||||||
first_selection.loc,
|
first_selection.coords,
|
||||||
]);
|
]);
|
||||||
} else {
|
} else {
|
||||||
set_selections([new_cursor]);
|
set_selections([new_cursor]);
|
||||||
|
|
@ -266,16 +265,28 @@ commit queue, datum editor, and field headers.
|
||||||
}
|
}
|
||||||
|
|
||||||
function try_sync_edit_to_cells() {
|
function try_sync_edit_to_cells() {
|
||||||
|
if (selections.length === 0) {
|
||||||
|
console.warn("preconditions for try_sync_edit_to_cells() not met");
|
||||||
|
return;
|
||||||
|
}
|
||||||
// Copy value locally so that it can be used intuitively in closures.
|
// Copy value locally so that it can be used intuitively in closures.
|
||||||
const editor_value_scoped = editor_value;
|
const editor_value_scoped = editor_value;
|
||||||
if (editor_value_scoped !== undefined) {
|
if (editor_value_scoped === undefined) {
|
||||||
// Editor state represents a valid cell value.
|
return;
|
||||||
selections = selections.map((sel) => ({
|
}
|
||||||
...sel,
|
for (const sel of selections) {
|
||||||
value_updated: editor_value_scoped,
|
// TODO: Refactor into `set_cell_values` function or similar to avoid
|
||||||
}));
|
// duplicating work with `apply_diffs()` callback of `undo_stack`.
|
||||||
|
if (sel.coords.region === "main") {
|
||||||
|
rows_main[sel.coords.row_idx].data[sel.coords.field_idx] =
|
||||||
|
editor_value_scoped;
|
||||||
|
} else if (sel.coords.region === "inserter") {
|
||||||
|
rows_inserter[sel.coords.row_idx].data[sel.coords.field_idx] =
|
||||||
|
editor_value_scoped;
|
||||||
|
} else {
|
||||||
|
throw new Error("Unknown region");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
set_cell_values(selections);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function try_queue_delta() {
|
function try_queue_delta() {
|
||||||
|
|
@ -290,9 +301,9 @@ commit queue, datum editor, and field headers.
|
||||||
} else {
|
} else {
|
||||||
if (selections.length > 0) {
|
if (selections.length > 0) {
|
||||||
undo_stack.push(
|
undo_stack.push(
|
||||||
selections.map(({ loc, value_initial }) => ({
|
selections.map(({ coords, original_value }) => ({
|
||||||
loc,
|
loc: coords,
|
||||||
value_initial,
|
value_initial: original_value,
|
||||||
value_updated: editor_value_scoped,
|
value_updated: editor_value_scoped,
|
||||||
})),
|
})),
|
||||||
);
|
);
|
||||||
|
|
@ -305,9 +316,17 @@ commit queue, datum editor, and field headers.
|
||||||
}
|
}
|
||||||
|
|
||||||
function cancel_edit() {
|
function cancel_edit() {
|
||||||
set_cell_values(invert_diff(selections));
|
selections.forEach(({ coords, original_value }) => {
|
||||||
|
if (coords.region === "main") {
|
||||||
|
rows_main[coords.row_idx].data[coords.field_idx] = original_value;
|
||||||
|
} else if (coords.region === "inserter") {
|
||||||
|
rows_inserter[coords.row_idx].data[coords.field_idx] = original_value;
|
||||||
|
} else {
|
||||||
|
throw new Error("Unknown region");
|
||||||
|
}
|
||||||
|
});
|
||||||
// Reset editor input value
|
// Reset editor input value
|
||||||
set_selections(selections.map(({ loc }) => loc));
|
set_selections(selections.map(({ coords }) => coords));
|
||||||
}
|
}
|
||||||
|
|
||||||
// -------- Event Handlers -------- //
|
// -------- Event Handlers -------- //
|
||||||
|
|
@ -321,11 +340,16 @@ commit queue, datum editor, and field headers.
|
||||||
const row_offset =
|
const row_offset =
|
||||||
arrow_direction === "Down" ? 1 : arrow_direction === "Up" ? -1 : 0;
|
arrow_direction === "Down" ? 1 : arrow_direction === "Up" ? -1 : 0;
|
||||||
const cursor = selections[0];
|
const cursor = selections[0];
|
||||||
const new_cursor = offset_coords(cursor.loc, row_offset, field_offset, {
|
const new_cursor = offset_coords(
|
||||||
|
cursor.coords,
|
||||||
|
row_offset,
|
||||||
|
field_offset,
|
||||||
|
{
|
||||||
n_fields: fields.length,
|
n_fields: fields.length,
|
||||||
n_rows_main: rows_main.length,
|
n_rows_main: rows_main.length,
|
||||||
n_rows_inserter: rows_inserter.length,
|
n_rows_inserter: rows_inserter.length,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
if (ev.shiftKey) {
|
if (ev.shiftKey) {
|
||||||
move_cursor(new_cursor, { additive: true });
|
move_cursor(new_cursor, { additive: true });
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -346,19 +370,19 @@ commit queue, datum editor, and field headers.
|
||||||
const sel = selections[0];
|
const sel = selections[0];
|
||||||
if (sel) {
|
if (sel) {
|
||||||
editor_value = get_empty_datum_for(
|
editor_value = get_empty_datum_for(
|
||||||
fields[sel.loc.field_idx].field.presentation,
|
fields[sel.coords.field_idx].field.presentation,
|
||||||
);
|
);
|
||||||
datum_editor?.focus();
|
datum_editor?.focus();
|
||||||
try_sync_edit_to_cells();
|
try_sync_edit_to_cells();
|
||||||
}
|
}
|
||||||
} else if (ev.key === "Enter") {
|
} else if (ev.key === "Enter") {
|
||||||
if (ev.shiftKey) {
|
if (ev.shiftKey) {
|
||||||
if (selections[0]?.loc.region === "main") {
|
if (selections[0]?.coords.region === "main") {
|
||||||
set_selections([
|
set_selections([
|
||||||
{
|
{
|
||||||
region: "inserter",
|
region: "inserter",
|
||||||
row_idx: 0,
|
row_idx: 0,
|
||||||
field_idx: selections[0]?.loc.field_idx ?? 0,
|
field_idx: selections[0]?.coords.field_idx ?? 0,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -406,17 +430,17 @@ commit queue, datum editor, and field headers.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const top_left: Coords = {
|
const top_left: Coords = {
|
||||||
region: selections.some(({ loc: { region } }) => region === "main")
|
region: selections.some(({ coords: { region } }) => region === "main")
|
||||||
? "main"
|
? "main"
|
||||||
: "inserter",
|
: "inserter",
|
||||||
field_idx: Math.min(
|
field_idx: Math.min(
|
||||||
...selections.map(({ loc: { field_idx } }) => field_idx),
|
...selections.map(({ coords: { field_idx } }) => field_idx),
|
||||||
),
|
),
|
||||||
row_idx: Math.min(
|
row_idx: Math.min(
|
||||||
...(selections.some(({ loc: { region } }) => region === "main")
|
...(selections.some(({ coords: { region } }) => region === "main")
|
||||||
? selections.filter(({ loc: { region } }) => region === "main")
|
? selections.filter(({ coords: { region } }) => region === "main")
|
||||||
: selections
|
: selections
|
||||||
).map(({ loc: { row_idx } }) => row_idx),
|
).map(({ coords: { row_idx } }) => row_idx),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
const fields_to_right = fields.slice(top_left.field_idx);
|
const fields_to_right = fields.slice(top_left.field_idx);
|
||||||
|
|
@ -466,6 +490,22 @@ commit queue, datum editor, and field headers.
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
parsed_tsv.forEach((row, i) =>
|
||||||
|
row.map((value, j) => {
|
||||||
|
const coords = offset_coords(top_left, i, j, {
|
||||||
|
n_fields: fields.length,
|
||||||
|
n_rows_main: rows_main.length,
|
||||||
|
n_rows_inserter: rows_inserter.length,
|
||||||
|
});
|
||||||
|
if (coords.region === "main") {
|
||||||
|
rows_main[coords.row_idx].data[coords.field_idx] = value;
|
||||||
|
} else if (coords.region === "inserter") {
|
||||||
|
rows_inserter[coords.row_idx].data[coords.field_idx] = value;
|
||||||
|
} else {
|
||||||
|
throw new Error("Unknown region");
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// TODO: pasting into multiple selections
|
// TODO: pasting into multiple selections
|
||||||
}
|
}
|
||||||
|
|
@ -497,7 +537,7 @@ commit queue, datum editor, and field headers.
|
||||||
<TableCell
|
<TableCell
|
||||||
coords={{ region, row_idx, field_idx }}
|
coords={{ region, row_idx, field_idx }}
|
||||||
cursor={selections.length !== 0 &&
|
cursor={selections.length !== 0 &&
|
||||||
coords_eq(selections[0].loc, {
|
coords_eq(selections[0].coords, {
|
||||||
region,
|
region,
|
||||||
row_idx,
|
row_idx,
|
||||||
field_idx,
|
field_idx,
|
||||||
|
|
@ -512,8 +552,8 @@ commit queue, datum editor, and field headers.
|
||||||
onpaste={handle_cell_paste}
|
onpaste={handle_cell_paste}
|
||||||
selected={selections.some(
|
selected={selections.some(
|
||||||
(sel) =>
|
(sel) =>
|
||||||
sel.loc.region === region &&
|
sel.coords.region === region &&
|
||||||
coords_eq(sel.loc, { region, row_idx, field_idx }),
|
coords_eq(sel.coords, { region, row_idx, field_idx }),
|
||||||
)}
|
)}
|
||||||
table_region={region}
|
table_region={region}
|
||||||
value={row.data[field_idx]}
|
value={row.data[field_idx]}
|
||||||
|
|
@ -573,8 +613,8 @@ commit queue, datum editor, and field headers.
|
||||||
set_selections([
|
set_selections([
|
||||||
coords,
|
coords,
|
||||||
...selections
|
...selections
|
||||||
.filter(({ loc }) => !coords_eq(loc, coords))
|
.filter((sel) => !coords_eq(sel.coords, coords))
|
||||||
.map(({ loc }) => loc),
|
.map((sel) => sel.coords),
|
||||||
]);
|
]);
|
||||||
} else if (ev.shiftKey) {
|
} else if (ev.shiftKey) {
|
||||||
move_cursor(coords, { additive: true });
|
move_cursor(coords, { additive: true });
|
||||||
|
|
@ -610,8 +650,8 @@ commit queue, datum editor, and field headers.
|
||||||
set_selections([
|
set_selections([
|
||||||
coords,
|
coords,
|
||||||
...selections
|
...selections
|
||||||
.filter(({ loc }) => !coords_eq(loc, coords))
|
.filter((sel) => !coords_eq(sel.coords, coords))
|
||||||
.map(({ loc }) => loc),
|
.map((sel) => sel.coords),
|
||||||
]);
|
]);
|
||||||
} else if (ev.shiftKey) {
|
} else if (ev.shiftKey) {
|
||||||
move_cursor(coords, { additive: true });
|
move_cursor(coords, { additive: true });
|
||||||
|
|
@ -648,15 +688,17 @@ commit queue, datum editor, and field headers.
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<div class="datum-editor">
|
<div class="datum-editor">
|
||||||
{#if selections.length !== 0 && selections.every(({ loc: { field_idx } }) => field_idx === selections[0]?.loc.field_idx)}
|
{#if selections.length !== 0 && selections.every(({ coords: { field_idx } }) => field_idx === selections[0]?.coords.field_idx)}
|
||||||
<DatumEditor
|
<DatumEditor
|
||||||
bind:this={datum_editor}
|
bind:this={datum_editor}
|
||||||
bind:value={editor_value}
|
bind:value={editor_value}
|
||||||
current_presentation={fields[selections[0].loc.field_idx].field
|
current_presentation={fields[selections[0].coords.field_idx].field
|
||||||
.presentation}
|
.presentation}
|
||||||
on_blur={try_queue_delta}
|
on_blur={() => try_queue_delta()}
|
||||||
on_cancel_edit={cancel_edit}
|
on_cancel_edit={cancel_edit}
|
||||||
on_change={try_sync_edit_to_cells}
|
on_change={() => {
|
||||||
|
try_sync_edit_to_cells();
|
||||||
|
}}
|
||||||
on_restore_focus={handle_restore_focus}
|
on_restore_focus={handle_restore_focus}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ component.
|
||||||
subfilter?: string;
|
subfilter?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
let { columns = [], subfilter = "" }: Props = $props();
|
let { columns = [], subfilter = "null" }: Props = $props();
|
||||||
|
|
||||||
type LazyData = {
|
type LazyData = {
|
||||||
count: number;
|
count: number;
|
||||||
|
|
@ -71,7 +71,7 @@ component.
|
||||||
{columns}
|
{columns}
|
||||||
fields={lazy_data.fields}
|
fields={lazy_data.fields}
|
||||||
rows_main={lazy_data.rows}
|
rows_main={lazy_data.rows}
|
||||||
subfilter_active={!!subfilter}
|
subfilter_active={!!subfilter && subfilter !== "null"}
|
||||||
total_count={lazy_data.count}
|
total_count={lazy_data.count}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ export class UndoStack<T extends Undoable<unknown, unknown>> {
|
||||||
|
|
||||||
// Call `_apply_diff()` after shifting cursor, in case it recursively
|
// Call `_apply_diff()` after shifting cursor, in case it recursively
|
||||||
// mutates this UndoStack.
|
// mutates this UndoStack.
|
||||||
this._apply_diff(invert_diff(this._diffs[this._cursor + 1]));
|
this._apply_diff(this._diffs[this._cursor + 1].map(invert));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -73,12 +73,10 @@ export class UndoStack<T extends Undoable<unknown, unknown>> {
|
||||||
/**
|
/**
|
||||||
* Returns a copy of the parameter with initial and updated values swapped.
|
* Returns a copy of the parameter with initial and updated values swapped.
|
||||||
*/
|
*/
|
||||||
export function invert_diff<T extends Undoable<unknown, unknown>>(
|
function invert<T extends Undoable<unknown, unknown>>(undoable: T): T {
|
||||||
diff: T[],
|
return {
|
||||||
): T[] {
|
|
||||||
return diff.map((undoable) => ({
|
|
||||||
loc: undoable.loc,
|
loc: undoable.loc,
|
||||||
value_initial: undoable.value_updated,
|
value_initial: undoable.value_updated,
|
||||||
value_updated: undoable.value_initial,
|
value_updated: undoable.value_initial,
|
||||||
} as T));
|
} as T;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue