1
0
Fork 0
forked from 2sys/phonograph
phonograph/phono-server/src/routes/relations_single/get_data_handler.rs

171 lines
5.1 KiB
Rust
Raw Normal View History

use std::collections::HashMap;
use axum::{
Json,
extract::{Path, State},
response::{IntoResponse as _, Response},
};
use phono_backends::{
escape_identifier, pg_acl::PgPrivilegeType, pg_attribute::PgAttribute, pg_class::PgClass,
};
use phono_models::{
accessors::{Accessor, Actor, portal::PortalAccessor},
datum::Datum,
2026-01-13 18:10:44 +00:00
expression::QueryFragment,
field::Field,
};
use serde::{Deserialize, Serialize};
use sqlx::{
postgres::{PgRow, types::Oid},
query,
};
use uuid::Uuid;
use crate::{
app::AppDbConn,
errors::AppError,
2025-10-01 22:36:19 -07:00
field_info::TableFieldInfo,
user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
};
#[derive(Clone, Debug, Deserialize)]
pub(super) struct PathParams {
portal_id: Uuid,
rel_oid: u32,
workspace_id: Uuid,
}
const FRONTEND_ROW_LIMIT: i64 = 1000;
/// HTTP GET handler for an API endpoint returning a JSON encoding of portal
/// data to display in a table or similar form.
///
/// Only queries up to the first [`FRONTEND_ROW_LIMIT`] rows.
pub(super) async fn get(
State(mut workspace_pooler): State<WorkspacePooler>,
AppDbConn(mut app_db): AppDbConn,
CurrentUser(user): CurrentUser,
Path(PathParams {
portal_id,
rel_oid,
workspace_id,
}): Path<PathParams>,
) -> Result<Response, AppError> {
let mut workspace_client = workspace_pooler
.acquire_for(workspace_id, RoleAssignment::User(user.id))
.await?;
let rel = PgClass::with_oid(Oid(rel_oid))
.fetch_one(&mut workspace_client)
.await?;
let portal = PortalAccessor::default()
.id(portal_id)
.as_actor(Actor::User(user.id))
.verify_workspace_id(workspace_id)
.verify_rel_oid(Oid(rel_oid))
.verify_rel_permissions([PgPrivilegeType::Select])
.using_rel(&rel)
.using_workspace_client(&mut workspace_client)
.using_app_db(&mut app_db)
.fetch_one()
.await?;
let attrs = PgAttribute::all_for_rel(portal.class_oid)
.fetch_all(&mut workspace_client)
.await?;
let pkey_attrs = PgAttribute::pkeys_for_rel(portal.class_oid)
.fetch_all(&mut workspace_client)
.await?;
2025-10-01 22:36:19 -07:00
let fields: Vec<TableFieldInfo> = {
let fields: Vec<Field> = Field::belonging_to_portal(portal.id)
.fetch_all(&mut app_db)
.await?;
2025-10-01 22:36:19 -07:00
let mut field_info: Vec<TableFieldInfo> = Vec::with_capacity(fields.len());
for field in fields {
if let Some(attr) = attrs.iter().find(|attr| attr.attname == field.name) {
2025-10-01 22:36:19 -07:00
field_info.push(TableFieldInfo {
field,
2025-10-01 22:36:19 -07:00
column_present: true,
has_default: attr.atthasdef,
not_null: attr.attnotnull.unwrap_or_default(),
});
}
}
field_info
};
2026-01-13 18:10:44 +00:00
let sql_fragment = {
// Defensive programming: Make `sql_fragment` immutable once built.
let mut sql_fragment = QueryFragment::from_sql(&format!(
"select {0} from {1}",
pkey_attrs
.iter()
.chain(attrs.iter())
.map(|attr| escape_identifier(&attr.attname))
.collect::<Vec<_>>()
.join(", "),
rel.get_identifier(),
));
if let Some(filter_expr) = portal.table_filter.0 {
sql_fragment.push(QueryFragment::from_sql(" where "));
sql_fragment.push(filter_expr.into_query_fragment());
}
2026-01-13 18:10:44 +00:00
sql_fragment.push(QueryFragment::from_sql(" order by _id limit "));
sql_fragment.push(QueryFragment::from_param(Datum::Numeric(Some(
FRONTEND_ROW_LIMIT.into(),
))));
sql_fragment
};
2026-01-13 18:10:44 +00:00
let sql_raw = sql_fragment.to_sql(1);
let mut q = query(&sql_raw);
for param in sql_fragment.to_params() {
q = param.bind_onto(q);
}
q = q.bind(FRONTEND_ROW_LIMIT);
let rows: Vec<PgRow> = q.fetch_all(workspace_client.get_conn()).await?;
#[derive(Serialize)]
struct DataRow {
pkey: String,
data: Vec<Datum>,
}
let mut data_rows: Vec<DataRow> = vec![];
let mut pkeys: Vec<String> = vec![];
for row in rows.iter() {
let mut pkey_values: HashMap<String, Datum> = HashMap::new();
for attr in pkey_attrs.clone() {
let field = Field::default_from_attr(&attr)
.ok_or(anyhow::anyhow!("unsupported primary key column type"))?;
pkey_values.insert(field.name.clone(), field.get_datum(row)?);
}
let pkey = serde_json::to_string(&pkey_values)?;
pkeys.push(pkey.clone());
let mut row_data: Vec<Datum> = vec![];
for field in fields.iter() {
row_data.push(field.field.get_datum(row)?);
}
data_rows.push(DataRow {
pkey,
data: row_data,
});
}
#[derive(Serialize)]
struct ResponseBody {
rows: Vec<DataRow>,
2025-10-01 22:36:19 -07:00
fields: Vec<TableFieldInfo>,
pkeys: Vec<String>,
}
Ok(Json(ResponseBody {
rows: data_rows,
fields,
pkeys,
})
.into_response())
}