phonograph/interim-server/src/routes/relations_single/get_data_handler.rs
2025-10-09 08:01:01 +00:00

146 lines
4.6 KiB
Rust

use std::collections::HashMap;
use axum::{
Json,
extract::{Path, State},
response::{IntoResponse as _, Response},
};
use interim_models::{datum::Datum, field::Field, portal::Portal};
use interim_pgtypes::{escape_identifier, pg_attribute::PgAttribute, pg_class::PgClass};
use serde::{Deserialize, Serialize};
use sqlx::{postgres::PgRow, query};
use uuid::Uuid;
use crate::{
app::AppDbConn,
errors::AppError,
field_info::TableFieldInfo,
user::CurrentUser,
workspace_pooler::{RoleAssignment, WorkspacePooler},
};
#[derive(Clone, Debug, Deserialize)]
pub(super) struct PathParams {
portal_id: Uuid,
}
const FRONTEND_ROW_LIMIT: i64 = 1000;
/// HTTP GET handler for an API endpoint returning a JSON encoding of portal
/// data to display in a table or similar form.
///
/// Only queries up to the first [`FRONTEND_ROW_LIMIT`] rows.
pub(super) async fn get(
State(mut workspace_pooler): State<WorkspacePooler>,
AppDbConn(mut app_db): AppDbConn,
CurrentUser(current_user): CurrentUser,
Path(PathParams { portal_id }): Path<PathParams>,
) -> Result<Response, AppError> {
// FIXME auth
let portal = Portal::with_id(portal_id).fetch_one(&mut app_db).await?;
let mut workspace_client = workspace_pooler
.acquire_for(portal.workspace_id, RoleAssignment::User(current_user.id))
.await?;
let rel = PgClass::with_oid(portal.class_oid)
.fetch_one(&mut workspace_client)
.await?;
let attrs = PgAttribute::all_for_rel(portal.class_oid)
.fetch_all(&mut workspace_client)
.await?;
let pkey_attrs = PgAttribute::pkeys_for_rel(portal.class_oid)
.fetch_all(&mut workspace_client)
.await?;
let fields: Vec<TableFieldInfo> = {
let fields: Vec<Field> = Field::belonging_to_portal(portal.id)
.fetch_all(&mut app_db)
.await?;
let mut field_info: Vec<TableFieldInfo> = Vec::with_capacity(fields.len());
for field in fields {
if let Some(attr) = attrs.iter().find(|attr| attr.attname == field.name) {
field_info.push(TableFieldInfo {
field,
column_present: true,
has_default: attr.atthasdef,
not_null: attr.attnotnull.unwrap_or_default(),
});
}
}
field_info
};
let mut sql_raw = format!(
"select {0} from {1}.{2}",
pkey_attrs
.iter()
.chain(attrs.iter())
.map(|attr| escape_identifier(&attr.attname))
.collect::<Vec<_>>()
.join(", "),
escape_identifier(&rel.regnamespace),
escape_identifier(&rel.relname),
);
let rows: Vec<PgRow> = if let Some(filter_expr) = portal.table_filter.0 {
let filter_fragment = filter_expr.into_query_fragment();
let filter_params = filter_fragment.to_params();
sql_raw = format!(
"{sql_raw} where {0} limit ${1}",
filter_fragment.to_sql(1),
filter_params.len() + 1
);
let mut q = query(&sql_raw);
for param in filter_params {
q = param.bind_onto(q);
}
q = q.bind(FRONTEND_ROW_LIMIT);
q.fetch_all(workspace_client.get_conn()).await?
} else {
sql_raw = format!("{sql_raw} limit $1");
query(&sql_raw)
.bind(FRONTEND_ROW_LIMIT)
.fetch_all(workspace_client.get_conn())
.await?
};
#[derive(Serialize)]
struct DataRow {
pkey: String,
data: Vec<Datum>,
}
let mut data_rows: Vec<DataRow> = vec![];
let mut pkeys: Vec<String> = vec![];
for row in rows.iter() {
let mut pkey_values: HashMap<String, Datum> = HashMap::new();
for attr in pkey_attrs.clone() {
let field = Field::default_from_attr(&attr)
.ok_or(anyhow::anyhow!("unsupported primary key column type"))?;
pkey_values.insert(field.name.clone(), field.get_datum(row)?);
}
let pkey = serde_json::to_string(&pkey_values)?;
pkeys.push(pkey.clone());
let mut row_data: Vec<Datum> = vec![];
for field in fields.iter() {
row_data.push(field.field.get_datum(row)?);
}
data_rows.push(DataRow {
pkey,
data: row_data,
});
}
#[derive(Serialize)]
struct ResponseBody {
rows: Vec<DataRow>,
fields: Vec<TableFieldInfo>,
pkeys: Vec<String>,
}
Ok(Json(ResponseBody {
rows: data_rows,
fields,
pkeys,
})
.into_response())
}