fix(saas): add ::bigint cast to all SUM() aggregates for PG NUMERIC compat
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

PostgreSQL SUM() on bigint returns NUMERIC, causing sqlx decode errors
when Rust expects i64/Option<i64>. Root cause: key_pool.rs
select_best_key() token_count SUM was missing ::bigint, causing
DATABASE_ERROR on every relay request.

Fixed in 4 files:
- relay/key_pool.rs: SUM(token_count) — root cause of relay failure
- relay/service.rs: SUM(remaining_rpm) in sort_candidates_by_quota
- account/handlers.rs: SUM(input/output_tokens) in dashboard stats
- workers/aggregate_usage.rs: SUM(input/output_tokens) in aggregation
This commit is contained in:
iven
2026-04-09 22:16:27 +08:00
parent 0054b32c61
commit bd6cf8e05f
4 changed files with 6 additions and 6 deletions

View File

@@ -194,8 +194,8 @@ pub async fn dashboard_stats(
let today_row: DashboardTodayRow = sqlx::query_as( let today_row: DashboardTodayRow = sqlx::query_as(
"SELECT "SELECT
(SELECT COUNT(*) FROM relay_tasks WHERE created_at >= $1 AND created_at < $2) as tasks_today, (SELECT COUNT(*) FROM relay_tasks WHERE created_at >= $1 AND created_at < $2) as tasks_today,
COALESCE((SELECT SUM(input_tokens) FROM usage_records WHERE created_at >= $1 AND created_at < $2), 0) as tokens_input, COALESCE((SELECT SUM(input_tokens) FROM usage_records WHERE created_at >= $1 AND created_at < $2), 0)::bigint as tokens_input,
COALESCE((SELECT SUM(output_tokens) FROM usage_records WHERE created_at >= $1 AND created_at < $2), 0) as tokens_output" COALESCE((SELECT SUM(output_tokens) FROM usage_records WHERE created_at >= $1 AND created_at < $2), 0)::bigint as tokens_output"
).bind(&today_start).bind(&tomorrow_start).fetch_one(&state.db).await?; ).bind(&today_start).bind(&tomorrow_start).fetch_one(&state.db).await?;
Ok(Json(serde_json::json!({ Ok(Json(serde_json::json!({

View File

@@ -83,7 +83,7 @@ pub async fn select_best_key(db: &PgPool, provider_id: &str, enc_key: &[u8; 32])
sqlx::query_as( sqlx::query_as(
"SELECT pk.id, pk.key_value, pk.priority, pk.max_rpm, pk.max_tpm, "SELECT pk.id, pk.key_value, pk.priority, pk.max_rpm, pk.max_tpm,
COALESCE(SUM(uw.request_count), 0)::bigint, COALESCE(SUM(uw.request_count), 0)::bigint,
COALESCE(SUM(uw.token_count), 0) COALESCE(SUM(uw.token_count), 0)::bigint
FROM provider_keys pk FROM provider_keys pk
LEFT JOIN key_usage_window uw ON pk.id = uw.key_id LEFT JOIN key_usage_window uw ON pk.id = uw.key_id
AND uw.window_minute >= to_char(NOW() - INTERVAL '1 minute', 'YYYY-MM-DDTHH24:MI') AND uw.window_minute >= to_char(NOW() - INTERVAL '1 minute', 'YYYY-MM-DDTHH24:MI')

View File

@@ -648,7 +648,7 @@ pub async fn sort_candidates_by_quota(
let quota_rows: Vec<(String, i64)> = match sqlx::query_as( let quota_rows: Vec<(String, i64)> = match sqlx::query_as(
r#" r#"
SELECT pk.provider_id, SELECT pk.provider_id,
SUM(COALESCE(pk.max_rpm, 999999) - COALESCE(uw.request_count, 0)) AS remaining_rpm SUM(COALESCE(pk.max_rpm, 999999) - COALESCE(uw.request_count, 0))::bigint AS remaining_rpm
FROM provider_keys pk FROM provider_keys pk
LEFT JOIN key_usage_window uw ON pk.id = uw.key_id LEFT JOIN key_usage_window uw ON pk.id = uw.key_id
AND uw.window_minute >= to_char(NOW() - INTERVAL '1 minute', 'YYYY-MM-DDTHH24:MI') AND uw.window_minute >= to_char(NOW() - INTERVAL '1 minute', 'YYYY-MM-DDTHH24:MI')

View File

@@ -56,8 +56,8 @@ async fn aggregate_single_account(db: &PgPool, account_id: &str) -> SaasResult<(
.with_nanosecond(0).unwrap_or(now); .with_nanosecond(0).unwrap_or(now);
let aggregated: Option<(i64, i64, i64)> = sqlx::query_as( let aggregated: Option<(i64, i64, i64)> = sqlx::query_as(
"SELECT COALESCE(SUM(input_tokens), 0), \ "SELECT COALESCE(SUM(input_tokens), 0)::bigint, \
COALESCE(SUM(output_tokens), 0), \ COALESCE(SUM(output_tokens), 0)::bigint, \
COUNT(*) \ COUNT(*) \
FROM usage_records \ FROM usage_records \
WHERE account_id = $1 AND created_at >= $2 AND status = 'success'" WHERE account_id = $1 AND created_at >= $2 AND status = 'success'"