From 318b16a5ce456014f93435991041428e6c3c7ca7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Tue, 19 Aug 2025 01:04:42 +0200 Subject: [PATCH 001/104] chore: fix typo in status message --- apps/frontend/nuxt.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/nuxt.config.ts b/apps/frontend/nuxt.config.ts index beaf5a9982..fb136f6676 100644 --- a/apps/frontend/nuxt.config.ts +++ b/apps/frontend/nuxt.config.ts @@ -154,7 +154,7 @@ export default defineNuxtConfig({ (state.errors ?? []).length === 0 ) { console.log( - 'Tags already recently generated. Delete apps/frontend/generated/state.json to force regeneration.', + 'Tags already recently generated. Delete apps/src/frontend/generated/state.json to force regeneration.', ) return } From a5e9797e61526d18e4d92cbeb98bd7634e60314a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Mon, 18 Aug 2025 23:25:24 +0200 Subject: [PATCH 002/104] feat(labrinth): overhaul malware scanner report storage and routes --- ...bc457a08e70dcde320c6852074819e41f8ad9.json | 24 ++ ...f9530c311eef084abb6fce35de5f37d79bcea.json | 34 ++ ...724e9a4d5b9765d52305f99f859f939c2e854.json | 63 ++++ ...3153f5e9796b55ae753ab57b14f37708b400d.json | 24 ++ ...d0a1658c6ddf7a486082cdb847fab06150328.json | 164 +++++++++ ...5d818fde0499d8e5a08e9e22bee42014877f3.json | 20 ++ .../20250810155316_delphi-reports.sql | 64 ++++ .../src/database/models/delphi_report_item.rs | 334 ++++++++++++++++++ apps/labrinth/src/database/models/ids.rs | 31 +- apps/labrinth/src/database/models/mod.rs | 1 + .../src/database/models/version_item.rs | 10 + apps/labrinth/src/routes/internal/admin.rs | 103 +----- apps/labrinth/src/routes/internal/delphi.rs | 265 ++++++++++++++ apps/labrinth/src/routes/internal/mod.rs | 4 +- apps/labrinth/src/routes/mod.rs | 4 + .../src/routes/v3/project_creation.rs | 4 - .../src/routes/v3/version_creation.rs | 44 +-- 17 files changed, 1032 insertions(+), 161 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json create mode 100644 apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json create mode 100644 apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json create mode 100644 apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json create mode 100644 apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json create mode 100644 apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json create mode 100644 apps/labrinth/migrations/20250810155316_delphi-reports.sql create mode 100644 apps/labrinth/src/database/models/delphi_report_item.rs create mode 100644 apps/labrinth/src/routes/internal/delphi.rs diff --git a/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json b/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json new file mode 100644 index 0000000000..37dcad2943 --- /dev/null +++ b/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source)\n VALUES ($1, $2, $3)\n ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text" + ] + }, + "nullable": [ + false + ] + }, + "hash": "0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9" +} diff --git a/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json new file mode 100644 index 0000000000..6f7b991949 --- /dev/null +++ b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n version_id AS \"version_id: crate::database::models::DBVersionId\",\n versions.mod_id AS \"project_id: crate::database::models::DBProjectId\",\n files.url AS \"url\"\n FROM files INNER JOIN versions ON files.version_id = versions.id\n WHERE files.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "version_id: crate::database::models::DBVersionId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_id: crate::database::models::DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "url", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea" +} diff --git a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json new file mode 100644 index 0000000000..963ea430b4 --- /dev/null +++ b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json @@ -0,0 +1,63 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issues (report_id, issue_type, status)\n VALUES ($1, $2, $3)\n ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + { + "Custom": { + "name": "delphi_report_issue_type", + "kind": { + "Enum": [ + "reflection_indirection", + "xor_obfuscation", + "included_libraries", + "suspicious_binaries", + "corrupt_classes", + "suspicious_classes", + "url_usage", + "classloader_usage", + "processbuilder_usage", + "runtime_exec_usage", + "jni_usage", + "main_method", + "native_loading", + "malformed_jar", + "nested_jar_too_deep", + "failed_decompilation", + "analysis_failure", + "malware_easyforme", + "malware_simplyloader", + "unknown" + ] + } + } + }, + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854" +} diff --git a/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json b/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json new file mode 100644 index 0000000000..de31a078f0 --- /dev/null +++ b/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_reports (file_id, delphi_version, artifact_url)\n VALUES ($1, $2, $3)\n ON CONFLICT (file_id, delphi_version) DO UPDATE SET\n delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Varchar" + ] + }, + "nullable": [ + false + ] + }, + "hash": "8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d" +} diff --git a/apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json b/apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json new file mode 100644 index 0000000000..54969cea41 --- /dev/null +++ b/apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json @@ -0,0 +1,164 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type AS \"issue_type: DelphiReportIssueType\",\n delphi_report_issues.status as \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created,\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC\n OFFSET $5\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "report_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "issue_type: DelphiReportIssueType", + "type_info": { + "Custom": { + "name": "delphi_report_issue_type", + "kind": { + "Enum": [ + "reflection_indirection", + "xor_obfuscation", + "included_libraries", + "suspicious_binaries", + "corrupt_classes", + "suspicious_classes", + "url_usage", + "classloader_usage", + "processbuilder_usage", + "runtime_exec_usage", + "jni_usage", + "main_method", + "native_loading", + "malformed_jar", + "nested_jar_too_deep", + "failed_decompilation", + "analysis_failure", + "malware_easyforme", + "malware_simplyloader", + "unknown" + ] + } + } + } + }, + { + "ordinal": 3, + "name": "status: DelphiReportIssueStatus", + "type_info": { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "file_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "delphi_version", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "artifact_url", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "classes: sqlx::types::Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "project_id?", + "type_info": "Int8" + }, + { + "ordinal": 10, + "name": "project_published?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "delphi_report_issue_type", + "kind": { + "Enum": [ + "reflection_indirection", + "xor_obfuscation", + "included_libraries", + "suspicious_binaries", + "corrupt_classes", + "suspicious_classes", + "url_usage", + "classloader_usage", + "processbuilder_usage", + "runtime_exec_usage", + "jni_usage", + "main_method", + "native_loading", + "malformed_jar", + "nested_jar_too_deep", + "failed_decompilation", + "analysis_failure", + "malware_easyforme", + "malware_simplyloader", + "unknown" + ] + } + } + }, + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + }, + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + false, + false, + false, + null, + true, + true + ] + }, + "hash": "c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328" +} diff --git a/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json new file mode 100644 index 0000000000..38db606828 --- /dev/null +++ b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT MAX(delphi_version) FROM delphi_reports", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3" +} diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql new file mode 100644 index 0000000000..4bc15e705b --- /dev/null +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -0,0 +1,64 @@ +CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); + +CREATE TYPE delphi_report_issue_type AS ENUM ( + 'reflection_indirection', + 'xor_obfuscation', + 'included_libraries', + 'suspicious_binaries', + 'corrupt_classes', + 'suspicious_classes', + 'url_usage', + 'classloader_usage', + 'processbuilder_usage', + 'runtime_exec_usage', + 'jni_usage', + 'main_method', + 'native_loading', + 'malformed_jar', + 'nested_jar_too_deep', + 'failed_decompilation', + 'analysis_failure', + 'malware_easyforme', + 'malware_simplyloader', + 'unknown' +); + +-- A Delphi analysis report for a project version +CREATE TABLE delphi_reports ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + file_id BIGINT REFERENCES files (id) + ON DELETE SET NULL + ON UPDATE CASCADE, + delphi_version INTEGER NOT NULL, + artifact_url VARCHAR(2048) NOT NULL, + created TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + UNIQUE (file_id, delphi_version) +); +CREATE INDEX delphi_version ON delphi_reports (delphi_version); + +-- An issue found in a Delphi report. Every issue belongs to a report, +-- and a report can have zero, one, or more issues attached to it +CREATE TABLE delphi_report_issues ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + report_id BIGINT NOT NULL REFERENCES delphi_reports (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + issue_type DELPHI_REPORT_ISSUE_TYPE NOT NULL, + status DELPHI_REPORT_ISSUE_STATUS NOT NULL, + UNIQUE (report_id, issue_type) +); +CREATE INDEX delphi_report_issue_by_status_and_type ON delphi_report_issues (status, issue_type); + +-- A Java class affected by a Delphi report issue. Every affected +-- Java class belongs to a specific issue, and an issue can have zero, +-- one, or more affected classes. (Some issues may be artifact-wide, +-- or otherwise not really specific to any particular class.) +CREATE TABLE delphi_report_issue_java_classes ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + issue_id BIGINT NOT NULL REFERENCES delphi_report_issues (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + internal_class_name TEXT NOT NULL, + decompiled_source TEXT NOT NULL, + UNIQUE (issue_id, internal_class_name) +); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs new file mode 100644 index 0000000000..8d83bc7dd2 --- /dev/null +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -0,0 +1,334 @@ +use std::{ + fmt::{self, Display, Formatter}, + ops::Deref, +}; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::database::models::{ + DBFileId, DBProjectId, DatabaseError, DelphiReportId, DelphiReportIssueId, + DelphiReportIssueJavaClassId, +}; + +/// A Delphi malware analysis report for a project version file. +/// +/// Malware analysis reports usually belong to a specific project file, +/// but they can get orphaned if the versions they belong to are deleted. +/// Thus, deleting versions does not delete these reports. +#[derive(Serialize)] +pub struct DBDelphiReport { + pub id: DelphiReportId, + pub file_id: Option, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub artifact_url: String, + pub created: DateTime, +} + +impl DBDelphiReport { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportId(sqlx::query_scalar!( + " + INSERT INTO delphi_reports (file_id, delphi_version, artifact_url) + VALUES ($1, $2, $3) + ON CONFLICT (file_id, delphi_version) DO UPDATE SET + delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP + RETURNING id + ", + self.file_id as Option, + self.delphi_version, + self.artifact_url, + ) + .fetch_one(&mut **transaction) + .await?)) + } +} + +/// An issue found in a Delphi report. Every issue belongs to a report, +/// and a report can have zero, one, or more issues attached to it. +#[derive(Deserialize, Serialize)] +pub struct DBDelphiReportIssue { + pub id: DelphiReportIssueId, + pub report_id: DelphiReportId, + pub issue_type: DelphiReportIssueType, + pub status: DelphiReportIssueStatus, +} + +/// An status a Delphi report issue can have. +#[derive( + Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_status")] +#[sqlx(rename_all = "snake_case")] +pub enum DelphiReportIssueStatus { + /// The issue is pending review by the moderation team. + Pending, + /// The issue has been approved (i.e., reviewed as a valid, true positive). + /// The affected artifact has thus been verified to be potentially malicious. + Approved, + /// The issue has been rejected (i.e., reviewed as a false positive). + /// The affected artifact has thus been verified to be clean, other issues + /// with it notwithstanding. + Rejected, +} + +impl Display for DelphiReportIssueStatus { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// An order in which Delphi report issues can be sorted during queries. +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[serde(rename_all = "snake_case")] +pub enum DelphiReportListOrder { + CreatedAsc, + CreatedDesc, + PendingStatusFirst, +} + +impl Display for DelphiReportListOrder { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// A result returned from a Delphi report issue query, slightly +/// denormalized with related entity information for ease of +/// consumption by clients. +#[derive(Serialize)] +pub struct DelphiReportIssueResult { + pub issue: DBDelphiReportIssue, + pub report: DBDelphiReport, + pub java_classes: Vec, + pub project_id: Option, + pub project_published: Option>, +} + +impl DBDelphiReportIssue { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueId( + sqlx::query_scalar!( + " + INSERT INTO delphi_report_issues (report_id, issue_type, status) + VALUES ($1, $2, $3) + ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3 + RETURNING id + ", + self.report_id as DelphiReportId, + self.issue_type as DelphiReportIssueType, + self.status as DelphiReportIssueStatus, + ) + .fetch_one(&mut **transaction) + .await?, + )) + } + + pub async fn find_all_by( + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result, DatabaseError> { + Ok(sqlx::query!( + r#" + SELECT + delphi_report_issues.id AS "id", report_id, + issue_type AS "issue_type: DelphiReportIssueType", + delphi_report_issues.status as "status: DelphiReportIssueStatus", + + file_id, delphi_version, artifact_url, created, + json_array(SELECT to_jsonb(delphi_report_issue_java_classes) + FROM delphi_report_issue_java_classes + WHERE issue_id = delphi_report_issues.id + ) AS "classes: sqlx::types::Json>", + versions.mod_id AS "project_id?", mods.published AS "project_published?" + FROM delphi_report_issues + INNER JOIN delphi_reports ON delphi_reports.id = report_id + LEFT OUTER JOIN files ON files.id = file_id + LEFT OUTER JOIN versions ON versions.id = files.version_id + LEFT OUTER JOIN mods ON mods.id = versions.mod_id + WHERE + (issue_type = $1 OR $1 IS NULL) + AND (delphi_report_issues.status = $2 OR $2 IS NULL) + ORDER BY + CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC + OFFSET $5 + LIMIT $4 + "#, + ty as Option, + status as Option, + order_by.map(|order_by| order_by.to_string()), + count.map(|count| count as i64), + offset, + ) + .map(|row| DelphiReportIssueResult { + issue: DBDelphiReportIssue { + id: DelphiReportIssueId(row.id), + report_id: DelphiReportId(row.report_id), + issue_type: row.issue_type, + status: row.status, + }, + report: DBDelphiReport { + id: DelphiReportId(row.report_id), + file_id: row.file_id.map(DBFileId), + delphi_version: row.delphi_version, + artifact_url: row.artifact_url, + created: row.created, + }, + java_classes: row + .classes + .into_iter() + .flat_map(|class_list| class_list.0) + .collect(), + project_id: row.project_id.map(DBProjectId), + project_published: row.project_published, + }) + .fetch_all(exec) + .await?) + } +} + +/// A type of issue found by Delphi for an artifact. +#[derive( + Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_type")] +#[sqlx(rename_all = "snake_case")] +pub enum DelphiReportIssueType { + ReflectionIndirection, + XorObfuscation, + IncludedLibraries, + SuspiciousBinaries, + CorruptClasses, + SuspiciousClasses, + + UrlUsage, + ClassloaderUsage, + ProcessbuilderUsage, + RuntimeExecUsage, + #[serde(rename = "jni_usage")] + #[sqlx(rename = "jni_usage")] + JNIUsage, + + MainMethod, + NativeLoading, + + MalformedJar, + NestedJarTooDeep, + FailedDecompilation, + #[serde(alias = "ANALYSIS FAILURE!")] + AnalysisFailure, + + MalwareEasyforme, + MalwareSimplyloader, + + /// An issue reported by Delphi but not known by labrinth yet. + #[serde(other)] + Unknown, +} + +impl Display for DelphiReportIssueType { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// A Java class affected by a Delphi report issue. Every affected +/// Java class belongs to a specific issue, and an issue can have zero, +/// one, or more affected classes. (Some issues may be artifact-wide, +/// or otherwise not really specific to any particular class.) +#[derive(Debug, Deserialize, Serialize)] +pub struct DBDelphiReportIssueJavaClass { + pub id: DelphiReportIssueJavaClassId, + pub issue_id: DelphiReportIssueId, + pub internal_class_name: InternalJavaClassName, + pub decompiled_source: DecompiledJavaClassSource, +} + +impl DBDelphiReportIssueJavaClass { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueJavaClassId(sqlx::query_scalar!( + " + INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source) + VALUES ($1, $2, $3) + ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3 + RETURNING id + ", + self.issue_id as DelphiReportIssueId, + self.internal_class_name.0, + self.decompiled_source.0, + ) + .fetch_one(&mut **transaction) + .await?)) + } +} + +/// A [Java class name] with dots replaced by forward slashes (/). +/// +/// Because class names are usually the [binary names] passed to a classloader, top level interfaces and classes +/// have a binary name that matches its canonical, fully qualified name, such canonical names are prefixed by the +/// package path the class is in, and packages usually match the directory structure within a JAR for typical +/// classloaders, this usually (but not necessarily) corresponds to the path to the class file within its JAR. +/// +/// [Java class name]: https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/lang/Class.html#getName() +/// [binary names]: https://docs.oracle.com/javase/specs/jls/se21/html/jls-13.html#jls-13.1 +#[derive( + Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(transparent)] +#[sqlx(transparent)] +pub struct InternalJavaClassName(String); + +impl Deref for InternalJavaClassName { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Display for InternalJavaClassName { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// The decompiled source code of a Java class. +#[derive( + Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(transparent)] +#[sqlx(transparent)] +pub struct DecompiledJavaClassSource(String); + +impl Deref for DecompiledJavaClassSource { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Display for DecompiledJavaClassSource { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 483f6841e5..6ec97476fe 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -140,8 +140,8 @@ macro_rules! db_id_interface { }; } -macro_rules! short_id_type { - ($name:ident) => { +macro_rules! id_type { + ($name:ident as $type:ty) => { #[derive( Copy, Clone, @@ -154,7 +154,7 @@ macro_rules! short_id_type { Hash, )] #[sqlx(transparent)] - pub struct $name(pub i32); + pub struct $name(pub $type); }; } @@ -268,14 +268,17 @@ db_id_interface!( generator: generate_affiliate_code_id @ "affiliate_codes", ); -short_id_type!(CategoryId); -short_id_type!(GameId); -short_id_type!(LinkPlatformId); -short_id_type!(LoaderFieldEnumId); -short_id_type!(LoaderFieldEnumValueId); -short_id_type!(LoaderFieldId); -short_id_type!(LoaderId); -short_id_type!(NotificationActionId); -short_id_type!(ProjectTypeId); -short_id_type!(ReportTypeId); -short_id_type!(StatusId); +id_type!(CategoryId as i32); +id_type!(GameId as i32); +id_type!(LinkPlatformId as i32); +id_type!(LoaderFieldEnumId as i32); +id_type!(LoaderFieldEnumValueId as i32); +id_type!(LoaderFieldId as i32); +id_type!(LoaderId as i32); +id_type!(NotificationActionId as i32); +id_type!(ProjectTypeId as i32); +id_type!(ReportTypeId as i32); +id_type!(StatusId as i32); +id_type!(DelphiReportId as i64); +id_type!(DelphiReportIssueId as i64); +id_type!(DelphiReportIssueJavaClassId as i64); diff --git a/apps/labrinth/src/database/models/mod.rs b/apps/labrinth/src/database/models/mod.rs index 0e5f31cdf8..0d78310cee 100644 --- a/apps/labrinth/src/database/models/mod.rs +++ b/apps/labrinth/src/database/models/mod.rs @@ -4,6 +4,7 @@ pub mod affiliate_code_item; pub mod categories; pub mod charge_item; pub mod collection_item; +pub mod delphi_report_item; pub mod flow_item; pub mod friend_item; pub mod ids; diff --git a/apps/labrinth/src/database/models/version_item.rs b/apps/labrinth/src/database/models/version_item.rs index c97a43e50c..d69ac2e531 100644 --- a/apps/labrinth/src/database/models/version_item.rs +++ b/apps/labrinth/src/database/models/version_item.rs @@ -6,6 +6,7 @@ use crate::database::models::loader_fields::{ }; use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; +use crate::routes::internal::delphi::DelphiRunParameters; use chrono::{DateTime, Utc}; use dashmap::{DashMap, DashSet}; use futures::TryStreamExt; @@ -164,6 +165,15 @@ impl VersionFileBuilder { .await?; } + if let Err(err) = crate::routes::internal::delphi::run( + &mut **transaction, + DelphiRunParameters { file_id }, + ) + .await + { + tracing::error!("Error submitting new file to Delphi: {err}"); + } + Ok(file_id) } } diff --git a/apps/labrinth/src/routes/internal/admin.rs b/apps/labrinth/src/routes/internal/admin.rs index 518a315aa8..c4c64b5bf6 100644 --- a/apps/labrinth/src/routes/internal/admin.rs +++ b/apps/labrinth/src/routes/internal/admin.rs @@ -1,12 +1,9 @@ use crate::auth::validate::get_user_record_from_bearer_token; -use crate::database::models::thread_item::ThreadMessageBuilder; use crate::database::redis::RedisPool; use crate::models::analytics::Download; use crate::models::ids::ProjectId; use crate::models::pats::Scopes; -use crate::models::threads::MessageBody; use crate::queue::analytics::AnalyticsQueue; -use crate::queue::moderation::AUTOMOD_ID; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::search::SearchConfig; @@ -17,17 +14,14 @@ use modrinth_maxmind::MaxMind; use serde::Deserialize; use sqlx::PgPool; use std::collections::HashMap; -use std::fmt::Write; use std::net::Ipv4Addr; use std::sync::Arc; -use tracing::info; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("admin") .service(count_download) - .service(force_reindex) - .service(delphi_result_ingest), + .service(force_reindex), ); } @@ -163,98 +157,3 @@ pub async fn force_reindex( index_projects(pool.as_ref().clone(), redis.clone(), &config).await?; Ok(HttpResponse::NoContent().finish()) } - -#[derive(Deserialize)] -pub struct DelphiIngest { - pub url: String, - pub project_id: crate::models::ids::ProjectId, - pub version_id: crate::models::ids::VersionId, - pub issues: HashMap>, -} - -#[post("/_delphi", guard = "admin_key_guard")] -pub async fn delphi_result_ingest( - pool: web::Data, - redis: web::Data, - body: web::Json, -) -> Result { - if body.issues.is_empty() { - info!("No issues found for file {}", body.url); - return Ok(HttpResponse::NoContent().finish()); - } - - let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; - - let project = crate::database::models::DBProject::get_id( - body.project_id.into(), - &**pool, - &redis, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Project {} does not exist", - body.project_id - )) - })?; - - let mut header = format!("Suspicious traces found at {}", body.url); - - for (issue, trace) in &body.issues { - for (path, code) in trace { - write!( - &mut header, - "\n issue {issue} found at file {path}: \n ```\n{code}\n```" - ) - .unwrap(); - } - } - - crate::util::webhook::send_slack_project_webhook( - body.project_id, - &pool, - &redis, - webhook_url, - Some(header), - ) - .await - .ok(); - - let mut thread_header = format!( - "Suspicious traces found at [version {}](https://modrinth.com/project/{}/version/{})", - body.version_id, body.project_id, body.version_id - ); - - for (issue, trace) in &body.issues { - for path in trace.keys() { - write!( - &mut thread_header, - "\n\n- issue {issue} found at file {path}" - ) - .unwrap(); - } - - if trace.is_empty() { - write!(&mut thread_header, "\n\n- issue {issue} found").unwrap(); - } - } - - let mut transaction = pool.begin().await?; - ThreadMessageBuilder { - author_id: Some(crate::database::models::DBUserId(AUTOMOD_ID)), - body: MessageBody::Text { - body: thread_header, - private: true, - replying_to: None, - associated_images: vec![], - }, - thread_id: project.thread_id, - hide_identity: false, - } - .insert(&mut transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().finish()) -} diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs new file mode 100644 index 0000000000..9e88d19d01 --- /dev/null +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -0,0 +1,265 @@ +use std::{collections::HashMap, fmt::Write, io, sync::LazyLock}; + +use actix_web::{HttpResponse, get, post, put, web}; +use chrono::{DateTime, Utc}; +use serde::Deserialize; +use sqlx::PgPool; +use tracing::info; + +use crate::{ + database::{ + models::{ + DBFileId, DelphiReportId, DelphiReportIssueId, + DelphiReportIssueJavaClassId, + delphi_report_item::{ + DBDelphiReport, DBDelphiReportIssue, + DBDelphiReportIssueJavaClass, DecompiledJavaClassSource, + DelphiReportIssueStatus, DelphiReportIssueType, + DelphiReportListOrder, InternalJavaClassName, + }, + }, + redis::RedisPool, + }, + routes::ApiError, + util::guards::admin_key_guard, +}; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("delphi") + .service(ingest_report) + .service(_run) + .service(version) + .service(issues) + .service(update_issue), + ); +} + +#[derive(Deserialize)] +struct DelphiReport { + pub url: String, + pub project_id: crate::models::ids::ProjectId, + #[serde(rename = "version_id")] + pub _version_id: crate::models::ids::VersionId, + pub file_id: crate::models::ids::FileId, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub issues: HashMap< + DelphiReportIssueType, + HashMap, + >, +} + +impl DelphiReport { + async fn send_to_slack( + &self, + pool: &PgPool, + redis: &RedisPool, + ) -> Result<(), ApiError> { + let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; + + let mut message_header = + format!("⚠️ Suspicious traces found at {}", self.url); + + for (issue, trace) in &self.issues { + for (path, code) in trace { + write!( + &mut message_header, + "\n issue {issue} found at file {path}:\n```\n{code}\n```" + ) + .ok(); + } + } + + crate::util::webhook::send_slack_project_webhook( + self.project_id, + pool, + redis, + webhook_url, + Some(message_header), + ) + .await + } +} + +#[derive(Deserialize)] +pub struct DelphiRunParameters { + pub file_id: crate::database::models::ids::DBFileId, +} + +#[post("ingest", guard = "admin_key_guard")] +async fn ingest_report( + pool: web::Data, + redis: web::Data, + web::Json(report): web::Json, +) -> Result { + if report.issues.is_empty() { + info!("No issues found for file {}", report.url); + return Ok(HttpResponse::NoContent().finish()); + } + + report.send_to_slack(&pool, &redis).await.ok(); + + let mut transaction = pool.begin().await?; + + let report_id = DBDelphiReport { + id: DelphiReportId(0), // This will be set by the database + file_id: Some(DBFileId(report.file_id.0 as i64)), + delphi_version: report.delphi_version, + artifact_url: report.url.clone(), + created: DateTime::::MIN_UTC, // This will be set by the database + } + .upsert(&mut transaction) + .await?; + + for (issue_type, issue_java_classes) in report.issues { + let issue_id = DBDelphiReportIssue { + id: DelphiReportIssueId(0), // This will be set by the database + report_id, + issue_type, + status: DelphiReportIssueStatus::Pending, + } + .upsert(&mut transaction) + .await?; + + for (internal_class_name, decompiled_source) in issue_java_classes { + DBDelphiReportIssueJavaClass { + id: DelphiReportIssueJavaClassId(0), // This will be set by the database + issue_id, + internal_class_name, + decompiled_source, + } + .upsert(&mut transaction) + .await?; + } + } + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().finish()) +} + +pub async fn run( + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + run_parameters: DelphiRunParameters, +) -> Result { + let file_data = sqlx::query!( + r#" + SELECT + version_id AS "version_id: crate::database::models::DBVersionId", + versions.mod_id AS "project_id: crate::database::models::DBProjectId", + files.url AS "url" + FROM files INNER JOIN versions ON files.version_id = versions.id + WHERE files.id = $1 + "#, + run_parameters.file_id.0 + ) + .fetch_one(exec) + .await?; + + static DELPHI_CLIENT: LazyLock = + LazyLock::new(reqwest::Client::new); + + tracing::debug!( + "Running Delphi for project {}, version {}, file {}", + file_data.project_id.0, + file_data.version_id.0, + run_parameters.file_id.0 + ); + + DELPHI_CLIENT + .post(dotenvy::var("DELPHI_URL")?) + .json(&serde_json::json!({ + "url": file_data.url, + "project_id": file_data.project_id, + "version_id": file_data.version_id, + "file_id": run_parameters.file_id, + })) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)?; + + Ok(HttpResponse::NoContent().finish()) +} + +#[post("run", guard = "admin_key_guard")] +async fn _run( + pool: web::Data, + run_parameters: web::Query, +) -> Result { + run(&**pool, run_parameters.into_inner()).await +} + +#[get("version", guard = "admin_key_guard")] +async fn version(pool: web::Data) -> Result { + Ok(HttpResponse::Ok().json( + sqlx::query_scalar!("SELECT MAX(delphi_version) FROM delphi_reports") + .fetch_one(&**pool) + .await?, + )) +} + +#[derive(Deserialize)] +struct DelphiIssuesSearchOptions { + #[serde(rename = "type")] + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, +} + +#[get("issues", guard = "admin_key_guard")] +async fn issues( + pool: web::Data, + search_options: web::Query, +) -> Result { + Ok(HttpResponse::Ok().json( + DBDelphiReportIssue::find_all_by( + search_options.ty, + search_options.status, + search_options.order_by, + search_options.count, + search_options + .offset + .map(|offset| offset.try_into()) + .transpose() + .map_err(|err| { + io::Error::other(format!("Invalid offset: {err}")) + })?, + &**pool, + ) + .await?, + )) +} + +#[put("issue/{issue_id}", guard = "admin_key_guard")] +async fn update_issue( + pool: web::Data, + issue_id: web::Path, + web::Json(update_data): web::Json, +) -> Result { + let new_id = issue_id.into_inner(); + + let mut transaction = pool.begin().await?; + + let modified_same_issue = (DBDelphiReportIssue { + id: new_id, // Doesn't matter, upsert done for values of other fields + report_id: update_data.report_id, + issue_type: update_data.issue_type, + status: update_data.status, + }) + .upsert(&mut transaction) + .await? + == new_id; + + transaction.commit().await?; + + if modified_same_issue { + Ok(HttpResponse::NoContent().finish()) + } else { + Ok(HttpResponse::Created().finish()) + } +} diff --git a/apps/labrinth/src/routes/internal/mod.rs b/apps/labrinth/src/routes/internal/mod.rs index f15da09f0b..af79ed3408 100644 --- a/apps/labrinth/src/routes/internal/mod.rs +++ b/apps/labrinth/src/routes/internal/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod admin; pub mod affiliate; pub mod billing; +pub mod delphi; pub mod external_notifications; pub mod flows; pub mod gdpr; @@ -32,7 +33,8 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) { .configure(medal::config) .configure(external_notifications::config) .configure(affiliate::config) - .configure(mural::config), + .configure(mural::config) + .configure(delphi::config), ); } diff --git a/apps/labrinth/src/routes/mod.rs b/apps/labrinth/src/routes/mod.rs index ca55240b8e..2ba78fa632 100644 --- a/apps/labrinth/src/routes/mod.rs +++ b/apps/labrinth/src/routes/mod.rs @@ -161,6 +161,8 @@ pub enum ApiError { RateLimitError(u128, u32), #[error("Error while interacting with payment processor: {0}")] Stripe(#[from] stripe::StripeError), + #[error("Error while interacting with Delphi: {0}")] + Delphi(reqwest::Error), } impl ApiError { @@ -201,6 +203,7 @@ impl ApiError { Self::Stripe(..) => "stripe_error", Self::TaxProcessor(..) => "tax_processor_error", Self::Slack(..) => "slack_error", + Self::Delphi(..) => "delphi_error", }, description: match self { Self::Internal(e) => format!("{e:#?}"), @@ -249,6 +252,7 @@ impl actix_web::ResponseError for ApiError { Self::Stripe(..) => StatusCode::FAILED_DEPENDENCY, Self::TaxProcessor(..) => StatusCode::INTERNAL_SERVER_ERROR, Self::Slack(..) => StatusCode::INTERNAL_SERVER_ERROR, + Self::Delphi(..) => StatusCode::INTERNAL_SERVER_ERROR, } } diff --git a/apps/labrinth/src/routes/v3/project_creation.rs b/apps/labrinth/src/routes/v3/project_creation.rs index e03d2dd58e..dad60fec85 100644 --- a/apps/labrinth/src/routes/v3/project_creation.rs +++ b/apps/labrinth/src/routes/v3/project_creation.rs @@ -339,9 +339,6 @@ async fn project_create_inner( redis: &RedisPool, session_queue: &AuthQueue, ) -> Result { - // The base URL for files uploaded to S3 - let cdn_url = dotenvy::var("CDN_URL")?; - // The currently logged in user let (_, current_user) = get_user_from_headers( &req, @@ -577,7 +574,6 @@ async fn project_create_inner( uploaded_files, &mut created_version.files, &mut created_version.dependencies, - &cdn_url, &content_disposition, project_id, created_version.version_id.into(), diff --git a/apps/labrinth/src/routes/v3/version_creation.rs b/apps/labrinth/src/routes/v3/version_creation.rs index 396395e69c..e70ffd7c79 100644 --- a/apps/labrinth/src/routes/v3/version_creation.rs +++ b/apps/labrinth/src/routes/v3/version_creation.rs @@ -38,7 +38,6 @@ use sha1::Digest; use sqlx::postgres::PgPool; use std::collections::{HashMap, HashSet}; use std::sync::Arc; -use tracing::error; use validator::Validate; fn default_requested_status() -> VersionStatus { @@ -158,8 +157,6 @@ async fn version_create_inner( session_queue: &AuthQueue, moderation_queue: &AutomatedModerationQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_version_data = None; let mut version_builder = None; let mut selected_loaders = None; @@ -355,7 +352,6 @@ async fn version_create_inner( uploaded_files, &mut version.files, &mut version.dependencies, - &cdn_url, &content_disposition, version.project_id.into(), version.version_id.into(), @@ -590,8 +586,6 @@ async fn upload_file_to_version_inner( version_id: models::DBVersionId, session_queue: &AuthQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_file_data: Option = None; let mut file_builders: Vec = Vec::new(); @@ -741,7 +735,6 @@ async fn upload_file_to_version_inner( uploaded_files, &mut file_builders, &mut dependencies, - &cdn_url, &content_disposition, project_id, version_id.into(), @@ -795,7 +788,6 @@ pub async fn upload_file( uploaded_files: &mut Vec, version_files: &mut Vec, dependencies: &mut Vec, - cdn_url: &str, content_disposition: &actix_web::http::header::ContentDisposition, project_id: ProjectId, version_id: VersionId, @@ -942,21 +934,17 @@ pub async fn upload_file( || force_primary || total_files_len == 1; - let file_path_encode = format!( - "data/{}/versions/{}/{}", - project_id, - version_id, + let file_path = format!( + "data/{project_id}/versions/{version_id}/{}", urlencoding::encode(file_name) ); - let file_path = - format!("data/{}/versions/{}/{}", project_id, version_id, &file_name); let upload_data = file_host .upload_file(content_type, &file_path, FileHostPublicity::Public, data) .await?; uploaded_files.push(UploadedFile { - name: file_path, + name: file_path.clone(), publicity: FileHostPublicity::Public, }); @@ -980,33 +968,9 @@ pub async fn upload_file( return Err(CreateError::InvalidInput(msg.to_string())); } - let url = format!("{cdn_url}/{file_path_encode}"); - - let client = reqwest::Client::new(); - let delphi_url = dotenvy::var("DELPHI_URL")?; - match client - .post(delphi_url) - .json(&serde_json::json!({ - "url": url, - "project_id": project_id, - "version_id": version_id, - })) - .send() - .await - { - Ok(res) => { - if !res.status().is_success() { - error!("Failed to upload file to Delphi: {url}"); - } - } - Err(e) => { - error!("Failed to upload file to Delphi: {url}: {e}"); - } - } - version_files.push(VersionFileBuilder { filename: file_name.to_string(), - url: format!("{cdn_url}/{file_path_encode}"), + url: format!("{}/{file_path}", dotenvy::var("CDN_URL")?), hashes: vec![ models::version_item::HashBuilder { algorithm: "sha1".to_string(), From 14518f331b9754b3377e35f5fca270868fb613ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 22 Aug 2025 22:37:04 +0200 Subject: [PATCH 003/104] chore: address some review comments --- apps/labrinth/src/routes/internal/delphi.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 9e88d19d01..4e8f588e98 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -63,10 +63,10 @@ impl DelphiReport { format!("⚠️ Suspicious traces found at {}", self.url); for (issue, trace) in &self.issues { - for (path, code) in trace { + for (class, code) in trace { write!( &mut message_header, - "\n issue {issue} found at file {path}:\n```\n{code}\n```" + "\n issue {issue} found at class `{class}`:\n```\n{code}\n```" ) .ok(); } @@ -227,7 +227,7 @@ async fn issues( .map(|offset| offset.try_into()) .transpose() .map_err(|err| { - io::Error::other(format!("Invalid offset: {err}")) + ApiError::InvalidInput(format!("Invalid offset: {err}")) })?, &**pool, ) From de68c4012353998cecbe8136cc13e0917f4ebb0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 22 Aug 2025 22:50:05 +0200 Subject: [PATCH 004/104] feat: add Delphi to Docker Compose `with-delphi` profile --- apps/labrinth/.env.docker-compose | 2 +- docker-compose.yml | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/apps/labrinth/.env.docker-compose b/apps/labrinth/.env.docker-compose index 6271bd3cb1..7da2b7f764 100644 --- a/apps/labrinth/.env.docker-compose +++ b/apps/labrinth/.env.docker-compose @@ -129,7 +129,7 @@ PYRO_API_KEY=none BREX_API_URL=https://platform.brexapis.com/v2/ BREX_API_KEY=none -DELPHI_URL=none +DELPHI_URL=http://labrinth-delphi:59999 DELPHI_SLACK_WEBHOOK=none AVALARA_1099_API_URL=https://www.track1099.com/api diff --git a/docker-compose.yml b/docker-compose.yml index 9ded6c6311..01074d4aa5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -114,6 +114,21 @@ services: watch: - path: ./apps/labrinth action: rebuild + delphi: + profiles: + - with-delphi + image: ghcr.io/modrinth/delphi:main + container_name: labrinth-delphi + ports: + - '59999:59999' + environment: + LABRINTH_ENDPOINT: http://labrinth:8000/_internal/delphi/ingest + LABRINTH_ADMIN_KEY: feedbeef + healthcheck: + test: ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:59999/health'] + interval: 3s + timeout: 5s + retries: 3 volumes: meilisearch-data: db-data: From 8c7aa47d7b6f6b3eecd006abc30f67c27bde2c37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 22 Aug 2025 22:52:23 +0200 Subject: [PATCH 005/104] chore: fix unused import Clippy lint --- apps/labrinth/src/routes/internal/delphi.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 4e8f588e98..b5205434f0 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, fmt::Write, io, sync::LazyLock}; +use std::{collections::HashMap, fmt::Write, sync::LazyLock}; use actix_web::{HttpResponse, get, post, put, web}; use chrono::{DateTime, Utc}; From a8166ef674ab55719f9e4f592a8f5fe7c06ca855 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 22 Aug 2025 23:13:48 +0200 Subject: [PATCH 006/104] feat(labrinth/delphi): use PAT token authorization with project read scopes --- apps/labrinth/src/routes/internal/delphi.rs | 65 +++++++++++++++++++-- 1 file changed, 59 insertions(+), 6 deletions(-) diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index b5205434f0..e9c36ae213 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -1,12 +1,13 @@ use std::{collections::HashMap, fmt::Write, sync::LazyLock}; -use actix_web::{HttpResponse, get, post, put, web}; +use actix_web::{HttpRequest, HttpResponse, get, post, put, web}; use chrono::{DateTime, Utc}; use serde::Deserialize; use sqlx::PgPool; use tracing::info; use crate::{ + auth::check_is_moderator_from_headers, database::{ models::{ DBFileId, DelphiReportId, DelphiReportIssueId, @@ -20,6 +21,8 @@ use crate::{ }, redis::RedisPool, }, + models::pats::Scopes, + queue::session::AuthQueue, routes::ApiError, util::guards::admin_key_guard, }; @@ -184,16 +187,42 @@ pub async fn run( Ok(HttpResponse::NoContent().finish()) } -#[post("run", guard = "admin_key_guard")] +#[post("run")] async fn _run( + req: HttpRequest, pool: web::Data, + redis: web::Data, + session_queue: web::Data, run_parameters: web::Query, ) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + run(&**pool, run_parameters.into_inner()).await } -#[get("version", guard = "admin_key_guard")] -async fn version(pool: web::Data) -> Result { +#[get("version")] +async fn version( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + Ok(HttpResponse::Ok().json( sqlx::query_scalar!("SELECT MAX(delphi_version) FROM delphi_reports") .fetch_one(&**pool) @@ -211,11 +240,23 @@ struct DelphiIssuesSearchOptions { offset: Option, } -#[get("issues", guard = "admin_key_guard")] +#[get("issues")] async fn issues( + req: HttpRequest, pool: web::Data, + redis: web::Data, + session_queue: web::Data, search_options: web::Query, ) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + Ok(HttpResponse::Ok().json( DBDelphiReportIssue::find_all_by( search_options.ty, @@ -235,12 +276,24 @@ async fn issues( )) } -#[put("issue/{issue_id}", guard = "admin_key_guard")] +#[put("issue/{issue_id}")] async fn update_issue( + req: HttpRequest, pool: web::Data, + redis: web::Data, + session_queue: web::Data, issue_id: web::Path, web::Json(update_data): web::Json, ) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + let new_id = issue_id.into_inner(); let mut transaction = pool.begin().await?; From 2589cc851bf0ac95f477f37122883ea20fa3428e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 22 Aug 2025 23:55:42 +0200 Subject: [PATCH 007/104] chore: expose file IDs in version queries --- apps/labrinth/src/models/v3/projects.rs | 5 +++++ apps/labrinth/src/routes/v3/version_creation.rs | 1 + 2 files changed, 6 insertions(+) diff --git a/apps/labrinth/src/models/v3/projects.rs b/apps/labrinth/src/models/v3/projects.rs index f70d806af1..559e76d002 100644 --- a/apps/labrinth/src/models/v3/projects.rs +++ b/apps/labrinth/src/models/v3/projects.rs @@ -731,6 +731,7 @@ impl From for Version { .files .into_iter() .map(|f| VersionFile { + id: Some(f.id.0), url: f.url, filename: f.filename, hashes: f.hashes, @@ -855,6 +856,10 @@ impl VersionStatus { /// A single project file, with a url for the file and the file's hash #[derive(Serialize, Deserialize, Clone)] pub struct VersionFile { + /// The ID of the file. Every file has an ID once created, but it + /// is not known until it indeed has been created. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub id: Option, /// A map of hashes of the file. The key is the hashing algorithm /// and the value is the string version of the hash. pub hashes: std::collections::HashMap, diff --git a/apps/labrinth/src/routes/v3/version_creation.rs b/apps/labrinth/src/routes/v3/version_creation.rs index e70ffd7c79..03ebb81283 100644 --- a/apps/labrinth/src/routes/v3/version_creation.rs +++ b/apps/labrinth/src/routes/v3/version_creation.rs @@ -447,6 +447,7 @@ async fn version_create_inner( .files .iter() .map(|file| VersionFile { + id: None, hashes: file .hashes .iter() From 9d1f09eb2e32e45468f834b387414cc93f10f922 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sat, 23 Aug 2025 22:01:21 +0200 Subject: [PATCH 008/104] fix: accept null decompiled source payloads from Delphi --- apps/labrinth/migrations/20250810155316_delphi-reports.sql | 2 +- apps/labrinth/src/database/models/delphi_report_item.rs | 5 ++--- apps/labrinth/src/routes/internal/delphi.rs | 6 ++++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index 4bc15e705b..003511bf49 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -59,6 +59,6 @@ CREATE TABLE delphi_report_issue_java_classes ( ON DELETE CASCADE ON UPDATE CASCADE, internal_class_name TEXT NOT NULL, - decompiled_source TEXT NOT NULL, + decompiled_source TEXT, UNIQUE (issue_id, internal_class_name) ); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 8d83bc7dd2..89c1c0c89f 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -231,7 +231,6 @@ pub enum DelphiReportIssueType { MalformedJar, NestedJarTooDeep, FailedDecompilation, - #[serde(alias = "ANALYSIS FAILURE!")] AnalysisFailure, MalwareEasyforme, @@ -257,7 +256,7 @@ pub struct DBDelphiReportIssueJavaClass { pub id: DelphiReportIssueJavaClassId, pub issue_id: DelphiReportIssueId, pub internal_class_name: InternalJavaClassName, - pub decompiled_source: DecompiledJavaClassSource, + pub decompiled_source: Option, } impl DBDelphiReportIssueJavaClass { @@ -274,7 +273,7 @@ impl DBDelphiReportIssueJavaClass { ", self.issue_id as DelphiReportIssueId, self.internal_class_name.0, - self.decompiled_source.0, + self.decompiled_source.as_ref().map(|decompiled_source| &decompiled_source.0), ) .fetch_one(&mut **transaction) .await?)) diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index e9c36ae213..7276306c51 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -50,7 +50,7 @@ struct DelphiReport { pub delphi_version: i32, pub issues: HashMap< DelphiReportIssueType, - HashMap, + HashMap>, >, } @@ -67,9 +67,11 @@ impl DelphiReport { for (issue, trace) in &self.issues { for (class, code) in trace { + let code = code.as_deref().map(|code| &**code); write!( &mut message_header, - "\n issue {issue} found at class `{class}`:\n```\n{code}\n```" + "\n issue {issue} found at class `{class}`:\n```\n{}\n```", + code.unwrap_or("No decompiled source available") ) .ok(); } From 10987dde8c76b329632174a2695a54bd11b5698a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sat, 23 Aug 2025 22:02:09 +0200 Subject: [PATCH 009/104] tweak(labrinth): expose base62 file IDs more consistently for Delphi --- apps/labrinth/src/database/models/version_item.rs | 4 +++- apps/labrinth/src/models/v3/projects.rs | 6 +++--- apps/labrinth/src/routes/internal/delphi.rs | 13 ++++++++----- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/apps/labrinth/src/database/models/version_item.rs b/apps/labrinth/src/database/models/version_item.rs index d69ac2e531..d0ba9c0b97 100644 --- a/apps/labrinth/src/database/models/version_item.rs +++ b/apps/labrinth/src/database/models/version_item.rs @@ -167,7 +167,9 @@ impl VersionFileBuilder { if let Err(err) = crate::routes::internal::delphi::run( &mut **transaction, - DelphiRunParameters { file_id }, + DelphiRunParameters { + file_id: file_id.into(), + }, ) .await { diff --git a/apps/labrinth/src/models/v3/projects.rs b/apps/labrinth/src/models/v3/projects.rs index 559e76d002..c6c1101ffe 100644 --- a/apps/labrinth/src/models/v3/projects.rs +++ b/apps/labrinth/src/models/v3/projects.rs @@ -5,7 +5,7 @@ use crate::database::models::loader_fields::VersionField; use crate::database::models::project_item::{LinkUrl, ProjectQueryResult}; use crate::database::models::version_item::VersionQueryResult; use crate::models::ids::{ - OrganizationId, ProjectId, TeamId, ThreadId, VersionId, + FileId, OrganizationId, ProjectId, TeamId, ThreadId, VersionId, }; use ariadne::ids::UserId; use chrono::{DateTime, Utc}; @@ -731,7 +731,7 @@ impl From for Version { .files .into_iter() .map(|f| VersionFile { - id: Some(f.id.0), + id: Some(FileId(f.id.0 as u64)), url: f.url, filename: f.filename, hashes: f.hashes, @@ -859,7 +859,7 @@ pub struct VersionFile { /// The ID of the file. Every file has an ID once created, but it /// is not known until it indeed has been created. #[serde(default, skip_serializing_if = "Option::is_none")] - pub id: Option, + pub id: Option, /// A map of hashes of the file. The key is the hashing algorithm /// and the value is the string version of the hash. pub hashes: std::collections::HashMap, diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 7276306c51..96c55d97ef 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -21,7 +21,10 @@ use crate::{ }, redis::RedisPool, }, - models::pats::Scopes, + models::{ + ids::{ProjectId, VersionId}, + pats::Scopes, + }, queue::session::AuthQueue, routes::ApiError, util::guards::admin_key_guard, @@ -90,7 +93,7 @@ impl DelphiReport { #[derive(Deserialize)] pub struct DelphiRunParameters { - pub file_id: crate::database::models::ids::DBFileId, + pub file_id: crate::models::ids::FileId, } #[post("ingest", guard = "admin_key_guard")] @@ -158,7 +161,7 @@ pub async fn run( FROM files INNER JOIN versions ON files.version_id = versions.id WHERE files.id = $1 "#, - run_parameters.file_id.0 + run_parameters.file_id.0 as i64 ) .fetch_one(exec) .await?; @@ -177,8 +180,8 @@ pub async fn run( .post(dotenvy::var("DELPHI_URL")?) .json(&serde_json::json!({ "url": file_data.url, - "project_id": file_data.project_id, - "version_id": file_data.version_id, + "project_id": ProjectId(file_data.project_id.0 as u64), + "version_id": VersionId(file_data.version_id.0 as u64), "file_id": run_parameters.file_id, })) .send() From cd6ab8fea1f8c8453e1456f353ec80e1ea5aa0fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sat, 23 Aug 2025 22:42:59 +0200 Subject: [PATCH 010/104] feat(labrinth/delphi): support new Delphi report severity field --- .../20250810155316_delphi-reports.sql | 3 ++ .../src/database/models/delphi_report_item.rs | 38 ++++++++++++++----- apps/labrinth/src/routes/internal/delphi.rs | 5 ++- 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index 003511bf49..141bdc5fdd 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -1,3 +1,5 @@ +CREATE TYPE delphi_report_severity AS ENUM ('low', 'medium', 'high', 'severe'); + CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); CREATE TYPE delphi_report_issue_type AS ENUM ( @@ -32,6 +34,7 @@ CREATE TABLE delphi_reports ( delphi_version INTEGER NOT NULL, artifact_url VARCHAR(2048) NOT NULL, created TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + severity DELPHI_REPORT_SEVERITY NOT NULL, UNIQUE (file_id, delphi_version) ); CREATE INDEX delphi_version ON delphi_reports (delphi_version); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 89c1c0c89f..6bc155953f 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -25,6 +25,7 @@ pub struct DBDelphiReport { pub delphi_version: i32, pub artifact_url: String, pub created: DateTime, + pub severity: DelphiReportSeverity, } impl DBDelphiReport { @@ -34,21 +35,35 @@ impl DBDelphiReport { ) -> Result { Ok(DelphiReportId(sqlx::query_scalar!( " - INSERT INTO delphi_reports (file_id, delphi_version, artifact_url) - VALUES ($1, $2, $3) + INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity) + VALUES ($1, $2, $3, $4) ON CONFLICT (file_id, delphi_version) DO UPDATE SET - delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP + delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4 RETURNING id ", self.file_id as Option, self.delphi_version, self.artifact_url, + self.severity as DelphiReportSeverity, ) .fetch_one(&mut **transaction) .await?)) } } +/// A severity level for a Delphi report. +#[derive( + Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(rename_all = "UPPERCASE")] +#[sqlx(type_name = "delphi_report_severity", rename_all = "snake_case")] +pub enum DelphiReportSeverity { + Low, + Medium, + High, + Severe, +} + /// An issue found in a Delphi report. Every issue belongs to a report, /// and a report can have zero, one, or more issues attached to it. #[derive(Deserialize, Serialize)] @@ -64,8 +79,7 @@ pub struct DBDelphiReportIssue { Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, )] #[serde(rename_all = "snake_case")] -#[sqlx(type_name = "delphi_report_issue_status")] -#[sqlx(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_status", rename_all = "snake_case")] pub enum DelphiReportIssueStatus { /// The issue is pending review by the moderation team. Pending, @@ -91,6 +105,8 @@ pub enum DelphiReportListOrder { CreatedAsc, CreatedDesc, PendingStatusFirst, + SeverityAsc, + SeverityDesc, } impl Display for DelphiReportListOrder { @@ -146,9 +162,9 @@ impl DBDelphiReportIssue { SELECT delphi_report_issues.id AS "id", report_id, issue_type AS "issue_type: DelphiReportIssueType", - delphi_report_issues.status as "status: DelphiReportIssueStatus", + delphi_report_issues.status AS "status: DelphiReportIssueStatus", - file_id, delphi_version, artifact_url, created, + file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiReportSeverity", json_array(SELECT to_jsonb(delphi_report_issue_java_classes) FROM delphi_report_issue_java_classes WHERE issue_id = delphi_report_issues.id @@ -165,7 +181,9 @@ impl DBDelphiReportIssue { ORDER BY CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC, CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC, - CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC + CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC, + CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC, + CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC OFFSET $5 LIMIT $4 "#, @@ -188,6 +206,7 @@ impl DBDelphiReportIssue { delphi_version: row.delphi_version, artifact_url: row.artifact_url, created: row.created, + severity: row.severity, }, java_classes: row .classes @@ -207,8 +226,7 @@ impl DBDelphiReportIssue { Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, )] #[serde(rename_all = "snake_case")] -#[sqlx(type_name = "delphi_report_issue_type")] -#[sqlx(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_type", rename_all = "snake_case")] pub enum DelphiReportIssueType { ReflectionIndirection, XorObfuscation, diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 96c55d97ef..7297e8579b 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -16,7 +16,8 @@ use crate::{ DBDelphiReport, DBDelphiReportIssue, DBDelphiReportIssueJavaClass, DecompiledJavaClassSource, DelphiReportIssueStatus, DelphiReportIssueType, - DelphiReportListOrder, InternalJavaClassName, + DelphiReportListOrder, DelphiReportSeverity, + InternalJavaClassName, }, }, redis::RedisPool, @@ -55,6 +56,7 @@ struct DelphiReport { DelphiReportIssueType, HashMap>, >, + pub severity: DelphiReportSeverity, } impl DelphiReport { @@ -117,6 +119,7 @@ async fn ingest_report( delphi_version: report.delphi_version, artifact_url: report.url.clone(), created: DateTime::::MIN_UTC, // This will be set by the database + severity: report.severity, } .upsert(&mut transaction) .await?; From 6c0fe22555bbd9747a36c0fc349d3d93d5b52ed1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Mon, 25 Aug 2025 14:41:15 +0200 Subject: [PATCH 011/104] chore(labrinth): run `cargo sqlx prepare` to fix Docker build errors --- ...87c2db6cf840fc9284a8d8b47f42be741b03.json} | 26 +++++++++++-- ...3153f5e9796b55ae753ab57b14f37708b400d.json | 24 ------------ ...de1e7cddd68ac956143bef994104280a8dc07.json | 37 +++++++++++++++++++ 3 files changed, 59 insertions(+), 28 deletions(-) rename apps/labrinth/.sqlx/{query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json => query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json} (65%) delete mode 100644 apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json create mode 100644 apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json diff --git a/apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json b/apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json similarity index 65% rename from apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json rename to apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json index 54969cea41..078e1fce41 100644 --- a/apps/labrinth/.sqlx/query-c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328.json +++ b/apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type AS \"issue_type: DelphiReportIssueType\",\n delphi_report_issues.status as \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created,\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC\n OFFSET $5\n LIMIT $4\n ", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type AS \"issue_type: DelphiReportIssueType\",\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiReportSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC\n OFFSET $5\n LIMIT $4\n ", "describe": { "columns": [ { @@ -84,16 +84,33 @@ }, { "ordinal": 8, + "name": "severity: DelphiReportSeverity", + "type_info": { + "Custom": { + "name": "delphi_report_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + }, + { + "ordinal": 9, "name": "classes: sqlx::types::Json>", "type_info": "Jsonb" }, { - "ordinal": 9, + "ordinal": 10, "name": "project_id?", "type_info": "Int8" }, { - "ordinal": 10, + "ordinal": 11, "name": "project_published?", "type_info": "Timestamptz" } @@ -155,10 +172,11 @@ false, false, false, + false, null, true, true ] }, - "hash": "c1cd83ddcd112e46477a195e8bed0a1658c6ddf7a486082cdb847fab06150328" + "hash": "2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03" } diff --git a/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json b/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json deleted file mode 100644 index de31a078f0..0000000000 --- a/apps/labrinth/.sqlx/query-8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO delphi_reports (file_id, delphi_version, artifact_url)\n VALUES ($1, $2, $3)\n ON CONFLICT (file_id, delphi_version) DO UPDATE SET\n delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP\n RETURNING id\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int4", - "Varchar" - ] - }, - "nullable": [ - false - ] - }, - "hash": "8f1f75d9c52a5a340aae2b3fd863153f5e9796b55ae753ab57b14f37708b400d" -} diff --git a/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json new file mode 100644 index 0000000000..cc1c7b84f3 --- /dev/null +++ b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (file_id, delphi_version) DO UPDATE SET\n delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Varchar", + { + "Custom": { + "name": "delphi_report_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07" +} From 4065acc62784f00e7a4560c5228ed0d1678e3eaa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sat, 6 Sep 2025 21:28:38 +0200 Subject: [PATCH 012/104] tweak: add route for fetching Delphi issue type schema, abstract Labrinth away from issue types --- .../20250810155316_delphi-reports.sql | 25 +----- .../src/database/models/delphi_report_item.rs | 54 ++---------- apps/labrinth/src/routes/internal/delphi.rs | 83 ++++++++++++++++--- 3 files changed, 78 insertions(+), 84 deletions(-) diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index 141bdc5fdd..d717d09f9d 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -2,29 +2,6 @@ CREATE TYPE delphi_report_severity AS ENUM ('low', 'medium', 'high', 'severe'); CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); -CREATE TYPE delphi_report_issue_type AS ENUM ( - 'reflection_indirection', - 'xor_obfuscation', - 'included_libraries', - 'suspicious_binaries', - 'corrupt_classes', - 'suspicious_classes', - 'url_usage', - 'classloader_usage', - 'processbuilder_usage', - 'runtime_exec_usage', - 'jni_usage', - 'main_method', - 'native_loading', - 'malformed_jar', - 'nested_jar_too_deep', - 'failed_decompilation', - 'analysis_failure', - 'malware_easyforme', - 'malware_simplyloader', - 'unknown' -); - -- A Delphi analysis report for a project version CREATE TABLE delphi_reports ( id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, @@ -46,7 +23,7 @@ CREATE TABLE delphi_report_issues ( report_id BIGINT NOT NULL REFERENCES delphi_reports (id) ON DELETE CASCADE ON UPDATE CASCADE, - issue_type DELPHI_REPORT_ISSUE_TYPE NOT NULL, + issue_type TEXT NOT NULL, status DELPHI_REPORT_ISSUE_STATUS NOT NULL, UNIQUE (report_id, issue_type) ); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 6bc155953f..d7757be92c 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -70,7 +70,7 @@ pub enum DelphiReportSeverity { pub struct DBDelphiReportIssue { pub id: DelphiReportIssueId, pub report_id: DelphiReportId, - pub issue_type: DelphiReportIssueType, + pub issue_type: String, pub status: DelphiReportIssueStatus, } @@ -141,7 +141,7 @@ impl DBDelphiReportIssue { RETURNING id ", self.report_id as DelphiReportId, - self.issue_type as DelphiReportIssueType, + self.issue_type, self.status as DelphiReportIssueStatus, ) .fetch_one(&mut **transaction) @@ -150,7 +150,7 @@ impl DBDelphiReportIssue { } pub async fn find_all_by( - ty: Option, + ty: Option, status: Option, order_by: Option, count: Option, @@ -161,7 +161,7 @@ impl DBDelphiReportIssue { r#" SELECT delphi_report_issues.id AS "id", report_id, - issue_type AS "issue_type: DelphiReportIssueType", + issue_type, delphi_report_issues.status AS "status: DelphiReportIssueStatus", file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiReportSeverity", @@ -187,7 +187,7 @@ impl DBDelphiReportIssue { OFFSET $5 LIMIT $4 "#, - ty as Option, + ty, status as Option, order_by.map(|order_by| order_by.to_string()), count.map(|count| count as i64), @@ -221,50 +221,6 @@ impl DBDelphiReportIssue { } } -/// A type of issue found by Delphi for an artifact. -#[derive( - Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, -)] -#[serde(rename_all = "snake_case")] -#[sqlx(type_name = "delphi_report_issue_type", rename_all = "snake_case")] -pub enum DelphiReportIssueType { - ReflectionIndirection, - XorObfuscation, - IncludedLibraries, - SuspiciousBinaries, - CorruptClasses, - SuspiciousClasses, - - UrlUsage, - ClassloaderUsage, - ProcessbuilderUsage, - RuntimeExecUsage, - #[serde(rename = "jni_usage")] - #[sqlx(rename = "jni_usage")] - JNIUsage, - - MainMethod, - NativeLoading, - - MalformedJar, - NestedJarTooDeep, - FailedDecompilation, - AnalysisFailure, - - MalwareEasyforme, - MalwareSimplyloader, - - /// An issue reported by Delphi but not known by labrinth yet. - #[serde(other)] - Unknown, -} - -impl Display for DelphiReportIssueType { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - self.serialize(f) - } -} - /// A Java class affected by a Delphi report issue. Every affected /// Java class belongs to a specific issue, and an issue can have zero, /// one, or more affected classes. (Some issues may be artifact-wide, diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 7297e8579b..bc5eb64fd4 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -1,9 +1,11 @@ -use std::{collections::HashMap, fmt::Write, sync::LazyLock}; +use std::{collections::HashMap, fmt::Write, sync::LazyLock, time::Instant}; use actix_web::{HttpRequest, HttpResponse, get, post, put, web}; use chrono::{DateTime, Utc}; +use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; use serde::Deserialize; use sqlx::PgPool; +use tokio::sync::Mutex; use tracing::info; use crate::{ @@ -15,9 +17,8 @@ use crate::{ delphi_report_item::{ DBDelphiReport, DBDelphiReportIssue, DBDelphiReportIssueJavaClass, DecompiledJavaClassSource, - DelphiReportIssueStatus, DelphiReportIssueType, - DelphiReportListOrder, DelphiReportSeverity, - InternalJavaClassName, + DelphiReportIssueStatus, DelphiReportListOrder, + DelphiReportSeverity, InternalJavaClassName, }, }, redis::RedisPool, @@ -38,10 +39,26 @@ pub fn config(cfg: &mut web::ServiceConfig) { .service(_run) .service(version) .service(issues) - .service(update_issue), + .service(update_issue) + .service(issue_type_schema), ); } +static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { + reqwest::Client::builder() + .default_headers({ + HeaderMap::from_iter([( + USER_AGENT, + HeaderValue::from_static(concat!( + "Labrinth/", + env!("COMPILATION_DATE") + )), + )]) + }) + .build() + .unwrap() +}); + #[derive(Deserialize)] struct DelphiReport { pub url: String, @@ -53,7 +70,7 @@ struct DelphiReport { /// Delphi version that generated this report. pub delphi_version: i32, pub issues: HashMap< - DelphiReportIssueType, + String, HashMap>, >, pub severity: DelphiReportSeverity, @@ -169,9 +186,6 @@ pub async fn run( .fetch_one(exec) .await?; - static DELPHI_CLIENT: LazyLock = - LazyLock::new(reqwest::Client::new); - tracing::debug!( "Running Delphi for project {}, version {}, file {}", file_data.project_id.0, @@ -241,7 +255,7 @@ async fn version( #[derive(Deserialize)] struct DelphiIssuesSearchOptions { #[serde(rename = "type")] - ty: Option, + ty: Option, status: Option, order_by: Option, count: Option, @@ -254,7 +268,7 @@ async fn issues( pool: web::Data, redis: web::Data, session_queue: web::Data, - search_options: web::Query, + web::Query(search_options): web::Query, ) -> Result { check_is_moderator_from_headers( &req, @@ -324,3 +338,50 @@ async fn update_issue( Ok(HttpResponse::Created().finish()) } } + +#[get("issue_type/schema")] +async fn issue_type_schema( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + // This route is expected to be called often by the frontend, and Delphi is not necessarily + // built to scale beyond malware analysis, so cache the result of its quasi-constant-valued + // schema route to alleviate the load on it + + static CACHED_ISSUE_TYPE_SCHEMA: Mutex< + Option<(serde_json::Map, Instant)>, + > = Mutex::const_new(None); + + match &mut *CACHED_ISSUE_TYPE_SCHEMA.lock().await { + Some((schema, last_fetch)) if last_fetch.elapsed().as_secs() < 60 => { + Ok(HttpResponse::Ok().json(schema)) + } + cache_entry => Ok(HttpResponse::Ok().json( + &cache_entry + .insert(( + DELPHI_CLIENT + .get(format!("{}/schema", dotenvy::var("DELPHI_URL")?)) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)? + .json::>() + .await + .map_err(ApiError::Delphi)?, + Instant::now(), + )) + .0, + )), + } +} From 2e118d4d791919afb3644d12ae98fa868bbc8711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sat, 6 Sep 2025 21:39:02 +0200 Subject: [PATCH 013/104] chore: run `cargo sqlx prepare` --- ...724e9a4d5b9765d52305f99f859f939c2e854.json | 30 +-- ...a87c2db6cf840fc9284a8d8b47f42be741b03.json | 182 ------------------ ...d299224bea59fb593310cfae59e41cc322da5.json | 126 ++++++++++++ 3 files changed, 127 insertions(+), 211 deletions(-) delete mode 100644 apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json create mode 100644 apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json diff --git a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json index 963ea430b4..31ff65e350 100644 --- a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json +++ b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json @@ -12,35 +12,7 @@ "parameters": { "Left": [ "Int8", - { - "Custom": { - "name": "delphi_report_issue_type", - "kind": { - "Enum": [ - "reflection_indirection", - "xor_obfuscation", - "included_libraries", - "suspicious_binaries", - "corrupt_classes", - "suspicious_classes", - "url_usage", - "classloader_usage", - "processbuilder_usage", - "runtime_exec_usage", - "jni_usage", - "main_method", - "native_loading", - "malformed_jar", - "nested_jar_too_deep", - "failed_decompilation", - "analysis_failure", - "malware_easyforme", - "malware_simplyloader", - "unknown" - ] - } - } - }, + "Text", { "Custom": { "name": "delphi_report_issue_status", diff --git a/apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json b/apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json deleted file mode 100644 index 078e1fce41..0000000000 --- a/apps/labrinth/.sqlx/query-2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03.json +++ /dev/null @@ -1,182 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type AS \"issue_type: DelphiReportIssueType\",\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiReportSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC\n OFFSET $5\n LIMIT $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "report_id", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "issue_type: DelphiReportIssueType", - "type_info": { - "Custom": { - "name": "delphi_report_issue_type", - "kind": { - "Enum": [ - "reflection_indirection", - "xor_obfuscation", - "included_libraries", - "suspicious_binaries", - "corrupt_classes", - "suspicious_classes", - "url_usage", - "classloader_usage", - "processbuilder_usage", - "runtime_exec_usage", - "jni_usage", - "main_method", - "native_loading", - "malformed_jar", - "nested_jar_too_deep", - "failed_decompilation", - "analysis_failure", - "malware_easyforme", - "malware_simplyloader", - "unknown" - ] - } - } - } - }, - { - "ordinal": 3, - "name": "status: DelphiReportIssueStatus", - "type_info": { - "Custom": { - "name": "delphi_report_issue_status", - "kind": { - "Enum": [ - "pending", - "approved", - "rejected" - ] - } - } - } - }, - { - "ordinal": 4, - "name": "file_id", - "type_info": "Int8" - }, - { - "ordinal": 5, - "name": "delphi_version", - "type_info": "Int4" - }, - { - "ordinal": 6, - "name": "artifact_url", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "created", - "type_info": "Timestamptz" - }, - { - "ordinal": 8, - "name": "severity: DelphiReportSeverity", - "type_info": { - "Custom": { - "name": "delphi_report_severity", - "kind": { - "Enum": [ - "low", - "medium", - "high", - "severe" - ] - } - } - } - }, - { - "ordinal": 9, - "name": "classes: sqlx::types::Json>", - "type_info": "Jsonb" - }, - { - "ordinal": 10, - "name": "project_id?", - "type_info": "Int8" - }, - { - "ordinal": 11, - "name": "project_published?", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - { - "Custom": { - "name": "delphi_report_issue_type", - "kind": { - "Enum": [ - "reflection_indirection", - "xor_obfuscation", - "included_libraries", - "suspicious_binaries", - "corrupt_classes", - "suspicious_classes", - "url_usage", - "classloader_usage", - "processbuilder_usage", - "runtime_exec_usage", - "jni_usage", - "main_method", - "native_loading", - "malformed_jar", - "nested_jar_too_deep", - "failed_decompilation", - "analysis_failure", - "malware_easyforme", - "malware_simplyloader", - "unknown" - ] - } - } - }, - { - "Custom": { - "name": "delphi_report_issue_status", - "kind": { - "Enum": [ - "pending", - "approved", - "rejected" - ] - } - } - }, - "Text", - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - true, - false, - false, - false, - false, - null, - true, - true - ] - }, - "hash": "2fd9af40467ca5c7ac81d921ca0a87c2db6cf840fc9284a8d8b47f42be741b03" -} diff --git a/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json b/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json new file mode 100644 index 0000000000..2349d6c90a --- /dev/null +++ b/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json @@ -0,0 +1,126 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiReportSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "report_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "issue_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "status: DelphiReportIssueStatus", + "type_info": { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "file_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "delphi_version", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "artifact_url", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "severity: DelphiReportSeverity", + "type_info": { + "Custom": { + "name": "delphi_report_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + }, + { + "ordinal": 9, + "name": "classes: sqlx::types::Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "project_id?", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "project_published?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + }, + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true + ] + }, + "hash": "54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5" +} From d614e9cbddc7d9e73118b9fe938dde1345374e0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Sun, 5 Oct 2025 13:18:38 +0200 Subject: [PATCH 014/104] chore: fix typo on frontend generated state file message --- apps/frontend/nuxt.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/nuxt.config.ts b/apps/frontend/nuxt.config.ts index fb136f6676..5e2f726cfe 100644 --- a/apps/frontend/nuxt.config.ts +++ b/apps/frontend/nuxt.config.ts @@ -154,7 +154,7 @@ export default defineNuxtConfig({ (state.errors ?? []).length === 0 ) { console.log( - 'Tags already recently generated. Delete apps/src/frontend/generated/state.json to force regeneration.', + 'Tags already recently generated. Delete apps/frontend/src/generated/state.json to force regeneration.', ) return } From 8c887c701c2a7a5759b50c1b8e088cd2a04a1d36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20Gonz=C3=A1lez?= Date: Fri, 24 Oct 2025 23:02:26 +0200 Subject: [PATCH 015/104] feat: update to use new Delphi issue schema --- ...bc457a08e70dcde320c6852074819e41f8ad9.json | 24 ------ ...01763de4c51bd16c100549261b11c5a4142b8.json | 38 +++++++++ ...c053676964380451b3f461e3276f3a26bbff.json} | 10 +-- ...adf1de8aaf214b32a2aab299a0d87fd2dc453.json | 14 ++++ ...de1e7cddd68ac956143bef994104280a8dc07.json | 2 +- .../20250810155316_delphi-reports.sql | 16 ++-- .../src/database/models/delphi_report_item.rs | 77 ++++++++++++------- apps/labrinth/src/database/models/ids.rs | 2 +- apps/labrinth/src/routes/internal/delphi.rs | 63 ++++++++++----- 9 files changed, 158 insertions(+), 88 deletions(-) delete mode 100644 apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json create mode 100644 apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json rename apps/labrinth/.sqlx/{query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json => query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json} (57%) create mode 100644 apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json diff --git a/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json b/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json deleted file mode 100644 index 37dcad2943..0000000000 --- a/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source)\n VALUES ($1, $2, $3)\n ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3\n RETURNING id\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text", - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9" -} diff --git a/apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json b/apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json new file mode 100644 index 0000000000..8806320d11 --- /dev/null +++ b/apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json @@ -0,0 +1,38 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issue_details (issue_id, internal_class_name, decompiled_source, data, severity)\n VALUES ($1, $2, $3, $4, $5)\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text", + "Jsonb", + { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8" +} diff --git a/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json b/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json similarity index 57% rename from apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json rename to apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json index 2349d6c90a..f31751fe42 100644 --- a/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json +++ b/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiReportSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_details)\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", "describe": { "columns": [ { @@ -56,10 +56,10 @@ }, { "ordinal": 8, - "name": "severity: DelphiReportSeverity", + "name": "severity: DelphiSeverity", "type_info": { "Custom": { - "name": "delphi_report_severity", + "name": "delphi_severity", "kind": { "Enum": [ "low", @@ -73,7 +73,7 @@ }, { "ordinal": 9, - "name": "classes: sqlx::types::Json>", + "name": "details: sqlx::types::Json>", "type_info": "Jsonb" }, { @@ -122,5 +122,5 @@ true ] }, - "hash": "54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5" + "hash": "c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff" } diff --git a/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json b/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json new file mode 100644 index 0000000000..db0d075672 --- /dev/null +++ b/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM delphi_report_issue_details WHERE issue_id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453" +} diff --git a/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json index cc1c7b84f3..8cbe94abd5 100644 --- a/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json +++ b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json @@ -16,7 +16,7 @@ "Varchar", { "Custom": { - "name": "delphi_report_severity", + "name": "delphi_severity", "kind": { "Enum": [ "low", diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index d717d09f9d..e2851c18d0 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -1,4 +1,4 @@ -CREATE TYPE delphi_report_severity AS ENUM ('low', 'medium', 'high', 'severe'); +CREATE TYPE delphi_severity AS ENUM ('low', 'medium', 'high', 'severe'); CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); @@ -11,7 +11,7 @@ CREATE TABLE delphi_reports ( delphi_version INTEGER NOT NULL, artifact_url VARCHAR(2048) NOT NULL, created TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, - severity DELPHI_REPORT_SEVERITY NOT NULL, + severity DELPHI_SEVERITY NOT NULL, UNIQUE (file_id, delphi_version) ); CREATE INDEX delphi_version ON delphi_reports (delphi_version); @@ -29,16 +29,18 @@ CREATE TABLE delphi_report_issues ( ); CREATE INDEX delphi_report_issue_by_status_and_type ON delphi_report_issues (status, issue_type); --- A Java class affected by a Delphi report issue. Every affected --- Java class belongs to a specific issue, and an issue can have zero, --- one, or more affected classes. (Some issues may be artifact-wide, +-- The details of a Delphi report issue, which contain data about a +-- Java class affected by it. Every Delphi report issue details object +-- belongs to a specific issue, and an issue can have zero, one, or +-- more details attached to it. (Some issues may be artifact-wide, -- or otherwise not really specific to any particular class.) -CREATE TABLE delphi_report_issue_java_classes ( +CREATE TABLE delphi_report_issue_details ( id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, issue_id BIGINT NOT NULL REFERENCES delphi_report_issues (id) ON DELETE CASCADE ON UPDATE CASCADE, internal_class_name TEXT NOT NULL, decompiled_source TEXT, - UNIQUE (issue_id, internal_class_name) + data JSONB NOT NULL, + severity DELPHI_SEVERITY NOT NULL ); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index d7757be92c..c1bbacd58c 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -1,14 +1,16 @@ use std::{ + collections::HashMap, fmt::{self, Display, Formatter}, ops::Deref, }; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; +use sqlx::types::Json; use crate::database::models::{ - DBFileId, DBProjectId, DatabaseError, DelphiReportId, DelphiReportIssueId, - DelphiReportIssueJavaClassId, + DBFileId, DBProjectId, DatabaseError, DelphiReportId, + DelphiReportIssueDetailsId, DelphiReportIssueId, }; /// A Delphi malware analysis report for a project version file. @@ -25,7 +27,7 @@ pub struct DBDelphiReport { pub delphi_version: i32, pub artifact_url: String, pub created: DateTime, - pub severity: DelphiReportSeverity, + pub severity: DelphiSeverity, } impl DBDelphiReport { @@ -44,20 +46,20 @@ impl DBDelphiReport { self.file_id as Option, self.delphi_version, self.artifact_url, - self.severity as DelphiReportSeverity, + self.severity as DelphiSeverity, ) .fetch_one(&mut **transaction) .await?)) } } -/// A severity level for a Delphi report. +/// A severity level reported by Delphi. #[derive( Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, )] #[serde(rename_all = "UPPERCASE")] -#[sqlx(type_name = "delphi_report_severity", rename_all = "snake_case")] -pub enum DelphiReportSeverity { +#[sqlx(type_name = "delphi_severity", rename_all = "snake_case")] +pub enum DelphiSeverity { Low, Medium, High, @@ -122,7 +124,7 @@ impl Display for DelphiReportListOrder { pub struct DelphiReportIssueResult { pub issue: DBDelphiReportIssue, pub report: DBDelphiReport, - pub java_classes: Vec, + pub details: Vec, pub project_id: Option, pub project_published: Option>, } @@ -164,11 +166,11 @@ impl DBDelphiReportIssue { issue_type, delphi_report_issues.status AS "status: DelphiReportIssueStatus", - file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiReportSeverity", - json_array(SELECT to_jsonb(delphi_report_issue_java_classes) - FROM delphi_report_issue_java_classes + file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiSeverity", + json_array(SELECT to_jsonb(delphi_report_issue_details) + FROM delphi_report_issue_details WHERE issue_id = delphi_report_issues.id - ) AS "classes: sqlx::types::Json>", + ) AS "details: sqlx::types::Json>", versions.mod_id AS "project_id?", mods.published AS "project_published?" FROM delphi_report_issues INNER JOIN delphi_reports ON delphi_reports.id = report_id @@ -182,8 +184,8 @@ impl DBDelphiReportIssue { CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC, CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC, CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC, - CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC, - CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC + CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC OFFSET $5 LIMIT $4 "#, @@ -208,10 +210,10 @@ impl DBDelphiReportIssue { created: row.created, severity: row.severity, }, - java_classes: row - .classes + details: row + .details .into_iter() - .flat_map(|class_list| class_list.0) + .flat_map(|details_list| details_list.0) .collect(), project_id: row.project_id.map(DBProjectId), project_published: row.project_published, @@ -221,37 +223,54 @@ impl DBDelphiReportIssue { } } -/// A Java class affected by a Delphi report issue. Every affected -/// Java class belongs to a specific issue, and an issue can have zero, -/// one, or more affected classes. (Some issues may be artifact-wide, +/// The details of a Delphi report issue, which contain data about a +/// Java class affected by it. Every Delphi report issue details object +/// belongs to a specific issue, and an issue can have zero, one, or +/// more details attached to it. (Some issues may be artifact-wide, /// or otherwise not really specific to any particular class.) #[derive(Debug, Deserialize, Serialize)] -pub struct DBDelphiReportIssueJavaClass { - pub id: DelphiReportIssueJavaClassId, +pub struct DBDelphiReportIssueDetails { + pub id: DelphiReportIssueDetailsId, pub issue_id: DelphiReportIssueId, pub internal_class_name: InternalJavaClassName, pub decompiled_source: Option, + pub data: Json>, + pub severity: DelphiSeverity, } -impl DBDelphiReportIssueJavaClass { - pub async fn upsert( +impl DBDelphiReportIssueDetails { + pub async fn insert( &self, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - ) -> Result { - Ok(DelphiReportIssueJavaClassId(sqlx::query_scalar!( + ) -> Result { + Ok(DelphiReportIssueDetailsId(sqlx::query_scalar!( " - INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source) - VALUES ($1, $2, $3) - ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3 + INSERT INTO delphi_report_issue_details (issue_id, internal_class_name, decompiled_source, data, severity) + VALUES ($1, $2, $3, $4, $5) RETURNING id ", self.issue_id as DelphiReportIssueId, self.internal_class_name.0, self.decompiled_source.as_ref().map(|decompiled_source| &decompiled_source.0), + &self.data as &Json>, + self.severity as DelphiSeverity, ) .fetch_one(&mut **transaction) .await?)) } + + pub async fn remove_all_by_issue_id( + issue_id: DelphiReportIssueId, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(sqlx::query!( + "DELETE FROM delphi_report_issue_details WHERE issue_id = $1", + issue_id as DelphiReportIssueId, + ) + .execute(&mut **transaction) + .await? + .rows_affected()) + } } /// A [Java class name] with dots replaced by forward slashes (/). diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 6ec97476fe..5c2c37cc42 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -281,4 +281,4 @@ id_type!(ReportTypeId as i32); id_type!(StatusId as i32); id_type!(DelphiReportId as i64); id_type!(DelphiReportIssueId as i64); -id_type!(DelphiReportIssueJavaClassId as i64); +id_type!(DelphiReportIssueDetailsId as i64); diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index bc5eb64fd4..6960dfbcd3 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -12,13 +12,13 @@ use crate::{ auth::check_is_moderator_from_headers, database::{ models::{ - DBFileId, DelphiReportId, DelphiReportIssueId, - DelphiReportIssueJavaClassId, + DBFileId, DelphiReportId, DelphiReportIssueDetailsId, + DelphiReportIssueId, delphi_report_item::{ DBDelphiReport, DBDelphiReportIssue, - DBDelphiReportIssueJavaClass, DecompiledJavaClassSource, - DelphiReportIssueStatus, DelphiReportListOrder, - DelphiReportSeverity, InternalJavaClassName, + DBDelphiReportIssueDetails, DecompiledJavaClassSource, + DelphiReportIssueStatus, DelphiReportListOrder, DelphiSeverity, + InternalJavaClassName, }, }, redis::RedisPool, @@ -59,6 +59,14 @@ static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { .unwrap() }); +#[derive(Deserialize)] +struct DelphiReportIssueDetails { + pub internal_class_name: InternalJavaClassName, + pub decompiled_source: Option, + pub data: HashMap, + pub severity: DelphiSeverity, +} + #[derive(Deserialize)] struct DelphiReport { pub url: String, @@ -69,11 +77,8 @@ struct DelphiReport { /// A sequential, monotonically increasing version number for the /// Delphi version that generated this report. pub delphi_version: i32, - pub issues: HashMap< - String, - HashMap>, - >, - pub severity: DelphiReportSeverity, + pub issues: HashMap>, + pub severity: DelphiSeverity, } impl DelphiReport { @@ -88,12 +93,19 @@ impl DelphiReport { format!("⚠️ Suspicious traces found at {}", self.url); for (issue, trace) in &self.issues { - for (class, code) in trace { - let code = code.as_deref().map(|code| &**code); + for DelphiReportIssueDetails { + internal_class_name, + decompiled_source, + .. + } in trace + { write!( &mut message_header, - "\n issue {issue} found at class `{class}`:\n```\n{}\n```", - code.unwrap_or("No decompiled source available") + "\n issue {issue} found at class `{internal_class_name}`:\n```\n{}\n```", + decompiled_source.as_ref().map_or( + "No decompiled source available", + |decompiled_source| &**decompiled_source + ) ) .ok(); } @@ -141,7 +153,7 @@ async fn ingest_report( .upsert(&mut transaction) .await?; - for (issue_type, issue_java_classes) in report.issues { + for (issue_type, issue_details) in report.issues { let issue_id = DBDelphiReportIssue { id: DelphiReportIssueId(0), // This will be set by the database report_id, @@ -151,14 +163,23 @@ async fn ingest_report( .upsert(&mut transaction) .await?; - for (internal_class_name, decompiled_source) in issue_java_classes { - DBDelphiReportIssueJavaClass { - id: DelphiReportIssueJavaClassId(0), // This will be set by the database + // This is required to handle the case where the same Delphi version is re-run on the same file + DBDelphiReportIssueDetails::remove_all_by_issue_id( + issue_id, + &mut transaction, + ) + .await?; + + for issue_detail in issue_details { + DBDelphiReportIssueDetails { + id: DelphiReportIssueDetailsId(0), // This will be set by the database issue_id, - internal_class_name, - decompiled_source, + internal_class_name: issue_detail.internal_class_name, + decompiled_source: issue_detail.decompiled_source, + data: issue_detail.data.into(), + severity: issue_detail.severity, } - .upsert(&mut transaction) + .insert(&mut transaction) .await?; } } From 67203345ff0b9d14031c9a8c3f0af27516e5a2e3 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Fri, 7 Nov 2025 10:01:39 +0000 Subject: [PATCH 016/104] wip: tech review endpoints --- .../{moderation.rs => moderation/mod.rs} | 8 +++- .../routes/internal/moderation/tech_review.rs | 43 +++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) rename apps/labrinth/src/routes/internal/{moderation.rs => moderation/mod.rs} (98%) create mode 100644 apps/labrinth/src/routes/internal/moderation/tech_review.rs diff --git a/apps/labrinth/src/routes/internal/moderation.rs b/apps/labrinth/src/routes/internal/moderation/mod.rs similarity index 98% rename from apps/labrinth/src/routes/internal/moderation.rs rename to apps/labrinth/src/routes/internal/moderation/mod.rs index 973183820b..e5968d67e6 100644 --- a/apps/labrinth/src/routes/internal/moderation.rs +++ b/apps/labrinth/src/routes/internal/moderation/mod.rs @@ -15,10 +15,16 @@ use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::collections::HashMap; +mod tech_review; + pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { cfg.service(get_projects) .service(get_project_meta) - .service(set_project_meta); + .service(set_project_meta) + .service( + utoipa_actix_web::scope("/tech-review") + .configure(tech_review::config), + ); } #[derive(Deserialize, utoipa::ToSchema)] diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs new file mode 100644 index 0000000000..bd9551ed8a --- /dev/null +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -0,0 +1,43 @@ +use actix_web::{HttpRequest, get, web}; +use sqlx::PgPool; + +use crate::{ + auth::check_is_moderator_from_headers, + database::redis::RedisPool, + models::pats::Scopes, + queue::session::AuthQueue, + routes::{ApiError, internal::moderation::ProjectsRequestOptions}, +}; + +pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { + cfg.service(get_projects); +} + +/// Gets all projects which are awaiting technical review. +#[utoipa::path] +#[get("")] +async fn get_projects( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + request_opts: web::Query, + session_queue: web::Data, +) -> Result<(), ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + sqlx::query!( + " + SELECT id FROM delphi_reports + ORDER BY created + + " + ) + .fetch(&**pool) +} From 356d76a321b2831761d37ae9e55941d6eacb5482 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Mon, 10 Nov 2025 16:46:54 +0000 Subject: [PATCH 017/104] wip: add ToSchema for dependent types --- CLAUDE.md | 2 +- apps/labrinth/src/auth/mod.rs | 10 +- .../src/database/models/delphi_report_item.rs | 11 +- apps/labrinth/src/database/models/ids.rs | 1 + apps/labrinth/src/models/v3/threads.rs | 10 +- apps/labrinth/src/models/v3/users.rs | 27 ++- .../routes/internal/moderation/tech_review.rs | 225 ++++++++++++++++-- 7 files changed, 260 insertions(+), 26 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 2f6a226977..0ab19bc52c 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -56,7 +56,7 @@ Use `docker exec labrinth-clickhouse clickhouse-client` to access the Clickhouse ### Postgres -Use `docker exec labrinth-postgres psql -U postgres` to access the PostgreSQL instance. +Use `docker exec labrinth-postgres psql -U labrinth -d labrinth -c "SELECT 1"` to access the PostgreSQL instance, replacing the `SELECT 1` with your query. # Guidelines diff --git a/apps/labrinth/src/auth/mod.rs b/apps/labrinth/src/auth/mod.rs index 953d978c52..8131c77f5f 100644 --- a/apps/labrinth/src/auth/mod.rs +++ b/apps/labrinth/src/auth/mod.rs @@ -112,7 +112,15 @@ impl AuthenticationError { } #[derive( - Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy, Debug, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Default, + Serialize, + Deserialize, + utoipa::ToSchema, )] #[serde(rename_all = "lowercase")] pub enum AuthProvider { diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index c1bbacd58c..1a3eff7fe9 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -55,7 +55,16 @@ impl DBDelphiReport { /// A severity level reported by Delphi. #[derive( - Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, + Deserialize, + Serialize, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + sqlx::Type, + utoipa::ToSchema, )] #[serde(rename_all = "UPPERCASE")] #[sqlx(type_name = "delphi_severity", rename_all = "snake_case")] diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 5c2c37cc42..266cafa2e0 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -152,6 +152,7 @@ macro_rules! id_type { Eq, PartialEq, Hash, + utoipa::ToSchema, )] #[sqlx(transparent)] pub struct $name(pub $type); diff --git a/apps/labrinth/src/models/v3/threads.rs b/apps/labrinth/src/models/v3/threads.rs index a1a32214bd..5b918899d2 100644 --- a/apps/labrinth/src/models/v3/threads.rs +++ b/apps/labrinth/src/models/v3/threads.rs @@ -7,7 +7,7 @@ use ariadne::ids::UserId; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct Thread { pub id: ThreadId, #[serde(rename = "type")] @@ -18,7 +18,7 @@ pub struct Thread { pub members: Vec, } -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct ThreadMessage { pub id: ThreadMessageId, pub author_id: Option, @@ -27,7 +27,7 @@ pub struct ThreadMessage { pub hide_identity: bool, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] #[serde(tag = "type", rename_all = "snake_case")] pub enum MessageBody { Text { @@ -50,7 +50,9 @@ pub enum MessageBody { }, } -#[derive(Serialize, Deserialize, Eq, PartialEq, Copy, Clone)] +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema, +)] #[serde(rename_all = "snake_case")] pub enum ThreadType { Report, diff --git a/apps/labrinth/src/models/v3/users.rs b/apps/labrinth/src/models/v3/users.rs index d8b0a2e822..0f276fc8fa 100644 --- a/apps/labrinth/src/models/v3/users.rs +++ b/apps/labrinth/src/models/v3/users.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; pub const DELETED_USER: UserId = UserId(127155982985829); bitflags::bitflags! { - #[derive(Copy, Clone, Debug)] + #[derive(Debug, Clone, Copy)] pub struct Badges: u64 { const MIDAS = 1 << 0; const EARLY_MODPACK_ADOPTER = 1 << 1; @@ -21,6 +21,23 @@ bitflags::bitflags! { } } +impl utoipa::PartialSchema for Badges { + fn schema() -> utoipa::openapi::RefOr { + u64::schema() + } +} + +impl utoipa::ToSchema for Badges { + fn schemas( + schemas: &mut Vec<( + String, + utoipa::openapi::RefOr, + )>, + ) { + u64::schemas(schemas); + } +} + bitflags_serde_impl!(Badges, u64); impl Default for Badges { @@ -29,7 +46,7 @@ impl Default for Badges { } } -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct User { pub id: UserId, pub username: String, @@ -52,7 +69,7 @@ pub struct User { pub github_id: Option, } -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct UserPayoutData { pub paypal_address: Option, pub paypal_country: Option, @@ -137,7 +154,9 @@ impl User { } } -#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] +#[derive( + Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, utoipa::ToSchema, +)] #[serde(rename_all = "lowercase")] pub enum Role { Developer, diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index bd9551ed8a..436595dca5 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -1,28 +1,151 @@ -use actix_web::{HttpRequest, get, web}; +use std::collections::HashMap; + +use actix_web::{HttpRequest, get, post, web}; +use serde::{Deserialize, Serialize}; use sqlx::PgPool; +use tokio_stream::StreamExt; use crate::{ auth::check_is_moderator_from_headers, - database::redis::RedisPool, - models::pats::Scopes, + database::{ + models::{ + DelphiReportId, DelphiReportIssueDetailsId, DelphiReportIssueId, + ProjectTypeId, categories::ProjectType, + delphi_report_item::DelphiSeverity, + }, + redis::RedisPool, + }, + models::{pats::Scopes, projects::Project, threads::Thread}, queue::session::AuthQueue, routes::{ApiError, internal::moderation::ProjectsRequestOptions}, + util::error::Context, }; pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { - cfg.service(get_projects); + cfg.service(search_projects); +} + +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SearchProjects { + #[serde(default = "default_limit")] + pub limit: u64, + #[serde(default)] + pub page: u64, + #[serde(default)] + pub filter: SearchProjectsFilter, + #[serde(default = "default_sort_by")] + pub sort_by: SearchProjectsSort, +} + +fn default_limit() -> u64 { + 20 +} + +fn default_sort_by() -> SearchProjectsSort { + SearchProjectsSort::Oldest +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SearchProjectsFilter { + pub project_type: Vec, +} + +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + utoipa::ToSchema, +)] +pub enum SearchProjectsSort { + Oldest, + Newest, +} + +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ProjectReview { + pub project: Project, + pub project_owner: (), + pub thread: Thread, + /// Why this project was flagged. + pub flag_reason: FlagReason, + /// What files were flagged in this review. + pub files: Vec, +} + +/// Why a project was flagged for technical review. +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + utoipa::ToSchema, +)] +#[serde(rename_all = "snake_case")] +pub enum FlagReason { + /// Delphi anti-malware scanner flagged a file in the project. + Delphi, } -/// Gets all projects which are awaiting technical review. +/// Details of a JAR file which was flagged for technical review, as part of +/// a [`ProjectReview`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct FileReview { + /// Name of the flagged file. + pub file_name: String, + /// Size of the flagged file, in bytes. + pub file_size: u64, + /// What issues appeared in the file. + pub issues: Vec, +} + +/// Issue raised by Delphi in a flagged file. +/// +/// The issue is scoped to the JAR, not any specific class, but issues can be +/// raised because they appeared in a class - see [`FileIssueDetails`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct FileIssue { + /// ID of the issue. + pub issue_id: DelphiReportIssueId, + /// Delphi-determined kind of issue that this is, e.g. `OBFUSCATED_NAMES`. + /// + /// Labrinth does not know the full set of kinds of issues, so this is kept + /// as a string. + pub kind: String, + /// How important is this issue, as flagged by Delphi? + pub severity: DelphiSeverity, + /// Details of why this issue might have been raised, such as what file it + /// was found in. + pub details: Vec, +} + +/// Occurrence of a [`FileIssue`] in a specific class in a scanned JAR file. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct FileIssueDetails { + /// Name of the Java class in which this issue was found. + pub class_name: String, + /// Decompiled, pretty-printed source of the Java class. + pub decompiled_source: String, +} + +/// Searches all projects which are awaiting technical review. #[utoipa::path] -#[get("")] -async fn get_projects( +#[post("/search")] +async fn search_projects( req: HttpRequest, pool: web::Data, redis: web::Data, - request_opts: web::Query, session_queue: web::Data, -) -> Result<(), ApiError> { + search_req: web::Json, +) -> Result>, ApiError> { check_is_moderator_from_headers( &req, &**pool, @@ -32,12 +155,84 @@ async fn get_projects( ) .await?; - sqlx::query!( - " - SELECT id FROM delphi_reports - ORDER BY created + let sort_by = match search_req.sort_by { + SearchProjectsSort::Oldest => 0, + SearchProjectsSort::Newest => 1, + }; + let limit = search_req.limit.max(50); + let offset = limit * search_req.page; + + let limit = + i64::try_from(limit).wrap_request_err("limit cannot fit into `i64`")?; + let offset = i64::try_from(offset) + .wrap_request_err("offset cannot fit into `i64`")?; + + let mut reports = Vec::new(); + let mut project_ids = Vec::new(); - " + let mut rows = sqlx::query!( + r#" + SELECT + dr.id AS report_id, + m.id AS project_id, + dr.created AS report_created, + dri.issue_type AS issue_type, + drid.internal_class_name AS issue_detail_class_name, + drid.decompiled_source AS issue_detail_decompiled_source, + drid.severity AS "issue_detail_severity: DelphiSeverity" + FROM delphi_reports dr + + -- fetch the project this report is for, and its type + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN mods m ON m.id = v.mod_id + LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id + INNER JOIN categories c ON c.id = mc.joining_category_id + + -- fetch report issues and details + LEFT JOIN delphi_report_issues dri ON dri.report_id = dr.id + LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id + + -- filtering + WHERE + -- project type + (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) + + -- sorting + ORDER BY + CASE WHEN $2 = ' + -- when sorting on TIMESTAMPTZ columns, we extract the int value of the time + -- so that we can sort by an integer, which we can negate + -- (we can't negate a TIMESTAMPTZ) + + -- oldest + WHEN $2 = 0 THEN EXTRACT(EPOCH FROM created) + -- newest + WHEN $2 = 1 THEN -EXTRACT(EPOCH FROM created) + END + + -- pagination + LIMIT $3 + OFFSET $4 + "#, + &search_req.filter.project_type.iter().map(|ty| ty.0).collect::>(), + sort_by, + limit, + offset, ) - .fetch(&**pool) + .fetch(&**pool); + while let Some(row) = rows + .next() + .await + .transpose() + .wrap_internal_err("failed to fetch reports") + { + project_ids.push(row.project_id); + reports.push(ProjectReview { + project: (), + project_owner: (), + }); + } + + Ok(()) } From 3ce5c23948ef4de86573ce9cec3cd3e7e5425550 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Mon, 10 Nov 2025 23:54:09 +0000 Subject: [PATCH 018/104] wip: report issues return --- Cargo.lock | 1 + apps/labrinth/Cargo.toml | 1 + .../src/database/models/thread_item.rs | 4 +- .../routes/internal/moderation/tech_review.rs | 169 +++++++++++++----- 4 files changed, 131 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4fb3b0b077..68e577e318 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4535,6 +4535,7 @@ dependencies = [ "hyper-util", "iana-time-zone", "image", + "indexmap 2.11.4", "itertools 0.14.0", "jemalloc_pprof", "json-patch 4.1.0", diff --git a/apps/labrinth/Cargo.toml b/apps/labrinth/Cargo.toml index 227a8b23e7..bb7147bfaa 100644 --- a/apps/labrinth/Cargo.toml +++ b/apps/labrinth/Cargo.toml @@ -66,6 +66,7 @@ image = { workspace = true, features = [ "tiff", "webp", ] } +indexmap = { workspace = true } itertools = { workspace = true } json-patch = { workspace = true } lettre = { workspace = true } diff --git a/apps/labrinth/src/database/models/thread_item.rs b/apps/labrinth/src/database/models/thread_item.rs index 04275d62e1..d01a1b3842 100644 --- a/apps/labrinth/src/database/models/thread_item.rs +++ b/apps/labrinth/src/database/models/thread_item.rs @@ -11,7 +11,7 @@ pub struct ThreadBuilder { pub report_id: Option, } -#[derive(Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct DBThread { pub id: DBThreadId, @@ -30,7 +30,7 @@ pub struct ThreadMessageBuilder { pub hide_identity: bool, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct DBThreadMessage { pub id: DBThreadMessageId, pub thread_id: DBThreadId, diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 436595dca5..73473dd883 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -1,6 +1,9 @@ -use std::collections::HashMap; +use std::{collections::HashMap, fmt}; -use actix_web::{HttpRequest, get, post, web}; +use actix_web::{HttpRequest, post, web}; +use chrono::{DateTime, Utc}; +use eyre::eyre; +use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use tokio_stream::StreamExt; @@ -8,16 +11,19 @@ use tokio_stream::StreamExt; use crate::{ auth::check_is_moderator_from_headers, database::{ + DBProject, models::{ - DelphiReportId, DelphiReportIssueDetailsId, DelphiReportIssueId, - ProjectTypeId, categories::ProjectType, - delphi_report_item::DelphiSeverity, + DBProjectId, DBThread, DBThreadId, DelphiReportId, + DelphiReportIssueId, ProjectTypeId, + delphi_report_item::{ + DBDelphiReportIssue, DBDelphiReportIssueDetails, DelphiSeverity, + }, }, redis::RedisPool, }, models::{pats::Scopes, projects::Project, threads::Thread}, queue::session::AuthQueue, - routes::{ApiError, internal::moderation::ProjectsRequestOptions}, + routes::ApiError, util::error::Context, }; @@ -42,7 +48,7 @@ fn default_limit() -> u64 { } fn default_sort_by() -> SearchProjectsSort { - SearchProjectsSort::Oldest + SearchProjectsSort::CreatedAsc } #[derive(Debug, Clone, Default, Serialize, Deserialize, utoipa::ToSchema)] @@ -62,15 +68,30 @@ pub struct SearchProjectsFilter { utoipa::ToSchema, )] pub enum SearchProjectsSort { - Oldest, - Newest, + CreatedAsc, + CreatedDesc, +} + +impl fmt::Display for SearchProjectsSort { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = serde_json::to_value(*self).unwrap(); + let s = s.as_str().unwrap(); + write!(f, "{s}") + } } #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] pub struct ProjectReview { pub project: Project, pub project_owner: (), - pub thread: Thread, + pub thread: DBThread, + pub reports: Vec, +} + +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ProjectReport { + /// When this report was created. + pub created_at: DateTime, /// Why this project was flagged. pub flag_reason: FlagReason, /// What files were flagged in this review. @@ -146,6 +167,20 @@ async fn search_projects( session_queue: web::Data, search_req: web::Json, ) -> Result>, ApiError> { + #[derive(Debug)] + struct ProjectRecord { + reports: IndexMap, + } + + #[derive(Debug)] + struct ReportRecord { + created: DateTime, + issues: IndexMap, + } + + #[derive(Debug)] + struct IssueRecord {} + check_is_moderator_from_headers( &req, &**pool, @@ -155,10 +190,7 @@ async fn search_projects( ) .await?; - let sort_by = match search_req.sort_by { - SearchProjectsSort::Oldest => 0, - SearchProjectsSort::Newest => 1, - }; + let sort_by = search_req.sort_by.to_string(); let limit = search_req.limit.max(50); let offset = limit * search_req.page; @@ -167,27 +199,31 @@ async fn search_projects( let offset = i64::try_from(offset) .wrap_request_err("offset cannot fit into `i64`")?; - let mut reports = Vec::new(); - let mut project_ids = Vec::new(); + let mut project_records = IndexMap::::new(); + let mut project_ids = Vec::::new(); + let mut thread_ids = Vec::::new(); let mut rows = sqlx::query!( r#" SELECT - dr.id AS report_id, - m.id AS project_id, - dr.created AS report_created, - dri.issue_type AS issue_type, - drid.internal_class_name AS issue_detail_class_name, - drid.decompiled_source AS issue_detail_decompiled_source, - drid.severity AS "issue_detail_severity: DelphiSeverity" + dr.id AS "report_id!: DelphiReportId", + m.id AS "project_id!: DBProjectId", + t.id AS "project_thread_id!: DBThreadId", + dr.created AS "report_created!", + dri.id AS "issue_id", + dri.issue_type AS "issue_type?", + drid.internal_class_name AS "issue_detail_class_name?", + drid.decompiled_source AS "issue_detail_decompiled_source?", + drid.severity AS "issue_detail_severity?: DelphiSeverity" FROM delphi_reports dr - -- fetch the project this report is for, and its type + -- fetch the project this report is for, its type, and thread INNER JOIN files f ON f.id = dr.file_id INNER JOIN versions v ON v.id = f.version_id INNER JOIN mods m ON m.id = v.mod_id LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id INNER JOIN categories c ON c.id = mc.joining_category_id + INNER JOIN threads t ON t.mod_id = m.id -- fetch report issues and details LEFT JOIN delphi_report_issues dri ON dri.report_id = dr.id @@ -200,39 +236,88 @@ async fn search_projects( -- sorting ORDER BY - CASE WHEN $2 = ' - -- when sorting on TIMESTAMPTZ columns, we extract the int value of the time - -- so that we can sort by an integer, which we can negate - -- (we can't negate a TIMESTAMPTZ) - - -- oldest - WHEN $2 = 0 THEN EXTRACT(EPOCH FROM created) - -- newest - WHEN $2 = 1 THEN -EXTRACT(EPOCH FROM created) - END + CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC -- pagination LIMIT $3 OFFSET $4 "#, - &search_req.filter.project_type.iter().map(|ty| ty.0).collect::>(), - sort_by, + &search_req + .filter + .project_type + .iter() + .map(|ty| ty.0) + .collect::>(), + &sort_by, limit, offset, ) .fetch(&**pool); + while let Some(row) = rows .next() .await .transpose() - .wrap_internal_err("failed to fetch reports") + .wrap_internal_err("failed to fetch reports")? { project_ids.push(row.project_id); - reports.push(ProjectReview { - project: (), - project_owner: (), - }); + thread_ids.push(row.project_thread_id); + + let project = + project_records.entry(row.project_id).or_insert_with(|| { + ProjectRecord { + reports: IndexMap::new(), + } + }); + let report = + project + .reports + .entry(row.report_id) + .or_insert(|| ReportRecord { + created: row.report_created, + issues: IndexMap::new(), + }); + report.issues.entry(row.issue) + // .push(ReportRecord { + // created: row.report_created, + // issues: + // flag_reason: FlagReason::Delphi, + // files: + // created: row.report_created, + // }); } - Ok(()) + let projects = DBProject::get_many_ids(&project_ids, &**pool, &redis) + .await + .wrap_internal_err("failed to fetch projects")? + .into_iter() + .map(|project| (project.inner.id, Project::from(project))) + .collect::>(); + let threads = DBThread::get_many(&thread_ids, &**pool) + .await + .wrap_internal_err("failed to fetch threads")? + .into_iter() + .map(|thread| (thread.id, thread)) + .collect::>(); + + let projects = project_records.into_iter().map(|(project_id, reports)| { + let project = + projects.get(&project_id).wrap_internal_err_with(|| { + eyre!("no fetched project with ID {project_id:?}") + })?; + let thread = threads + .get(&DBThreadId::from(project.thread_id)) + .wrap_internal_err_with(|| { + eyre!("no fetched thread with ID {:?}", project.thread_id) + })?; + Ok::<_, ApiError>(ProjectReview { + project: project.clone(), + project_owner: (), + thread: thread.clone(), + reports, + }) + }); + + Ok(web::Json(projects)) } From 846f2cf5598f6d42ab64b88ec38de01f9735792d Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 12 Nov 2025 19:17:47 +0000 Subject: [PATCH 019/104] wip --- .../routes/internal/moderation/tech_review.rs | 62 +++++++++++++------ 1 file changed, 43 insertions(+), 19 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 73473dd883..3356ad00c3 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -13,7 +13,7 @@ use crate::{ database::{ DBProject, models::{ - DBProjectId, DBThread, DBThreadId, DelphiReportId, + DBFileId, DBProjectId, DBThread, DBThreadId, DelphiReportId, DelphiReportIssueId, ProjectTypeId, delphi_report_item::{ DBDelphiReportIssue, DBDelphiReportIssueDetails, DelphiSeverity, @@ -179,7 +179,10 @@ async fn search_projects( } #[derive(Debug)] - struct IssueRecord {} + struct IssueRecord { + file_name: String, + file_size: u64, + } check_is_moderator_from_headers( &req, @@ -202,6 +205,7 @@ async fn search_projects( let mut project_records = IndexMap::::new(); let mut project_ids = Vec::::new(); let mut thread_ids = Vec::::new(); + let mut file_ids = Vec::::new(); let mut rows = sqlx::query!( r#" @@ -212,6 +216,8 @@ async fn search_projects( dr.created AS "report_created!", dri.id AS "issue_id", dri.issue_type AS "issue_type?", + f.filename AS "file_name?", + f.size AS "file_size?", drid.internal_class_name AS "issue_detail_class_name?", drid.decompiled_source AS "issue_detail_decompiled_source?", drid.severity AS "issue_detail_severity?: DelphiSeverity" @@ -278,7 +284,7 @@ async fn search_projects( created: row.report_created, issues: IndexMap::new(), }); - report.issues.entry(row.issue) + // report.issues.entry(row.issue).or_inser // .push(ReportRecord { // created: row.report_created, // issues: @@ -301,23 +307,41 @@ async fn search_projects( .map(|thread| (thread.id, thread)) .collect::>(); - let projects = project_records.into_iter().map(|(project_id, reports)| { - let project = - projects.get(&project_id).wrap_internal_err_with(|| { - eyre!("no fetched project with ID {project_id:?}") - })?; - let thread = threads - .get(&DBThreadId::from(project.thread_id)) - .wrap_internal_err_with(|| { - eyre!("no fetched thread with ID {:?}", project.thread_id) - })?; - Ok::<_, ApiError>(ProjectReview { - project: project.clone(), - project_owner: (), - thread: thread.clone(), - reports, + let projects = project_records + .into_iter() + .map(|(project_id, project_record)| { + let project = + projects.get(&project_id).wrap_internal_err_with(|| { + eyre!("no fetched project with ID {project_id:?}") + })?; + let thread = threads + .get(&DBThreadId::from(project.thread_id)) + .wrap_internal_err_with(|| { + eyre!("no fetched thread with ID {:?}", project.thread_id) + })?; + Ok::<_, ApiError>(ProjectReview { + project: project.clone(), + project_owner: (), + thread: thread.clone(), + reports: project_record + .reports + .into_iter() + .map(|(_, report_record)| ProjectReport { + created_at: report_record.created, + flag_reason: FlagReason::Delphi, + files: report_record + .issues + .into_iter() + .map(|(_, issue_record)| FileReview { + file_name: issue_record.file_name, + file_size: issue_record.file_size, + }) + .collect(), + }) + .collect(), + }) }) - }); + .collect::, _>>()?; Ok(web::Json(projects)) } From 2aac5ab245aeb2fd4fca287476fac22bc9b795fc Mon Sep 17 00:00:00 2001 From: aecsocket Date: Thu, 13 Nov 2025 14:52:56 +0000 Subject: [PATCH 020/104] wip: returning more data --- .../20250810155316_delphi-reports.sql | 2 +- .../src/database/models/delphi_report_item.rs | 19 ++- .../routes/internal/moderation/tech_review.rs | 123 +++++++++++++++--- 3 files changed, 118 insertions(+), 26 deletions(-) diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index e2851c18d0..7cbd19afa7 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -1,6 +1,6 @@ CREATE TYPE delphi_severity AS ENUM ('low', 'medium', 'high', 'severe'); -CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); +CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'safe', 'unsafe'); -- A Delphi analysis report for a project version CREATE TABLE delphi_reports ( diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 1a3eff7fe9..a2b4cfcb33 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -87,20 +87,29 @@ pub struct DBDelphiReportIssue { /// An status a Delphi report issue can have. #[derive( - Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, + Deserialize, + Serialize, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + sqlx::Type, + utoipa::ToSchema, )] #[serde(rename_all = "snake_case")] #[sqlx(type_name = "delphi_report_issue_status", rename_all = "snake_case")] pub enum DelphiReportIssueStatus { /// The issue is pending review by the moderation team. Pending, - /// The issue has been approved (i.e., reviewed as a valid, true positive). - /// The affected artifact has thus been verified to be potentially malicious. - Approved, /// The issue has been rejected (i.e., reviewed as a false positive). /// The affected artifact has thus been verified to be clean, other issues /// with it notwithstanding. - Rejected, + Safe, + /// The issue has been approved (i.e., reviewed as a valid, true positive). + /// The affected artifact has thus been verified to be potentially malicious. + Unsafe, } impl Display for DelphiReportIssueStatus { diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 3356ad00c3..9a626c02fb 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -14,21 +14,22 @@ use crate::{ DBProject, models::{ DBFileId, DBProjectId, DBThread, DBThreadId, DelphiReportId, - DelphiReportIssueId, ProjectTypeId, + DelphiReportIssueDetailsId, DelphiReportIssueId, ProjectTypeId, delphi_report_item::{ - DBDelphiReportIssue, DBDelphiReportIssueDetails, DelphiSeverity, + DBDelphiReportIssue, DBDelphiReportIssueDetails, + DelphiReportIssueStatus, DelphiSeverity, }, }, redis::RedisPool, }, models::{pats::Scopes, projects::Project, threads::Thread}, queue::session::AuthQueue, - routes::ApiError, + routes::{ApiError, internal::moderation::Ownership}, util::error::Context, }; pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { - cfg.service(search_projects); + cfg.service(search_projects).service(update_issue); } #[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] @@ -83,7 +84,7 @@ impl fmt::Display for SearchProjectsSort { #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] pub struct ProjectReview { pub project: Project, - pub project_owner: (), + pub project_owner: Ownership, pub thread: DBThread, pub reports: Vec, } @@ -123,7 +124,7 @@ pub struct FileReview { /// Name of the flagged file. pub file_name: String, /// Size of the flagged file, in bytes. - pub file_size: u64, + pub file_size: i32, /// What issues appeared in the file. pub issues: Vec, } @@ -143,6 +144,8 @@ pub struct FileIssue { pub kind: String, /// How important is this issue, as flagged by Delphi? pub severity: DelphiSeverity, + /// Is this issue valid (malicious) or a false positive (safe)? + pub status: DelphiReportIssueStatus, /// Details of why this issue might have been raised, such as what file it /// was found in. pub details: Vec, @@ -181,7 +184,9 @@ async fn search_projects( #[derive(Debug)] struct IssueRecord { file_name: String, - file_size: u64, + file_size: i32, + issue_type: String, + details: IndexMap, } check_is_moderator_from_headers( @@ -214,10 +219,11 @@ async fn search_projects( m.id AS "project_id!: DBProjectId", t.id AS "project_thread_id!: DBThreadId", dr.created AS "report_created!", - dri.id AS "issue_id", + dri.id AS "issue_id?: DelphiReportIssueId", dri.issue_type AS "issue_type?", f.filename AS "file_name?", f.size AS "file_size?", + drid.id AS "issue_detail_id?: DelphiReportIssueDetailsId", drid.internal_class_name AS "issue_detail_class_name?", drid.decompiled_source AS "issue_detail_decompiled_source?", drid.severity AS "issue_detail_severity?: DelphiSeverity" @@ -277,21 +283,46 @@ async fn search_projects( } }); let report = - project - .reports - .entry(row.report_id) - .or_insert(|| ReportRecord { + project.reports.entry(row.report_id).or_insert_with(|| { + ReportRecord { created: row.report_created, issues: IndexMap::new(), + } + }); + + let ( + Some(issue_id), + Some(file_name), + Some(file_size), + Some(issue_type), + ) = (row.issue_id, row.file_name, row.file_size, row.issue_type) + else { + continue; + }; + let issue = + report + .issues + .entry(issue_id) + .or_insert_with(|| IssueRecord { + file_name, + file_size, + issue_type, + details: IndexMap::new(), }); - // report.issues.entry(row.issue).or_inser - // .push(ReportRecord { - // created: row.report_created, - // issues: - // flag_reason: FlagReason::Delphi, - // files: - // created: row.report_created, - // }); + + let (Some(issue_detail_id), Some(class_name), Some(decompiled_source)) = ( + row.issue_detail_id, + row.issue_detail_class_name, + row.issue_detail_decompiled_source, + ) else { + continue; + }; + issue.details.entry(issue_detail_id).or_insert_with(|| { + FileIssueDetails { + class_name, + decompiled_source, + } + }); } let projects = DBProject::get_many_ids(&project_ids, &**pool, &redis) @@ -335,6 +366,15 @@ async fn search_projects( .map(|(_, issue_record)| FileReview { file_name: issue_record.file_name, file_size: issue_record.file_size, + issues: issue_record + .details + .into_iter() + .map(|(issue_id, detail)| FileIssue { + issue_id, + kind: issue_record, + kind: details, + }) + .collect(), }) .collect(), }) @@ -345,3 +385,46 @@ async fn search_projects( Ok(web::Json(projects)) } + +/// Updates the state of a technical review issue. +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct UpdateIssue { + /// Status to set the issue to. + pub status: DelphiReportIssueStatus, +} + +#[utoipa::path] +#[post("/issue/{id}")] +async fn update_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + update_req: web::Json, + path: web::Path<(DelphiReportIssueId,)>, +) -> Result<(), ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_WRITE, + ) + .await?; + let (issue_id,) = path.into_inner(); + + sqlx::query!( + " + UPDATE delphi_report_issues + SET status = $1 + WHERE id = $2 + ", + update_req.status as DelphiReportIssueStatus, + issue_id as DelphiReportIssueId, + ) + .execute(&**pool) + .await + .wrap_internal_err("failed to update issue")?; + + Ok(()) +} From a48749504fcca4f027d9db1e242a716fdaa9e64b Mon Sep 17 00:00:00 2001 From: aecsocket Date: Thu, 13 Nov 2025 18:20:09 +0000 Subject: [PATCH 021/104] wip --- ...724e9a4d5b9765d52305f99f859f939c2e854.json | 4 +- ...c9e2bd27ac9a87987eafd79b06f1c4ecdb659.json | 26 ++++ ...fc053676964380451b3f461e3276f3a26bbff.json | 8 +- ...956b0bf816248c131c16954793f0eda09c1ec.json | 103 +++++++++++++ apps/labrinth/src/database/models/ids.rs | 2 +- .../src/routes/internal/moderation/mod.rs | 77 ++-------- .../routes/internal/moderation/ownership.rs | 143 ++++++++++++++++++ .../routes/internal/moderation/tech_review.rs | 122 +++++++++------ 8 files changed, 369 insertions(+), 116 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json create mode 100644 apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json create mode 100644 apps/labrinth/src/routes/internal/moderation/ownership.rs diff --git a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json index 31ff65e350..7e30ece2ec 100644 --- a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json +++ b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json @@ -19,8 +19,8 @@ "kind": { "Enum": [ "pending", - "approved", - "rejected" + "safe", + "unsafe" ] } } diff --git a/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json b/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json new file mode 100644 index 0000000000..216435cf2b --- /dev/null +++ b/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE delphi_report_issues\n SET status = $1\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + }, + "Int8" + ] + }, + "nullable": [] + }, + "hash": "b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659" +} diff --git a/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json b/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json index f31751fe42..7e536646ef 100644 --- a/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json +++ b/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json @@ -27,8 +27,8 @@ "kind": { "Enum": [ "pending", - "approved", - "rejected" + "safe", + "unsafe" ] } } @@ -96,8 +96,8 @@ "kind": { "Enum": [ "pending", - "approved", - "rejected" + "safe", + "unsafe" ] } } diff --git a/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json b/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json new file mode 100644 index 0000000000..c39018099c --- /dev/null +++ b/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json @@ -0,0 +1,103 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dri.id AS \"issue_id?: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type?\",\n f.filename AS \"file_name?\",\n f.size AS \"file_size?\",\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.internal_class_name AS \"issue_detail_class_name?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n LEFT JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "report_id!: DelphiReportId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_id!: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "project_thread_id!: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "report_created!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "issue_id?: DelphiReportIssueId", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "issue_type?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "file_name?", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "file_size?", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "issue_detail_id?: DelphiReportIssueDetailsId", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "issue_detail_class_name?", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "issue_detail_decompiled_source?", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "issue_detail_severity?: DelphiSeverity", + "type_info": { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec" +} diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 266cafa2e0..10f8cc8ae9 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -94,7 +94,7 @@ macro_rules! generate_bulk_ids { macro_rules! impl_db_id_interface { ($id_struct:ident, $db_id_struct:ident, $(, generator: $generator_function:ident @ $db_table:expr, $(bulk_generator: $bulk_generator_function:ident,)?)?) => { - #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)] + #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash, utoipa::ToSchema)] #[sqlx(transparent)] pub struct $db_id_struct(pub i64); diff --git a/apps/labrinth/src/routes/internal/moderation/mod.rs b/apps/labrinth/src/routes/internal/moderation/mod.rs index e5968d67e6..29b7331253 100644 --- a/apps/labrinth/src/routes/internal/moderation/mod.rs +++ b/apps/labrinth/src/routes/internal/moderation/mod.rs @@ -1,8 +1,7 @@ use super::ApiError; use crate::database; -use crate::database::models::{DBOrganization, DBTeamId, DBTeamMember, DBUser}; use crate::database::redis::RedisPool; -use crate::models::ids::{OrganizationId, TeamId}; +use crate::models::ids::OrganizationId; use crate::models::projects::{Project, ProjectStatus}; use crate::queue::moderation::{ApprovalType, IdentifiedFile, MissingMetadata}; use crate::queue::session::AuthQueue; @@ -10,11 +9,12 @@ use crate::util::error::Context; use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; use actix_web::{HttpRequest, get, post, web}; use ariadne::ids::{UserId, random_base62}; -use eyre::eyre; +use ownership::get_projects_ownership; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::collections::HashMap; +mod ownership; mod tech_review; pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { @@ -53,7 +53,7 @@ pub struct FetchedProject { } /// Fetched information on who owns a project. -#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema, Clone)] #[serde(tag = "kind", rename_all = "snake_case")] pub enum Ownership { /// Project is owned by a team, and this is the team owner. @@ -135,73 +135,20 @@ pub async fn get_projects_internal( .map(crate::models::projects::Project::from) .collect::>(); - let team_ids = projects - .iter() - .map(|project| project.team_id) - .map(DBTeamId::from) - .collect::>(); - let org_ids = projects - .iter() - .filter_map(|project| project.organization) - .collect::>(); - - let team_members = - DBTeamMember::get_from_team_full_many(&team_ids, &**pool, &redis) - .await - .wrap_internal_err("failed to fetch team members")?; - let users = DBUser::get_many_ids( - &team_members - .iter() - .map(|member| member.user_id) - .collect::>(), - &**pool, - &redis, - ) - .await - .wrap_internal_err("failed to fetch user data of team members")?; - let orgs = DBOrganization::get_many(&org_ids, &**pool, &redis) + let ownerships = get_projects_ownership(&projects, &pool, &redis) .await - .wrap_internal_err("failed to fetch organizations")?; - - let map_project = |project: Project| -> Result { - let project_id = project.id; - let ownership = if let Some(org_id) = project.organization { - let org = orgs - .iter() - .find(|org| OrganizationId::from(org.id) == org_id) - .wrap_internal_err_with(|| { - eyre!( - "project {project_id} is owned by an invalid organization {org_id}" - ) - })?; - - Ownership::Organization { - id: OrganizationId::from(org.id), - name: org.name.clone(), - icon_url: org.icon_url.clone(), - } - } else { - let team_id = project.team_id; - let team_owner = team_members.iter().find(|member| TeamId::from(member.team_id) == team_id && member.is_owner) - .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has no valid owner"))?; - let team_owner_id = team_owner.user_id; - let user = users.iter().find(|user| user.id == team_owner_id) - .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has owner {} which does not exist", UserId::from(team_owner_id)))?; - - Ownership::User { - id: UserId::from(user.id), - name: user.username.clone(), - icon_url: user.avatar_url.clone(), - } - }; + .wrap_internal_err("failed to fetch project ownerships")?; - Ok(FetchedProject { ownership, project }) - }; + let map_project = + |(project, ownership): (Project, Ownership)| -> FetchedProject { + FetchedProject { ownership, project } + }; let projects = projects .into_iter() + .zip(ownerships) .map(map_project) - .collect::, _>>()?; + .collect::>(); Ok(web::Json(projects)) } diff --git a/apps/labrinth/src/routes/internal/moderation/ownership.rs b/apps/labrinth/src/routes/internal/moderation/ownership.rs new file mode 100644 index 0000000000..46fba8af1a --- /dev/null +++ b/apps/labrinth/src/routes/internal/moderation/ownership.rs @@ -0,0 +1,143 @@ +use crate::database::models::{DBOrganization, DBTeamId, DBTeamMember, DBUser}; +use crate::database::redis::RedisPool; +use crate::models::ids::OrganizationId; +use crate::routes::internal::moderation::Ownership; +use crate::util::error::Context; +use ariadne::ids::UserId; +use eyre::eyre; +use sqlx::PgPool; + +/// Fetches ownership information for a single project. +pub async fn get_project_ownership( + project_id: crate::models::ids::ProjectId, + team_id: crate::models::ids::TeamId, + organization: Option, + pool: &PgPool, + redis: &RedisPool, +) -> Result { + if let Some(org_id) = organization { + let org = DBOrganization::get(&org_id.to_string(), pool, redis) + .await + .wrap_internal_err("failed to fetch organization")? + .ok_or_else(|| { + crate::routes::ApiError::Internal( + eyre!("project {project_id} is owned by an invalid organization {org_id}") + ) + })?; + + Ok(Ownership::Organization { + id: OrganizationId::from(org.id), + name: org.name.clone(), + icon_url: org.icon_url, + }) + } else { + let team_members = DBTeamMember::get_from_team_full( + DBTeamId::from(team_id), + pool, + redis, + ) + .await + .wrap_internal_err("failed to fetch team members")?; + + let team_owner = team_members + .iter() + .find(|member| member.is_owner) + .ok_or_else(|| { + crate::routes::ApiError::Internal(eyre!( + "project {project_id} is owned by a team {team_id} which has no valid owner" + )) + })?; + + let user = DBUser::get(&team_owner.user_id.0.to_string(), pool, redis) + .await + .wrap_internal_err("failed to fetch user data")? + .ok_or_else(|| { + crate::routes::ApiError::Internal(eyre!( + "project {project_id} is owned by a team {team_id} which has owner {} which does not exist", + ariadne::ids::UserId::from(team_owner.user_id) + )) + })?; + + Ok(Ownership::User { + id: ariadne::ids::UserId::from(user.id), + name: user.username.clone(), + icon_url: user.avatar_url, + }) + } +} + +/// Fetches ownership information for multiple projects efficiently +pub async fn get_projects_ownership( + projects: &[crate::models::projects::Project], + pool: &PgPool, + redis: &RedisPool, +) -> Result, crate::routes::ApiError> { + let team_ids = projects + .iter() + .map(|project| project.team_id) + .map(DBTeamId::from) + .collect::>(); + let org_ids = projects + .iter() + .filter_map(|project| project.organization) + .collect::>(); + + let team_members = + DBTeamMember::get_from_team_full_many(&team_ids, pool, redis) + .await + .wrap_internal_err("failed to fetch team members")?; + let users = DBUser::get_many_ids( + &team_members + .iter() + .map(|member| member.user_id) + .collect::>(), + pool, + redis, + ) + .await + .wrap_internal_err("failed to fetch user data of team members")?; + let orgs = DBOrganization::get_many(&org_ids, pool, redis) + .await + .wrap_internal_err("failed to fetch organizations")?; + + let mut ownerships = Vec::with_capacity(projects.len()); + + for project in projects { + let project_id = project.id; + let ownership = if let Some(org_id) = project.organization { + let org = orgs + .iter() + .find(|org| OrganizationId::from(org.id) == org_id) + .wrap_internal_err_with(|| { + eyre!( + "project {project_id} is owned by an invalid organization {org_id}" + ) + })?; + + Ownership::Organization { + id: OrganizationId::from(org.id), + name: org.name.clone(), + icon_url: org.icon_url.clone(), + } + } else { + let team_id = project.team_id; + let team_owner = team_members.iter().find(|member| { + crate::models::ids::TeamId::from(member.team_id) == team_id && member.is_owner + }) + .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has no valid owner"))?; + let team_owner_id = team_owner.user_id; + let user = users.iter().find(|user| user.id == team_owner_id) + .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has owner {} which does not exist", UserId::from(team_owner_id)))?; + + Ownership::User { + id: ariadne::ids::UserId::from(user.id), + name: user.username.clone(), + icon_url: user.avatar_url.clone(), + } + }; + + ownerships.push(ownership); + } + + Ok(ownerships) +} diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 9a626c02fb..691aa33b35 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -8,6 +8,7 @@ use serde::{Deserialize, Serialize}; use sqlx::PgPool; use tokio_stream::StreamExt; +use super::ownership::get_projects_ownership; use crate::{ auth::check_is_moderator_from_headers, database::{ @@ -15,14 +16,11 @@ use crate::{ models::{ DBFileId, DBProjectId, DBThread, DBThreadId, DelphiReportId, DelphiReportIssueDetailsId, DelphiReportIssueId, ProjectTypeId, - delphi_report_item::{ - DBDelphiReportIssue, DBDelphiReportIssueDetails, - DelphiReportIssueStatus, DelphiSeverity, - }, + delphi_report_item::{DelphiReportIssueStatus, DelphiSeverity}, }, redis::RedisPool, }, - models::{pats::Scopes, projects::Project, threads::Thread}, + models::{pats::Scopes, projects::Project}, queue::session::AuthQueue, routes::{ApiError, internal::moderation::Ownership}, util::error::Context, @@ -32,6 +30,7 @@ pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { cfg.service(search_projects).service(update_issue); } +/// Arguments for searching project technical reviews. #[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct SearchProjects { #[serde(default = "default_limit")] @@ -148,12 +147,12 @@ pub struct FileIssue { pub status: DelphiReportIssueStatus, /// Details of why this issue might have been raised, such as what file it /// was found in. - pub details: Vec, + pub details: Vec, } /// Occurrence of a [`FileIssue`] in a specific class in a scanned JAR file. #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] -pub struct FileIssueDetails { +pub struct FileIssueDetail { /// Name of the Java class in which this issue was found. pub class_name: String, /// Decompiled, pretty-printed source of the Java class. @@ -178,15 +177,21 @@ async fn search_projects( #[derive(Debug)] struct ReportRecord { created: DateTime, - issues: IndexMap, + files: IndexMap, } #[derive(Debug)] - struct IssueRecord { + struct FileRecord { file_name: String, file_size: i32, + issues: IndexMap, + } + + #[derive(Debug)] + struct IssueRecord { issue_type: String, - details: IndexMap, + status: DelphiReportIssueStatus, + details: IndexMap, } check_is_moderator_from_headers( @@ -210,19 +215,22 @@ async fn search_projects( let mut project_records = IndexMap::::new(); let mut project_ids = Vec::::new(); let mut thread_ids = Vec::::new(); - let mut file_ids = Vec::::new(); + let _file_ids = Vec::::new(); let mut rows = sqlx::query!( r#" SELECT dr.id AS "report_id!: DelphiReportId", + f.id AS "file_id!: DBFileId", + f.filename AS "file_name!", + f.size AS "file_size!", m.id AS "project_id!: DBProjectId", t.id AS "project_thread_id!: DBThreadId", dr.created AS "report_created!", - dri.id AS "issue_id?: DelphiReportIssueId", - dri.issue_type AS "issue_type?", - f.filename AS "file_name?", - f.size AS "file_size?", + dri.id AS "issue_id!: DelphiReportIssueId", + dri.issue_type AS "issue_type!", + dri.status AS "issue_status!: DelphiReportIssueStatus", + -- maybe null drid.id AS "issue_detail_id?: DelphiReportIssueDetailsId", drid.internal_class_name AS "issue_detail_class_name?", drid.decompiled_source AS "issue_detail_decompiled_source?", @@ -238,7 +246,7 @@ async fn search_projects( INNER JOIN threads t ON t.mod_id = m.id -- fetch report issues and details - LEFT JOIN delphi_report_issues dri ON dri.report_id = dr.id + INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id -- filtering @@ -286,27 +294,24 @@ async fn search_projects( project.reports.entry(row.report_id).or_insert_with(|| { ReportRecord { created: row.report_created, - issues: IndexMap::new(), + files: IndexMap::new(), } }); - - let ( - Some(issue_id), - Some(file_name), - Some(file_size), - Some(issue_type), - ) = (row.issue_id, row.file_name, row.file_size, row.issue_type) - else { - continue; - }; - let issue = + let file = report - .issues - .entry(issue_id) + .files + .entry(row.file_id) + .or_insert_with(|| FileRecord { + file_name: row.file_name, + file_size: row.file_size, + issues: IndexMap::new(), + }); + let issue = + file.issues + .entry(row.issue_id) .or_insert_with(|| IssueRecord { - file_name, - file_size, - issue_type, + issue_type: row.issue_type, + status: row.issue_status, details: IndexMap::new(), }); @@ -318,7 +323,7 @@ async fn search_projects( continue; }; issue.details.entry(issue_detail_id).or_insert_with(|| { - FileIssueDetails { + FileIssueDetail { class_name, decompiled_source, } @@ -338,6 +343,18 @@ async fn search_projects( .map(|thread| (thread.id, thread)) .collect::>(); + let project_list: Vec = projects.values().cloned().collect(); + + let ownerships = get_projects_ownership(&project_list, &pool, &redis) + .await + .wrap_internal_err("failed to fetch project ownerships")?; + + let ownership_map = projects + .keys() + .copied() + .zip(ownerships) + .collect::>(); + let projects = project_records .into_iter() .map(|(project_id, project_record)| { @@ -352,7 +369,12 @@ async fn search_projects( })?; Ok::<_, ApiError>(ProjectReview { project: project.clone(), - project_owner: (), + project_owner: ownership_map + .get(&project_id) + .cloned() + .wrap_internal_err_with(|| { + eyre!("no owner for {project_id:?}") + })?, thread: thread.clone(), reports: project_record .reports @@ -361,18 +383,30 @@ async fn search_projects( created_at: report_record.created, flag_reason: FlagReason::Delphi, files: report_record - .issues + .files .into_iter() - .map(|(_, issue_record)| FileReview { - file_name: issue_record.file_name, - file_size: issue_record.file_size, - issues: issue_record - .details + .map(|(_, file)| FileReview { + file_name: file.file_name, + file_size: file.file_size, + issues: file + .issues .into_iter() - .map(|(issue_id, detail)| FileIssue { + .map(|(issue_id, issue)| FileIssue { issue_id, - kind: issue_record, - kind: details, + kind: issue.issue_type.clone(), + status: issue.status, + details: issue + .details + .into_iter() + .map(|(_, detail)| { + FileIssueDetail { + class_name: detail + .class_name, + decompiled_source: detail + .decompiled_source, + } + }) + .collect(), }) .collect(), }) From 54c053ad71635542737bfa86cfa0f7398d1d15c3 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Thu, 13 Nov 2025 18:39:19 +0000 Subject: [PATCH 022/104] Fix up db query --- ...6bcab9fcba55df818cab6e98721a324659526.json | 144 ++++++++++++++++++ ...956b0bf816248c131c16954793f0eda09c1ec.json | 103 ------------- .../routes/internal/moderation/ownership.rs | 59 ------- .../routes/internal/moderation/tech_review.rs | 23 ++- 4 files changed, 163 insertions(+), 166 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json delete mode 100644 apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json diff --git a/apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json b/apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json new file mode 100644 index 0000000000..de08e37ea9 --- /dev/null +++ b/apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json @@ -0,0 +1,144 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n f.id AS \"file_id!: DBFileId\",\n f.filename AS \"file_name!\",\n f.size AS \"file_size!\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dr.severity AS \"report_severity!: DelphiSeverity\",\n dri.id AS \"issue_id!: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type!\",\n dri.status AS \"issue_status!: DelphiReportIssueStatus\",\n -- maybe null\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.internal_class_name AS \"issue_detail_class_name?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "report_id!: DelphiReportId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "file_id!: DBFileId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "file_name!", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "file_size!", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "project_id!: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "project_thread_id!: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 6, + "name": "report_created!", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "report_severity!: DelphiSeverity", + "type_info": { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + }, + { + "ordinal": 8, + "name": "issue_id!: DelphiReportIssueId", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "issue_type!", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "issue_status!: DelphiReportIssueStatus", + "type_info": { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + } + }, + { + "ordinal": 11, + "name": "issue_detail_id?: DelphiReportIssueDetailsId", + "type_info": "Int8" + }, + { + "ordinal": 12, + "name": "issue_detail_class_name?", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "issue_detail_decompiled_source?", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "issue_detail_severity?: DelphiSeverity", + "type_info": { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526" +} diff --git a/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json b/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json deleted file mode 100644 index c39018099c..0000000000 --- a/apps/labrinth/.sqlx/query-e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dri.id AS \"issue_id?: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type?\",\n f.filename AS \"file_name?\",\n f.size AS \"file_size?\",\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.internal_class_name AS \"issue_detail_class_name?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n LEFT JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "report_id!: DelphiReportId", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "project_id!: DBProjectId", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "project_thread_id!: DBThreadId", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "report_created!", - "type_info": "Timestamptz" - }, - { - "ordinal": 4, - "name": "issue_id?: DelphiReportIssueId", - "type_info": "Int8" - }, - { - "ordinal": 5, - "name": "issue_type?", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "file_name?", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "file_size?", - "type_info": "Int4" - }, - { - "ordinal": 8, - "name": "issue_detail_id?: DelphiReportIssueDetailsId", - "type_info": "Int8" - }, - { - "ordinal": 9, - "name": "issue_detail_class_name?", - "type_info": "Text" - }, - { - "ordinal": 10, - "name": "issue_detail_decompiled_source?", - "type_info": "Text" - }, - { - "ordinal": 11, - "name": "issue_detail_severity?: DelphiSeverity", - "type_info": { - "Custom": { - "name": "delphi_severity", - "kind": { - "Enum": [ - "low", - "medium", - "high", - "severe" - ] - } - } - } - } - ], - "parameters": { - "Left": [ - "Int4Array", - "Text", - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false, - true, - true, - true, - true - ] - }, - "hash": "e48e36c7535052093fa4f0d1759956b0bf816248c131c16954793f0eda09c1ec" -} diff --git a/apps/labrinth/src/routes/internal/moderation/ownership.rs b/apps/labrinth/src/routes/internal/moderation/ownership.rs index 46fba8af1a..5979811cee 100644 --- a/apps/labrinth/src/routes/internal/moderation/ownership.rs +++ b/apps/labrinth/src/routes/internal/moderation/ownership.rs @@ -7,65 +7,6 @@ use ariadne::ids::UserId; use eyre::eyre; use sqlx::PgPool; -/// Fetches ownership information for a single project. -pub async fn get_project_ownership( - project_id: crate::models::ids::ProjectId, - team_id: crate::models::ids::TeamId, - organization: Option, - pool: &PgPool, - redis: &RedisPool, -) -> Result { - if let Some(org_id) = organization { - let org = DBOrganization::get(&org_id.to_string(), pool, redis) - .await - .wrap_internal_err("failed to fetch organization")? - .ok_or_else(|| { - crate::routes::ApiError::Internal( - eyre!("project {project_id} is owned by an invalid organization {org_id}") - ) - })?; - - Ok(Ownership::Organization { - id: OrganizationId::from(org.id), - name: org.name.clone(), - icon_url: org.icon_url, - }) - } else { - let team_members = DBTeamMember::get_from_team_full( - DBTeamId::from(team_id), - pool, - redis, - ) - .await - .wrap_internal_err("failed to fetch team members")?; - - let team_owner = team_members - .iter() - .find(|member| member.is_owner) - .ok_or_else(|| { - crate::routes::ApiError::Internal(eyre!( - "project {project_id} is owned by a team {team_id} which has no valid owner" - )) - })?; - - let user = DBUser::get(&team_owner.user_id.0.to_string(), pool, redis) - .await - .wrap_internal_err("failed to fetch user data")? - .ok_or_else(|| { - crate::routes::ApiError::Internal(eyre!( - "project {project_id} is owned by a team {team_id} which has owner {} which does not exist", - ariadne::ids::UserId::from(team_owner.user_id) - )) - })?; - - Ok(Ownership::User { - id: ariadne::ids::UserId::from(user.id), - name: user.username.clone(), - icon_url: user.avatar_url, - }) - } -} - /// Fetches ownership information for multiple projects efficiently pub async fn get_projects_ownership( projects: &[crate::models::projects::Project], diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 691aa33b35..286379d3c3 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -94,6 +94,8 @@ pub struct ProjectReport { pub created_at: DateTime, /// Why this project was flagged. pub flag_reason: FlagReason, + /// According to this report, how likely is the project malicious? + pub severity: DelphiSeverity, /// What files were flagged in this review. pub files: Vec, } @@ -141,8 +143,6 @@ pub struct FileIssue { /// Labrinth does not know the full set of kinds of issues, so this is kept /// as a string. pub kind: String, - /// How important is this issue, as flagged by Delphi? - pub severity: DelphiSeverity, /// Is this issue valid (malicious) or a false positive (safe)? pub status: DelphiReportIssueStatus, /// Details of why this issue might have been raised, such as what file it @@ -157,6 +157,8 @@ pub struct FileIssueDetail { pub class_name: String, /// Decompiled, pretty-printed source of the Java class. pub decompiled_source: String, + /// How important is this issue, as flagged by Delphi? + pub severity: DelphiSeverity, } /// Searches all projects which are awaiting technical review. @@ -177,6 +179,7 @@ async fn search_projects( #[derive(Debug)] struct ReportRecord { created: DateTime, + severity: DelphiSeverity, files: IndexMap, } @@ -227,6 +230,7 @@ async fn search_projects( m.id AS "project_id!: DBProjectId", t.id AS "project_thread_id!: DBThreadId", dr.created AS "report_created!", + dr.severity AS "report_severity!: DelphiSeverity", dri.id AS "issue_id!: DelphiReportIssueId", dri.issue_type AS "issue_type!", dri.status AS "issue_status!: DelphiReportIssueStatus", @@ -294,6 +298,7 @@ async fn search_projects( project.reports.entry(row.report_id).or_insert_with(|| { ReportRecord { created: row.report_created, + severity: row.report_severity, files: IndexMap::new(), } }); @@ -315,17 +320,25 @@ async fn search_projects( details: IndexMap::new(), }); - let (Some(issue_detail_id), Some(class_name), Some(decompiled_source)) = ( + let ( + Some(issue_detail_id), + Some(class_name), + Some(decompiled_source), + Some(severity), + ) = ( row.issue_detail_id, row.issue_detail_class_name, row.issue_detail_decompiled_source, - ) else { + row.issue_detail_severity, + ) + else { continue; }; issue.details.entry(issue_detail_id).or_insert_with(|| { FileIssueDetail { class_name, decompiled_source, + severity, } }); } @@ -382,6 +395,7 @@ async fn search_projects( .map(|(_, report_record)| ProjectReport { created_at: report_record.created, flag_reason: FlagReason::Delphi, + severity: report_record.severity, files: report_record .files .into_iter() @@ -404,6 +418,7 @@ async fn search_projects( .class_name, decompiled_source: detail .decompiled_source, + severity: detail.severity, } }) .collect(), From 3464333bc6171d0e9c100ec0237b48fcac08e5a6 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Thu, 13 Nov 2025 21:12:48 +0000 Subject: [PATCH 023/104] Delphi configuration to talk to Labrinth --- apps/labrinth/src/routes/internal/delphi.rs | 2 ++ docker-compose.yml | 9 +++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 6960dfbcd3..db9e8a034e 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -133,6 +133,8 @@ async fn ingest_report( redis: web::Data, web::Json(report): web::Json, ) -> Result { + tracing::error!("!! INGEST !!"); + if report.issues.is_empty() { info!("No issues found for file {}", report.url); return Ok(HttpResponse::NoContent().finish()); diff --git a/docker-compose.yml b/docker-compose.yml index 01074d4aa5..b7d44ee7b9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -122,13 +122,18 @@ services: ports: - '59999:59999' environment: - LABRINTH_ENDPOINT: http://labrinth:8000/_internal/delphi/ingest + LABRINTH_ENDPOINT: http://host.docker.internal:8000/_internal/delphi/ingest LABRINTH_ADMIN_KEY: feedbeef healthcheck: - test: ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:59999/health'] + test: + ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:59999/health'] interval: 3s timeout: 5s retries: 3 + extra_hosts: + # Delphi must send a message on a webhook to our backend, + # so it must have access to our local network + - 'host.docker.internal:host-gateway' volumes: meilisearch-data: db-data: From 3c792fc9df7afac98ce08557ead5a3edded0bd67 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sat, 15 Nov 2025 23:09:25 +0000 Subject: [PATCH 024/104] Get Delphi working with Labrinth --- .../20250810155316_delphi-reports.sql | 3 +- .../src/database/models/delphi_report_item.rs | 10 ++-- apps/labrinth/src/routes/internal/delphi.rs | 47 ++++++++++++++----- .../routes/internal/moderation/tech_review.rs | 16 +++---- docker-compose.yml | 6 ++- 5 files changed, 56 insertions(+), 26 deletions(-) diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql index 7cbd19afa7..2aa7a75913 100644 --- a/apps/labrinth/migrations/20250810155316_delphi-reports.sql +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -39,7 +39,8 @@ CREATE TABLE delphi_report_issue_details ( issue_id BIGINT NOT NULL REFERENCES delphi_report_issues (id) ON DELETE CASCADE ON UPDATE CASCADE, - internal_class_name TEXT NOT NULL, + key TEXT NOT NULL, + file_path TEXT NOT NULL, decompiled_source TEXT, data JSONB NOT NULL, severity DELPHI_SEVERITY NOT NULL diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index a2b4cfcb33..be8b698ad9 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -249,8 +249,9 @@ impl DBDelphiReportIssue { #[derive(Debug, Deserialize, Serialize)] pub struct DBDelphiReportIssueDetails { pub id: DelphiReportIssueDetailsId, + pub key: String, pub issue_id: DelphiReportIssueId, - pub internal_class_name: InternalJavaClassName, + pub file_path: String, pub decompiled_source: Option, pub data: Json>, pub severity: DelphiSeverity, @@ -263,12 +264,13 @@ impl DBDelphiReportIssueDetails { ) -> Result { Ok(DelphiReportIssueDetailsId(sqlx::query_scalar!( " - INSERT INTO delphi_report_issue_details (issue_id, internal_class_name, decompiled_source, data, severity) - VALUES ($1, $2, $3, $4, $5) + INSERT INTO delphi_report_issue_details (issue_id, key, file_path, decompiled_source, data, severity) + VALUES ($1, $2, $3, $4, $5, $6) RETURNING id ", self.issue_id as DelphiReportIssueId, - self.internal_class_name.0, + self.key, + self.file_path, self.decompiled_source.as_ref().map(|decompiled_source| &decompiled_source.0), &self.data as &Json>, self.severity as DelphiSeverity, diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index db9e8a034e..44268c4486 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -6,7 +6,7 @@ use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; use serde::Deserialize; use sqlx::PgPool; use tokio::sync::Mutex; -use tracing::info; +use tracing::{info, warn}; use crate::{ auth::check_is_moderator_from_headers, @@ -18,7 +18,6 @@ use crate::{ DBDelphiReport, DBDelphiReportIssue, DBDelphiReportIssueDetails, DecompiledJavaClassSource, DelphiReportIssueStatus, DelphiReportListOrder, DelphiSeverity, - InternalJavaClassName, }, }, redis::RedisPool, @@ -61,7 +60,8 @@ static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { #[derive(Deserialize)] struct DelphiReportIssueDetails { - pub internal_class_name: InternalJavaClassName, + pub file: String, + pub key: String, pub decompiled_source: Option, pub data: HashMap, pub severity: DelphiSeverity, @@ -94,14 +94,14 @@ impl DelphiReport { for (issue, trace) in &self.issues { for DelphiReportIssueDetails { - internal_class_name, + file, decompiled_source, .. } in trace { write!( &mut message_header, - "\n issue {issue} found at class `{internal_class_name}`:\n```\n{}\n```", + "\n issue {issue} found at class `{file}`:\n```\n{}\n```", decompiled_source.as_ref().map_or( "No decompiled source available", |decompiled_source| &**decompiled_source @@ -131,13 +131,21 @@ pub struct DelphiRunParameters { async fn ingest_report( pool: web::Data, redis: web::Data, - web::Json(report): web::Json, -) -> Result { - tracing::error!("!! INGEST !!"); + report: web::Bytes, + // web::Json(report): web::Json, +) -> Result<(), ApiError> { + info!( + "Json: {}", + serde_json::to_string_pretty( + &serde_json::from_slice::(&report).unwrap() + ) + .unwrap() + ); + let report = serde_json::from_slice::(&report).unwrap(); if report.issues.is_empty() { info!("No issues found for file {}", report.url); - return Ok(HttpResponse::NoContent().finish()); + return Ok(()); } report.send_to_slack(&pool, &redis).await.ok(); @@ -155,6 +163,12 @@ async fn ingest_report( .upsert(&mut transaction) .await?; + warn!( + "Delphi found {} issues in file {}", + report.issues.len(), + report.url + ); + for (issue_type, issue_details) in report.issues { let issue_id = DBDelphiReportIssue { id: DelphiReportIssueId(0), // This will be set by the database @@ -176,7 +190,8 @@ async fn ingest_report( DBDelphiReportIssueDetails { id: DelphiReportIssueDetailsId(0), // This will be set by the database issue_id, - internal_class_name: issue_detail.internal_class_name, + key: issue_detail.key, + file_path: issue_detail.file, decompiled_source: issue_detail.decompiled_source, data: issue_detail.data.into(), severity: issue_detail.severity, @@ -188,7 +203,7 @@ async fn ingest_report( transaction.commit().await?; - Ok(HttpResponse::NoContent().finish()) + Ok(()) } pub async fn run( @@ -216,10 +231,18 @@ pub async fn run( run_parameters.file_id.0 ); + // fix for local file paths + // TODO: should we fix this upstream in whatever inserts the files row? + let url = if file_data.url.starts_with("/") { + format!("file://{}", file_data.url) + } else { + file_data.url + }; + DELPHI_CLIENT .post(dotenvy::var("DELPHI_URL")?) .json(&serde_json::json!({ - "url": file_data.url, + "url": url, "project_id": ProjectId(file_data.project_id.0 as u64), "version_id": VersionId(file_data.version_id.0 as u64), "file_id": run_parameters.file_id, diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 286379d3c3..b87e925cb2 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -67,6 +67,7 @@ pub struct SearchProjectsFilter { Deserialize, utoipa::ToSchema, )] +#[serde(rename_all = "snake_case")] pub enum SearchProjectsSort { CreatedAsc, CreatedDesc, @@ -153,8 +154,8 @@ pub struct FileIssue { /// Occurrence of a [`FileIssue`] in a specific class in a scanned JAR file. #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] pub struct FileIssueDetail { - /// Name of the Java class in which this issue was found. - pub class_name: String, + /// Name of the Java class path in which this issue was found. + pub file_path: String, /// Decompiled, pretty-printed source of the Java class. pub decompiled_source: String, /// How important is this issue, as flagged by Delphi? @@ -236,7 +237,7 @@ async fn search_projects( dri.status AS "issue_status!: DelphiReportIssueStatus", -- maybe null drid.id AS "issue_detail_id?: DelphiReportIssueDetailsId", - drid.internal_class_name AS "issue_detail_class_name?", + drid.file_path AS "issue_detail_file_path?", drid.decompiled_source AS "issue_detail_decompiled_source?", drid.severity AS "issue_detail_severity?: DelphiSeverity" FROM delphi_reports dr @@ -322,12 +323,12 @@ async fn search_projects( let ( Some(issue_detail_id), - Some(class_name), + Some(file_path), Some(decompiled_source), Some(severity), ) = ( row.issue_detail_id, - row.issue_detail_class_name, + row.issue_detail_file_path, row.issue_detail_decompiled_source, row.issue_detail_severity, ) @@ -336,7 +337,7 @@ async fn search_projects( }; issue.details.entry(issue_detail_id).or_insert_with(|| { FileIssueDetail { - class_name, + file_path, decompiled_source, severity, } @@ -414,8 +415,7 @@ async fn search_projects( .into_iter() .map(|(_, detail)| { FileIssueDetail { - class_name: detail - .class_name, + file_path: detail.file_path, decompiled_source: detail .decompiled_source, severity: detail.severity, diff --git a/docker-compose.yml b/docker-compose.yml index b7d44ee7b9..569278e56a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,7 +117,7 @@ services: delphi: profiles: - with-delphi - image: ghcr.io/modrinth/delphi:main + image: ghcr.io/modrinth/delphi:feature-schema-rework container_name: labrinth-delphi ports: - '59999:59999' @@ -130,6 +130,10 @@ services: interval: 3s timeout: 5s retries: 3 + volumes: + # Labrinth deposits version files here; + # Delphi reads them from here + - /tmp/modrinth:/tmp/modrinth:ro extra_hosts: # Delphi must send a message on a webhook to our backend, # so it must have access to our local network From 8986a1fd586781412dd24092afecb2e095c6b210 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sat, 15 Nov 2025 23:18:48 +0000 Subject: [PATCH 025/104] Add Delphi dummy fixture --- .../fixtures/delphi-report-2025-11-15.sql | 90 +++++++++++++++++++ .../src/database/models/delphi_report_item.rs | 9 +- 2 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 apps/labrinth/fixtures/delphi-report-2025-11-15.sql diff --git a/apps/labrinth/fixtures/delphi-report-2025-11-15.sql b/apps/labrinth/fixtures/delphi-report-2025-11-15.sql new file mode 100644 index 0000000000..389159672a --- /dev/null +++ b/apps/labrinth/fixtures/delphi-report-2025-11-15.sql @@ -0,0 +1,90 @@ +-- +-- PostgreSQL database dump +-- + +\restrict RGysBmMc8KFBQ9AssusGyNPozUiB43hdmIPxlv5KSWbX7tdW7XVMPpMginvod9K + +-- Dumped from database version 17.6 +-- Dumped by pg_dump version 17.6 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET transaction_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Data for Name: delphi_reports; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_reports (id, file_id, delphi_version, artifact_url, created, severity) FROM stdin; +1 157529403422109 3 file:///tmp/modrinth/data/CaG4Mr66/versions/fMzLDsVA/sodium.jar 2025-11-15 23:01:30.012526+00 high +\. + + +-- +-- Data for Name: delphi_report_issues; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_report_issues (id, report_id, issue_type, status) FROM stdin; +1 1 runtime_exec_usage pending +2 1 hardcoded_url pending +3 1 classloader_usage pending +4 1 obfuscated_names pending +5 1 main_method pending +\. + + +-- +-- Data for Name: delphi_report_issue_details; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_report_issue_details (id, issue_id, key, file_path, decompiled_source, data, severity) FROM stdin; +1 1 d670186a0e5210fc2b9332a2163849740f19bec59a99d890bef0ae9e6608f83d net/caffeinemc/mods/sodium/desktop/utils/browse/XDGImpl package net.caffeinemc.mods.sodium.desktop.utils.browse;\n\nimport java.io.IOException;\nimport java.util.Locale;\n\nclass XDGImpl implements BrowseUrlHandler {\n public static boolean isSupported() {\n String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);\n return os.equals("linux");\n }\n\n @Override\n public void browseTo(String url) throws IOException {\n Process process = Runtime.getRuntime().exec(new String[]{"xdg-open", url});\n\n try {\n int result = process.waitFor();\n if (result != 0) {\n throw new IOException("xdg-open exited with code: %d".formatted(result));\n }\n } catch (InterruptedException var4) {\n throw new RuntimeException(var4);\n }\n }\n}\n {} medium +2 1 317dd815f60f04f1cef5d855e30f6a2719570c583ef49ae94ca2b563179fc1fa net/caffeinemc/mods/sodium/client/compatibility/environment/probe/GraphicsAdapterProbe package net.caffeinemc.mods.sodium.client.compatibility.environment.probe;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.stream.Stream;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.OsUtils;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.d3dkmt.D3DKMT;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GraphicsAdapterProbe {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-GraphicsAdapterProbe");\n private static final Set LINUX_PCI_CLASSES = Set.of("0x030000", "0x030001", "0x030200", "0x038000");\n private static List ADAPTERS = List.of();\n\n public static void findAdapters() {\n LOGGER.info("Searching for graphics cards...");\n\n List adapters;\n try {\n adapters = switch (OsUtils.getOs()) {\n case WIN -> findAdapters$Windows();\n case LINUX -> findAdapters$Linux();\n default -> null;\n };\n } catch (Exception var3) {\n LOGGER.error("Failed to find graphics adapters!", var3);\n return;\n }\n\n if (adapters != null) {\n if (adapters.isEmpty()) {\n LOGGER.warn(\n "Could not find any graphics adapters! Probably the device is not on a bus we can probe, or there are no devices supporting 3D acceleration."\n );\n } else {\n for (GraphicsAdapterInfo adapter : adapters) {\n LOGGER.info("Found graphics adapter: {}", adapter);\n }\n }\n\n ADAPTERS = adapters;\n }\n }\n\n private static List findAdapters$Windows() {\n return D3DKMT.findGraphicsAdapters();\n }\n\n private static List findAdapters$Linux() {\n ArrayList results = new ArrayList();\n\n try {\n Stream devices = Files.list(Path.of("/sys/bus/pci/devices/"));\n\n try {\n for (Path devicePath : devices::iterator) {\n String deviceClass = Files.readString(devicePath.resolve("class")).trim();\n if (LINUX_PCI_CLASSES.contains(deviceClass)) {\n String pciVendorId = Files.readString(devicePath.resolve("vendor")).trim();\n String pciDeviceId = Files.readString(devicePath.resolve("device")).trim();\n GraphicsAdapterVendor adapterVendor = GraphicsAdapterVendor.fromPciVendorId(pciVendorId);\n String adapterName = getPciDeviceName$Linux(pciVendorId, pciDeviceId);\n if (adapterName == null) {\n adapterName = "";\n }\n\n GraphicsAdapterInfo.LinuxPciAdapterInfo info = new GraphicsAdapterInfo.LinuxPciAdapterInfo(\n adapterVendor, adapterName, pciVendorId, pciDeviceId\n );\n results.add(info);\n }\n }\n } catch (Throwable var12) {\n if (devices != null) {\n try {\n devices.close();\n } catch (Throwable var11) {\n var12.addSuppressed(var11);\n }\n }\n\n throw var12;\n }\n\n if (devices != null) {\n devices.close();\n }\n } catch (IOException var13) {\n }\n\n return results;\n }\n\n @Nullable\n private static String getPciDeviceName$Linux(String vendorId, String deviceId) {\n String deviceFilter = vendorId.substring(2) + ":" + deviceId.substring(2);\n\n try {\n Process process = Runtime.getRuntime().exec(new String[]{"lspci", "-vmm", "-d", deviceFilter});\n int result = process.waitFor();\n if (result != 0) {\n throw new IOException("lspci exited with error code: %s".formatted(result));\n } else {\n BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));\n\n String var7;\n label40: {\n String line;\n try {\n while ((line = reader.readLine()) != null) {\n if (line.startsWith("Device:")) {\n var7 = line.substring("Device:".length()).trim();\n break label40;\n }\n }\n } catch (Throwable var9) {\n try {\n reader.close();\n } catch (Throwable var8) {\n var9.addSuppressed(var8);\n }\n\n throw var9;\n }\n\n reader.close();\n throw new IOException("lspci did not return a device name");\n }\n\n reader.close();\n return var7;\n }\n } catch (Throwable var10) {\n LOGGER.warn("Failed to query PCI device name for %s:%s".formatted(vendorId, deviceId), var10);\n return null;\n }\n }\n\n public static Collection getAdapters() {\n if (ADAPTERS == null) {\n LOGGER.error("Graphics adapters not probed yet; returning an empty list.");\n return Collections.emptyList();\n } else {\n return ADAPTERS;\n }\n }\n}\n {} medium +3 2 5ba58b7f9dcc59f14c8a0fd9b78c23a19723791bd9006ed408d43557ea24abb4 net/caffeinemc/mods/sodium/desktop/LaunchWarn package net.caffeinemc.mods.sodium.desktop;\n\nimport java.awt.GraphicsEnvironment;\nimport java.io.IOException;\nimport javax.swing.JDialog;\nimport javax.swing.JOptionPane;\nimport javax.swing.UIManager;\nimport javax.swing.UnsupportedLookAndFeelException;\nimport net.caffeinemc.mods.sodium.desktop.utils.browse.BrowseUrlHandler;\n\npublic class LaunchWarn {\n private static final String HELP_URL = "https://link.caffeinemc.net/guides/sodium/installation";\n private static final String RICH_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

";\n private static final String FALLBACK_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

";\n private static final String FAILED_TO_BROWSE_MESSAGE = "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

";\n public static final String WINDOW_TITLE = "Sodium";\n\n public static void main(String[] args) {\n if (GraphicsEnvironment.isHeadless()) {\n showHeadlessError();\n } else {\n showGraphicalError();\n }\n }\n\n private static void showHeadlessError() {\n System.err\n .println(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

"\n );\n }\n\n private static void showGraphicalError() {\n trySetSystemLookAndFeel();\n trySetSystemFontPreferences();\n BrowseUrlHandler browseUrlHandler = BrowseUrlHandler.createImplementation();\n if (browseUrlHandler != null) {\n showRichGraphicalDialog(browseUrlHandler);\n } else {\n showFallbackGraphicalDialog();\n }\n\n System.exit(0);\n }\n\n private static void showRichGraphicalDialog(BrowseUrlHandler browseUrlHandler) {\n int selectedOption = showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

",\n "Sodium",\n 0,\n 1,\n new String[]{"Help", "Close"},\n 0\n );\n if (selectedOption == 0) {\n log("Opening URL: https://link.caffeinemc.net/guides/sodium/installation");\n\n try {\n browseUrlHandler.browseTo("https://link.caffeinemc.net/guides/sodium/installation");\n } catch (IOException var3) {\n log("Failed to open default web browser!", var3);\n showDialogBox(\n "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

",\n "Sodium",\n -1,\n 2,\n null,\n -1\n );\n }\n }\n }\n\n private static void showFallbackGraphicalDialog() {\n showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

",\n "Sodium",\n -1,\n 1,\n null,\n null\n );\n }\n\n private static int showDialogBox(String message, String title, int optionType, int messageType, String[] options, Object initialValue) {\n JOptionPane pane = new JOptionPane(message, messageType, optionType, null, options, initialValue);\n JDialog dialog = pane.createDialog(title);\n dialog.setVisible(true);\n Object selectedValue = pane.getValue();\n if (selectedValue == null) {\n return -1;\n } else if (options == null) {\n return selectedValue instanceof Integer ? (Integer)selectedValue : -1;\n } else {\n for (int counter = 0; counter < options.length; counter++) {\n String option = options[counter];\n if (option.equals(selectedValue)) {\n return counter;\n }\n }\n\n return -1;\n }\n }\n\n private static void trySetSystemLookAndFeel() {\n try {\n UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());\n } catch (UnsupportedLookAndFeelException | ReflectiveOperationException var1) {\n }\n }\n\n private static void trySetSystemFontPreferences() {\n System.setProperty("awt.useSystemAAFontSettings", "on");\n }\n\n private static void log(String message) {\n System.err.println(message);\n }\n\n private static void log(String message, Throwable exception) {\n System.err.println(message);\n exception.printStackTrace(System.err);\n }\n}\n {"url": "https://link.caffeinemc.net/guides/sodium/installation"} low +4 2 34a4ceb119311f669d4b3b036dfef9f93c1e86f765582ebf556b92486766f861 net/caffeinemc/mods/sodium/client/gui/SodiumOptionsGUI package net.caffeinemc.mods.sodium.client.gui;\n\nimport com.google.common.collect.UnmodifiableIterator;\nimport java.io.IOException;\nimport java.time.Instant;\nimport java.time.temporal.ChronoUnit;\nimport java.util.ArrayList;\nimport java.util.EnumSet;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.stream.Stream;\nimport net.caffeinemc.mods.sodium.client.SodiumClientMod;\nimport net.caffeinemc.mods.sodium.client.console.Console;\nimport net.caffeinemc.mods.sodium.client.console.message.MessageLevel;\nimport net.caffeinemc.mods.sodium.client.data.fingerprint.HashedFingerprint;\nimport net.caffeinemc.mods.sodium.client.gui.options.Option;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionFlag;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionGroup;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionImpact;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionPage;\nimport net.caffeinemc.mods.sodium.client.gui.options.control.Control;\nimport net.caffeinemc.mods.sodium.client.gui.options.control.ControlElement;\nimport net.caffeinemc.mods.sodium.client.gui.options.storage.OptionStorage;\nimport net.caffeinemc.mods.sodium.client.gui.prompt.ScreenPrompt;\nimport net.caffeinemc.mods.sodium.client.gui.prompt.ScreenPromptable;\nimport net.caffeinemc.mods.sodium.client.gui.screen.ConfigCorruptedScreen;\nimport net.caffeinemc.mods.sodium.client.gui.widgets.AbstractWidget;\nimport net.caffeinemc.mods.sodium.client.gui.widgets.FlatButtonWidget;\nimport net.caffeinemc.mods.sodium.client.services.PlatformRuntimeInformation;\nimport net.caffeinemc.mods.sodium.client.util.Dim2i;\nimport net.minecraft.class_124;\nimport net.minecraft.class_156;\nimport net.minecraft.class_2561;\nimport net.minecraft.class_310;\nimport net.minecraft.class_332;\nimport net.minecraft.class_364;\nimport net.minecraft.class_437;\nimport net.minecraft.class_446;\nimport net.minecraft.class_5250;\nimport net.minecraft.class_5348;\nimport net.minecraft.class_5481;\nimport org.jetbrains.annotations.Nullable;\n\npublic class SodiumOptionsGUI extends class_437 implements ScreenPromptable {\n private final List pages = new ArrayList();\n private final List> controls = new ArrayList();\n private final class_437 prevScreen;\n private OptionPage currentPage;\n private FlatButtonWidget applyButton;\n private FlatButtonWidget closeButton;\n private FlatButtonWidget undoButton;\n private FlatButtonWidget donateButton;\n private FlatButtonWidget hideDonateButton;\n private boolean hasPendingChanges;\n private ControlElement hoveredElement;\n @Nullable\n private ScreenPrompt prompt;\n private static final List DONATION_PROMPT_MESSAGE = List.of(\n class_5348.method_29433(new class_5348[]{class_2561.method_43470("Hello!")}),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("It seems that you've been enjoying "),\n class_2561.method_43470("Sodium").method_54663(2616210),\n class_2561.method_43470(", the powerful and open rendering optimization mod for Minecraft.")\n }\n ),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("Mods like these are complex. They require "),\n class_2561.method_43470("thousands of hours").method_54663(16739840),\n class_2561.method_43470(" of development, debugging, and tuning to create the experience that players have come to expect.")\n }\n ),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("If you'd like to show your token of appreciation, and support the development of our mod in the process, then consider "),\n class_2561.method_43470("buying us a coffee").method_54663(15550926),\n class_2561.method_43470(".")\n }\n ),\n class_5348.method_29433(new class_5348[]{class_2561.method_43470("And thanks again for using our mod! We hope it helps you (and your computer.)")})\n );\n\n private SodiumOptionsGUI(class_437 prevScreen) {\n super(class_2561.method_43470("Sodium Renderer Settings"));\n this.prevScreen = prevScreen;\n this.pages.add(SodiumGameOptionPages.general());\n this.pages.add(SodiumGameOptionPages.quality());\n this.pages.add(SodiumGameOptionPages.performance());\n this.pages.add(SodiumGameOptionPages.advanced());\n this.checkPromptTimers();\n }\n\n private void checkPromptTimers() {\n if (!PlatformRuntimeInformation.getInstance().isDevelopmentEnvironment()) {\n SodiumGameOptions options = SodiumClientMod.options();\n if (!options.notifications.hasSeenDonationPrompt) {\n HashedFingerprint fingerprint = null;\n\n try {\n fingerprint = HashedFingerprint.loadFromDisk();\n } catch (Throwable var5) {\n SodiumClientMod.logger().error("Failed to read the fingerprint from disk", var5);\n }\n\n if (fingerprint != null) {\n Instant now = Instant.now();\n Instant threshold = Instant.ofEpochSecond(fingerprint.timestamp()).plus(3L, ChronoUnit.DAYS);\n if (now.isAfter(threshold)) {\n this.openDonationPrompt(options);\n }\n }\n }\n }\n }\n\n private void openDonationPrompt(SodiumGameOptions options) {\n ScreenPrompt prompt = new ScreenPrompt(\n this, DONATION_PROMPT_MESSAGE, 320, 190, new ScreenPrompt.Action(class_2561.method_43470("Buy us a coffee"), this::openDonationPage)\n );\n prompt.method_25365(true);\n options.notifications.hasSeenDonationPrompt = true;\n\n try {\n SodiumGameOptions.writeToDisk(options);\n } catch (IOException var4) {\n SodiumClientMod.logger().error("Failed to update config file", var4);\n }\n }\n\n public static class_437 createScreen(class_437 currentScreen) {\n return (class_437)(SodiumClientMod.options().isReadOnly()\n ? new ConfigCorruptedScreen(currentScreen, SodiumOptionsGUI::new)\n : new SodiumOptionsGUI(currentScreen));\n }\n\n public void setPage(OptionPage page) {\n this.currentPage = page;\n this.rebuildGUI();\n }\n\n protected void method_25426() {\n super.method_25426();\n this.rebuildGUI();\n if (this.prompt != null) {\n this.prompt.init();\n }\n }\n\n private void rebuildGUI() {\n this.controls.clear();\n this.method_37067();\n if (this.currentPage == null) {\n if (this.pages.isEmpty()) {\n throw new IllegalStateException("No pages are available?!");\n }\n\n this.currentPage = (OptionPage)this.pages.get(0);\n }\n\n this.rebuildGUIPages();\n this.rebuildGUIOptions();\n this.undoButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 211, this.field_22790 - 30, 65, 20), class_2561.method_43471("sodium.options.buttons.undo"), this::undoChanges\n );\n this.applyButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 142, this.field_22790 - 30, 65, 20), class_2561.method_43471("sodium.options.buttons.apply"), this::applyChanges\n );\n this.closeButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 73, this.field_22790 - 30, 65, 20), class_2561.method_43471("gui.done"), this::method_25419\n );\n this.donateButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 128, 6, 100, 20), class_2561.method_43471("sodium.options.buttons.donate"), this::openDonationPage\n );\n this.hideDonateButton = new FlatButtonWidget(new Dim2i(this.field_22789 - 26, 6, 20, 20), class_2561.method_43470("x"), this::hideDonationButton);\n if (SodiumClientMod.options().notifications.hasClearedDonationButton) {\n this.setDonationButtonVisibility(false);\n }\n\n this.method_37063(this.undoButton);\n this.method_37063(this.applyButton);\n this.method_37063(this.closeButton);\n this.method_37063(this.donateButton);\n this.method_37063(this.hideDonateButton);\n }\n\n private void setDonationButtonVisibility(boolean value) {\n this.donateButton.setVisible(value);\n this.hideDonateButton.setVisible(value);\n }\n\n private void hideDonationButton() {\n SodiumGameOptions options = SodiumClientMod.options();\n options.notifications.hasClearedDonationButton = true;\n\n try {\n SodiumGameOptions.writeToDisk(options);\n } catch (IOException var3) {\n throw new RuntimeException("Failed to save configuration", var3);\n }\n\n this.setDonationButtonVisibility(false);\n }\n\n private void rebuildGUIPages() {\n int x = 6;\n int y = 6;\n\n for (OptionPage page : this.pages) {\n int width = 12 + this.field_22793.method_27525(page.getName());\n FlatButtonWidget button = new FlatButtonWidget(new Dim2i(x, y, width, 18), page.getName(), () -> this.setPage(page));\n button.setSelected(this.currentPage == page);\n x += width + 6;\n this.method_37063(button);\n }\n }\n\n private void rebuildGUIOptions() {\n int x = 6;\n int y = 28;\n\n for (UnmodifiableIterator var3 = this.currentPage.getGroups().iterator(); var3.hasNext(); y += 4) {\n OptionGroup group = (OptionGroup)var3.next();\n\n for (UnmodifiableIterator var5 = group.getOptions().iterator(); var5.hasNext(); y += 18) {\n Option option = (Option)var5.next();\n Control control = option.getControl();\n ControlElement element = control.createElement(new Dim2i(x, y, 240, 18));\n this.method_37063(element);\n this.controls.add(element);\n }\n }\n }\n\n public void method_25394(class_332 graphics, int mouseX, int mouseY, float delta) {\n this.updateControls();\n super.method_25394(graphics, this.prompt != null ? -1 : mouseX, this.prompt != null ? -1 : mouseY, delta);\n if (this.hoveredElement != null) {\n this.renderOptionTooltip(graphics, this.hoveredElement);\n }\n\n if (this.prompt != null) {\n this.prompt.method_25394(graphics, mouseX, mouseY, delta);\n }\n }\n\n private void updateControls() {\n ControlElement hovered = (ControlElement)this.getActiveControls()\n .filter(AbstractWidget::isHovered)\n .findFirst()\n .orElse((ControlElement)this.getActiveControls().filter(AbstractWidget::method_25370).findFirst().orElse(null));\n boolean hasChanges = this.getAllOptions().anyMatch(Option::hasChanged);\n\n for (OptionPage page : this.pages) {\n UnmodifiableIterator var5 = page.getOptions().iterator();\n\n while (var5.hasNext()) {\n Option option = (Option)var5.next();\n if (option.hasChanged()) {\n hasChanges = true;\n }\n }\n }\n\n this.applyButton.setEnabled(hasChanges);\n this.undoButton.setVisible(hasChanges);\n this.closeButton.setEnabled(!hasChanges);\n this.hasPendingChanges = hasChanges;\n this.hoveredElement = hovered;\n }\n\n private Stream> getAllOptions() {\n return this.pages.stream().flatMap(s -> s.getOptions().stream());\n }\n\n private Stream> getActiveControls() {\n return this.controls.stream();\n }\n\n private void renderOptionTooltip(class_332 graphics, ControlElement element) {\n Dim2i dim = element.getDimensions();\n int textPadding = 3;\n int boxPadding = 3;\n int boxY = dim.y();\n int boxX = dim.getLimitX() + boxPadding;\n int boxWidth = Math.min(200, this.field_22789 - boxX - boxPadding);\n Option option = element.getOption();\n int splitWidth = boxWidth - textPadding * 2;\n List tooltip = new ArrayList(this.field_22793.method_1728(option.getTooltip(), splitWidth));\n OptionImpact impact = option.getImpact();\n if (impact != null) {\n class_5250 impactText = class_2561.method_43469("sodium.options.performance_impact_string", new Object[]{impact.getLocalizedName()});\n tooltip.addAll(this.field_22793.method_1728(impactText.method_27692(class_124.field_1080), splitWidth));\n }\n\n int boxHeight = tooltip.size() * 12 + boxPadding;\n int boxYLimit = boxY + boxHeight;\n int boxYCutoff = this.field_22790 - 40;\n if (boxYLimit > boxYCutoff) {\n boxY -= boxYLimit - boxYCutoff;\n }\n\n graphics.method_25296(boxX, boxY, boxX + boxWidth, boxY + boxHeight, -536870912, -536870912);\n\n for (int i = 0; i < tooltip.size(); i++) {\n graphics.method_35720(this.field_22793, (class_5481)tooltip.get(i), boxX + textPadding, boxY + textPadding + i * 12, -1);\n }\n }\n\n private void applyChanges() {\n HashSet> dirtyStorages = new HashSet();\n EnumSet flags = EnumSet.noneOf(OptionFlag.class);\n this.getAllOptions().forEach(option -> {\n if (option.hasChanged()) {\n option.applyChanges();\n flags.addAll(option.getFlags());\n dirtyStorages.add(option.getStorage());\n }\n });\n class_310 client = class_310.method_1551();\n if (client.field_1687 != null) {\n if (flags.contains(OptionFlag.REQUIRES_RENDERER_RELOAD)) {\n client.field_1769.method_3279();\n } else if (flags.contains(OptionFlag.REQUIRES_RENDERER_UPDATE)) {\n client.field_1769.method_3292();\n }\n }\n\n if (flags.contains(OptionFlag.REQUIRES_ASSET_RELOAD)) {\n client.method_24041((Integer)client.field_1690.method_42563().method_41753());\n client.method_1513();\n }\n\n if (flags.contains(OptionFlag.REQUIRES_VIDEOMODE_RELOAD)) {\n client.method_22683().method_4475();\n }\n\n if (flags.contains(OptionFlag.REQUIRES_GAME_RESTART)) {\n Console.instance().logMessage(MessageLevel.WARN, "sodium.console.game_restart", true, 10.0);\n }\n\n for (OptionStorage storage : dirtyStorages) {\n storage.save();\n }\n }\n\n private void undoChanges() {\n this.getAllOptions().forEach(Option::reset);\n }\n\n private void openDonationPage() {\n class_156.method_668().method_670("https://caffeinemc.net/donate");\n }\n\n public boolean method_25404(int keyCode, int scanCode, int modifiers) {\n if (this.prompt != null && this.prompt.method_25404(keyCode, scanCode, modifiers)) {\n return true;\n } else if (this.prompt == null && keyCode == 80 && (modifiers & 1) != 0) {\n class_310.method_1551().method_1507(new class_446(this.prevScreen, class_310.method_1551(), class_310.method_1551().field_1690));\n return true;\n } else {\n return super.method_25404(keyCode, scanCode, modifiers);\n }\n }\n\n public boolean method_25402(double mouseX, double mouseY, int button) {\n if (this.prompt != null) {\n return this.prompt.method_25402(mouseX, mouseY, button);\n } else {\n boolean clicked = super.method_25402(mouseX, mouseY, button);\n if (!clicked) {\n this.method_25395(null);\n return true;\n } else {\n return clicked;\n }\n }\n }\n\n public boolean method_25422() {\n return !this.hasPendingChanges;\n }\n\n public void method_25419() {\n this.field_22787.method_1507(this.prevScreen);\n }\n\n public List method_25396() {\n return this.prompt == null ? super.method_25396() : this.prompt.getWidgets();\n }\n\n @Override\n public void setPrompt(@Nullable ScreenPrompt prompt) {\n this.prompt = prompt;\n }\n\n @Nullable\n @Override\n public ScreenPrompt getPrompt() {\n return this.prompt;\n }\n\n @Override\n public Dim2i getDimensions() {\n return new Dim2i(0, 0, this.field_22789, this.field_22790);\n }\n}\n {"url": "https://caffeinemc.net/donate"} low +5 2 2048cee1aed753e10480183673ffb5e685de2ce414e99e93f7d1dd11a87a19af net/caffeinemc/mods/sodium/client/compatibility/checks/PreLaunchChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport org.lwjgl.Version;\n\npublic class PreLaunchChecks {\n private static final String REQUIRED_LWJGL_VERSION = "3.3.3";\n\n public static void checkEnvironment() {\n if (BugChecks.ISSUE_2561) {\n checkLwjglRuntimeVersion();\n }\n }\n\n private static void checkLwjglRuntimeVersion() {\n if (!isUsingKnownCompatibleLwjglVersion()) {\n String advice;\n if (isUsingPrismLauncher()) {\n advice = "It appears you are using Prism Launcher to start the game. You can likely fix this problem by opening your instance settings and navigating to the Versionsection in the sidebar.";\n } else {\n advice = "You must change the LWJGL version in your launcher to continue. This is usually controlled by the settings for a profile or instance in your launcher.";\n }\n\n String message = "The game failed to start because the currently active LWJGL version is not compatible.\\n\\nInstalled version: ###CURRENT_VERSION###\\nRequired version: ###REQUIRED_VERSION###\\n\\n###ADVICE_STRING###"\n .replace("###CURRENT_VERSION###", Version.getVersion())\n .replace("###REQUIRED_VERSION###", "3.3.3")\n .replace("###ADVICE_STRING###", advice);\n PlatformHelper.showCriticalErrorAndClose(\n null, "Sodium Renderer - Unsupported LWJGL", message, "https://link.caffeinemc.net/help/sodium/runtime-issue/lwjgl3/gh-2561"\n );\n }\n }\n\n private static boolean isUsingKnownCompatibleLwjglVersion() {\n return Version.getVersion().startsWith("3.3.3");\n }\n\n private static boolean isUsingPrismLauncher() {\n return getLauncherBrand().equalsIgnoreCase("PrismLauncher");\n }\n\n private static String getLauncherBrand() {\n return System.getProperty("minecraft.launcher.brand", "unknown");\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/runtime-issue/lwjgl3/gh-2561"} low +6 2 0b9d53bc482f11c0d8c71a9689645132f0a50249838091b3e6f95fefbc279075 net/caffeinemc/mods/sodium/client/compatibility/checks/ModuleScanner package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport com.sun.jna.Platform;\nimport com.sun.jna.platform.win32.Kernel32;\nimport com.sun.jna.platform.win32.Kernel32Util;\nimport com.sun.jna.platform.win32.Tlhelp32.MODULEENTRY32W;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport net.caffeinemc.mods.sodium.client.platform.MessageBox;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.Version;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionInfo;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ModuleScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-Win32ModuleChecks");\n private static final String[] RTSS_HOOKS_MODULE_NAMES = new String[]{"RTSSHooks64.dll", "RTSSHooks.dll"};\n private static final String[] ASUS_GPU_TWEAK_MODULE_NAMES = new String[]{\n "GTIII-OSD64-GL.dll", "GTIII-OSD-GL.dll", "GTIII-OSD64-VK.dll", "GTIII-OSD-VK.dll", "GTIII-OSD64.dll", "GTIII-OSD.dll"\n };\n\n public static void checkModules(NativeWindowHandle window) {\n List modules;\n try {\n modules = listModules();\n } catch (Throwable var3) {\n LOGGER.warn("Failed to scan the currently loaded modules", var3);\n return;\n }\n\n if (!modules.isEmpty()) {\n if (BugChecks.ISSUE_2048 && isModuleLoaded(modules, RTSS_HOOKS_MODULE_NAMES)) {\n checkRTSSModules(window);\n }\n\n if (BugChecks.ISSUE_2637 && isModuleLoaded(modules, ASUS_GPU_TWEAK_MODULE_NAMES)) {\n checkASUSGpuTweakIII(window);\n }\n }\n }\n\n private static List listModules() {\n if (!Platform.isWindows()) {\n return List.of();\n } else {\n int pid = Kernel32.INSTANCE.GetCurrentProcessId();\n ArrayList modules = new ArrayList();\n\n for (MODULEENTRY32W module : Kernel32Util.getModules(pid)) {\n modules.add(module.szModule());\n }\n\n return Collections.unmodifiableList(modules);\n }\n }\n\n private static void checkRTSSModules(NativeWindowHandle window) {\n LOGGER.warn("RivaTuner Statistics Server (RTSS) has injected into the process! Attempting to apply workarounds for compatibility...");\n WindowsFileVersion version = null;\n\n try {\n version = findRTSSModuleVersion();\n } catch (Throwable var3) {\n LOGGER.warn("Exception thrown while reading file version", var3);\n }\n\n if (version == null) {\n LOGGER.warn("Could not determine version of RivaTuner Statistics Server");\n } else {\n LOGGER.info("Detected RivaTuner Statistics Server version: {}", version);\n }\n\n if (version == null || !isRTSSCompatible(version)) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "You appear to be using an older version of RivaTuner Statistics Server (RTSS) which is not compatible with Sodium.\\n\\nYou must either update to a newer version (7.3.4 and later) or close the RivaTuner Statistics Server application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n throw new RuntimeException(\n "The installed version of RivaTuner Statistics Server (RTSS) is not compatible with Sodium, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n }\n }\n\n private static boolean isRTSSCompatible(WindowsFileVersion version) {\n int x = version.x();\n int y = version.y();\n int z = version.z();\n return x > 7 || x == 7 && y > 3 || x == 7 && y == 3 && z >= 4;\n }\n\n private static void checkASUSGpuTweakIII(NativeWindowHandle window) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "ASUS GPU Tweak III is not compatible with Minecraft, and causes extreme performance issues and severe graphical corruption when used with Minecraft.\\n\\nYou *must* do one of the following things to continue:\\n\\na) Open the settings of ASUS GPU Tweak III, enable the Blacklist option, click \\"Browse from file...\\", and select the Java runtime (javaw.exe) which is used by Minecraft.\\n\\nb) Completely uninstall the ASUS GPU Tweak III application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n throw new RuntimeException(\n "ASUS GPU Tweak III is not compatible with Minecraft, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n }\n\n @Nullable\n private static WindowsFileVersion findRTSSModuleVersion() {\n long module;\n try {\n module = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleHandleByNames(RTSS_HOOKS_MODULE_NAMES);\n } catch (Throwable var9) {\n LOGGER.warn("Failed to locate module", var9);\n return null;\n }\n\n String moduleFileName;\n try {\n moduleFileName = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleFileName(module);\n } catch (Throwable var8) {\n LOGGER.warn("Failed to get path of module", var8);\n return null;\n }\n\n Path modulePath = Path.of(moduleFileName);\n Path moduleDirectory = modulePath.getParent();\n LOGGER.info("Searching directory: {}", moduleDirectory);\n Path executablePath = moduleDirectory.resolve("RTSS.exe");\n if (!Files.exists(executablePath, new LinkOption[0])) {\n LOGGER.warn("Could not find executable: {}", executablePath);\n return null;\n } else {\n LOGGER.info("Parsing file: {}", executablePath);\n VersionInfo version = Version.getModuleFileVersion(executablePath.toAbsolutePath().toString());\n if (version == null) {\n LOGGER.warn("Couldn't find version structure");\n return null;\n } else {\n VersionFixedFileInfoStruct fileVersion = version.queryFixedFileInfo();\n if (fileVersion == null) {\n LOGGER.warn("Couldn't query file version");\n return null;\n } else {\n return WindowsFileVersion.fromFileVersion(fileVersion);\n }\n }\n }\n }\n\n private static boolean isModuleLoaded(List modules, String[] names) {\n for (String name : names) {\n for (String module : modules) {\n if (module.equalsIgnoreCase(name)) {\n return true;\n }\n }\n }\n\n return false;\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"} low +7 2 c7ef03b142e9371c5b10fd54ee7a22060872bd3addb96c6fa21ab10ccdc4b481 net/caffeinemc/mods/sodium/client/compatibility/checks/ModuleScanner package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport com.sun.jna.Platform;\nimport com.sun.jna.platform.win32.Kernel32;\nimport com.sun.jna.platform.win32.Kernel32Util;\nimport com.sun.jna.platform.win32.Tlhelp32.MODULEENTRY32W;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport net.caffeinemc.mods.sodium.client.platform.MessageBox;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.Version;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionInfo;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ModuleScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-Win32ModuleChecks");\n private static final String[] RTSS_HOOKS_MODULE_NAMES = new String[]{"RTSSHooks64.dll", "RTSSHooks.dll"};\n private static final String[] ASUS_GPU_TWEAK_MODULE_NAMES = new String[]{\n "GTIII-OSD64-GL.dll", "GTIII-OSD-GL.dll", "GTIII-OSD64-VK.dll", "GTIII-OSD-VK.dll", "GTIII-OSD64.dll", "GTIII-OSD.dll"\n };\n\n public static void checkModules(NativeWindowHandle window) {\n List modules;\n try {\n modules = listModules();\n } catch (Throwable var3) {\n LOGGER.warn("Failed to scan the currently loaded modules", var3);\n return;\n }\n\n if (!modules.isEmpty()) {\n if (BugChecks.ISSUE_2048 && isModuleLoaded(modules, RTSS_HOOKS_MODULE_NAMES)) {\n checkRTSSModules(window);\n }\n\n if (BugChecks.ISSUE_2637 && isModuleLoaded(modules, ASUS_GPU_TWEAK_MODULE_NAMES)) {\n checkASUSGpuTweakIII(window);\n }\n }\n }\n\n private static List listModules() {\n if (!Platform.isWindows()) {\n return List.of();\n } else {\n int pid = Kernel32.INSTANCE.GetCurrentProcessId();\n ArrayList modules = new ArrayList();\n\n for (MODULEENTRY32W module : Kernel32Util.getModules(pid)) {\n modules.add(module.szModule());\n }\n\n return Collections.unmodifiableList(modules);\n }\n }\n\n private static void checkRTSSModules(NativeWindowHandle window) {\n LOGGER.warn("RivaTuner Statistics Server (RTSS) has injected into the process! Attempting to apply workarounds for compatibility...");\n WindowsFileVersion version = null;\n\n try {\n version = findRTSSModuleVersion();\n } catch (Throwable var3) {\n LOGGER.warn("Exception thrown while reading file version", var3);\n }\n\n if (version == null) {\n LOGGER.warn("Could not determine version of RivaTuner Statistics Server");\n } else {\n LOGGER.info("Detected RivaTuner Statistics Server version: {}", version);\n }\n\n if (version == null || !isRTSSCompatible(version)) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "You appear to be using an older version of RivaTuner Statistics Server (RTSS) which is not compatible with Sodium.\\n\\nYou must either update to a newer version (7.3.4 and later) or close the RivaTuner Statistics Server application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n throw new RuntimeException(\n "The installed version of RivaTuner Statistics Server (RTSS) is not compatible with Sodium, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n }\n }\n\n private static boolean isRTSSCompatible(WindowsFileVersion version) {\n int x = version.x();\n int y = version.y();\n int z = version.z();\n return x > 7 || x == 7 && y > 3 || x == 7 && y == 3 && z >= 4;\n }\n\n private static void checkASUSGpuTweakIII(NativeWindowHandle window) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "ASUS GPU Tweak III is not compatible with Minecraft, and causes extreme performance issues and severe graphical corruption when used with Minecraft.\\n\\nYou *must* do one of the following things to continue:\\n\\na) Open the settings of ASUS GPU Tweak III, enable the Blacklist option, click \\"Browse from file...\\", and select the Java runtime (javaw.exe) which is used by Minecraft.\\n\\nb) Completely uninstall the ASUS GPU Tweak III application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n throw new RuntimeException(\n "ASUS GPU Tweak III is not compatible with Minecraft, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n }\n\n @Nullable\n private static WindowsFileVersion findRTSSModuleVersion() {\n long module;\n try {\n module = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleHandleByNames(RTSS_HOOKS_MODULE_NAMES);\n } catch (Throwable var9) {\n LOGGER.warn("Failed to locate module", var9);\n return null;\n }\n\n String moduleFileName;\n try {\n moduleFileName = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleFileName(module);\n } catch (Throwable var8) {\n LOGGER.warn("Failed to get path of module", var8);\n return null;\n }\n\n Path modulePath = Path.of(moduleFileName);\n Path moduleDirectory = modulePath.getParent();\n LOGGER.info("Searching directory: {}", moduleDirectory);\n Path executablePath = moduleDirectory.resolve("RTSS.exe");\n if (!Files.exists(executablePath, new LinkOption[0])) {\n LOGGER.warn("Could not find executable: {}", executablePath);\n return null;\n } else {\n LOGGER.info("Parsing file: {}", executablePath);\n VersionInfo version = Version.getModuleFileVersion(executablePath.toAbsolutePath().toString());\n if (version == null) {\n LOGGER.warn("Couldn't find version structure");\n return null;\n } else {\n VersionFixedFileInfoStruct fileVersion = version.queryFixedFileInfo();\n if (fileVersion == null) {\n LOGGER.warn("Couldn't query file version");\n return null;\n } else {\n return WindowsFileVersion.fromFileVersion(fileVersion);\n }\n }\n }\n }\n\n private static boolean isModuleLoaded(List modules, String[] names) {\n for (String name : names) {\n for (String module : modules) {\n if (module.equalsIgnoreCase(name)) {\n return true;\n }\n }\n }\n\n return false;\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"} low +8 2 54aa6d079497c9fc459f84c660a303496d18fa17c35c0e22cbe2160924de212e net/caffeinemc/mods/sodium/client/compatibility/checks/GraphicsDriverChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.GlContextInfo;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.probe.GraphicsAdapterVendor;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.intel.IntelWorkarounds;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaDriverVersion;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaWorkarounds;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\n\nclass GraphicsDriverChecks {\n static void postContextInit(NativeWindowHandle window, GlContextInfo context) {\n GraphicsAdapterVendor vendor = GraphicsAdapterVendor.fromContext(context);\n if (vendor != GraphicsAdapterVendor.UNKNOWN) {\n if (vendor == GraphicsAdapterVendor.INTEL && BugChecks.ISSUE_899) {\n WindowsFileVersion installedVersion = IntelWorkarounds.findIntelDriverMatchingBug899();\n if (installedVersion != null) {\n String installedVersionString = installedVersion.toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed Intel Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 10.18.10.5161 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"\n );\n }\n }\n\n if (vendor == GraphicsAdapterVendor.NVIDIA && BugChecks.ISSUE_1486) {\n WindowsFileVersion installedVersion = NvidiaWorkarounds.findNvidiaDriverMatchingBug1486();\n if (installedVersion != null) {\n String installedVersionString = NvidiaDriverVersion.parse(installedVersion).toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed NVIDIA Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 536.23 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"\n );\n }\n }\n }\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"} low +9 2 5ff865ff6c2e250096fb15b9a943c645deb8558eddc72fa1b492748eb2c78b32 net/caffeinemc/mods/sodium/client/compatibility/checks/GraphicsDriverChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.GlContextInfo;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.probe.GraphicsAdapterVendor;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.intel.IntelWorkarounds;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaDriverVersion;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaWorkarounds;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\n\nclass GraphicsDriverChecks {\n static void postContextInit(NativeWindowHandle window, GlContextInfo context) {\n GraphicsAdapterVendor vendor = GraphicsAdapterVendor.fromContext(context);\n if (vendor != GraphicsAdapterVendor.UNKNOWN) {\n if (vendor == GraphicsAdapterVendor.INTEL && BugChecks.ISSUE_899) {\n WindowsFileVersion installedVersion = IntelWorkarounds.findIntelDriverMatchingBug899();\n if (installedVersion != null) {\n String installedVersionString = installedVersion.toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed Intel Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 10.18.10.5161 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"\n );\n }\n }\n\n if (vendor == GraphicsAdapterVendor.NVIDIA && BugChecks.ISSUE_1486) {\n WindowsFileVersion installedVersion = NvidiaWorkarounds.findNvidiaDriverMatchingBug1486();\n if (installedVersion != null) {\n String installedVersionString = NvidiaDriverVersion.parse(installedVersion).toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed NVIDIA Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 536.23 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"\n );\n }\n }\n }\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"} low +10 2 dcf24bb91e7861b7a382958053c3efc201684e93df7ce058e849effa3f947fb0 net/caffeinemc/mods/sodium/client/checks/ResourcePackScanner package net.caffeinemc.mods.sodium.client.checks;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Set;\nimport net.caffeinemc.mods.sodium.client.console.Console;\nimport net.caffeinemc.mods.sodium.client.console.message.MessageLevel;\nimport net.minecraft.class_3258;\nimport net.minecraft.class_3259;\nimport net.minecraft.class_3262;\nimport net.minecraft.class_3264;\nimport net.minecraft.class_3300;\nimport org.jetbrains.annotations.NotNull;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ResourcePackScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-ResourcePackScanner");\n private static final Set SHADER_PROGRAM_BLACKLIST = Set.of(\n "rendertype_solid.vsh",\n "rendertype_solid.fsh",\n "rendertype_solid.json",\n "rendertype_cutout_mipped.vsh",\n "rendertype_cutout_mipped.fsh",\n "rendertype_cutout_mipped.json",\n "rendertype_cutout.vsh",\n "rendertype_cutout.fsh",\n "rendertype_cutout.json",\n "rendertype_translucent.vsh",\n "rendertype_translucent.fsh",\n "rendertype_translucent.json",\n "rendertype_tripwire.vsh",\n "rendertype_tripwire.fsh",\n "rendertype_tripwire.json",\n "rendertype_clouds.vsh",\n "rendertype_clouds.fsh",\n "rendertype_clouds.json"\n );\n private static final Set SHADER_INCLUDE_BLACKLIST = Set.of("light.glsl", "fog.glsl");\n\n public static void checkIfCoreShaderLoaded(class_3300 manager) {\n List outputs = manager.method_29213()\n .filter(ResourcePackScanner::isExternalResourcePack)\n .map(ResourcePackScanner::scanResources)\n .toList();\n printToasts(outputs);\n printCompatibilityReport(outputs);\n }\n\n private static void printToasts(Collection resourcePacks) {\n List incompatibleResourcePacks = resourcePacks.stream().filter(pack -> !pack.shaderPrograms.isEmpty()).toList();\n List likelyIncompatibleResourcePacks = resourcePacks.stream()\n .filter(pack -> !pack.shaderIncludes.isEmpty())\n .filter(pack -> !incompatibleResourcePacks.contains(pack))\n .toList();\n boolean shown = false;\n if (!incompatibleResourcePacks.isEmpty()) {\n showConsoleMessage("sodium.console.core_shaders_error", true, MessageLevel.SEVERE);\n\n for (ResourcePackScanner.ScannedResourcePack entry : incompatibleResourcePacks) {\n showConsoleMessage(getResourcePackName(entry.resourcePack), false, MessageLevel.SEVERE);\n }\n\n shown = true;\n }\n\n if (!likelyIncompatibleResourcePacks.isEmpty()) {\n showConsoleMessage("sodium.console.core_shaders_warn", true, MessageLevel.WARN);\n\n for (ResourcePackScanner.ScannedResourcePack entry : likelyIncompatibleResourcePacks) {\n showConsoleMessage(getResourcePackName(entry.resourcePack), false, MessageLevel.WARN);\n }\n\n shown = true;\n }\n\n if (shown) {\n showConsoleMessage("sodium.console.core_shaders_info", true, MessageLevel.INFO);\n }\n }\n\n private static void printCompatibilityReport(Collection scanResults) {\n StringBuilder builder = new StringBuilder();\n\n for (ResourcePackScanner.ScannedResourcePack entry : scanResults) {\n if (!entry.shaderPrograms.isEmpty() || !entry.shaderIncludes.isEmpty()) {\n builder.append("- Resource pack: ").append(getResourcePackName(entry.resourcePack)).append("\\n");\n if (!entry.shaderPrograms.isEmpty()) {\n emitProblem(\n builder,\n "The resource pack replaces terrain shaders, which are not supported",\n "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs",\n entry.shaderPrograms\n );\n }\n\n if (!entry.shaderIncludes.isEmpty()) {\n emitProblem(\n builder,\n "The resource pack modifies shader include files, which are not fully supported",\n "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs",\n entry.shaderIncludes\n );\n }\n }\n }\n\n if (!builder.isEmpty()) {\n LOGGER.error("The following compatibility issues were found with installed resource packs:\\n{}", builder);\n }\n }\n\n private static void emitProblem(StringBuilder builder, String description, String url, List resources) {\n builder.append("\\t- Problem found: ").append("\\n");\n builder.append("\\t\\t- Description:\\n\\t\\t\\t").append(description).append("\\n");\n builder.append("\\t\\t- More information: ").append(url).append("\\n");\n builder.append("\\t\\t- Files: ").append("\\n");\n\n for (String resource : resources) {\n builder.append("\\t\\t\\t- ").append(resource).append("\\n");\n }\n }\n\n @NotNull\n private static ResourcePackScanner.ScannedResourcePack scanResources(class_3262 resourcePack) {\n List ignoredShaders = determineIgnoredShaders(resourcePack);\n if (!ignoredShaders.isEmpty()) {\n LOGGER.warn(\n "Resource pack '{}' indicates the following shaders should be ignored: {}", getResourcePackName(resourcePack), String.join(", ", ignoredShaders)\n );\n }\n\n ArrayList unsupportedShaderPrograms = new ArrayList();\n ArrayList unsupportedShaderIncludes = new ArrayList();\n resourcePack.method_14408(class_3264.field_14188, "minecraft", "shaders", (identifier, supplier) -> {\n String path = identifier.method_12832();\n String name = path.substring(path.lastIndexOf(47) + 1);\n if (!ignoredShaders.contains(name)) {\n if (SHADER_PROGRAM_BLACKLIST.contains(name)) {\n unsupportedShaderPrograms.add(path);\n } else if (SHADER_INCLUDE_BLACKLIST.contains(name)) {\n unsupportedShaderIncludes.add(path);\n }\n }\n });\n return new ResourcePackScanner.ScannedResourcePack(resourcePack, unsupportedShaderPrograms, unsupportedShaderIncludes);\n }\n\n private static boolean isExternalResourcePack(class_3262 pack) {\n return pack instanceof class_3259 || pack instanceof class_3258;\n }\n\n private static String getResourcePackName(class_3262 pack) {\n String path = pack.method_14409();\n return path.startsWith("file/") ? path.substring(5) : path;\n }\n\n private static List determineIgnoredShaders(class_3262 resourcePack) {\n ArrayList ignoredShaders = new ArrayList();\n\n try {\n SodiumResourcePackMetadata meta = (SodiumResourcePackMetadata)resourcePack.method_14407(SodiumResourcePackMetadata.SERIALIZER);\n if (meta != null) {\n ignoredShaders.addAll(meta.ignoredShaders());\n }\n } catch (IOException var3) {\n LOGGER.error("Failed to load pack.mcmeta file for resource pack '{}'", resourcePack.method_14409());\n }\n\n return ignoredShaders;\n }\n\n private static void showConsoleMessage(String message, boolean translatable, MessageLevel messageLevel) {\n Console.instance().logMessage(messageLevel, message, translatable, 12.5);\n }\n\n private record ScannedResourcePack(class_3262 resourcePack, ArrayList shaderPrograms, ArrayList shaderIncludes) {\n }\n}\n {"url": "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs"} low +11 3 fc7e089f517eab447befde28ce1b5b2438bc5a08131eb338adeaaacbdef7d6cf net/fabricmc/fabric/impl/base/event/EventFactoryImpl \N {} low +12 4 967302d02a45f4cfa29af6604a50d12097295caa1aabff33b1a3d8e7638f9962 net/caffeinemc/mods/sodium/client/platform/windows/WindowsFileVersion package net.caffeinemc.mods.sodium.client.platform.windows;\n\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport org.jetbrains.annotations.NotNull;\n\npublic record WindowsFileVersion(int x, int y, int z, int w) {\n @NotNull\n public static WindowsFileVersion fromFileVersion(VersionFixedFileInfoStruct fileVersion) {\n int x = fileVersion.getFileVersionMostSignificantBits() >>> 16 & 65535;\n int y = fileVersion.getFileVersionMostSignificantBits() >>> 0 & 65535;\n int z = fileVersion.getFileVersionLeastSignificantBits() >>> 16 & 65535;\n int w = fileVersion.getFileVersionLeastSignificantBits() >>> 0 & 65535;\n return new WindowsFileVersion(x, y, z, w);\n }\n\n public String toString() {\n return "%s.%s.%s.%s".formatted(this.x, this.y, this.z, this.w);\n }\n}\n {} high +13 5 7005a5b5d443c84d758eceb963351a357d93b5c15eeb19caf20aae99d65a623b net/caffeinemc/mods/sodium/desktop/LaunchWarn package net.caffeinemc.mods.sodium.desktop;\n\nimport java.awt.GraphicsEnvironment;\nimport java.io.IOException;\nimport javax.swing.JDialog;\nimport javax.swing.JOptionPane;\nimport javax.swing.UIManager;\nimport javax.swing.UnsupportedLookAndFeelException;\nimport net.caffeinemc.mods.sodium.desktop.utils.browse.BrowseUrlHandler;\n\npublic class LaunchWarn {\n private static final String HELP_URL = "https://link.caffeinemc.net/guides/sodium/installation";\n private static final String RICH_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

";\n private static final String FALLBACK_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

";\n private static final String FAILED_TO_BROWSE_MESSAGE = "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

";\n public static final String WINDOW_TITLE = "Sodium";\n\n public static void main(String[] args) {\n if (GraphicsEnvironment.isHeadless()) {\n showHeadlessError();\n } else {\n showGraphicalError();\n }\n }\n\n private static void showHeadlessError() {\n System.err\n .println(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

"\n );\n }\n\n private static void showGraphicalError() {\n trySetSystemLookAndFeel();\n trySetSystemFontPreferences();\n BrowseUrlHandler browseUrlHandler = BrowseUrlHandler.createImplementation();\n if (browseUrlHandler != null) {\n showRichGraphicalDialog(browseUrlHandler);\n } else {\n showFallbackGraphicalDialog();\n }\n\n System.exit(0);\n }\n\n private static void showRichGraphicalDialog(BrowseUrlHandler browseUrlHandler) {\n int selectedOption = showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

",\n "Sodium",\n 0,\n 1,\n new String[]{"Help", "Close"},\n 0\n );\n if (selectedOption == 0) {\n log("Opening URL: https://link.caffeinemc.net/guides/sodium/installation");\n\n try {\n browseUrlHandler.browseTo("https://link.caffeinemc.net/guides/sodium/installation");\n } catch (IOException var3) {\n log("Failed to open default web browser!", var3);\n showDialogBox(\n "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

",\n "Sodium",\n -1,\n 2,\n null,\n -1\n );\n }\n }\n }\n\n private static void showFallbackGraphicalDialog() {\n showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

",\n "Sodium",\n -1,\n 1,\n null,\n null\n );\n }\n\n private static int showDialogBox(String message, String title, int optionType, int messageType, String[] options, Object initialValue) {\n JOptionPane pane = new JOptionPane(message, messageType, optionType, null, options, initialValue);\n JDialog dialog = pane.createDialog(title);\n dialog.setVisible(true);\n Object selectedValue = pane.getValue();\n if (selectedValue == null) {\n return -1;\n } else if (options == null) {\n return selectedValue instanceof Integer ? (Integer)selectedValue : -1;\n } else {\n for (int counter = 0; counter < options.length; counter++) {\n String option = options[counter];\n if (option.equals(selectedValue)) {\n return counter;\n }\n }\n\n return -1;\n }\n }\n\n private static void trySetSystemLookAndFeel() {\n try {\n UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());\n } catch (UnsupportedLookAndFeelException | ReflectiveOperationException var1) {\n }\n }\n\n private static void trySetSystemFontPreferences() {\n System.setProperty("awt.useSystemAAFontSettings", "on");\n }\n\n private static void log(String message) {\n System.err.println(message);\n }\n\n private static void log(String message, Throwable exception) {\n System.err.println(message);\n exception.printStackTrace(System.err);\n }\n}\n {} low +\. + + +-- +-- Name: delphi_report_issue_details_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_report_issue_details_id_seq', 13, true); + + +-- +-- Name: delphi_report_issues_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_report_issues_id_seq', 5, true); + + +-- +-- Name: delphi_reports_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_reports_id_seq', 1, true); + + +-- +-- PostgreSQL database dump complete +-- + +\unrestrict RGysBmMc8KFBQ9AssusGyNPozUiB43hdmIPxlv5KSWbX7tdW7XVMPpMginvod9K diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index be8b698ad9..0f402cfb8f 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -66,12 +66,19 @@ impl DBDelphiReport { sqlx::Type, utoipa::ToSchema, )] -#[serde(rename_all = "UPPERCASE")] +// The canonical serialized form of this enum is the snake_case representation. +// We add `alias`es so we can deserialize it from how Delphi sends it, +// which follows the Java conventions of `SCREAMING_SNAKE_CASE`. +#[serde(rename_all = "snake_case")] #[sqlx(type_name = "delphi_severity", rename_all = "snake_case")] pub enum DelphiSeverity { + #[serde(alias = "LOW")] Low, + #[serde(alias = "MEDIUM")] Medium, + #[serde(alias = "HIGH")] High, + #[serde(alias = "SEVERE")] Severe, } From da866a4d7f000ecbbe26f01f9df43c2fb56ba3ab Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sat, 15 Nov 2025 23:42:09 +0000 Subject: [PATCH 026/104] Better Delphi logging --- ...b2249314019825c43f2b391847c782ce97fa.json} | 6 ++-- ...efd9e422151b683d9897a071ee0c4bac1cd4.json} | 5 +-- apps/labrinth/src/routes/internal/delphi.rs | 35 +++++++++---------- 3 files changed, 23 insertions(+), 23 deletions(-) rename apps/labrinth/.sqlx/{query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json => query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json} (71%) rename apps/labrinth/.sqlx/{query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json => query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json} (72%) diff --git a/apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json b/apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json similarity index 71% rename from apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json rename to apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json index de08e37ea9..176c40ec3c 100644 --- a/apps/labrinth/.sqlx/query-a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526.json +++ b/apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n f.id AS \"file_id!: DBFileId\",\n f.filename AS \"file_name!\",\n f.size AS \"file_size!\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dr.severity AS \"report_severity!: DelphiSeverity\",\n dri.id AS \"issue_id!: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type!\",\n dri.status AS \"issue_status!: DelphiReportIssueStatus\",\n -- maybe null\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.internal_class_name AS \"issue_detail_class_name?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n f.id AS \"file_id!: DBFileId\",\n f.filename AS \"file_name!\",\n f.size AS \"file_size!\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dr.severity AS \"report_severity!: DelphiSeverity\",\n dri.id AS \"issue_id!: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type!\",\n dri.status AS \"issue_status!: DelphiReportIssueStatus\",\n -- maybe null\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.file_path AS \"issue_detail_file_path?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", "describe": { "columns": [ { @@ -88,7 +88,7 @@ }, { "ordinal": 12, - "name": "issue_detail_class_name?", + "name": "issue_detail_file_path?", "type_info": "Text" }, { @@ -140,5 +140,5 @@ true ] }, - "hash": "a06d0f682b1e576e634b02102ab6bcab9fcba55df818cab6e98721a324659526" + "hash": "900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa" } diff --git a/apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json b/apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json similarity index 72% rename from apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json rename to apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json index 8806320d11..a0ea4442ee 100644 --- a/apps/labrinth/.sqlx/query-26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8.json +++ b/apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO delphi_report_issue_details (issue_id, internal_class_name, decompiled_source, data, severity)\n VALUES ($1, $2, $3, $4, $5)\n RETURNING id\n ", + "query": "\n INSERT INTO delphi_report_issue_details (issue_id, key, file_path, decompiled_source, data, severity)\n VALUES ($1, $2, $3, $4, $5, $6)\n RETURNING id\n ", "describe": { "columns": [ { @@ -14,6 +14,7 @@ "Int8", "Text", "Text", + "Text", "Jsonb", { "Custom": { @@ -34,5 +35,5 @@ false ] }, - "hash": "26a43c77d3c1e875889141d209f01763de4c51bd16c100549261b11c5a4142b8" + "hash": "b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4" } diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 44268c4486..062fa3ecbe 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -6,7 +6,7 @@ use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; use serde::Deserialize; use sqlx::PgPool; use tokio::sync::Mutex; -use tracing::{info, warn}; +use tracing::info; use crate::{ auth::check_is_moderator_from_headers, @@ -72,7 +72,7 @@ struct DelphiReport { pub url: String, pub project_id: crate::models::ids::ProjectId, #[serde(rename = "version_id")] - pub _version_id: crate::models::ids::VersionId, + pub version_id: crate::models::ids::VersionId, pub file_id: crate::models::ids::FileId, /// A sequential, monotonically increasing version number for the /// Delphi version that generated this report. @@ -128,23 +128,23 @@ pub struct DelphiRunParameters { } #[post("ingest", guard = "admin_key_guard")] +#[tracing::instrument( + level = "info", + skip_all, + fields( + %report.url, + %report.file_id, + %report.project_id, + %report.version_id, + ) +)] async fn ingest_report( pool: web::Data, redis: web::Data, - report: web::Bytes, - // web::Json(report): web::Json, + web::Json(report): web::Json, ) -> Result<(), ApiError> { - info!( - "Json: {}", - serde_json::to_string_pretty( - &serde_json::from_slice::(&report).unwrap() - ) - .unwrap() - ); - let report = serde_json::from_slice::(&report).unwrap(); - if report.issues.is_empty() { - info!("No issues found for file {}", report.url); + info!("No issues found for file"); return Ok(()); } @@ -163,10 +163,9 @@ async fn ingest_report( .upsert(&mut transaction) .await?; - warn!( - "Delphi found {} issues in file {}", - report.issues.len(), - report.url + info!( + num_issues = %report.issues.len(), + "Delphi found issues in file", ); for (issue_type, issue_details) in report.issues { From b2fceb603d2ff3e4f9c1f424ff021fd987c92e6b Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sun, 16 Nov 2025 00:11:34 +0000 Subject: [PATCH 027/104] Improve utoipa for tech review routes --- apps/labrinth/src/main.rs | 17 ++++++++++++++++- .../routes/internal/moderation/tech_review.rs | 17 +++++++++++++---- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/apps/labrinth/src/main.rs b/apps/labrinth/src/main.rs index f3cab342f0..9484d62224 100644 --- a/apps/labrinth/src/main.rs +++ b/apps/labrinth/src/main.rs @@ -21,6 +21,7 @@ use std::sync::Arc; use tracing::{Instrument, error, info, info_span}; use tracing_actix_web::TracingLogger; use utoipa::OpenApi; +use utoipa::openapi::security::{ApiKey, ApiKeyValue, SecurityScheme}; use utoipa_actix_web::AppExt; use utoipa_swagger_ui::SwaggerUi; @@ -262,9 +263,23 @@ async fn main() -> std::io::Result<()> { } #[derive(utoipa::OpenApi)] -#[openapi(info(title = "Labrinth"))] +#[openapi(info(title = "Labrinth"), modifiers(&SecurityAddon))] struct ApiDoc; +struct SecurityAddon; + +impl utoipa::Modify for SecurityAddon { + fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) { + let components = openapi.components.as_mut().unwrap(); + components.add_security_scheme( + "bearer_auth", + SecurityScheme::ApiKey(ApiKey::Header(ApiKeyValue::new( + "authorization", + ))), + ); + } +} + fn log_error(err: &actix_web::Error) { if err.as_response_error().status_code().is_client_error() { tracing::debug!( diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index b87e925cb2..4f5cd71021 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -34,8 +34,10 @@ pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { #[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct SearchProjects { #[serde(default = "default_limit")] + #[schema(default = 20)] pub limit: u64, #[serde(default)] + #[schema(default = 0)] pub page: u64, #[serde(default)] pub filter: SearchProjectsFilter, @@ -163,7 +165,10 @@ pub struct FileIssueDetail { } /// Searches all projects which are awaiting technical review. -#[utoipa::path] +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(Vec))) +)] #[post("/search")] async fn search_projects( req: HttpRequest, @@ -209,7 +214,7 @@ async fn search_projects( let sort_by = search_req.sort_by.to_string(); let limit = search_req.limit.max(50); - let offset = limit * search_req.page; + let offset = limit.saturating_mul(search_req.page); let limit = i64::try_from(limit).wrap_request_err("limit cannot fit into `i64`")?; @@ -435,14 +440,18 @@ async fn search_projects( Ok(web::Json(projects)) } -/// Updates the state of a technical review issue. +/// See [`update_issue`]. #[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct UpdateIssue { /// Status to set the issue to. pub status: DelphiReportIssueStatus, } -#[utoipa::path] +/// Updates the state of a technical review issue. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = NO_CONTENT)) +)] #[post("/issue/{id}")] async fn update_issue( req: HttpRequest, From 3ec0a50517a0e962f065c21d985cddac757d2f5b Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sun, 16 Nov 2025 11:27:57 +0000 Subject: [PATCH 028/104] Add more sorting options for tech review queue --- .../labrinth/src/routes/internal/moderation/tech_review.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 4f5cd71021..3683fc109d 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -73,6 +73,8 @@ pub struct SearchProjectsFilter { pub enum SearchProjectsSort { CreatedAsc, CreatedDesc, + SeverityAsc, + SeverityDesc, } impl fmt::Display for SearchProjectsSort { @@ -267,7 +269,10 @@ async fn search_projects( -- sorting ORDER BY CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC, - CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC + CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $2 = 'pending_status_first' THEN dri.status ELSE 'pending'::delphi_report_issue_status END ASC, + CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC -- pagination LIMIT $3 From 49c4d37afca115fda429a1d03455e16204eced87 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sun, 16 Nov 2025 11:39:39 +0000 Subject: [PATCH 029/104] Oops join --- apps/labrinth/src/routes/internal/moderation/tech_review.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 3683fc109d..e6f5873296 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -254,9 +254,8 @@ async fn search_projects( INNER JOIN versions v ON v.id = f.version_id INNER JOIN mods m ON m.id = v.mod_id LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id - INNER JOIN categories c ON c.id = mc.joining_category_id + LEFT JOIN categories c ON c.id = mc.joining_category_id INNER JOIN threads t ON t.mod_id = m.id - -- fetch report issues and details INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id @@ -270,7 +269,6 @@ async fn search_projects( ORDER BY CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC, CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC, - CASE WHEN $2 = 'pending_status_first' THEN dri.status ELSE 'pending'::delphi_report_issue_status END ASC, CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC, CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC From 945d4bebdca9f24b88b0312f9e1d8b096b5f9b78 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Tue, 18 Nov 2025 18:43:48 +0000 Subject: [PATCH 030/104] New routes for fetching issues and reports --- ...7023c1becd6d30e74a6110c14c0049d156118.json | 22 + ...ad9db6ed1b17ae7002172b4172b959bd7710.json} | 6 +- ...2b2249314019825c43f2b391847c782ce97fa.json | 144 ------ ...8fe0dbf285367b8051070bedbb075c4006c8d.json | 37 ++ ...d00483f055231f07ad5f39ce716a25ec2c6ad.json | 22 + .../src/database/models/delphi_report_item.rs | 87 +--- apps/labrinth/src/routes/internal/delphi.rs | 61 ++- .../routes/internal/moderation/tech_review.rs | 452 +++++++++--------- apps/labrinth/src/util/error.rs | 7 + 9 files changed, 377 insertions(+), 461 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json rename apps/labrinth/.sqlx/{query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json => query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json} (66%) delete mode 100644 apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json create mode 100644 apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json create mode 100644 apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json diff --git a/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json b/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json new file mode 100644 index 0000000000..0b88c91314 --- /dev/null +++ b/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n WHERE dr.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118" +} diff --git a/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json b/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json similarity index 66% rename from apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json rename to apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json index 7e536646ef..8c1c8e1f3a 100644 --- a/apps/labrinth/.sqlx/query-c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff.json +++ b/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_details)\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_details)\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", "describe": { "columns": [ { @@ -73,7 +73,7 @@ }, { "ordinal": 9, - "name": "details: sqlx::types::Json>", + "name": "details: sqlx::types::Json>", "type_info": "Jsonb" }, { @@ -122,5 +122,5 @@ true ] }, - "hash": "c0ef7e1f2ddc02604c14a94235afc053676964380451b3f461e3276f3a26bbff" + "hash": "749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710" } diff --git a/apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json b/apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json deleted file mode 100644 index 176c40ec3c..0000000000 --- a/apps/labrinth/.sqlx/query-900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa.json +++ /dev/null @@ -1,144 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n dr.id AS \"report_id!: DelphiReportId\",\n f.id AS \"file_id!: DBFileId\",\n f.filename AS \"file_name!\",\n f.size AS \"file_size!\",\n m.id AS \"project_id!: DBProjectId\",\n t.id AS \"project_thread_id!: DBThreadId\",\n dr.created AS \"report_created!\",\n dr.severity AS \"report_severity!: DelphiSeverity\",\n dri.id AS \"issue_id!: DelphiReportIssueId\",\n dri.issue_type AS \"issue_type!\",\n dri.status AS \"issue_status!: DelphiReportIssueStatus\",\n -- maybe null\n drid.id AS \"issue_detail_id?: DelphiReportIssueDetailsId\",\n drid.file_path AS \"issue_detail_file_path?\",\n drid.decompiled_source AS \"issue_detail_decompiled_source?\",\n drid.severity AS \"issue_detail_severity?: DelphiSeverity\"\n FROM delphi_reports dr\n\n -- fetch the project this report is for, its type, and thread\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n INNER JOIN categories c ON c.id = mc.joining_category_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- fetch report issues and details\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "report_id!: DelphiReportId", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "file_id!: DBFileId", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "file_name!", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "file_size!", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "project_id!: DBProjectId", - "type_info": "Int8" - }, - { - "ordinal": 5, - "name": "project_thread_id!: DBThreadId", - "type_info": "Int8" - }, - { - "ordinal": 6, - "name": "report_created!", - "type_info": "Timestamptz" - }, - { - "ordinal": 7, - "name": "report_severity!: DelphiSeverity", - "type_info": { - "Custom": { - "name": "delphi_severity", - "kind": { - "Enum": [ - "low", - "medium", - "high", - "severe" - ] - } - } - } - }, - { - "ordinal": 8, - "name": "issue_id!: DelphiReportIssueId", - "type_info": "Int8" - }, - { - "ordinal": 9, - "name": "issue_type!", - "type_info": "Text" - }, - { - "ordinal": 10, - "name": "issue_status!: DelphiReportIssueStatus", - "type_info": { - "Custom": { - "name": "delphi_report_issue_status", - "kind": { - "Enum": [ - "pending", - "safe", - "unsafe" - ] - } - } - } - }, - { - "ordinal": 11, - "name": "issue_detail_id?: DelphiReportIssueDetailsId", - "type_info": "Int8" - }, - { - "ordinal": 12, - "name": "issue_detail_file_path?", - "type_info": "Text" - }, - { - "ordinal": 13, - "name": "issue_detail_decompiled_source?", - "type_info": "Text" - }, - { - "ordinal": 14, - "name": "issue_detail_severity?: DelphiSeverity", - "type_info": { - "Custom": { - "name": "delphi_severity", - "kind": { - "Enum": [ - "low", - "medium", - "high", - "severe" - ] - } - } - } - } - ], - "parameters": { - "Left": [ - "Int4Array", - "Text", - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - true, - true, - true, - true - ] - }, - "hash": "900f4266e8926fc0458709081eb2b2249314019825c43f2b391847c782ce97fa" -} diff --git a/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json b/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json new file mode 100644 index 0000000000..e27ec02523 --- /dev/null +++ b/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n m.id AS \"project_id: DBProjectId\",\n t.id AS \"project_thread_id: DBThreadId\",\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"report!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "project_id: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_thread_id: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "report!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + null + ] + }, + "hash": "d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d" +} diff --git a/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json b/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json new file mode 100644 index 0000000000..b36f5b1197 --- /dev/null +++ b/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n LEFT JOIN delphi_report_issue_details drid ON dri.id = drid.issue_id\n WHERE dri.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad" +} diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 0f402cfb8f..8bcaf4d938 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -1,7 +1,6 @@ use std::{ collections::HashMap, fmt::{self, Display, Formatter}, - ops::Deref, }; use chrono::{DateTime, Utc}; @@ -149,7 +148,7 @@ impl Display for DelphiReportListOrder { pub struct DelphiReportIssueResult { pub issue: DBDelphiReportIssue, pub report: DBDelphiReport, - pub details: Vec, + pub details: Vec, pub project_id: Option, pub project_published: Option>, } @@ -195,7 +194,7 @@ impl DBDelphiReportIssue { json_array(SELECT to_jsonb(delphi_report_issue_details) FROM delphi_report_issue_details WHERE issue_id = delphi_report_issues.id - ) AS "details: sqlx::types::Json>", + ) AS "details: sqlx::types::Json>", versions.mod_id AS "project_id?", mods.published AS "project_published?" FROM delphi_report_issues INNER JOIN delphi_reports ON delphi_reports.id = report_id @@ -253,18 +252,32 @@ impl DBDelphiReportIssue { /// belongs to a specific issue, and an issue can have zero, one, or /// more details attached to it. (Some issues may be artifact-wide, /// or otherwise not really specific to any particular class.) -#[derive(Debug, Deserialize, Serialize)] -pub struct DBDelphiReportIssueDetails { +#[derive( + Debug, Clone, Deserialize, Serialize, utoipa::ToSchema, sqlx::FromRow, +)] +pub struct ReportIssueDetail { + /// ID of this issue detail. pub id: DelphiReportIssueDetailsId, - pub key: String, + /// ID of the issue this detail belongs to. pub issue_id: DelphiReportIssueId, + /// Opaque identifier for where this issue detail is located, relative to + /// the file scanned. + /// + /// This acts as a stable identifier for an issue detail, even across + /// different versions of the same file. + pub key: String, + /// Name of the Java class path in which this issue was found. pub file_path: String, - pub decompiled_source: Option, - pub data: Json>, + /// Decompiled, pretty-printed source of the Java class. + pub decompiled_source: Option, + /// Extra detail-specific info for this detail. + #[sqlx(json)] + pub data: HashMap, + /// How important is this issue, as flagged by Delphi? pub severity: DelphiSeverity, } -impl DBDelphiReportIssueDetails { +impl ReportIssueDetail { pub async fn insert( &self, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, @@ -278,8 +291,8 @@ impl DBDelphiReportIssueDetails { self.issue_id as DelphiReportIssueId, self.key, self.file_path, - self.decompiled_source.as_ref().map(|decompiled_source| &decompiled_source.0), - &self.data as &Json>, + self.decompiled_source, + sqlx::types::Json(&self.data) as Json<&HashMap>, self.severity as DelphiSeverity, ) .fetch_one(&mut **transaction) @@ -299,55 +312,3 @@ impl DBDelphiReportIssueDetails { .rows_affected()) } } - -/// A [Java class name] with dots replaced by forward slashes (/). -/// -/// Because class names are usually the [binary names] passed to a classloader, top level interfaces and classes -/// have a binary name that matches its canonical, fully qualified name, such canonical names are prefixed by the -/// package path the class is in, and packages usually match the directory structure within a JAR for typical -/// classloaders, this usually (but not necessarily) corresponds to the path to the class file within its JAR. -/// -/// [Java class name]: https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/lang/Class.html#getName() -/// [binary names]: https://docs.oracle.com/javase/specs/jls/se21/html/jls-13.html#jls-13.1 -#[derive( - Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, -)] -#[serde(transparent)] -#[sqlx(transparent)] -pub struct InternalJavaClassName(String); - -impl Deref for InternalJavaClassName { - type Target = String; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl Display for InternalJavaClassName { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -/// The decompiled source code of a Java class. -#[derive( - Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, -)] -#[serde(transparent)] -#[sqlx(transparent)] -pub struct DecompiledJavaClassSource(String); - -impl Deref for DecompiledJavaClassSource { - type Target = String; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl Display for DecompiledJavaClassSource { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 062fa3ecbe..35cd1c61ae 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -2,6 +2,7 @@ use std::{collections::HashMap, fmt::Write, sync::LazyLock, time::Instant}; use actix_web::{HttpRequest, HttpResponse, get, post, put, web}; use chrono::{DateTime, Utc}; +use eyre::eyre; use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; use serde::Deserialize; use sqlx::PgPool; @@ -15,9 +16,8 @@ use crate::{ DBFileId, DelphiReportId, DelphiReportIssueDetailsId, DelphiReportIssueId, delphi_report_item::{ - DBDelphiReport, DBDelphiReportIssue, - DBDelphiReportIssueDetails, DecompiledJavaClassSource, - DelphiReportIssueStatus, DelphiReportListOrder, DelphiSeverity, + DBDelphiReport, DBDelphiReportIssue, DelphiReportIssueStatus, + DelphiReportListOrder, DelphiSeverity, ReportIssueDetail, }, }, redis::RedisPool, @@ -28,7 +28,7 @@ use crate::{ }, queue::session::AuthQueue, routes::ApiError, - util::guards::admin_key_guard, + util::{error::Context, guards::admin_key_guard}, }; pub fn config(cfg: &mut web::ServiceConfig) { @@ -62,7 +62,6 @@ static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { struct DelphiReportIssueDetails { pub file: String, pub key: String, - pub decompiled_source: Option, pub data: HashMap, pub severity: DelphiSeverity, } @@ -79,6 +78,9 @@ struct DelphiReport { pub delphi_version: i32, pub issues: HashMap>, pub severity: DelphiSeverity, + /// Map of [`DelphiReportIssueDetails::file`] to the decompiled Java source + /// code. + pub decompiled_sources: HashMap>, } impl DelphiReport { @@ -93,12 +95,10 @@ impl DelphiReport { format!("⚠️ Suspicious traces found at {}", self.url); for (issue, trace) in &self.issues { - for DelphiReportIssueDetails { - file, - decompiled_source, - .. - } in trace - { + for DelphiReportIssueDetails { file, .. } in trace { + let decompiled_source = + self.decompiled_sources.get(file).and_then(|o| o.as_ref()); + write!( &mut message_header, "\n issue {issue} found at class `{file}`:\n```\n{}\n```", @@ -128,6 +128,25 @@ pub struct DelphiRunParameters { } #[post("ingest", guard = "admin_key_guard")] +async fn ingest_report( + pool: web::Data, + redis: web::Data, + web::Json(report): web::Json, +) -> Result<(), ApiError> { + // treat this as an internal error, since it's not a bad request from the + // client's side - it's *our* fault for handling the Delphi schema wrong + // this could happen if Delphi updates and Labrinth doesn't + let report = serde_json::from_value::(report.clone()) + .wrap_internal_err_with(|| { + eyre!( + "Delphi sent a response which does not match our schema\n\n{}", + serde_json::to_string_pretty(&report).unwrap() + ) + })?; + + ingest_report_deserialized(pool, redis, report).await +} + #[tracing::instrument( level = "info", skip_all, @@ -138,10 +157,10 @@ pub struct DelphiRunParameters { %report.version_id, ) )] -async fn ingest_report( +async fn ingest_report_deserialized( pool: web::Data, redis: web::Data, - web::Json(report): web::Json, + report: DelphiReport, ) -> Result<(), ApiError> { if report.issues.is_empty() { info!("No issues found for file"); @@ -179,20 +198,20 @@ async fn ingest_report( .await?; // This is required to handle the case where the same Delphi version is re-run on the same file - DBDelphiReportIssueDetails::remove_all_by_issue_id( - issue_id, - &mut transaction, - ) - .await?; + ReportIssueDetail::remove_all_by_issue_id(issue_id, &mut transaction) + .await?; for issue_detail in issue_details { - DBDelphiReportIssueDetails { + let decompiled_source = + report.decompiled_sources.get(&issue_detail.file); + + ReportIssueDetail { id: DelphiReportIssueDetailsId(0), // This will be set by the database issue_id, key: issue_detail.key, file_path: issue_detail.file, - decompiled_source: issue_detail.decompiled_source, - data: issue_detail.data.into(), + decompiled_source: decompiled_source.cloned().flatten(), + data: issue_detail.data, severity: issue_detail.severity, } .insert(&mut transaction) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index e6f5873296..fde0920dea 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -1,9 +1,7 @@ use std::{collections::HashMap, fmt}; -use actix_web::{HttpRequest, post, web}; +use actix_web::{HttpRequest, get, post, web}; use chrono::{DateTime, Utc}; -use eyre::eyre; -use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use tokio_stream::StreamExt; @@ -14,20 +12,29 @@ use crate::{ database::{ DBProject, models::{ - DBFileId, DBProjectId, DBThread, DBThreadId, DelphiReportId, - DelphiReportIssueDetailsId, DelphiReportIssueId, ProjectTypeId, - delphi_report_item::{DelphiReportIssueStatus, DelphiSeverity}, + DBFileId, DBProjectId, DBThread, DBThreadId, DBVersionId, + DelphiReportId, DelphiReportIssueId, ProjectTypeId, + delphi_report_item::{ + DelphiReportIssueStatus, DelphiSeverity, ReportIssueDetail, + }, }, redis::RedisPool, }, - models::{pats::Scopes, projects::Project}, + models::{ + ids::{ProjectId, ThreadId}, + pats::Scopes, + projects::Project, + }, queue::session::AuthQueue, routes::{ApiError, internal::moderation::Ownership}, util::error::Context, }; pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { - cfg.service(search_projects).service(update_issue); + cfg.service(search_projects) + .service(get_report) + .service(get_issue) + .service(update_issue); } /// Arguments for searching project technical reviews. @@ -86,47 +93,21 @@ impl fmt::Display for SearchProjectsSort { } #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] -pub struct ProjectReview { - pub project: Project, - pub project_owner: Ownership, - pub thread: DBThread, - pub reports: Vec, -} - -#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] -pub struct ProjectReport { - /// When this report was created. - pub created_at: DateTime, +pub struct FileReport { + /// ID of this report. + pub id: DelphiReportId, + /// ID of the file that was scanned. + pub file_id: DBFileId, + /// ID of the project version this report is for. + pub version_id: DBVersionId, + /// ID of the project this report is for. + pub project_id: DBProjectId, + /// When the report for this file was created. + pub created: DateTime, /// Why this project was flagged. pub flag_reason: FlagReason, /// According to this report, how likely is the project malicious? pub severity: DelphiSeverity, - /// What files were flagged in this review. - pub files: Vec, -} - -/// Why a project was flagged for technical review. -#[derive( - Debug, - Clone, - Copy, - PartialEq, - Eq, - Hash, - Serialize, - Deserialize, - utoipa::ToSchema, -)] -#[serde(rename_all = "snake_case")] -pub enum FlagReason { - /// Delphi anti-malware scanner flagged a file in the project. - Delphi, -} - -/// Details of a JAR file which was flagged for technical review, as part of -/// a [`ProjectReview`]. -#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] -pub struct FileReview { /// Name of the flagged file. pub file_name: String, /// Size of the flagged file, in bytes. @@ -142,34 +123,170 @@ pub struct FileReview { #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] pub struct FileIssue { /// ID of the issue. - pub issue_id: DelphiReportIssueId, + pub id: DelphiReportIssueId, + /// ID of the report this issue is a part of. + pub report_id: DelphiReportId, /// Delphi-determined kind of issue that this is, e.g. `OBFUSCATED_NAMES`. /// /// Labrinth does not know the full set of kinds of issues, so this is kept /// as a string. - pub kind: String, + pub issue_type: String, /// Is this issue valid (malicious) or a false positive (safe)? pub status: DelphiReportIssueStatus, /// Details of why this issue might have been raised, such as what file it /// was found in. - pub details: Vec, + pub details: Vec, } -/// Occurrence of a [`FileIssue`] in a specific class in a scanned JAR file. +/// Why a project was flagged for technical review. +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + utoipa::ToSchema, +)] +#[serde(rename_all = "snake_case")] +pub enum FlagReason { + /// Delphi anti-malware scanner flagged a file in the project. + Delphi, +} + +/// Get info on an issue in a Delphi report. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(FileIssue))) +)] +#[get("/issue/{issue_id}")] +async fn get_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + path: web::Path<(DelphiReportIssueId,)>, +) -> Result, ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let (issue_id,) = path.into_inner(); + let row = sqlx::query!( + r#" + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT to_jsonb(drid) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) AS "data!: sqlx::types::Json" + FROM delphi_report_issues dri + LEFT JOIN delphi_report_issue_details drid ON dri.id = drid.issue_id + WHERE dri.id = $1 + "#, + issue_id as DelphiReportIssueId, + ) + .fetch_optional(&**pool) + .await + .wrap_internal_err("failed to fetch issue from database")? + .ok_or(ApiError::NotFound)?; + + Ok(web::Json(row.data.0)) +} + +/// Get info on a specific report for a project. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(FileReport))) +)] +#[get("/report/{id}")] +async fn get_report( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + path: web::Path<(DelphiReportId,)>, +) -> Result, ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let (report_id,) = path.into_inner(); + + let row = sqlx::query!( + r#" + SELECT + to_jsonb(dr) + || jsonb_build_object( + 'file_id', f.id, + 'version_id', v.id, + 'project_id', v.mod_id, + 'file_name', f.filename, + 'file_size', f.size, + 'flag_reason', 'delphi', + 'issues', json_array( + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT to_jsonb(drid) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) + FROM delphi_report_issues dri + WHERE dri.report_id = dr.id + ) + ) AS "data!: sqlx::types::Json" + FROM delphi_reports dr + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id + LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id + WHERE dr.id = $1 + "#, + report_id as DelphiReportId, + ) + .fetch_optional(&**pool) + .await + .wrap_internal_err("failed to fetch report from database")? + .ok_or(ApiError::NotFound)?; + + Ok(web::Json(row.data.0)) +} + +/// See [`search_projects`]. #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] -pub struct FileIssueDetail { - /// Name of the Java class path in which this issue was found. - pub file_path: String, - /// Decompiled, pretty-printed source of the Java class. - pub decompiled_source: String, - /// How important is this issue, as flagged by Delphi? - pub severity: DelphiSeverity, +pub struct SearchResponse { + /// List of reports returned. + pub reports: Vec, + /// Fetched project information for projects in the returned reports. + pub projects: HashMap, + /// Fetched moderation threads for projects in the returned reports. + pub threads: HashMap, + /// Fetched owner information for projects. + pub ownership: HashMap, } /// Searches all projects which are awaiting technical review. #[utoipa::path( security(("bearer_auth" = [])), - responses((status = OK, body = inline(Vec))) + responses((status = OK, body = inline(Vec))) )] #[post("/search")] async fn search_projects( @@ -178,33 +295,7 @@ async fn search_projects( redis: web::Data, session_queue: web::Data, search_req: web::Json, -) -> Result>, ApiError> { - #[derive(Debug)] - struct ProjectRecord { - reports: IndexMap, - } - - #[derive(Debug)] - struct ReportRecord { - created: DateTime, - severity: DelphiSeverity, - files: IndexMap, - } - - #[derive(Debug)] - struct FileRecord { - file_name: String, - file_size: i32, - issues: IndexMap, - } - - #[derive(Debug)] - struct IssueRecord { - issue_type: String, - status: DelphiReportIssueStatus, - details: IndexMap, - } - +) -> Result, ApiError> { check_is_moderator_from_headers( &req, &**pool, @@ -223,44 +314,55 @@ async fn search_projects( let offset = i64::try_from(offset) .wrap_request_err("offset cannot fit into `i64`")?; - let mut project_records = IndexMap::::new(); + let mut reports = Vec::::new(); let mut project_ids = Vec::::new(); let mut thread_ids = Vec::::new(); - let _file_ids = Vec::::new(); - let mut rows = sqlx::query!( r#" SELECT - dr.id AS "report_id!: DelphiReportId", - f.id AS "file_id!: DBFileId", - f.filename AS "file_name!", - f.size AS "file_size!", - m.id AS "project_id!: DBProjectId", - t.id AS "project_thread_id!: DBThreadId", - dr.created AS "report_created!", - dr.severity AS "report_severity!: DelphiSeverity", - dri.id AS "issue_id!: DelphiReportIssueId", - dri.issue_type AS "issue_type!", - dri.status AS "issue_status!: DelphiReportIssueStatus", - -- maybe null - drid.id AS "issue_detail_id?: DelphiReportIssueDetailsId", - drid.file_path AS "issue_detail_file_path?", - drid.decompiled_source AS "issue_detail_decompiled_source?", - drid.severity AS "issue_detail_severity?: DelphiSeverity" + m.id AS "project_id: DBProjectId", + t.id AS "project_thread_id: DBThreadId", + to_jsonb(dr) + || jsonb_build_object( + 'file_id', f.id, + 'version_id', v.id, + 'project_id', v.mod_id, + 'file_name', f.filename, + 'file_size', f.size, + 'flag_reason', 'delphi', + 'issues', json_array( + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT jsonb_build_object( + 'id', drid.id, + 'issue_id', drid.issue_id, + 'key', drid.key, + 'file_path', drid.file_path, + -- ignore `decompiled_source` + 'data', drid.data, + 'severity', drid.severity + ) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) + FROM delphi_report_issues dri + WHERE dri.report_id = dr.id + ) + ) AS "report!: sqlx::types::Json" FROM delphi_reports dr - - -- fetch the project this report is for, its type, and thread INNER JOIN files f ON f.id = dr.file_id INNER JOIN versions v ON v.id = f.version_id INNER JOIN mods m ON m.id = v.mod_id - LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id - LEFT JOIN categories c ON c.id = mc.joining_category_id INNER JOIN threads t ON t.mod_id = m.id - -- fetch report issues and details INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id -- filtering + LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id + LEFT JOIN categories c ON c.id = mc.joining_category_id WHERE -- project type (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) @@ -294,75 +396,24 @@ async fn search_projects( .transpose() .wrap_internal_err("failed to fetch reports")? { + reports.push(row.report.0); project_ids.push(row.project_id); thread_ids.push(row.project_thread_id); - - let project = - project_records.entry(row.project_id).or_insert_with(|| { - ProjectRecord { - reports: IndexMap::new(), - } - }); - let report = - project.reports.entry(row.report_id).or_insert_with(|| { - ReportRecord { - created: row.report_created, - severity: row.report_severity, - files: IndexMap::new(), - } - }); - let file = - report - .files - .entry(row.file_id) - .or_insert_with(|| FileRecord { - file_name: row.file_name, - file_size: row.file_size, - issues: IndexMap::new(), - }); - let issue = - file.issues - .entry(row.issue_id) - .or_insert_with(|| IssueRecord { - issue_type: row.issue_type, - status: row.issue_status, - details: IndexMap::new(), - }); - - let ( - Some(issue_detail_id), - Some(file_path), - Some(decompiled_source), - Some(severity), - ) = ( - row.issue_detail_id, - row.issue_detail_file_path, - row.issue_detail_decompiled_source, - row.issue_detail_severity, - ) - else { - continue; - }; - issue.details.entry(issue_detail_id).or_insert_with(|| { - FileIssueDetail { - file_path, - decompiled_source, - severity, - } - }); } let projects = DBProject::get_many_ids(&project_ids, &**pool, &redis) .await .wrap_internal_err("failed to fetch projects")? .into_iter() - .map(|project| (project.inner.id, Project::from(project))) + .map(|project| { + (ProjectId::from(project.inner.id), Project::from(project)) + }) .collect::>(); let threads = DBThread::get_many(&thread_ids, &**pool) .await .wrap_internal_err("failed to fetch threads")? .into_iter() - .map(|thread| (thread.id, thread)) + .map(|thread| (ThreadId::from(thread.id), thread)) .collect::>(); let project_list: Vec = projects.values().cloned().collect(); @@ -370,77 +421,18 @@ async fn search_projects( let ownerships = get_projects_ownership(&project_list, &pool, &redis) .await .wrap_internal_err("failed to fetch project ownerships")?; - - let ownership_map = projects + let ownership = projects .keys() .copied() .zip(ownerships) .collect::>(); - let projects = project_records - .into_iter() - .map(|(project_id, project_record)| { - let project = - projects.get(&project_id).wrap_internal_err_with(|| { - eyre!("no fetched project with ID {project_id:?}") - })?; - let thread = threads - .get(&DBThreadId::from(project.thread_id)) - .wrap_internal_err_with(|| { - eyre!("no fetched thread with ID {:?}", project.thread_id) - })?; - Ok::<_, ApiError>(ProjectReview { - project: project.clone(), - project_owner: ownership_map - .get(&project_id) - .cloned() - .wrap_internal_err_with(|| { - eyre!("no owner for {project_id:?}") - })?, - thread: thread.clone(), - reports: project_record - .reports - .into_iter() - .map(|(_, report_record)| ProjectReport { - created_at: report_record.created, - flag_reason: FlagReason::Delphi, - severity: report_record.severity, - files: report_record - .files - .into_iter() - .map(|(_, file)| FileReview { - file_name: file.file_name, - file_size: file.file_size, - issues: file - .issues - .into_iter() - .map(|(issue_id, issue)| FileIssue { - issue_id, - kind: issue.issue_type.clone(), - status: issue.status, - details: issue - .details - .into_iter() - .map(|(_, detail)| { - FileIssueDetail { - file_path: detail.file_path, - decompiled_source: detail - .decompiled_source, - severity: detail.severity, - } - }) - .collect(), - }) - .collect(), - }) - .collect(), - }) - .collect(), - }) - }) - .collect::, _>>()?; - - Ok(web::Json(projects)) + Ok(web::Json(SearchResponse { + reports, + projects, + threads, + ownership, + })) } /// See [`update_issue`]. diff --git a/apps/labrinth/src/util/error.rs b/apps/labrinth/src/util/error.rs index 5f9ff343c2..cba9c21971 100644 --- a/apps/labrinth/src/util/error.rs +++ b/apps/labrinth/src/util/error.rs @@ -19,6 +19,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`eyre::Report`] with the given message. #[inline] + #[track_caller] fn wrap_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -28,6 +29,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Internal`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_internal_err_with( self, f: impl FnOnce() -> D, @@ -40,6 +42,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Internal`] with the given message. #[inline] + #[track_caller] fn wrap_internal_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -49,6 +52,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Request`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_request_err_with( self, f: impl FnOnce() -> D, @@ -61,6 +65,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Request`] with the given message. #[inline] + #[track_caller] fn wrap_request_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -70,6 +75,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Auth`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_auth_err_with(self, f: impl FnOnce() -> D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -79,6 +85,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Auth`] with the given message. #[inline] + #[track_caller] fn wrap_auth_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, From 18aca82de2e59107fa662a98df203b8976426c5e Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 19 Nov 2025 12:12:09 +0000 Subject: [PATCH 031/104] Fix which kind of ID is returned in tech review endpoints --- .../routes/internal/moderation/tech_review.rs | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index fde0920dea..8b85f4d795 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -12,8 +12,8 @@ use crate::{ database::{ DBProject, models::{ - DBFileId, DBProjectId, DBThread, DBThreadId, DBVersionId, - DelphiReportId, DelphiReportIssueId, ProjectTypeId, + DBProjectId, DBThread, DBThreadId, DelphiReportId, + DelphiReportIssueId, ProjectTypeId, delphi_report_item::{ DelphiReportIssueStatus, DelphiSeverity, ReportIssueDetail, }, @@ -21,7 +21,7 @@ use crate::{ redis::RedisPool, }, models::{ - ids::{ProjectId, ThreadId}, + ids::{FileId, ProjectId, ThreadId, VersionId}, pats::Scopes, projects::Project, }, @@ -97,11 +97,11 @@ pub struct FileReport { /// ID of this report. pub id: DelphiReportId, /// ID of the file that was scanned. - pub file_id: DBFileId, + pub file_id: FileId, /// ID of the project version this report is for. - pub version_id: DBVersionId, + pub version_id: VersionId, /// ID of the project this report is for. - pub project_id: DBProjectId, + pub project_id: ProjectId, /// When the report for this file was created. pub created: DateTime, /// Why this project was flagged. @@ -233,9 +233,9 @@ async fn get_report( SELECT to_jsonb(dr) || jsonb_build_object( - 'file_id', f.id, - 'version_id', v.id, - 'project_id', v.mod_id, + 'file_id', to_base62(f.id), + 'version_id', to_base62(v.id), + 'project_id', to_base62(v.mod_id), 'file_name', f.filename, 'file_size', f.size, 'flag_reason', 'delphi', @@ -324,9 +324,9 @@ async fn search_projects( t.id AS "project_thread_id: DBThreadId", to_jsonb(dr) || jsonb_build_object( - 'file_id', f.id, - 'version_id', v.id, - 'project_id', v.mod_id, + 'file_id', to_base62(f.id), + 'version_id', to_base62(v.id), + 'project_id', to_base62(v.mod_id), 'file_name', f.filename, 'file_size', f.size, 'flag_reason', 'delphi', From 4c66b9f21f79d6568e70e8fb960a7b64ec0f57aa Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 19 Nov 2025 21:04:13 +0000 Subject: [PATCH 032/104] Deduplicate tech review report rows --- .../routes/internal/moderation/tech_review.rs | 109 +++++++++--------- 1 file changed, 57 insertions(+), 52 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 8b85f4d795..3b40e5fca6 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -191,7 +191,6 @@ async fn get_issue( ) ) AS "data!: sqlx::types::Json" FROM delphi_report_issues dri - LEFT JOIN delphi_report_issue_details drid ON dri.id = drid.issue_id WHERE dri.id = $1 "#, issue_id as DelphiReportIssueId, @@ -230,7 +229,7 @@ async fn get_report( let row = sqlx::query!( r#" - SELECT + SELECT DISTINCT ON (dr.id) to_jsonb(dr) || jsonb_build_object( 'file_id', to_base62(f.id), @@ -256,8 +255,6 @@ async fn get_report( FROM delphi_reports dr INNER JOIN files f ON f.id = dr.file_id INNER JOIN versions v ON v.id = f.version_id - INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id - LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id WHERE dr.id = $1 "#, report_id as DelphiReportId, @@ -320,59 +317,67 @@ async fn search_projects( let mut rows = sqlx::query!( r#" SELECT - m.id AS "project_id: DBProjectId", - t.id AS "project_thread_id: DBThreadId", - to_jsonb(dr) - || jsonb_build_object( - 'file_id', to_base62(f.id), - 'version_id', to_base62(v.id), - 'project_id', to_base62(v.mod_id), - 'file_name', f.filename, - 'file_size', f.size, - 'flag_reason', 'delphi', - 'issues', json_array( - SELECT - to_jsonb(dri) - || jsonb_build_object( - 'details', json_array( - SELECT jsonb_build_object( - 'id', drid.id, - 'issue_id', drid.issue_id, - 'key', drid.key, - 'file_path', drid.file_path, - -- ignore `decompiled_source` - 'data', drid.data, - 'severity', drid.severity + project_id AS "project_id: DBProjectId", + project_thread_id AS "project_thread_id: DBThreadId", + report AS "report!: sqlx::types::Json" + FROM ( + SELECT DISTINCT ON (dr.id) + dr.id AS report_id, + dr.created AS report_created, + dr.severity AS report_severity, + m.id AS project_id, + t.id AS project_thread_id, + + to_jsonb(dr) + || jsonb_build_object( + 'file_id', to_base62(f.id), + 'version_id', to_base62(v.id), + 'project_id', to_base62(v.mod_id), + 'file_name', f.filename, + 'file_size', f.size, + 'flag_reason', 'delphi', + 'issues', json_array( + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT jsonb_build_object( + 'id', drid.id, + 'issue_id', drid.issue_id, + 'key', drid.key, + 'file_path', drid.file_path, + -- ignore `decompiled_source` + 'data', drid.data, + 'severity', drid.severity + ) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id ) - FROM delphi_report_issue_details drid - WHERE drid.issue_id = dri.id ) - ) - FROM delphi_report_issues dri - WHERE dri.report_id = dr.id - ) - ) AS "report!: sqlx::types::Json" - FROM delphi_reports dr - INNER JOIN files f ON f.id = dr.file_id - INNER JOIN versions v ON v.id = f.version_id - INNER JOIN mods m ON m.id = v.mod_id - INNER JOIN threads t ON t.mod_id = m.id - INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id - LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id - - -- filtering - LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id - LEFT JOIN categories c ON c.id = mc.joining_category_id - WHERE - -- project type - (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) + FROM delphi_report_issues dri + WHERE dri.report_id = dr.id + ) + ) AS report + FROM delphi_reports dr + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN mods m ON m.id = v.mod_id + INNER JOIN threads t ON t.mod_id = m.id + + -- filtering + LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id + LEFT JOIN categories c ON c.id = mc.joining_category_id + WHERE + -- project type + (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) + ) t -- sorting ORDER BY - CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC, - CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC, - CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC, - CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC + CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC -- pagination LIMIT $3 From a59b18c6e2fffa4ecf3a178d9a2ebc630cbc0ec1 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 19 Nov 2025 22:20:56 +0000 Subject: [PATCH 033/104] Reduce info sent for projects --- .../routes/internal/moderation/tech_review.rs | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 3b40e5fca6..c1e171044d 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -273,13 +273,26 @@ pub struct SearchResponse { /// List of reports returned. pub reports: Vec, /// Fetched project information for projects in the returned reports. - pub projects: HashMap, + pub projects: HashMap, /// Fetched moderation threads for projects in the returned reports. pub threads: HashMap, /// Fetched owner information for projects. pub ownership: HashMap, } +/// Limited set of project information returned by [`search_projects`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ProjectModerationInfo { + /// Projecet ID. + pub id: ProjectId, + /// Project name. + pub name: String, + /// The aggregated project typos of the versions of this project + pub project_types: Vec, + /// The URL of the icon of the project + pub icon_url: Option, +} + /// Searches all projects which are awaiting technical review. #[utoipa::path( security(("bearer_auth" = [])), @@ -434,7 +447,20 @@ async fn search_projects( Ok(web::Json(SearchResponse { reports, - projects, + projects: projects + .into_iter() + .map(|(id, project)| { + ( + id, + ProjectModerationInfo { + id, + name: project.name, + project_types: project.project_types, + icon_url: project.icon_url, + }, + ) + }) + .collect(), threads, ownership, })) From c807076bfb70cd8f1bfbfe09dbe3e0643a36fbec Mon Sep 17 00:00:00 2001 From: aecsocket Date: Fri, 21 Nov 2025 16:28:50 +0000 Subject: [PATCH 034/104] Fetch more thread info --- .../routes/internal/moderation/tech_review.rs | 31 +++++++++++++++---- docker-compose.yml | 2 +- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index c1e171044d..270cceee0d 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -12,7 +12,7 @@ use crate::{ database::{ DBProject, models::{ - DBProjectId, DBThread, DBThreadId, DelphiReportId, + DBProjectId, DBThread, DBThreadId, DBUser, DelphiReportId, DelphiReportIssueId, ProjectTypeId, delphi_report_item::{ DelphiReportIssueStatus, DelphiSeverity, ReportIssueDetail, @@ -24,6 +24,7 @@ use crate::{ ids::{FileId, ProjectId, ThreadId, VersionId}, pats::Scopes, projects::Project, + threads::Thread, }, queue::session::AuthQueue, routes::{ApiError, internal::moderation::Ownership}, @@ -275,7 +276,7 @@ pub struct SearchResponse { /// Fetched project information for projects in the returned reports. pub projects: HashMap, /// Fetched moderation threads for projects in the returned reports. - pub threads: HashMap, + pub threads: HashMap, /// Fetched owner information for projects. pub ownership: HashMap, } @@ -285,6 +286,8 @@ pub struct SearchResponse { pub struct ProjectModerationInfo { /// Projecet ID. pub id: ProjectId, + /// Project moderation thread ID. + pub thread_id: ThreadId, /// Project name. pub name: String, /// The aggregated project typos of the versions of this project @@ -306,7 +309,7 @@ async fn search_projects( session_queue: web::Data, search_req: web::Json, ) -> Result, ApiError> { - check_is_moderator_from_headers( + let user = check_is_moderator_from_headers( &req, &**pool, &redis, @@ -427,11 +430,26 @@ async fn search_projects( (ProjectId::from(project.inner.id), Project::from(project)) }) .collect::>(); - let threads = DBThread::get_many(&thread_ids, &**pool) + let db_threads = DBThread::get_many(&thread_ids, &**pool) .await - .wrap_internal_err("failed to fetch threads")? + .wrap_internal_err("failed to fetch threads")?; + let thread_author_ids = db_threads + .iter() + .flat_map(|thread| thread.members.clone()) + .collect::>(); + let thread_authors = + DBUser::get_many_ids(&thread_author_ids, &**pool, &redis) + .await + .wrap_internal_err("failed to fetch thread authors")? + .into_iter() + .map(From::from) + .collect::>(); + let threads = db_threads .into_iter() - .map(|thread| (ThreadId::from(thread.id), thread)) + .map(|thread| { + let thread = Thread::from(thread, thread_authors.clone(), &user); + (thread.id, thread) + }) .collect::>(); let project_list: Vec = projects.values().cloned().collect(); @@ -454,6 +472,7 @@ async fn search_projects( id, ProjectModerationInfo { id, + thread_id: project.thread_id, name: project.name, project_types: project.project_types, icon_url: project.icon_url, diff --git a/docker-compose.yml b/docker-compose.yml index 569278e56a..777eeef4d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,7 +12,7 @@ services: POSTGRES_PASSWORD: labrinth POSTGRES_HOST_AUTH_METHOD: trust healthcheck: - test: ['CMD', 'pg_isready'] + test: ['CMD', 'pg_isready', '-U', 'labrinth'] interval: 3s timeout: 5s retries: 3 From 560a6e0d2b0cc939c028a32ea6717b7acf8f4cd2 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sun, 23 Nov 2025 19:39:44 +0000 Subject: [PATCH 035/104] Address PR comments --- apps/labrinth/src/file_hosting/mock.rs | 4 +++- apps/labrinth/src/routes/internal/delphi.rs | 10 +--------- .../src/routes/internal/moderation/tech_review.rs | 4 ++++ 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/apps/labrinth/src/file_hosting/mock.rs b/apps/labrinth/src/file_hosting/mock.rs index c04f92420f..2565bd287c 100644 --- a/apps/labrinth/src/file_hosting/mock.rs +++ b/apps/labrinth/src/file_hosting/mock.rs @@ -27,7 +27,9 @@ impl FileHost for MockHost { file_publicity: FileHostPublicity, file_bytes: Bytes, ) -> Result { - let path = get_file_path(file_name, file_publicity); + let file_name = urlencoding::decode(file_name) + .map_err(|_| FileHostingError::InvalidFilename)?; + let path = get_file_path(&file_name, file_publicity); std::fs::create_dir_all( path.parent().ok_or(FileHostingError::InvalidFilename)?, )?; diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs index 35cd1c61ae..0c690634ac 100644 --- a/apps/labrinth/src/routes/internal/delphi.rs +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -249,18 +249,10 @@ pub async fn run( run_parameters.file_id.0 ); - // fix for local file paths - // TODO: should we fix this upstream in whatever inserts the files row? - let url = if file_data.url.starts_with("/") { - format!("file://{}", file_data.url) - } else { - file_data.url - }; - DELPHI_CLIENT .post(dotenvy::var("DELPHI_URL")?) .json(&serde_json::json!({ - "url": url, + "url": file_data.url, "project_id": ProjectId(file_data.project_id.0 as u64), "version_id": VersionId(file_data.version_id.0 as u64), "file_id": run_parameters.file_id, diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 270cceee0d..eb2d82a854 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -113,6 +113,8 @@ pub struct FileReport { pub file_name: String, /// Size of the flagged file, in bytes. pub file_size: i32, + /// URL to download the flagged file. + pub download_url: String, /// What issues appeared in the file. pub issues: Vec, } @@ -239,6 +241,7 @@ async fn get_report( 'file_name', f.filename, 'file_size', f.size, 'flag_reason', 'delphi', + 'download_url', f.url, 'issues', json_array( SELECT to_jsonb(dri) @@ -352,6 +355,7 @@ async fn search_projects( 'file_name', f.filename, 'file_size', f.size, 'flag_reason', 'delphi', + 'download_url', f.url, 'issues', json_array( SELECT to_jsonb(dri) From 081f094ae76493c49be36bb40a8312ea3d6f79d0 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Tue, 25 Nov 2025 12:17:43 +0000 Subject: [PATCH 036/104] fix ci --- ...bde760fc9d827acd0a9bc203ce8aa0ad2157c.json | 22 +++++++++++ ...b4561283c2c0a8491fc41ebbd7dd42584d443.json | 37 +++++++++++++++++++ ...7023c1becd6d30e74a6110c14c0049d156118.json | 22 ----------- ...8fe0dbf285367b8051070bedbb075c4006c8d.json | 37 ------------------- ...a07a9bedfa885b171e8141930225c6adaab1.json} | 4 +- apps/labrinth/Cargo.toml | 3 +- 6 files changed, 62 insertions(+), 63 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json create mode 100644 apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json delete mode 100644 apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json delete mode 100644 apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json rename apps/labrinth/.sqlx/{query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json => query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json} (76%) diff --git a/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json b/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json new file mode 100644 index 0000000000..0fcaf0889e --- /dev/null +++ b/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT DISTINCT ON (dr.id)\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n WHERE dr.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c" +} diff --git a/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json b/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json new file mode 100644 index 0000000000..7740063567 --- /dev/null +++ b/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n project_id AS \"project_id: DBProjectId\",\n project_thread_id AS \"project_thread_id: DBThreadId\",\n report AS \"report!: sqlx::types::Json\"\n FROM (\n SELECT DISTINCT ON (dr.id)\n dr.id AS report_id,\n dr.created AS report_created,\n dr.severity AS report_severity,\n m.id AS project_id,\n t.id AS project_thread_id,\n\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS report\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n ) t\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "project_id: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_thread_id: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "report!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + null + ] + }, + "hash": "39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443" +} diff --git a/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json b/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json deleted file mode 100644 index 0b88c91314..0000000000 --- a/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n WHERE dr.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "data!: sqlx::types::Json", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118" -} diff --git a/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json b/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json deleted file mode 100644 index e27ec02523..0000000000 --- a/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n m.id AS \"project_id: DBProjectId\",\n t.id AS \"project_thread_id: DBThreadId\",\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"report!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "project_id: DBProjectId", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "project_thread_id: DBThreadId", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "report!: sqlx::types::Json", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int4Array", - "Text", - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - null - ] - }, - "hash": "d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d" -} diff --git a/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json b/apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json similarity index 76% rename from apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json rename to apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json index b36f5b1197..46f684c829 100644 --- a/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json +++ b/apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n LEFT JOIN delphi_report_issue_details drid ON dri.id = drid.issue_id\n WHERE dri.id = $1\n ", + "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n WHERE dri.id = $1\n ", "describe": { "columns": [ { @@ -18,5 +18,5 @@ null ] }, - "hash": "e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad" + "hash": "ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1" } diff --git a/apps/labrinth/Cargo.toml b/apps/labrinth/Cargo.toml index bb7147bfaa..6f5f88352d 100644 --- a/apps/labrinth/Cargo.toml +++ b/apps/labrinth/Cargo.toml @@ -66,14 +66,13 @@ image = { workspace = true, features = [ "tiff", "webp", ] } -indexmap = { workspace = true } itertools = { workspace = true } json-patch = { workspace = true } lettre = { workspace = true } meilisearch-sdk = { workspace = true, features = ["reqwest"] } modrinth-maxmind = { workspace = true } modrinth-util = { workspace = true } -muralpay = { workspace = true, features = ["utoipa", "mock"] } +muralpay = { workspace = true, features = ["mock", "utoipa"] } murmur2 = { workspace = true } paste = { workspace = true } path-util = { workspace = true } From ed50f34af9ebe3fa2830a9c74b37176484ff0f31 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Tue, 25 Nov 2025 12:17:50 +0000 Subject: [PATCH 037/104] fix ci --- Cargo.lock | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 68e577e318..4fb3b0b077 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4535,7 +4535,6 @@ dependencies = [ "hyper-util", "iana-time-zone", "image", - "indexmap 2.11.4", "itertools 0.14.0", "jemalloc_pprof", "json-patch 4.1.0", From 17e3c12dd6506d556eee3c09b82e3ed6cf318b38 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 26 Nov 2025 14:10:58 +0000 Subject: [PATCH 038/104] fix postgres version mismatch --- ...bde760fc9d827acd0a9bc203ce8aa0ad2157c.json | 22 ---------- ...79de6c2e84f442b481f42c2f9ab40b547ac7f.json | 37 ++++++++++++++++ ...b4561283c2c0a8491fc41ebbd7dd42584d443.json | 37 ---------------- ...52530ffe2a76b7bfbbd2053e87455be8772a1.json | 22 ++++++++++ ...66b6696052ac6d5ebe131b9e7242104f700af.json | 22 ++++++++++ ...ac8460088b70cd115c3d5ebf9474aa4d54fa.json} | 6 +-- ...aa07a9bedfa885b171e8141930225c6adaab1.json | 22 ---------- .../src/database/models/delphi_report_item.rs | 5 ++- .../routes/internal/moderation/tech_review.rs | 43 +++++++++++-------- docker-compose.yml | 3 +- 10 files changed, 116 insertions(+), 103 deletions(-) delete mode 100644 apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json create mode 100644 apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json delete mode 100644 apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json create mode 100644 apps/labrinth/.sqlx/query-47b4d61218e7d38d63a7f96efe152530ffe2a76b7bfbbd2053e87455be8772a1.json create mode 100644 apps/labrinth/.sqlx/query-7d1f49699e242f3e002afee9bf466b6696052ac6d5ebe131b9e7242104f700af.json rename apps/labrinth/.sqlx/{query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json => query-d9b6a1b7fb133f7aec7599e06e21ac8460088b70cd115c3d5ebf9474aa4d54fa.json} (61%) delete mode 100644 apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json diff --git a/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json b/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json deleted file mode 100644 index 0fcaf0889e..0000000000 --- a/apps/labrinth/.sqlx/query-2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT DISTINCT ON (dr.id)\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n WHERE dr.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "data!: sqlx::types::Json", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "2782a1158fe434819eda94e63b3bde760fc9d827acd0a9bc203ce8aa0ad2157c" -} diff --git a/apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json b/apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json new file mode 100644 index 0000000000..52796b1e9a --- /dev/null +++ b/apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n project_id AS \"project_id: DBProjectId\",\n project_thread_id AS \"project_thread_id: DBThreadId\",\n report AS \"report!: sqlx::types::Json\"\n FROM (\n SELECT DISTINCT ON (dr.id)\n dr.id AS report_id,\n dr.created AS report_created,\n dr.severity AS report_severity,\n m.id AS project_id,\n t.id AS project_thread_id,\n\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n -- TODO: replace with `json_array` in Postgres 16\n 'issues', (\n SELECT json_agg(\n to_jsonb(dri)\n || jsonb_build_object(\n -- TODO: replace with `json_array` in Postgres 16\n 'details', (\n SELECT json_agg(\n jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS report\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n ) t\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "project_id: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_thread_id: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "report!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + null + ] + }, + "hash": "365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f" +} diff --git a/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json b/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json deleted file mode 100644 index 7740063567..0000000000 --- a/apps/labrinth/.sqlx/query-39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n project_id AS \"project_id: DBProjectId\",\n project_thread_id AS \"project_thread_id: DBThreadId\",\n report AS \"report!: sqlx::types::Json\"\n FROM (\n SELECT DISTINCT ON (dr.id)\n dr.id AS report_id,\n dr.created AS report_created,\n dr.severity AS report_severity,\n m.id AS project_id,\n t.id AS project_thread_id,\n\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS report\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n ) t\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "project_id: DBProjectId", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "project_thread_id: DBThreadId", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "report!: sqlx::types::Json", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int4Array", - "Text", - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - null - ] - }, - "hash": "39d32bb5a6c60600d4c9536bf9db4561283c2c0a8491fc41ebbd7dd42584d443" -} diff --git a/apps/labrinth/.sqlx/query-47b4d61218e7d38d63a7f96efe152530ffe2a76b7bfbbd2053e87455be8772a1.json b/apps/labrinth/.sqlx/query-47b4d61218e7d38d63a7f96efe152530ffe2a76b7bfbbd2053e87455be8772a1.json new file mode 100644 index 0000000000..4bbdebb887 --- /dev/null +++ b/apps/labrinth/.sqlx/query-47b4d61218e7d38d63a7f96efe152530ffe2a76b7bfbbd2053e87455be8772a1.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT DISTINCT ON (dr.id)\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n -- TODO: replace with `json_array` in Postgres 16\n 'issues', (\n SELECT json_agg(\n to_jsonb(dri)\n || jsonb_build_object(\n -- TODO: replace with `json_array` in Postgres 16\n 'details', (\n SELECT json_agg(to_jsonb(drid))\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n WHERE dr.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "47b4d61218e7d38d63a7f96efe152530ffe2a76b7bfbbd2053e87455be8772a1" +} diff --git a/apps/labrinth/.sqlx/query-7d1f49699e242f3e002afee9bf466b6696052ac6d5ebe131b9e7242104f700af.json b/apps/labrinth/.sqlx/query-7d1f49699e242f3e002afee9bf466b6696052ac6d5ebe131b9e7242104f700af.json new file mode 100644 index 0000000000..bfbd66d754 --- /dev/null +++ b/apps/labrinth/.sqlx/query-7d1f49699e242f3e002afee9bf466b6696052ac6d5ebe131b9e7242104f700af.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n -- TODO: replace with `json_array` in Postgres 16\n 'details', (\n SELECT json_agg(to_jsonb(drid))\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n WHERE dri.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "7d1f49699e242f3e002afee9bf466b6696052ac6d5ebe131b9e7242104f700af" +} diff --git a/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json b/apps/labrinth/.sqlx/query-d9b6a1b7fb133f7aec7599e06e21ac8460088b70cd115c3d5ebf9474aa4d54fa.json similarity index 61% rename from apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json rename to apps/labrinth/.sqlx/query-d9b6a1b7fb133f7aec7599e06e21ac8460088b70cd115c3d5ebf9474aa4d54fa.json index 8c1c8e1f3a..c7719601d7 100644 --- a/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json +++ b/apps/labrinth/.sqlx/query-d9b6a1b7fb133f7aec7599e06e21ac8460088b70cd115c3d5ebf9474aa4d54fa.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_details)\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n\n -- TODO: replace with `json_array` in Postgres 16\n (\n SELECT json_agg(to_jsonb(delphi_report_issue_details))\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", "describe": { "columns": [ { @@ -74,7 +74,7 @@ { "ordinal": 9, "name": "details: sqlx::types::Json>", - "type_info": "Jsonb" + "type_info": "Json" }, { "ordinal": 10, @@ -122,5 +122,5 @@ true ] }, - "hash": "749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710" + "hash": "d9b6a1b7fb133f7aec7599e06e21ac8460088b70cd115c3d5ebf9474aa4d54fa" } diff --git a/apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json b/apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json deleted file mode 100644 index 46f684c829..0000000000 --- a/apps/labrinth/.sqlx/query-ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n WHERE dri.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "data!: sqlx::types::Json", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "ff526151c628646e735a2241c20aa07a9bedfa885b171e8141930225c6adaab1" -} diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs index 8bcaf4d938..0234dff190 100644 --- a/apps/labrinth/src/database/models/delphi_report_item.rs +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -191,7 +191,10 @@ impl DBDelphiReportIssue { delphi_report_issues.status AS "status: DelphiReportIssueStatus", file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiSeverity", - json_array(SELECT to_jsonb(delphi_report_issue_details) + + -- TODO: replace with `json_array` in Postgres 16 + ( + SELECT json_agg(to_jsonb(delphi_report_issue_details)) FROM delphi_report_issue_details WHERE issue_id = delphi_report_issues.id ) AS "details: sqlx::types::Json>", diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index eb2d82a854..1615222c8c 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -187,8 +187,9 @@ async fn get_issue( SELECT to_jsonb(dri) || jsonb_build_object( - 'details', json_array( - SELECT to_jsonb(drid) + -- TODO: replace with `json_array` in Postgres 16 + 'details', ( + SELECT json_agg(to_jsonb(drid)) FROM delphi_report_issue_details drid WHERE drid.issue_id = dri.id ) @@ -242,16 +243,19 @@ async fn get_report( 'file_size', f.size, 'flag_reason', 'delphi', 'download_url', f.url, - 'issues', json_array( - SELECT + -- TODO: replace with `json_array` in Postgres 16 + 'issues', ( + SELECT json_agg( to_jsonb(dri) || jsonb_build_object( - 'details', json_array( - SELECT to_jsonb(drid) + -- TODO: replace with `json_array` in Postgres 16 + 'details', ( + SELECT json_agg(to_jsonb(drid)) FROM delphi_report_issue_details drid WHERE drid.issue_id = dri.id ) ) + ) FROM delphi_report_issues dri WHERE dri.report_id = dr.id ) @@ -356,24 +360,29 @@ async fn search_projects( 'file_size', f.size, 'flag_reason', 'delphi', 'download_url', f.url, - 'issues', json_array( - SELECT + -- TODO: replace with `json_array` in Postgres 16 + 'issues', ( + SELECT json_agg( to_jsonb(dri) || jsonb_build_object( - 'details', json_array( - SELECT jsonb_build_object( - 'id', drid.id, - 'issue_id', drid.issue_id, - 'key', drid.key, - 'file_path', drid.file_path, - -- ignore `decompiled_source` - 'data', drid.data, - 'severity', drid.severity + -- TODO: replace with `json_array` in Postgres 16 + 'details', ( + SELECT json_agg( + jsonb_build_object( + 'id', drid.id, + 'issue_id', drid.issue_id, + 'key', drid.key, + 'file_path', drid.file_path, + -- ignore `decompiled_source` + 'data', drid.data, + 'severity', drid.severity + ) ) FROM delphi_report_issue_details drid WHERE drid.issue_id = dri.id ) ) + ) FROM delphi_report_issues dri WHERE dri.report_id = dr.id ) diff --git a/docker-compose.yml b/docker-compose.yml index 777eeef4d1..e75960bef7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,8 @@ name: labrinth services: postgres_db: - image: postgres:alpine + # staging/prod Labrinth are currently using this version of Postgres + image: postgres:15-alpine container_name: labrinth-postgres volumes: - db-data:/var/lib/postgresql/data From 6457674907b8b8fda4cefec2535252ac1890e4a5 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Wed, 26 Nov 2025 21:26:41 +0000 Subject: [PATCH 039/104] fix version creation --- apps/labrinth/src/routes/v3/version_creation.rs | 8 +++++--- docker-compose.yml | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/apps/labrinth/src/routes/v3/version_creation.rs b/apps/labrinth/src/routes/v3/version_creation.rs index 03ebb81283..d3726df36c 100644 --- a/apps/labrinth/src/routes/v3/version_creation.rs +++ b/apps/labrinth/src/routes/v3/version_creation.rs @@ -935,17 +935,19 @@ pub async fn upload_file( || force_primary || total_files_len == 1; - let file_path = format!( + let file_path_encode = format!( "data/{project_id}/versions/{version_id}/{}", urlencoding::encode(file_name) ); + let file_path = + format!("data/{project_id}/versions/{version_id}/{file_name}"); let upload_data = file_host .upload_file(content_type, &file_path, FileHostPublicity::Public, data) .await?; uploaded_files.push(UploadedFile { - name: file_path.clone(), + name: file_path, publicity: FileHostPublicity::Public, }); @@ -971,7 +973,7 @@ pub async fn upload_file( version_files.push(VersionFileBuilder { filename: file_name.to_string(), - url: format!("{}/{file_path}", dotenvy::var("CDN_URL")?), + url: format!("{}/{file_path_encode}", dotenvy::var("CDN_URL")?), hashes: vec![ models::version_item::HashBuilder { algorithm: "sha1".to_string(), diff --git a/docker-compose.yml b/docker-compose.yml index e75960bef7..dd1890a4cc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -121,7 +121,7 @@ services: image: ghcr.io/modrinth/delphi:feature-schema-rework container_name: labrinth-delphi ports: - - '59999:59999' + - '127.0.0.1:59999:59999' environment: LABRINTH_ENDPOINT: http://host.docker.internal:8000/_internal/delphi/ingest LABRINTH_ADMIN_KEY: feedbeef From 8dd600a4d33b270bb5340efc71d22df2f297ecb0 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sun, 30 Nov 2025 17:56:34 +0000 Subject: [PATCH 040/104] Implement routes --- ...c71674941487c15be1e8ce0ebc78e7c26b34d.json | 15 ++ ...3aaeb30a8da4721959fdee99cf649a8b29e3.json} | 22 ++- ...9698a350098ea5396ce3c327064bb9d7eeb01.json | 26 ++++ ...d07b7d42c37e089403961ee16be0f99958ea0.json | 15 ++ .../20251130173416_delphi_report_verdicts.sql | 2 + .../routes/internal/moderation/tech_review.rs | 141 ++++++++++++++++-- 6 files changed, 201 insertions(+), 20 deletions(-) create mode 100644 apps/labrinth/.sqlx/query-3240e4b5abc9850b5d3c09fafcac71674941487c15be1e8ce0ebc78e7c26b34d.json rename apps/labrinth/.sqlx/{query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json => query-3961aa17ce3219c057c398dca0ed3aaeb30a8da4721959fdee99cf649a8b29e3.json} (77%) create mode 100644 apps/labrinth/.sqlx/query-67f52d745c4b53a9ca70d37aa8c9698a350098ea5396ce3c327064bb9d7eeb01.json create mode 100644 apps/labrinth/.sqlx/query-6cf1862b3c197d42f9183dcbbd3d07b7d42c37e089403961ee16be0f99958ea0.json create mode 100644 apps/labrinth/migrations/20251130173416_delphi_report_verdicts.sql diff --git a/apps/labrinth/.sqlx/query-3240e4b5abc9850b5d3c09fafcac71674941487c15be1e8ce0ebc78e7c26b34d.json b/apps/labrinth/.sqlx/query-3240e4b5abc9850b5d3c09fafcac71674941487c15be1e8ce0ebc78e7c26b34d.json new file mode 100644 index 0000000000..736375ee54 --- /dev/null +++ b/apps/labrinth/.sqlx/query-3240e4b5abc9850b5d3c09fafcac71674941487c15be1e8ce0ebc78e7c26b34d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE mods\n SET status = $1\n FROM delphi_report_issues dri\n INNER JOIN delphi_reports dr ON dr.id = dri.report_id\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON v.mod_id = m.id\n WHERE dri.id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "3240e4b5abc9850b5d3c09fafcac71674941487c15be1e8ce0ebc78e7c26b34d" +} diff --git a/apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json b/apps/labrinth/.sqlx/query-3961aa17ce3219c057c398dca0ed3aaeb30a8da4721959fdee99cf649a8b29e3.json similarity index 77% rename from apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json rename to apps/labrinth/.sqlx/query-3961aa17ce3219c057c398dca0ed3aaeb30a8da4721959fdee99cf649a8b29e3.json index 52796b1e9a..134a39e34d 100644 --- a/apps/labrinth/.sqlx/query-365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f.json +++ b/apps/labrinth/.sqlx/query-3961aa17ce3219c057c398dca0ed3aaeb30a8da4721959fdee99cf649a8b29e3.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n project_id AS \"project_id: DBProjectId\",\n project_thread_id AS \"project_thread_id: DBThreadId\",\n report AS \"report!: sqlx::types::Json\"\n FROM (\n SELECT DISTINCT ON (dr.id)\n dr.id AS report_id,\n dr.created AS report_created,\n dr.severity AS report_severity,\n m.id AS project_id,\n t.id AS project_thread_id,\n\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n -- TODO: replace with `json_array` in Postgres 16\n 'issues', (\n SELECT json_agg(\n to_jsonb(dri)\n || jsonb_build_object(\n -- TODO: replace with `json_array` in Postgres 16\n 'details', (\n SELECT json_agg(\n jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS report\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n ) t\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "query": "\n SELECT\n project_id AS \"project_id: DBProjectId\",\n project_thread_id AS \"project_thread_id: DBThreadId\",\n report AS \"report!: sqlx::types::Json\"\n FROM (\n SELECT DISTINCT ON (dr.id)\n dr.id AS report_id,\n dr.created AS report_created,\n dr.severity AS report_severity,\n m.id AS project_id,\n t.id AS project_thread_id,\n\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', to_base62(f.id),\n 'version_id', to_base62(v.id),\n 'project_id', to_base62(v.mod_id),\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'download_url', f.url,\n -- TODO: replace with `json_array` in Postgres 16\n 'issues', (\n SELECT json_agg(\n to_jsonb(dri)\n || jsonb_build_object(\n -- TODO: replace with `json_array` in Postgres 16\n 'details', (\n SELECT json_agg(\n jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS report\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($4::int[]) = 0 OR c.project_type = ANY($4::int[]))\n AND dr.status = $5\n ) t\n\n -- sorting\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $1\n OFFSET $2\n ", "describe": { "columns": [ { @@ -21,10 +21,22 @@ ], "parameters": { "Left": [ - "Int4Array", - "Text", "Int8", - "Int8" + "Int8", + "Text", + "Int4Array", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + } ] }, "nullable": [ @@ -33,5 +45,5 @@ null ] }, - "hash": "365407c545cc4a55236f56e65a579de6c2e84f442b481f42c2f9ab40b547ac7f" + "hash": "3961aa17ce3219c057c398dca0ed3aaeb30a8da4721959fdee99cf649a8b29e3" } diff --git a/apps/labrinth/.sqlx/query-67f52d745c4b53a9ca70d37aa8c9698a350098ea5396ce3c327064bb9d7eeb01.json b/apps/labrinth/.sqlx/query-67f52d745c4b53a9ca70d37aa8c9698a350098ea5396ce3c327064bb9d7eeb01.json new file mode 100644 index 0000000000..419d82de6c --- /dev/null +++ b/apps/labrinth/.sqlx/query-67f52d745c4b53a9ca70d37aa8c9698a350098ea5396ce3c327064bb9d7eeb01.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE delphi_reports dr\n SET status = $1\n WHERE dr.id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + }, + "Int8" + ] + }, + "nullable": [] + }, + "hash": "67f52d745c4b53a9ca70d37aa8c9698a350098ea5396ce3c327064bb9d7eeb01" +} diff --git a/apps/labrinth/.sqlx/query-6cf1862b3c197d42f9183dcbbd3d07b7d42c37e089403961ee16be0f99958ea0.json b/apps/labrinth/.sqlx/query-6cf1862b3c197d42f9183dcbbd3d07b7d42c37e089403961ee16be0f99958ea0.json new file mode 100644 index 0000000000..f2f509efc7 --- /dev/null +++ b/apps/labrinth/.sqlx/query-6cf1862b3c197d42f9183dcbbd3d07b7d42c37e089403961ee16be0f99958ea0.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE mods\n SET status = $1\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON v.mod_id = m.id\n WHERE dr.id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "6cf1862b3c197d42f9183dcbbd3d07b7d42c37e089403961ee16be0f99958ea0" +} diff --git a/apps/labrinth/migrations/20251130173416_delphi_report_verdicts.sql b/apps/labrinth/migrations/20251130173416_delphi_report_verdicts.sql new file mode 100644 index 0000000000..daa66ce975 --- /dev/null +++ b/apps/labrinth/migrations/20251130173416_delphi_report_verdicts.sql @@ -0,0 +1,2 @@ +ALTER TABLE delphi_reports +ADD COLUMN status delphi_report_issue_status NOT NULL DEFAULT 'pending'; diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs index 1615222c8c..0ca374c567 100644 --- a/apps/labrinth/src/routes/internal/moderation/tech_review.rs +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -2,6 +2,7 @@ use std::{collections::HashMap, fmt}; use actix_web::{HttpRequest, get, post, web}; use chrono::{DateTime, Utc}; +use eyre::eyre; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use tokio_stream::StreamExt; @@ -23,7 +24,7 @@ use crate::{ models::{ ids::{FileId, ProjectId, ThreadId, VersionId}, pats::Scopes, - projects::Project, + projects::{Project, ProjectStatus}, threads::Thread, }, queue::session::AuthQueue, @@ -35,6 +36,7 @@ pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { cfg.service(search_projects) .service(get_report) .service(get_issue) + .service(update_report) .service(update_issue); } @@ -398,29 +400,31 @@ async fn search_projects( LEFT JOIN categories c ON c.id = mc.joining_category_id WHERE -- project type - (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) + (cardinality($4::int[]) = 0 OR c.project_type = ANY($4::int[])) + AND dr.status = $5 ) t -- sorting ORDER BY - CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC, - CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC, - CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC, - CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC + CASE WHEN $3 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $3 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $3 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $3 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC -- pagination - LIMIT $3 - OFFSET $4 + LIMIT $1 + OFFSET $2 "#, + limit, + offset, + &sort_by, &search_req .filter .project_type .iter() .map(|ty| ty.0) .collect::>(), - &sort_by, - limit, - offset, + DelphiReportIssueStatus::Pending as _, ) .fetch(&**pool); @@ -498,13 +502,86 @@ async fn search_projects( })) } -/// See [`update_issue`]. +/// See [`update_report`] and [`update_issue`]. #[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] -pub struct UpdateIssue { +pub struct UpdateStatus { /// Status to set the issue to. pub status: DelphiReportIssueStatus, } +/// Updates the state of a project based on a technical review report. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = NO_CONTENT)) +)] +#[post("/report/{id}")] +async fn update_report( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + update_req: web::Json, + path: web::Path<(DelphiReportId,)>, +) -> Result<(), ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_WRITE, + ) + .await?; + let (report_id,) = path.into_inner(); + + let mut txn = pool + .begin() + .await + .wrap_internal_err("failed to begin transaction")?; + + sqlx::query!( + " + UPDATE delphi_reports dr + SET status = $1 + WHERE dr.id = $2 + ", + update_req.status as _, + report_id as DelphiReportId, + ) + .execute(&mut *txn) + .await + .wrap_internal_err("failed to update report")?; + + if update_req.status == DelphiReportIssueStatus::Unsafe { + let result = sqlx::query!( + " + UPDATE mods + SET status = $1 + FROM delphi_reports dr + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN mods m ON v.mod_id = m.id + WHERE dr.id = $2 + ", + ProjectStatus::Rejected.as_str(), + report_id as DelphiReportId, + ) + .execute(&mut *txn) + .await + .wrap_internal_err("failed to mark project as rejected")?; + if result.rows_affected() == 0 { + return Err(ApiError::Internal(eyre!( + "no project was marked as rejected" + ))); + } + } + + txn.commit() + .await + .wrap_internal_err("failed to commit transaction")?; + + Ok(()) +} + /// Updates the state of a technical review issue. #[utoipa::path( security(("bearer_auth" = [])), @@ -516,7 +593,7 @@ async fn update_issue( pool: web::Data, redis: web::Data, session_queue: web::Data, - update_req: web::Json, + update_req: web::Json, path: web::Path<(DelphiReportIssueId,)>, ) -> Result<(), ApiError> { check_is_moderator_from_headers( @@ -529,6 +606,11 @@ async fn update_issue( .await?; let (issue_id,) = path.into_inner(); + let mut txn = pool + .begin() + .await + .wrap_internal_err("failed to start transaction")?; + sqlx::query!( " UPDATE delphi_report_issues @@ -538,9 +620,38 @@ async fn update_issue( update_req.status as DelphiReportIssueStatus, issue_id as DelphiReportIssueId, ) - .execute(&**pool) + .execute(&mut *txn) .await .wrap_internal_err("failed to update issue")?; + if update_req.status == DelphiReportIssueStatus::Unsafe { + let result = sqlx::query!( + " + UPDATE mods + SET status = $1 + FROM delphi_report_issues dri + INNER JOIN delphi_reports dr ON dr.id = dri.report_id + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN mods m ON v.mod_id = m.id + WHERE dri.id = $2 + ", + ProjectStatus::Rejected.as_str(), + issue_id as DelphiReportIssueId, + ) + .execute(&mut *txn) + .await + .wrap_internal_err("failed to mark project as rejected")?; + if result.rows_affected() == 0 { + return Err(ApiError::Internal(eyre!( + "no project was marked as rejected" + ))); + } + } + + txn.commit() + .await + .wrap_internal_err("failed to commit transaction")?; + Ok(()) } From 3a955d7ec5354e2add62eaf7df97df5b996eeacc Mon Sep 17 00:00:00 2001 From: IMB11 Date: Sat, 13 Sep 2025 18:34:11 +0100 Subject: [PATCH 041/104] feat: batch scan alert --- .../delphi/BatchScanProgressAlert.vue | 39 ++ .../moderation/technical-review-mockup.vue | 387 ------------------ .../src/pages/moderation/technical-review.vue | 19 +- 3 files changed, 57 insertions(+), 388 deletions(-) create mode 100644 apps/frontend/src/components/ui/moderation/delphi/BatchScanProgressAlert.vue delete mode 100644 apps/frontend/src/pages/moderation/technical-review-mockup.vue diff --git a/apps/frontend/src/components/ui/moderation/delphi/BatchScanProgressAlert.vue b/apps/frontend/src/components/ui/moderation/delphi/BatchScanProgressAlert.vue new file mode 100644 index 0000000000..42c8b80de0 --- /dev/null +++ b/apps/frontend/src/components/ui/moderation/delphi/BatchScanProgressAlert.vue @@ -0,0 +1,39 @@ + + + diff --git a/apps/frontend/src/pages/moderation/technical-review-mockup.vue b/apps/frontend/src/pages/moderation/technical-review-mockup.vue deleted file mode 100644 index 95e4c1fbb3..0000000000 --- a/apps/frontend/src/pages/moderation/technical-review-mockup.vue +++ /dev/null @@ -1,387 +0,0 @@ - - - diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index 3a5ae57552..27360f2437 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -1,3 +1,20 @@ + + From 557b1f8621ca268dc0eec0128100d5aba6f7f8fd Mon Sep 17 00:00:00 2001 From: IMB11 Date: Tue, 16 Sep 2025 18:55:00 +0100 Subject: [PATCH 042/104] feat: layout --- .../frontend/src/helpers/tech-review.dummy.ts | 105 +++++++ apps/frontend/src/helpers/tech-review.ts | 47 +++ .../src/pages/moderation/technical-review.vue | 279 +++++++++++++++++- 3 files changed, 427 insertions(+), 4 deletions(-) create mode 100644 apps/frontend/src/helpers/tech-review.dummy.ts create mode 100644 apps/frontend/src/helpers/tech-review.ts diff --git a/apps/frontend/src/helpers/tech-review.dummy.ts b/apps/frontend/src/helpers/tech-review.dummy.ts new file mode 100644 index 0000000000..852e1f719b --- /dev/null +++ b/apps/frontend/src/helpers/tech-review.dummy.ts @@ -0,0 +1,105 @@ +// Dummy data for the technical review queue, used when backend is unavailable + +export type DelphiReportSeverity = 'LOW' | 'MEDIUM' | 'HIGH' | 'SEVERE' +export type DelphiReportIssueStatus = 'pending' | 'approved' | 'rejected' + +export interface DelphiIssueJavaClass { + id: number + issue_id: number + internal_class_name: string + decompiled_source?: string | null +} + +export interface DelphiReportSummary { + id: number + file_id?: number | null + delphi_version: number + artifact_url: string + created: string // ISO date + severity: DelphiReportSeverity +} + +export interface DelphiIssueSummary { + id: number + report_id: number + issue_type: string + status: DelphiReportIssueStatus +} + +export interface DelphiIssueResult { + issue: DelphiIssueSummary + report: DelphiReportSummary + java_classes: DelphiIssueJavaClass[] + project_id?: number | null + project_published?: string | null +} + +export const DUMMY_ISSUE_TYPES: string[] = [ + 'reflection_indirection', + 'xor_obfuscation', + 'included_libraries', + 'suspicious_binaries', + 'corrupt_classes', + 'suspicious_classes', + 'url_usage', + 'classloader_usage', + 'processbuilder_usage', + 'runtime_exec_usage', + 'jni_usage', + 'main_method', + 'native_loading', + 'malformed_jar', + 'nested_jar_too_deep', + 'failed_decompilation', + 'analysis_failure', + 'malware_easyforme', + 'malware_simplyloader', +] + +export const DUMMY_ISSUES: DelphiIssueResult[] = [ + { + issue: { + id: 1001, + report_id: 501, + issue_type: 'suspicious_classes', + status: 'pending', + }, + report: { + id: 501, + file_id: 90001, + delphi_version: 47, + artifact_url: 'https://cdn.modrinth.com/data/abc/versions/1.0.0.jar', + created: new Date(Date.now() - 3 * 24 * 3600 * 1000).toISOString(), + severity: 'SEVERE', + }, + java_classes: [ + { + id: 7001, + issue_id: 1001, + internal_class_name: 'com/example/Suspect', + decompiled_source: 'public class Suspect { /* ... */ }', + }, + ], + project_id: 123456, + project_published: new Date(Date.now() - 30 * 24 * 3600 * 1000).toISOString(), + }, + { + issue: { + id: 1002, + report_id: 502, + issue_type: 'url_usage', + status: 'pending', + }, + report: { + id: 502, + file_id: 90002, + delphi_version: 47, + artifact_url: 'https://cdn.modrinth.com/data/def/versions/2.3.4.jar', + created: new Date(Date.now() - 1 * 24 * 3600 * 1000).toISOString(), + severity: 'HIGH', + }, + java_classes: [], + project_id: 789012, + project_published: new Date(Date.now() - 45 * 24 * 3600 * 1000).toISOString(), + }, +] diff --git a/apps/frontend/src/helpers/tech-review.ts b/apps/frontend/src/helpers/tech-review.ts new file mode 100644 index 0000000000..5fe651f322 --- /dev/null +++ b/apps/frontend/src/helpers/tech-review.ts @@ -0,0 +1,47 @@ +import { DUMMY_ISSUE_TYPES, DUMMY_ISSUES, type DelphiIssueResult } from './tech-review.dummy' + +// TODO: @modrinth/api-client package + +export type OrderBy = + | 'created_asc' + | 'created_desc' + | 'pending_status_first' + | 'severity_asc' + | 'severity_desc' + +export interface FetchIssuesParams { + type?: string | null + status?: 'pending' | 'approved' | 'rejected' | null + order_by?: OrderBy | null + count?: number + offset?: number +} + +export async function fetchIssueTypeSchema(): Promise { + try { + const schema = await useBaseFetch('internal/delphi/issue_type/schema', { internal: true }) + // Expecting a JSON object map of type -> metadata; return its keys + if (schema && typeof schema === 'object') { + return Object.keys(schema as Record) + } + return DUMMY_ISSUE_TYPES + } catch { + return DUMMY_ISSUE_TYPES + } +} + +export async function fetchDelphiIssues(params: FetchIssuesParams): Promise { + const query = new URLSearchParams() + if (params.type) query.set('type', params.type) + if (params.status) query.set('status', params.status) + if (params.order_by) query.set('order_by', params.order_by) + if (params.count != null) query.set('count', String(params.count)) + if (params.offset != null) query.set('offset', String(params.offset)) + + try { + const res = await useBaseFetch(`internal/delphi/issues?${query.toString()}`, { internal: true }) + return (res as any[]) || [] + } catch { + return DUMMY_ISSUES + } +} diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index 27360f2437..cb3b4f4574 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -2,6 +2,200 @@ import BatchScanProgressAlert, { type BatchScanProgress, } from '@/components/ui/moderation/delphi/BatchScanProgressAlert.vue' +import { FilterIcon, SearchIcon, SortAscIcon, SortDescIcon, XIcon } from '@modrinth/assets' +import { Button, DropdownSelect, Pagination } from '@modrinth/ui' +import { defineMessages, useVIntl } from '@vintl/vintl' +import Fuse from 'fuse.js' +import { fetchDelphiIssues, fetchIssueTypeSchema, type OrderBy } from '~/helpers/tech-review' + +// Data from backend helper (with dummy fallback) +type TechReviewItem = Awaited>[number] +const reviewItems = ref([]) + +// Basic pagination state (mirrors moderation pages) +const currentPage = ref(1) +const itemsPerPage = 15 +// Search/filter/sort UI state +const { formatMessage } = useVIntl() +const route = useRoute() +const router = useRouter() + +const messages = defineMessages({ + searchPlaceholder: { + id: 'moderation.search.placeholder', + defaultMessage: 'Search...', + }, + filterBy: { + id: 'moderation.filter.by', + defaultMessage: 'Filter by', + }, + sortBy: { + id: 'moderation.sort.by', + defaultMessage: 'Sort by', + }, +}) + +const query = ref(route.query.q?.toString() || '') + +watch( + query, + (newQuery) => { + const currentQuery = { ...route.query } + if (newQuery) { + currentQuery.q = newQuery + } else { + delete currentQuery.q + } + + router.replace({ + path: route.path, + query: currentQuery, + }) + goToPage(1) + }, + { immediate: false }, +) + +watch( + () => route.query.q, + (newQueryParam) => { + const newValue = newQueryParam?.toString() || '' + if (query.value !== newValue) { + query.value = newValue + } + }, +) + +const currentFilterType = ref('All issues') +const rawIssueTypes = ref(null) +const filterTypes = computed(() => { + const base: string[] = ['All issues'] + if (rawIssueTypes.value && rawIssueTypes.value.length) base.push(...rawIssueTypes.value) + return base +}) + +const currentSortType = ref('Oldest') +const sortTypes: readonly string[] = readonly([ + 'Oldest', + 'Newest', + 'Pending first', + 'Severity ↑', + 'Severity ↓', +]) + +const fuse = computed(() => { + if (!reviewItems.value || reviewItems.value.length === 0) return null + return new Fuse(reviewItems.value, { + keys: [ + { name: 'issue.issue_type', weight: 3 }, + { name: 'report.artifact_url', weight: 2 }, + { name: 'java_classes.internal_class_name', weight: 2 }, + ], + includeScore: true, + threshold: 0.4, + }) +}) + +const searchResults = computed(() => { + if (!query.value || !fuse.value) return null + return fuse.value.search(query.value).map((result) => result.item as TechReviewItem) +}) + +const baseFiltered = computed(() => { + if (!reviewItems.value) return [] + return query.value && searchResults.value ? searchResults.value : [...reviewItems.value] +}) + +const typeFiltered = computed(() => { + if (currentFilterType.value === 'All issues') return baseFiltered.value + const type = currentFilterType.value + return baseFiltered.value.filter((it) => it.issue.issue_type === type) +}) + +const filteredItems = computed(() => { + const filtered = [...typeFiltered.value] + + switch (currentSortType.value) { + case 'Oldest': + filtered.sort( + (a, b) => new Date(a.report.created).getTime() - new Date(b.report.created).getTime(), + ) + break + case 'Newest': + filtered.sort( + (a, b) => new Date(b.report.created).getTime() - new Date(a.report.created).getTime(), + ) + break + case 'Pending first': { + const p = (s: string) => (s === 'pending' ? 0 : 1) + filtered.sort((a, b) => p(a.issue.status) - p(b.issue.status)) + break + } + case 'Severity ↑': { + const order = { LOW: 0, MEDIUM: 1, HIGH: 2, SEVERE: 3 } as Record + filtered.sort((a, b) => (order[a.report.severity] ?? 0) - (order[b.report.severity] ?? 0)) + break + } + case 'Severity ↓': { + const order = { LOW: 0, MEDIUM: 1, HIGH: 2, SEVERE: 3 } as Record + filtered.sort((a, b) => (order[b.report.severity] ?? 0) - (order[a.report.severity] ?? 0)) + break + } + } + + return filtered +}) + +const totalPages = computed(() => Math.ceil((filteredItems.value?.length || 0) / itemsPerPage)) +const paginatedItems = computed(() => { + if (!filteredItems.value) return [] + const start = (currentPage.value - 1) * itemsPerPage + const end = start + itemsPerPage + return filteredItems.value.slice(start, end) +}) +function goToPage(page: number) { + currentPage.value = page +} + +// Map sort label to backend order_by param +function toOrderBy(label: string): OrderBy | null { + switch (label) { + case 'Oldest': + return 'created_asc' + case 'Newest': + return 'created_desc' + case 'Pending first': + return 'pending_status_first' + case 'Severity ↑': + return 'severity_asc' + case 'Severity ↓': + return 'severity_desc' + default: + return null + } +} + +// Initial fetch and reactive refetch on filter/sort changes +onMounted(async () => { + rawIssueTypes.value = await fetchIssueTypeSchema() + const order_by = toOrderBy(currentSortType.value) + reviewItems.value = await fetchDelphiIssues({ count: 350, offset: 0, order_by }) +}) + +watch(currentFilterType, async (val) => { + const type = val === 'All issues' ? null : val + const order_by = toOrderBy(currentSortType.value) + reviewItems.value = await fetchDelphiIssues({ type, count: 350, offset: 0, order_by }) + goToPage(1) +}) + +watch(currentSortType, async (val) => { + const type = currentFilterType.value === 'All issues' ? null : currentFilterType.value + const order_by = toOrderBy(val) + // If you prefer server-side sorting only, keep this; otherwise client-side above already reorders + reviewItems.value = await fetchDelphiIssues({ type, count: 350, offset: 0, order_by }) + goToPage(1) +}) // TODO: Live way to update this via the backend, polling? const batchScanProgressInformation = computed(() => { @@ -13,8 +207,85 @@ const batchScanProgressInformation = computed(() From d39feda3485b13c4032d676b85a3ec26cc066617 Mon Sep 17 00:00:00 2001 From: IMB11 Date: Thu, 18 Sep 2025 12:56:09 +0100 Subject: [PATCH 043/104] feat: introduce surface variables --- .../ui/moderation/ModerationReportCard.vue | 241 ++++++++---------- .../src/components/ui/thread/ReportThread.vue | 71 ++++-- packages/assets/styles/variables.scss | 6 + 3 files changed, 168 insertions(+), 150 deletions(-) diff --git a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue index 895099a780..b813f87385 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue @@ -1,126 +1,123 @@ - - diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue new file mode 100644 index 0000000000..38601e69c4 --- /dev/null +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -0,0 +1,163 @@ + + + diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index cb3b4f4574..7dea729b4e 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -1,11 +1,12 @@ @@ -44,7 +33,7 @@ const createdAt = computed(() => new Date(props.item.report.created).toLocaleDat
-
+
new Date(props.item.report.created).toLocaleDat
-
+
- -
- +
-
-
-

TBD

-
-
- -
+

TBD

diff --git a/apps/frontend/src/helpers/tech-review.ts b/apps/frontend/src/helpers/tech-review.ts index 5fe651f322..7a6e58fc6c 100644 --- a/apps/frontend/src/helpers/tech-review.ts +++ b/apps/frontend/src/helpers/tech-review.ts @@ -38,10 +38,12 @@ export async function fetchDelphiIssues(params: FetchIssuesParams): Promise Date: Sun, 21 Sep 2025 14:29:57 +0100 Subject: [PATCH 047/104] feat: batch scan btn --- .../src/pages/moderation/technical-review.vue | 31 +++++++++---------- packages/assets/generated-icons.ts | 2 ++ packages/assets/icons/shield-alert.svg | 18 +++++++++++ 3 files changed, 35 insertions(+), 16 deletions(-) create mode 100644 packages/assets/icons/shield-alert.svg diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index 7dea729b4e..4f1b739e5d 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -1,6 +1,13 @@ diff --git a/packages/utils/highlightjs/index.ts b/packages/utils/highlightjs/index.ts index c8e7ba5c32..dbfbae0892 100644 --- a/packages/utils/highlightjs/index.ts +++ b/packages/utils/highlightjs/index.ts @@ -55,6 +55,8 @@ hljs.registerAliases(['toml'], { languageName: 'ini' }) hljs.registerAliases(['yml'], { languageName: 'yaml' }) hljs.registerAliases(['html', 'htm', 'xhtml', 'mcui', 'fxml'], { languageName: 'xml' }) +export { hljs } + export const renderHighlightedString = (string) => configuredXss.process( md({ @@ -71,3 +73,34 @@ export const renderHighlightedString = (string) => }, }).render(string), ) + +export const highlightCodeLines = (code: string, language: string): string[] => { + if (!code) return [] + + if (!hljs.getLanguage(language)) { + return code.split('\n') + } + + try { + const highlighted = hljs.highlight(code, { language }).value + const openTags: string[] = [] + + const processedHtml = highlighted.replace(/(]+>)|(<\/span>)|(\n)/g, (match) => { + if (match === '\n') { + return ''.repeat(openTags.length) + '\n' + openTags.join('') + } + + if (match === '') { + openTags.pop() + } else { + openTags.push(match) + } + + return match + }) + + return processedHtml.split('\n') + } catch { + return code.split('\n') + } +} From c80262f1260b4d42cbb44555d5a62b6a125d51ab Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Sun, 16 Nov 2025 10:12:44 +0000 Subject: [PATCH 053/104] feat: temp remove mock data --- .../src/pages/moderation/technical-review.vue | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index 1fd71edf55..a4000b3aef 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -13,10 +13,10 @@ import { ButtonStyled, Combobox, type ComboboxOption, - // injectModrinthClient, // TEMPORARY: Commented out while using mock data + injectModrinthClient, Pagination, } from '@modrinth/ui' -// import { useQuery } from '@tanstack/vue-query' // TEMPORARY: Commented out while using mock data +import { useQuery } from '@tanstack/vue-query' import { defineMessages, useVIntl } from '@vintl/vintl' import Fuse from 'fuse.js' @@ -24,9 +24,9 @@ import { type BatchScanProgress } from '~/components/ui/moderation/BatchScanProg import ModerationTechRevCard from '~/components/ui/moderation/ModerationTechRevCard.vue' // TEMPORARY: Mock data for development -import { generateMockProjectReviews } from '~/utils/mockTechReviewData' +// import { generateMockProjectReviews } from '~/utils/mockTechReviewData' -// const client = injectModrinthClient() // TEMPORARY: Commented out while using mock data +const client = injectModrinthClient() const currentPage = ref(1) const itemsPerPage = 15 @@ -223,8 +223,6 @@ function goToPage(page: number, top = false) { } } -// TEMPORARY: Commented out while using mock data -/* function toApiSort(label: string): Labrinth.TechReview.Internal.SearchProjectsSort { switch (label) { case 'Oldest': @@ -234,11 +232,7 @@ function toApiSort(label: string): Labrinth.TechReview.Internal.SearchProjectsSo return 'CreatedDesc' } } -*/ -// TEMPORARY: Using mock data instead of API -// Uncomment below to use real API data -/* const { data: reviewItems, isLoading, @@ -254,16 +248,15 @@ const { }, initialData: [] as Labrinth.TechReview.Internal.ProjectReview[], }) -*/ // TEMPORARY: Mock data for development (58 items to match batch scan progress) -const reviewItems = ref( - generateMockProjectReviews(58), -) -const isLoading = ref(false) -const refetch = () => { - reviewItems.value = generateMockProjectReviews(58) -} +// const reviewItems = ref( +// generateMockProjectReviews(58), +// ) +// const isLoading = ref(false) +// const refetch = () => { +// reviewItems.value = generateMockProjectReviews(58) +// } watch(currentSortType, () => { goToPage(1) From 3a8a951ccefbcfae9fef010e83f34f451b0707b4 Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Sun, 16 Nov 2025 12:18:41 +0000 Subject: [PATCH 054/104] fix: search sort types --- apps/frontend/src/pages/moderation/technical-review.vue | 4 ++-- .../api-client/src/modules/labrinth/tech-review/internal.ts | 2 +- packages/api-client/src/modules/labrinth/types.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index a4000b3aef..2a5d3d01df 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -226,10 +226,10 @@ function goToPage(page: number, top = false) { function toApiSort(label: string): Labrinth.TechReview.Internal.SearchProjectsSort { switch (label) { case 'Oldest': - return 'CreatedAsc' + return 'created_asc' case 'Newest': default: - return 'CreatedDesc' + return 'created_desc' } } diff --git a/packages/api-client/src/modules/labrinth/tech-review/internal.ts b/packages/api-client/src/modules/labrinth/tech-review/internal.ts index 86be55f48c..6cd985f244 100644 --- a/packages/api-client/src/modules/labrinth/tech-review/internal.ts +++ b/packages/api-client/src/modules/labrinth/tech-review/internal.ts @@ -20,7 +20,7 @@ export class LabrinthTechReviewInternalModule extends AbstractModule { * const reviews = await client.labrinth.tech_review_internal.searchProjects({ * limit: 20, * page: 0, - * sort_by: 'CreatedAsc', + * sort_by: 'created_asc', * filter: { * project_type: ['mod', 'modpack'] * } diff --git a/packages/api-client/src/modules/labrinth/types.ts b/packages/api-client/src/modules/labrinth/types.ts index 03cdbb296f..4cca1b818a 100644 --- a/packages/api-client/src/modules/labrinth/types.ts +++ b/packages/api-client/src/modules/labrinth/types.ts @@ -468,7 +468,7 @@ export namespace Labrinth { project_type?: string[] } - export type SearchProjectsSort = 'CreatedAsc' | 'CreatedDesc' + export type SearchProjectsSort = 'created_asc' | 'created_desc' export type UpdateIssueRequest = { status: DelphiReportIssueStatus From 42e496a77f8920ad812120346744b92882c28bb3 Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Sun, 16 Nov 2025 12:32:47 +0000 Subject: [PATCH 055/104] fix: intl & lint --- .../withdraw-stages/MuralpayDetailsStage.vue | 10 +- .../ui/moderation/ModerationReportCard.vue | 21 +- .../ui/moderation/ModerationTechRevCard.vue | 5 +- .../src/components/ui/servers/FileItem.vue | 5 +- apps/frontend/src/layouts/default.vue | 17 +- apps/frontend/src/locales/en-US/index.json | 10 +- .../src/pages/moderation/technical-review.vue | 13 +- .../src/providers/creator-withdraw.ts | 7 +- apps/frontend/src/utils/mockTechReviewData.ts | 1304 ++++++++--------- .../ui/src/components/base/Admonition.vue | 5 +- .../components/project/ProjectStatusBadge.vue | 2 +- packages/ui/src/utils/auto-icons.ts | 27 +- 12 files changed, 720 insertions(+), 706 deletions(-) diff --git a/apps/frontend/src/components/ui/dashboard/withdraw-stages/MuralpayDetailsStage.vue b/apps/frontend/src/components/ui/dashboard/withdraw-stages/MuralpayDetailsStage.vue index 4ba2a770ce..c4225bd36f 100644 --- a/apps/frontend/src/components/ui/dashboard/withdraw-stages/MuralpayDetailsStage.vue +++ b/apps/frontend/src/components/ui/dashboard/withdraw-stages/MuralpayDetailsStage.vue @@ -207,6 +207,10 @@ import { financialMessages, formFieldLabels, formFieldPlaceholders, + getBlockchainColor, + getBlockchainIcon, + getCurrencyColor, + getCurrencyIcon, } from '@modrinth/ui' import { defineMessages, useVIntl } from '@vintl/vintl' import { IntlFormatted } from '@vintl/vintl/components' @@ -217,12 +221,6 @@ import RevenueInputField from '@/components/ui/dashboard/RevenueInputField.vue' import WithdrawFeeBreakdown from '@/components/ui/dashboard/WithdrawFeeBreakdown.vue' import { useGeneratedState } from '@/composables/generated' import { useWithdrawContext } from '@/providers/creator-withdraw.ts' -import { - getBlockchainColor, - getBlockchainIcon, - getCurrencyColor, - getCurrencyIcon, -} from '@modrinth/ui' import { getRailConfig } from '@/utils/muralpay-rails' import { normalizeChildren } from '@/utils/vue-children.ts' diff --git a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue index 08cb58aed6..8cd424defd 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue @@ -138,14 +138,13 @@ import { LinkIcon, OrganizationIcon, } from '@modrinth/assets' -import type { ExtendedReport, ReportQuickReply } from '@modrinth/moderation' +import type { ExtendedReport } from '@modrinth/moderation' +import type { OverflowMenuOption } from '@modrinth/ui' import { Avatar, ButtonStyled, - CollapsibleRegion, injectNotificationManager, OverflowMenu, - type OverflowMenuOption, useRelativeTime, } from '@modrinth/ui' import { computed } from 'vue' @@ -159,7 +158,7 @@ const props = defineProps<{ }>() const reportThread = ref | null>(null) -const collapsibleRegion = ref | null>(null) +// const collapsibleRegion = ref | null>(null) const formatRelativeTime = useRelativeTime() @@ -198,14 +197,14 @@ const quickActions: OverflowMenuOption[] = [ }, ] -async function handleQuickReply(reply: ReportQuickReply) { - const message = - typeof reply.message === 'function' ? await reply.message(props.report) : reply.message +// async function handleQuickReply(reply: ReportQuickReply) { +// const message = +// typeof reply.message === 'function' ? await reply.message(props.report) : reply.message - collapsibleRegion.value?.setCollapsed(false) - await nextTick() - reportThread.value?.setReplyContent(message) -} +// collapsibleRegion.value?.setCollapsed(false) +// await nextTick() +// reportThread.value?.setReplyContent(message) +// } const reportItemAvatarUrl = computed(() => { switch (props.report.item_type) { diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue index 8e7db3d062..58a61d0c16 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -168,8 +168,9 @@ function getSeverityBreakdown(file: Labrinth.TechReview.Internal.FileReview) { class="h-4 w-4" /> {{ formatProjectType(project_type, true) }}
@@ -377,9 +378,9 @@ function getSeverityBreakdown(file: Labrinth.TechReview.Internal.FileReview) { >
- - diff --git a/packages/moderation/package.json b/packages/moderation/package.json index 7b784d475d..7f1d4dfa07 100644 --- a/packages/moderation/package.json +++ b/packages/moderation/package.json @@ -11,6 +11,7 @@ "dependencies": { "@modrinth/assets": "workspace:*", "@modrinth/utils": "workspace:*", + "@modrinth/api-client": "workspace:*", "vue": "^3.5.13" }, "devDependencies": { diff --git a/packages/moderation/src/data/report-quick-replies.ts b/packages/moderation/src/data/report-quick-replies.ts index d8847f4cd9..4a29ea8432 100644 --- a/packages/moderation/src/data/report-quick-replies.ts +++ b/packages/moderation/src/data/report-quick-replies.ts @@ -1,4 +1,5 @@ -import type { ReportQuickReply } from '../types/reports' +import type { ExtendedReport } from '../types/reports' +import type { QuickReply } from '../types/quick-reply' export default [ { @@ -67,4 +68,4 @@ export default [ message: async () => (await import('./messages/reports/stale.md?raw')).default, private: false, }, -] as ReadonlyArray +] as ReadonlyArray> diff --git a/packages/moderation/src/data/tech-review-quick-replies.ts b/packages/moderation/src/data/tech-review-quick-replies.ts new file mode 100644 index 0000000000..e5aab6326b --- /dev/null +++ b/packages/moderation/src/data/tech-review-quick-replies.ts @@ -0,0 +1,10 @@ +import type { Labrinth } from '@modrinth/api-client' +import type { QuickReply } from '../types/quick-reply' + +export interface TechReviewContext { + project: Labrinth.Projects.v3.Project + project_owner: Labrinth.TechReview.Internal.Ownership + reports: Labrinth.TechReview.Internal.FileReport[] +} + +export default [] as ReadonlyArray> diff --git a/packages/moderation/src/index.ts b/packages/moderation/src/index.ts index f10b4d0294..5f07054e2c 100644 --- a/packages/moderation/src/index.ts +++ b/packages/moderation/src/index.ts @@ -4,10 +4,12 @@ export { finalPermissionMessages } from './data/modpack-permissions-stage' export { default as nags } from './data/nags' export * from './data/nags/index' export { default as reportQuickReplies } from './data/report-quick-replies' +export { default as techReviewQuickReplies, type TechReviewContext } from './data/tech-review-quick-replies' export * from './types/actions' export * from './types/keybinds' export * from './types/messages' export * from './types/nags' export * from './types/reports' export * from './types/stage' +export * from './types/quick-reply' export * from './utils' diff --git a/packages/moderation/src/types/quick-reply.ts b/packages/moderation/src/types/quick-reply.ts new file mode 100644 index 0000000000..e27ec70f02 --- /dev/null +++ b/packages/moderation/src/types/quick-reply.ts @@ -0,0 +1,6 @@ +export interface QuickReply { + label: string + message: string | ((context: T) => Promise | string) + shouldShow?: (context: T) => boolean + private?: boolean +} diff --git a/packages/moderation/src/types/reports.ts b/packages/moderation/src/types/reports.ts index 16122b6778..cb02f8c354 100644 --- a/packages/moderation/src/types/reports.ts +++ b/packages/moderation/src/types/reports.ts @@ -15,10 +15,3 @@ export interface ExtendedReport extends Report { version?: Version target?: OwnershipTarget } - -export interface ReportQuickReply { - label: string - message: string | ((report: ExtendedReport) => Promise | string) - shouldShow?: (report: ExtendedReport) => boolean - private?: boolean -} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 78c13fa818..06ca1aeca3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -467,6 +467,9 @@ importers: packages/moderation: dependencies: + '@modrinth/api-client': + specifier: workspace:* + version: link:../api-client '@modrinth/assets': specifier: workspace:* version: link:../assets From 1530b49082e25052328233a8a124157a16dca14b Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Fri, 5 Dec 2025 12:28:00 +0000 Subject: [PATCH 073/104] feat: more qa --- .../ui/moderation/ModerationReportCard.vue | 45 ++++----- .../ui/moderation/ModerationTechRevCard.vue | 38 +++++-- .../src/components/ui/thread/ThreadView.vue | 6 +- .../src/components/base/CollapsibleRegion.vue | 98 +++++++------------ 4 files changed, 91 insertions(+), 96 deletions(-) diff --git a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue index 0df5a068a7..3b81942c17 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationReportCard.vue @@ -1,11 +1,6 @@ diff --git a/packages/ui/src/components/base/CollapsibleRegion.vue b/packages/ui/src/components/base/CollapsibleRegion.vue index 0d9f27f04b..a8919f240c 100644 --- a/packages/ui/src/components/base/CollapsibleRegion.vue +++ b/packages/ui/src/components/base/CollapsibleRegion.vue @@ -1,29 +1,27 @@ - From 429c44dc73a728dbbeb3b44742d001adbc11e07c Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Fri, 5 Dec 2025 16:24:44 +0000 Subject: [PATCH 074/104] feat: fix collapse --- .../components/ui/moderation/ModerationTechRevCard.vue | 4 ++-- packages/ui/src/components/base/CollapsibleRegion.vue | 8 +++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue index 092846da90..a9e1f4c3ef 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -504,12 +504,12 @@ const techReviewContext = computed(() => ({
diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index f16b44df21..93026db8bb 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -24,6 +24,7 @@ const route = useRoute() const router = useRouter() const CACHE_TTL = 24 * 60 * 60 * 1000 +const CACHE_KEY_PREFIX = 'tech_review_source_' type CachedSource = { source: string @@ -32,14 +33,14 @@ type CachedSource = { function getCachedSource(detailId: string): string | null { try { - const cached = localStorage.getItem(`tech_review_source_${detailId}`) + const cached = localStorage.getItem(`${CACHE_KEY_PREFIX}${detailId}`) if (!cached) return null const data: CachedSource = JSON.parse(cached) const now = Date.now() if (now - data.timestamp > CACHE_TTL) { - localStorage.removeItem(`tech_review_source_${detailId}`) + localStorage.removeItem(`${CACHE_KEY_PREFIX}${detailId}`) return null } @@ -55,7 +56,7 @@ function setCachedSource(detailId: string, source: string): void { source, timestamp: Date.now(), } - localStorage.setItem(`tech_review_source_${detailId}`, JSON.stringify(data)) + localStorage.setItem(`${CACHE_KEY_PREFIX}${detailId}`, JSON.stringify(data)) } catch (error) { console.error('Failed to cache source:', error) } @@ -67,7 +68,7 @@ function clearExpiredCache(): void { const keys = Object.keys(localStorage) for (const key of keys) { - if (key.startsWith('tech_review_source_')) { + if (key.startsWith(CACHE_KEY_PREFIX)) { const cached = localStorage.getItem(key) if (cached) { const data: CachedSource = JSON.parse(cached) diff --git a/packages/ui/src/components/base/index.ts b/packages/ui/src/components/base/index.ts index fa9a75e0e3..b31cc318c6 100644 --- a/packages/ui/src/components/base/index.ts +++ b/packages/ui/src/components/base/index.ts @@ -13,8 +13,8 @@ export { default as Checkbox } from './Checkbox.vue' export { default as Chips } from './Chips.vue' export { default as Collapsible } from './Collapsible.vue' export { default as CollapsibleRegion } from './CollapsibleRegion.vue' -export { default as Combobox } from './Combobox.vue' export type { ComboboxOption } from './Combobox.vue' +export { default as Combobox } from './Combobox.vue' export { default as ContentPageHeader } from './ContentPageHeader.vue' export { default as CopyCode } from './CopyCode.vue' export { default as DoubleIcon } from './DoubleIcon.vue' @@ -23,19 +23,19 @@ export { default as DropdownSelect } from './DropdownSelect.vue' export { default as EnvironmentIndicator } from './EnvironmentIndicator.vue' export { default as ErrorInformationCard } from './ErrorInformationCard.vue' export { default as FileInput } from './FileInput.vue' -export { default as FilterBar } from './FilterBar.vue' export type { FilterBarOption } from './FilterBar.vue' +export { default as FilterBar } from './FilterBar.vue' export { default as HeadingLink } from './HeadingLink.vue' export { default as HorizontalRule } from './HorizontalRule.vue' export { default as IconSelect } from './IconSelect.vue' -export { default as JoinedButtons } from './JoinedButtons.vue' export type { JoinedButtonAction } from './JoinedButtons.vue' +export { default as JoinedButtons } from './JoinedButtons.vue' export { default as LoadingIndicator } from './LoadingIndicator.vue' export { default as ManySelect } from './ManySelect.vue' export { default as MarkdownEditor } from './MarkdownEditor.vue' export { default as OptionGroup } from './OptionGroup.vue' -export { default as OverflowMenu } from './OverflowMenu.vue' export type { Option as OverflowMenuOption } from './OverflowMenu.vue' +export { default as OverflowMenu } from './OverflowMenu.vue' export { default as Page } from './Page.vue' export { default as Pagination } from './Pagination.vue' export { default as PopoutMenu } from './PopoutMenu.vue' From 8c3a3c6f22d59db0510a0900c152a10d96207295 Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Thu, 18 Dec 2025 15:52:28 +0000 Subject: [PATCH 100/104] fix: pagination --- .../ui/moderation/ModerationTechRevCard.vue | 44 +++-- .../src/pages/moderation/technical-review.vue | 185 +++++++++--------- .../api-client/src/modules/labrinth/types.ts | 22 ++- 3 files changed, 143 insertions(+), 108 deletions(-) diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue index 4a4bfb370c..cec5f7794f 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -35,9 +35,7 @@ import { import dayjs from 'dayjs' import { computed, ref, watch } from 'vue' -import MaliciousSummaryModal, { - type UnsafeFile, -} from '~/components/ui/moderation/MaliciousSummaryModal.vue' +import type { UnsafeFile } from '~/components/ui/moderation/MaliciousSummaryModal.vue' import NavTabs from '~/components/ui/NavTabs.vue' import ThreadView from '~/components/ui/thread/ThreadView.vue' @@ -78,6 +76,7 @@ const emit = defineEmits<{ refetch: [] loadFileSources: [reportId: string] markComplete: [projectId: string] + showMaliciousSummary: [unsafeFiles: UnsafeFile[]] }>() const quickActions = computed(() => { @@ -157,12 +156,25 @@ watch(selectedFile, (newFile) => { const client = injectModrinthClient() +const severityOrder = { severe: 3, high: 2, medium: 1, low: 0 } as Record + +function getFileHighestSeverity(file: FlattenedFileReport): Labrinth.TechReview.Internal.DelphiSeverity { + const severities = file.issues + .flatMap((i) => i.details ?? []) + .map((d) => d.severity) + .filter((s): s is Labrinth.TechReview.Internal.DelphiSeverity => !!s) + + return severities.sort((a, b) => (severityOrder[b] ?? 0) - (severityOrder[a] ?? 0))[0] || 'low' +} + const allFiles = computed(() => { - return props.item.reports + return [...props.item.reports].sort((a, b) => { + const aSeverity = getFileHighestSeverity(a) + const bSeverity = getFileHighestSeverity(b) + return (severityOrder[bSeverity] ?? 0) - (severityOrder[aSeverity] ?? 0) + }) }) -const severityOrder = { severe: 3, high: 2, medium: 1, low: 0 } as Record - const highestSeverity = computed(() => { const severities = props.item.reports .flatMap((r) => r.issues ?? []) @@ -484,8 +496,6 @@ const threadViewRef = ref<{ getReplyContent: () => string } | null>(null) -const maliciousSummaryModalRef = ref>() - const unsafeFiles = computed(() => { return props.item.reports .filter((report) => @@ -657,7 +667,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { }) if (verdict === 'unsafe') { - maliciousSummaryModalRef.value?.show() + emit('showMaliciousSummary', unsafeFiles.value) } } catch (error: unknown) { const err = error as { response?: { data?: { issues?: string[] } } } @@ -820,7 +830,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { - + @@ -851,6 +861,15 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { formatFileSize(file.file_size) }}
+
+ {{ + capitalizeString(getFileHighestSeverity(file)) + }} +
{{ getFileMarkedCount(file) }}/{{ getFileDetailCount(file) }} flags
+
Manual review @@ -1075,8 +1095,6 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') {
- -
diff --git a/apps/frontend/src/pages/moderation/technical-review.vue b/apps/frontend/src/pages/moderation/technical-review.vue index 93026db8bb..b1ff7150b2 100644 --- a/apps/frontend/src/pages/moderation/technical-review.vue +++ b/apps/frontend/src/pages/moderation/technical-review.vue @@ -8,17 +8,21 @@ import { injectModrinthClient, Pagination, } from '@modrinth/ui' -import { useQuery, useQueryClient } from '@tanstack/vue-query' +import { useInfiniteQuery, useQueryClient } from '@tanstack/vue-query' import { defineMessages, useVIntl } from '@vintl/vintl' import Fuse from 'fuse.js' +import MaliciousSummaryModal, { + type UnsafeFile, +} from '~/components/ui/moderation/MaliciousSummaryModal.vue' import ModerationTechRevCard from '~/components/ui/moderation/ModerationTechRevCard.vue' const client = injectModrinthClient() const queryClient = useQueryClient() const currentPage = ref(1) -const itemsPerPage = 15 +const API_PAGE_SIZE = 50 +const UI_PAGE_SIZE = 4 const { formatMessage } = useVIntl() const route = useRoute() const router = useRouter() @@ -208,13 +212,12 @@ const filterTypes = computed[]>(() => { return [...base, ...sortedTypes.map((type) => ({ value: type, label: type }))] }) -const currentSortType = ref('Oldest') +const currentSortType = ref('Severe first') const sortTypes: ComboboxOption[] = [ { value: 'Oldest', label: 'Oldest' }, { value: 'Newest', label: 'Newest' }, - { value: 'Pending', label: 'Pending' }, - { value: 'Severity ↑', label: 'Severity ↑' }, - { value: 'Severity ↓', label: 'Severity ↓' }, + { value: 'Severe first', label: 'Severe first' }, + { value: 'Severe last', label: 'Severe last' }, ] const fuse = computed(() => { @@ -255,64 +258,7 @@ const typeFiltered = computed(() => { }) }) -function getHighestSeverity(review: { - reports: Labrinth.TechReview.Internal.FileReport[] -}): string { - const severities = review.reports - .flatMap((r) => r.issues ?? []) - .flatMap((i) => i.details ?? []) - .map((d) => d.severity) - .filter((s): s is Labrinth.TechReview.Internal.DelphiSeverity => !!s) - - const order = { severe: 3, high: 2, medium: 1, low: 0 } as Record - return severities.sort((a, b) => (order[b] ?? 0) - (order[a] ?? 0))[0] || 'low' -} - -function hasPendingIssues(review: { reports: Labrinth.TechReview.Internal.FileReport[] }): boolean { - return review.reports.some((report) => report.issues.some((issue) => issue.status === 'pending')) -} - -function getEarliestDate(review: { reports: Labrinth.TechReview.Internal.FileReport[] }): number { - const dates = review.reports.map((r) => new Date(r.created).getTime()) - return Math.min(...dates) -} - -const filteredItems = computed(() => { - const filtered = [...typeFiltered.value] - - switch (currentSortType.value) { - case 'Oldest': - filtered.sort((a, b) => getEarliestDate(a) - getEarliestDate(b)) - break - case 'Newest': - filtered.sort((a, b) => getEarliestDate(b) - getEarliestDate(a)) - break - case 'Pending': { - filtered.sort((a, b) => { - const aPending = hasPendingIssues(a) ? 0 : 1 - const bPending = hasPendingIssues(b) ? 0 : 1 - return aPending - bPending - }) - break - } - case 'Severity ↑': { - const order = { low: 0, medium: 1, high: 2, severe: 3 } as Record - filtered.sort( - (a, b) => (order[getHighestSeverity(a)] ?? 0) - (order[getHighestSeverity(b)] ?? 0), - ) - break - } - case 'Severity ↓': { - const order = { low: 0, medium: 1, high: 2, severe: 3 } as Record - filtered.sort( - (a, b) => (order[getHighestSeverity(b)] ?? 0) - (order[getHighestSeverity(a)] ?? 0), - ) - break - } - } - - return filtered -}) +const filteredItems = computed(() => typeFiltered.value) const filteredIssuesCount = computed(() => { return filteredItems.value.reduce((total, review) => { @@ -332,11 +278,11 @@ const filteredIssuesCount = computed(() => { }, 0) }) -const totalPages = computed(() => Math.ceil((filteredItems.value?.length || 0) / itemsPerPage)) +const totalPages = computed(() => Math.ceil((filteredItems.value?.length || 0) / UI_PAGE_SIZE)) const paginatedItems = computed(() => { if (!filteredItems.value) return [] - const start = (currentPage.value - 1) * itemsPerPage - const end = start + itemsPerPage + const start = (currentPage.value - 1) * UI_PAGE_SIZE + const end = start + UI_PAGE_SIZE return filteredItems.value.slice(start, end) }) function goToPage(page: number, top = false) { @@ -356,24 +302,66 @@ function toApiSort(label: string): Labrinth.TechReview.Internal.SearchProjectsSo case 'Oldest': return 'created_asc' case 'Newest': - default: return 'created_desc' + case 'Severe first': + return 'severity_desc' + case 'Severe last': + return 'severity_asc' + default: + return 'severity_desc' } } const { - data: searchResponse, + data: infiniteData, isLoading, + isFetchingNextPage, + fetchNextPage, + hasNextPage, refetch, -} = useQuery({ +} = useInfiniteQuery({ queryKey: ['tech-reviews', currentSortType], - queryFn: async () => { + queryFn: async ({ pageParam = 0 }) => { return await client.labrinth.tech_review_internal.searchProjects({ - limit: 350, - page: 0, + limit: API_PAGE_SIZE, + page: pageParam, sort_by: toApiSort(currentSortType.value), }) }, + getNextPageParam: (lastPage, allPages) => { + // If we got a full page, there's probably more + return lastPage.project_reports.length >= API_PAGE_SIZE ? allPages.length : undefined + }, + initialPageParam: 0, +}) + +watch( + [() => infiniteData.value, hasNextPage], + () => { + if (hasNextPage.value && !isFetchingNextPage.value) { + fetchNextPage() + } + }, + { immediate: true }, +) + +const mergedSearchResponse = computed(() => { + if (!infiniteData.value?.pages?.length) return null + + return infiniteData.value.pages.reduce( + (merged, page) => ({ + project_reports: [...merged.project_reports, ...page.project_reports], + projects: { ...merged.projects, ...page.projects }, + threads: { ...merged.threads, ...page.threads }, + ownership: { ...merged.ownership, ...page.ownership }, + }), + { + project_reports: [] as Labrinth.TechReview.Internal.ProjectReport[], + projects: {} as Record, + threads: {} as Record, + ownership: {} as Record, + }, + ) }) type FlattenedFileReport = Labrinth.TechReview.Internal.FileReport & { @@ -382,11 +370,11 @@ type FlattenedFileReport = Labrinth.TechReview.Internal.FileReport & { } const reviewItems = computed(() => { - if (!searchResponse.value?.project_reports?.length) { + if (!mergedSearchResponse.value?.project_reports?.length) { return [] } - const response = searchResponse.value + const response = mergedSearchResponse.value return response.project_reports .map((projectReport) => { @@ -414,7 +402,7 @@ const reviewItems = computed(() => { ( item, ): item is { - project: Labrinth.Projects.v3.Project + project: Labrinth.TechReview.Internal.ProjectModerationInfo project_owner: Labrinth.TechReview.Internal.Ownership thread: Labrinth.TechReview.Internal.Thread reports: FlattenedFileReport[] @@ -425,29 +413,43 @@ const reviewItems = computed(() => { function handleMarkComplete(projectId: string) { queryClient.setQueryData( ['tech-reviews', currentSortType], - (oldData: Labrinth.TechReview.Internal.SearchResponse | undefined) => { + ( + oldData: + | { + pages: Labrinth.TechReview.Internal.SearchResponse[] + pageParams: number[] + } + | undefined, + ) => { if (!oldData) return oldData - const remainingProjectReports = oldData.project_reports.filter( - (pr) => pr.project_id !== projectId, - ) - - const { [projectId]: _removedProject, ...remainingProjects } = oldData.projects - const { [projectId]: _removedOwnership, ...remainingOwnership } = oldData.ownership - return { ...oldData, - project_reports: remainingProjectReports, - projects: remainingProjects, - ownership: remainingOwnership, + pages: oldData.pages.map((page) => ({ + ...page, + project_reports: page.project_reports.filter((pr) => pr.project_id !== projectId), + projects: Object.fromEntries( + Object.entries(page.projects).filter(([id]) => id !== projectId), + ), + ownership: Object.fromEntries( + Object.entries(page.ownership).filter(([id]) => id !== projectId), + ), + })), } }, ) } +const maliciousSummaryModalRef = ref>() +const currentUnsafeFiles = ref([]) + +function handleShowMaliciousSummary(unsafeFiles: UnsafeFile[]) { + currentUnsafeFiles.value = unsafeFiles + maliciousSummaryModalRef.value?.show() +} + watch(currentSortType, () => { goToPage(1) - refetch() }) // TODO: Reimpl when backend is available @@ -509,7 +511,7 @@ watch(currentSortType, () => { {
-
+
{ @refetch="refetch" @load-file-sources="handleLoadFileSources" @mark-complete="handleMarkComplete" + @show-malicious-summary="handleShowMaliciousSummary" />
@@ -563,5 +566,7 @@ watch(currentSortType, () => { @switch-page="(num) => goToPage(num, true)" /> + + diff --git a/packages/api-client/src/modules/labrinth/types.ts b/packages/api-client/src/modules/labrinth/types.ts index b8c6674f3c..8a15574ace 100644 --- a/packages/api-client/src/modules/labrinth/types.ts +++ b/packages/api-client/src/modules/labrinth/types.ts @@ -679,7 +679,11 @@ export namespace Labrinth { project_type?: string[] } - export type SearchProjectsSort = 'created_asc' | 'created_desc' + export type SearchProjectsSort = + | 'created_asc' + | 'created_desc' + | 'severity_asc' + | 'severity_desc' export type UpdateIssueRequest = { verdict: 'safe' | 'unsafe' @@ -692,14 +696,22 @@ export namespace Labrinth { export type SearchResponse = { project_reports: ProjectReport[] - projects: Record + projects: Record threads: Record ownership: Record } + export type ProjectModerationInfo = { + id: string + thread_id: string + name: string + project_types: string[] + icon_url: string | null + } & Projects.v3.Project + export type ProjectReport = { project_id: string - max_severity: DelphiSeverity + max_severity: DelphiSeverity | null versions: VersionReport[] } @@ -724,7 +736,6 @@ export namespace Labrinth { id: string report_id: string issue_type: string - status: DelphiReportIssueStatus details: ReportIssueDetail[] } @@ -736,6 +747,7 @@ export namespace Labrinth { decompiled_source: string | null data: Record severity: DelphiSeverity + status: DelphiReportIssueStatus } export type Ownership = @@ -823,7 +835,7 @@ export namespace Labrinth { members: User[] } - export type FlagReason = 'delphi' | 'manual' + export type FlagReason = 'delphi' export type DelphiSeverity = 'low' | 'medium' | 'high' | 'severe' From bea8b609323faca55548694f8700367ca50cadd6 Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Thu, 18 Dec 2025 15:52:49 +0000 Subject: [PATCH 101/104] fix: lint --- .../src/components/ui/moderation/ModerationTechRevCard.vue | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue index cec5f7794f..b7fda2d5f4 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -158,7 +158,9 @@ const client = injectModrinthClient() const severityOrder = { severe: 3, high: 2, medium: 1, low: 0 } as Record -function getFileHighestSeverity(file: FlattenedFileReport): Labrinth.TechReview.Internal.DelphiSeverity { +function getFileHighestSeverity( + file: FlattenedFileReport, +): Labrinth.TechReview.Internal.DelphiSeverity { const severities = file.issues .flatMap((i) => i.details ?? []) .map((d) => d.severity) From cdee0936848ede7dfc1cefc10f514d1483c66f07 Mon Sep 17 00:00:00 2001 From: "Calum H. (IMB11)" Date: Fri, 19 Dec 2025 19:46:58 +0000 Subject: [PATCH 102/104] fix: qa --- .../ui/moderation/MaliciousSummaryModal.vue | 16 +++--- .../ui/moderation/ModerationTechRevCard.vue | 53 ++++++++++++------- packages/ui/src/components/base/Badge.vue | 4 +- 3 files changed, 46 insertions(+), 27 deletions(-) diff --git a/apps/frontend/src/components/ui/moderation/MaliciousSummaryModal.vue b/apps/frontend/src/components/ui/moderation/MaliciousSummaryModal.vue index a35810b4e0..f5b1fea8a8 100644 --- a/apps/frontend/src/components/ui/moderation/MaliciousSummaryModal.vue +++ b/apps/frontend/src/components/ui/moderation/MaliciousSummaryModal.vue @@ -9,6 +9,7 @@ export type UnsafeFile = { projectName: string projectId: string userId: string + username: string } const props = defineProps<{ @@ -36,11 +37,16 @@ async function fetchVersionHashes(versionIds: string[]) { try { // TODO: switch to api-client once truman's vers stuff is merged const version = (await useBaseFetch(`version/${versionId}`)) as { - files: Array<{ filename: string; hashes: { sha512: string; sha1: string } }> + files: Array<{ + filename: string + file_name?: string + hashes: { sha512: string; sha1: string } + }> } const filesMap = new Map() for (const file of version.files) { - filesMap.set(file.filename, file.hashes.sha512) + const name = file.file_name ?? file.filename + filesMap.set(name, file.hashes.sha512) } versionDataCache.value.set(versionId, { files: filesMap, loading: false }) } catch (error) { @@ -92,13 +98,11 @@ defineExpose({ show, hide })
Project: -
-
- Project ID:
- User ID: + User: +
diff --git a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue index b7fda2d5f4..4d9b12f2d3 100644 --- a/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue +++ b/apps/frontend/src/components/ui/moderation/ModerationTechRevCard.vue @@ -169,13 +169,22 @@ function getFileHighestSeverity( return severities.sort((a, b) => (severityOrder[b] ?? 0) - (severityOrder[a] ?? 0))[0] || 'low' } -const allFiles = computed(() => { - return [...props.item.reports].sort((a, b) => { - const aSeverity = getFileHighestSeverity(a) - const bSeverity = getFileHighestSeverity(b) - return (severityOrder[bSeverity] ?? 0) - (severityOrder[aSeverity] ?? 0) - }) -}) +const allFiles = ref([]) + +watch( + () => props.item.reports, + (reports) => { + allFiles.value = [...reports].sort((a, b) => { + const aComplete = getFileMarkedCount(a) === getFileDetailCount(a) + const bComplete = getFileMarkedCount(b) === getFileDetailCount(b) + if (aComplete !== bComplete) return aComplete ? 1 : -1 + const aSeverity = getFileHighestSeverity(a) + const bSeverity = getFileHighestSeverity(b) + return (severityOrder[bSeverity] ?? 0) - (severityOrder[aSeverity] ?? 0) + }) + }, + { immediate: true }, +) const highestSeverity = computed(() => { const severities = props.item.reports @@ -385,13 +394,13 @@ async function updateDetailStatus(detailId: string, verdict: 'safe' | 'unsafe') if (verdict === 'safe') { addNotification({ type: 'success', - title: 'Issue marked as safe', + title: 'Issue marked as pass', text: 'This issue has been marked as a false positive.', }) } else { addNotification({ type: 'success', - title: 'Issue marked as unsafe', + title: 'Issue marked as fail', text: 'This issue has been flagged as malicious.', }) } @@ -483,6 +492,14 @@ function toggleClass(filePath: string) { } } +function getClassDecompiledSource(classItem: ClassGroup): string | undefined { + for (const flag of classItem.flags) { + const source = props.decompiledSources.get(flag.detail.id) + if (source) return source + } + return undefined +} + function handleThreadUpdate() { emit('refetch') } @@ -516,6 +533,7 @@ const unsafeFiles = computed(() => { projectName: props.item.project.name, projectId: props.item.project.id, userId: props.item.project_owner.id, + username: props.item.project_owner.name, })) }) @@ -819,7 +837,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { :disabled="!canSubmitReview" @click="handleSubmitReview('safe')" > - Safe + Pass @@ -828,7 +846,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { :disabled="!canSubmitReview" @click="handleSubmitReview('unsafe')" > - Unsafe + Fail @@ -1023,7 +1041,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { :disabled="updatingDetails.has(flag.detail.id)" @click="updateDetailStatus(flag.detail.id, 'safe')" > - Safe + Pass @@ -1040,14 +1058,14 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { :disabled="updatingDetails.has(flag.detail.id)" @click="updateDetailStatus(flag.detail.id, 'unsafe')" > - Unsafe + Fail
@@ -1055,10 +1073,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') { v-tooltip="`Copy code`" class="absolute right-2 top-2 border-[1px]" @click=" - copyToClipboard( - props.decompiledSources.get(classItem.flags[0].detail.id)!, - classItem.filePath, - ) + copyToClipboard(getClassDecompiledSource(classItem)!, classItem.filePath) " > @@ -1069,7 +1084,7 @@ async function handleSubmitReview(verdict: 'safe' | 'unsafe') {
Date: Sat, 20 Dec 2025 09:29:07 +0000 Subject: [PATCH 103/104] intl extract --- packages/ui/src/locales/en-US/index.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ui/src/locales/en-US/index.json b/packages/ui/src/locales/en-US/index.json index 857c4ac7fb..5e75d51289 100644 --- a/packages/ui/src/locales/en-US/index.json +++ b/packages/ui/src/locales/en-US/index.json @@ -417,7 +417,7 @@ "defaultMessage": "Returned" }, "omorphia.component.badge.label.safe": { - "defaultMessage": "Safe" + "defaultMessage": "Pass" }, "omorphia.component.badge.label.scheduled": { "defaultMessage": "Scheduled" @@ -429,7 +429,7 @@ "defaultMessage": "Unlisted" }, "omorphia.component.badge.label.unsafe": { - "defaultMessage": "Unsafe" + "defaultMessage": "Fail" }, "omorphia.component.badge.label.withheld": { "defaultMessage": "Withheld" From 5c6105bb85bd8c07acc214062b7fce5e465217e8 Mon Sep 17 00:00:00 2001 From: aecsocket Date: Sat, 20 Dec 2025 10:34:38 +0000 Subject: [PATCH 104/104] fix ci --- packages/api-client/src/modules/index.ts | 88 +++++++++++------------- 1 file changed, 41 insertions(+), 47 deletions(-) diff --git a/packages/api-client/src/modules/index.ts b/packages/api-client/src/modules/index.ts index c06924aeb6..610a0b9910 100644 --- a/packages/api-client/src/modules/index.ts +++ b/packages/api-client/src/modules/index.ts @@ -26,19 +26,19 @@ type ModuleConstructor = new (client: AbstractModrinthClient) => AbstractModule * TODO: Better way? Probably not */ export const MODULE_REGISTRY = { - archon_backups_v0: ArchonBackupsV0Module, - archon_backups_v1: ArchonBackupsV1Module, - archon_servers_v0: ArchonServersV0Module, - archon_servers_v1: ArchonServersV1Module, - iso3166_data: ISO3166Module, - kyros_files_v0: KyrosFilesV0Module, - labrinth_billing_internal: LabrinthBillingInternalModule, - labrinth_collections: LabrinthCollectionsModule, - labrinth_projects_v2: LabrinthProjectsV2Module, - labrinth_projects_v3: LabrinthProjectsV3Module, - labrinth_state: LabrinthStateModule, - labrinth_tech_review_internal: LabrinthTechReviewInternalModule, - labrinth_versions_v3: LabrinthVersionsV3Module, + archon_backups_v0: ArchonBackupsV0Module, + archon_backups_v1: ArchonBackupsV1Module, + archon_servers_v0: ArchonServersV0Module, + archon_servers_v1: ArchonServersV1Module, + iso3166_data: ISO3166Module, + kyros_files_v0: KyrosFilesV0Module, + labrinth_billing_internal: LabrinthBillingInternalModule, + labrinth_collections: LabrinthCollectionsModule, + labrinth_projects_v2: LabrinthProjectsV2Module, + labrinth_projects_v3: LabrinthProjectsV3Module, + labrinth_state: LabrinthStateModule, + labrinth_tech_review_internal: LabrinthTechReviewInternalModule, + labrinth_versions_v3: LabrinthVersionsV3Module, } as const satisfies Record export type ModuleID = keyof typeof MODULE_REGISTRY @@ -51,15 +51,15 @@ export type ModuleID = keyof typeof MODULE_REGISTRY * @throws Error if module ID doesn't match expected format */ export function parseModuleID(id: string): [string, string] { - const parts = id.split('_') - if (parts.length < 2) { - throw new Error( - `Invalid module ID "${id}". Expected format: _ (e.g., "labrinth_projects_v2")`, - ) - } - const api = parts[0] - const moduleName = parts.slice(1).join('_') - return [api, moduleName] + const parts = id.split('_') + if (parts.length < 2) { + throw new Error( + `Invalid module ID "${id}". Expected format: _ (e.g., "labrinth_projects_v2")`, + ) + } + const api = parts[0] + const moduleName = parts.slice(1).join('_') + return [api, moduleName] } /** @@ -76,41 +76,35 @@ export function parseModuleID(id: string): [string, string] { * * @returns Nested structure organized by API namespace */ -export function buildModuleStructure(): Record< - string, - Record -> { - const structure: Record> = {} +export function buildModuleStructure(): Record> { + const structure: Record> = {} - for (const [id, constructor] of Object.entries(MODULE_REGISTRY)) { - const [api, moduleName] = parseModuleID(id) + for (const [id, constructor] of Object.entries(MODULE_REGISTRY)) { + const [api, moduleName] = parseModuleID(id) - if (!structure[api]) { - structure[api] = {} - } + if (!structure[api]) { + structure[api] = {} + } - structure[api][moduleName] = constructor - } + structure[api][moduleName] = constructor + } - return structure + return structure } /** * Extract API name from module ID * @example ParseAPI<'labrinth_projects_v2'> = 'labrinth' */ -type ParseAPI = T extends `${infer API}_${string}` - ? API - : never +type ParseAPI = T extends `${infer API}_${string}` ? API : never /** * Extract module name for a given API * @example ParseModule<'labrinth_projects_v2', 'labrinth'> = 'projects_v2' */ -type ParseModule< - T extends string, - API extends string, -> = T extends `${API}_${infer Module}` ? Module : never +type ParseModule = T extends `${API}_${infer Module}` + ? Module + : never /** * Group registry modules by API namespace @@ -121,11 +115,11 @@ type ParseModule< * ``` */ type GroupByAPI> = { - [API in ParseAPI]: { - [Module in ParseModule]: InstanceType< - Registry[`${API}_${Module}`] - > - } + [API in ParseAPI]: { + [Module in ParseModule]: InstanceType< + Registry[`${API}_${Module}`] + > + } } /**