-
- Failed: {session.failed_events} events
-
- {#if session.error_message}
-
-
- Error: {session.error_message}
-
-
- {/if}
- {#if session.failed_event_errors && session.failed_event_errors.length > 0}
-
- {#each session.failed_event_errors as error}
-
-
{error.event_id}
-
{error.error}
-
- {/each}
+ {#if session.errors?.length}
+
+ {#each session.errors as err}
+
+ {#if err.event_id}
{err.event_id}
{/if}
+
{err.error}
- {/if}
+ {/each}
{/if}
- {#if session.execution_results && session.execution_results.length > 0}
+ {#if session.execution_results?.length}
Execution Results:
@@ -82,25 +65,15 @@
}">
{result.status}
- {#if result.execution_time}
-
- {result.execution_time.toFixed(2)}s
-
+ {#if result.resource_usage?.execution_time_wall_seconds}
+ {result.resource_usage.execution_time_wall_seconds.toFixed(2)}s
{/if}
- {#if result.output || result.errors}
+ {#if result.stdout || result.stderr}
- {#if result.output}
-
- Output: {result.output}
-
- {/if}
- {#if result.errors}
-
- Error: {result.errors}
-
- {/if}
+ {#if result.stdout}
{result.stdout}
{/if}
+ {#if result.stderr}
{result.stderr}
{/if}
{/if}
diff --git a/frontend/src/lib/api/index.ts b/frontend/src/lib/api/index.ts
index bbd8ae88..2788bc9b 100644
--- a/frontend/src/lib/api/index.ts
+++ b/frontend/src/lib/api/index.ts
@@ -1,4 +1,4 @@
// This file is auto-generated by @hey-api/openapi-ts
export { aggregateEventsApiV1EventsAggregatePost, browseEventsApiV1AdminEventsBrowsePost, cancelExecutionApiV1ExecutionIdCancelPost, cancelReplaySessionApiV1ReplaySessionsSessionIdCancelPost, cancelSagaApiV1SagasSagaIdCancelPost, cleanupOldSessionsApiV1ReplayCleanupPost, createExecutionApiV1ExecutePost, createReplaySessionApiV1ReplaySessionsPost, createSavedScriptApiV1ScriptsPost, createUserApiV1AdminUsersPost, deleteEventApiV1AdminEventsEventIdDelete, deleteEventApiV1EventsEventIdDelete, deleteExecutionApiV1ExecutionIdDelete, deleteNotificationApiV1NotificationsNotificationIdDelete, deleteSavedScriptApiV1ScriptsScriptIdDelete, deleteUserApiV1AdminUsersUserIdDelete, discardDlqMessageApiV1DlqMessagesEventIdDelete, executionEventsApiV1EventsExecutionsExecutionIdGet, exportEventsCsvApiV1AdminEventsExportCsvGet, exportEventsJsonApiV1AdminEventsExportJsonGet, getCurrentRequestEventsApiV1EventsCurrentRequestGet, getCurrentUserProfileApiV1AuthMeGet, getDlqMessageApiV1DlqMessagesEventIdGet, getDlqMessagesApiV1DlqMessagesGet, getDlqStatisticsApiV1DlqStatsGet, getDlqTopicsApiV1DlqTopicsGet, getEventApiV1EventsEventIdGet, getEventDetailApiV1AdminEventsEventIdGet, getEventsByCorrelationApiV1EventsCorrelationCorrelationIdGet, getEventStatisticsApiV1EventsStatisticsGet, getEventStatsApiV1AdminEventsStatsGet, getExampleScriptsApiV1ExampleScriptsGet, getExecutionEventsApiV1EventsExecutionsExecutionIdEventsGet, getExecutionEventsApiV1ExecutionsExecutionIdEventsGet, getExecutionSagasApiV1SagasExecutionExecutionIdGet, getK8sResourceLimitsApiV1K8sLimitsGet, getNotificationsApiV1NotificationsGet, getReplaySessionApiV1ReplaySessionsSessionIdGet, getReplayStatusApiV1AdminEventsReplaySessionIdStatusGet, getResultApiV1ResultExecutionIdGet, getSagaStatusApiV1SagasSagaIdGet, getSavedScriptApiV1ScriptsScriptIdGet, getSettingsHistoryApiV1UserSettingsHistoryGet, getSubscriptionsApiV1NotificationsSubscriptionsGet, getSystemSettingsApiV1AdminSettingsGet, getUnreadCountApiV1NotificationsUnreadCountGet, getUserApiV1AdminUsersUserIdGet, getUserEventsApiV1EventsUserGet, getUserExecutionsApiV1UserExecutionsGet, getUserOverviewApiV1AdminUsersUserIdOverviewGet, getUserRateLimitsApiV1AdminUsersUserIdRateLimitsGet, getUserSettingsApiV1UserSettingsGet, listEventTypesApiV1EventsTypesListGet, listReplaySessionsApiV1ReplaySessionsGet, listSagasApiV1SagasGet, listSavedScriptsApiV1ScriptsGet, listUsersApiV1AdminUsersGet, livenessApiV1HealthLiveGet, loginApiV1AuthLoginPost, logoutApiV1AuthLogoutPost, markAllReadApiV1NotificationsMarkAllReadPost, markNotificationReadApiV1NotificationsNotificationIdReadPut, notificationStreamApiV1EventsNotificationsStreamGet, type Options, pauseReplaySessionApiV1ReplaySessionsSessionIdPausePost, publishCustomEventApiV1EventsPublishPost, queryEventsApiV1EventsQueryPost, readinessApiV1HealthReadyGet, receiveGrafanaAlertsApiV1AlertsGrafanaPost, registerApiV1AuthRegisterPost, replayAggregateEventsApiV1EventsReplayAggregateIdPost, replayEventsApiV1AdminEventsReplayPost, resetSystemSettingsApiV1AdminSettingsResetPost, resetUserPasswordApiV1AdminUsersUserIdResetPasswordPost, resetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPost, restoreSettingsApiV1UserSettingsRestorePost, resumeReplaySessionApiV1ReplaySessionsSessionIdResumePost, retryDlqMessagesApiV1DlqRetryPost, retryExecutionApiV1ExecutionIdRetryPost, setRetryPolicyApiV1DlqRetryPolicyPost, sseHealthApiV1EventsHealthGet, startReplaySessionApiV1ReplaySessionsSessionIdStartPost, testGrafanaAlertEndpointApiV1AlertsGrafanaTestGet, updateCustomSettingApiV1UserSettingsCustomKeyPut, updateEditorSettingsApiV1UserSettingsEditorPut, updateNotificationSettingsApiV1UserSettingsNotificationsPut, updateSavedScriptApiV1ScriptsScriptIdPut, updateSubscriptionApiV1NotificationsSubscriptionsChannelPut, updateSystemSettingsApiV1AdminSettingsPut, updateThemeApiV1UserSettingsThemePut, updateUserApiV1AdminUsersUserIdPut, updateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPut, updateUserSettingsApiV1UserSettingsPut, verifyTokenApiV1AuthVerifyTokenGet } from './sdk.gen';
-export type { AdminUserOverview, AggregateEventsApiV1EventsAggregatePostData, AggregateEventsApiV1EventsAggregatePostError, AggregateEventsApiV1EventsAggregatePostErrors, AggregateEventsApiV1EventsAggregatePostResponse, AggregateEventsApiV1EventsAggregatePostResponses, AlertResponse, BodyLoginApiV1AuthLoginPost, BrowseEventsApiV1AdminEventsBrowsePostData, BrowseEventsApiV1AdminEventsBrowsePostError, BrowseEventsApiV1AdminEventsBrowsePostErrors, BrowseEventsApiV1AdminEventsBrowsePostResponse, BrowseEventsApiV1AdminEventsBrowsePostResponses, CancelExecutionApiV1ExecutionIdCancelPostData, CancelExecutionApiV1ExecutionIdCancelPostError, CancelExecutionApiV1ExecutionIdCancelPostErrors, CancelExecutionApiV1ExecutionIdCancelPostResponse, CancelExecutionApiV1ExecutionIdCancelPostResponses, CancelExecutionRequest, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostData, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostError, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostErrors, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponse, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponses, CancelResponse, CancelSagaApiV1SagasSagaIdCancelPostData, CancelSagaApiV1SagasSagaIdCancelPostError, CancelSagaApiV1SagasSagaIdCancelPostErrors, CancelSagaApiV1SagasSagaIdCancelPostResponse, CancelSagaApiV1SagasSagaIdCancelPostResponses, CleanupOldSessionsApiV1ReplayCleanupPostData, CleanupOldSessionsApiV1ReplayCleanupPostError, CleanupOldSessionsApiV1ReplayCleanupPostErrors, CleanupOldSessionsApiV1ReplayCleanupPostResponse, CleanupOldSessionsApiV1ReplayCleanupPostResponses, CleanupResponse, ClientOptions, CreateExecutionApiV1ExecutePostData, CreateExecutionApiV1ExecutePostError, CreateExecutionApiV1ExecutePostErrors, CreateExecutionApiV1ExecutePostResponse, CreateExecutionApiV1ExecutePostResponses, CreateReplaySessionApiV1ReplaySessionsPostData, CreateReplaySessionApiV1ReplaySessionsPostError, CreateReplaySessionApiV1ReplaySessionsPostErrors, CreateReplaySessionApiV1ReplaySessionsPostResponse, CreateReplaySessionApiV1ReplaySessionsPostResponses, CreateSavedScriptApiV1ScriptsPostData, CreateSavedScriptApiV1ScriptsPostError, CreateSavedScriptApiV1ScriptsPostErrors, CreateSavedScriptApiV1ScriptsPostResponse, CreateSavedScriptApiV1ScriptsPostResponses, CreateUserApiV1AdminUsersPostData, CreateUserApiV1AdminUsersPostError, CreateUserApiV1AdminUsersPostErrors, CreateUserApiV1AdminUsersPostResponse, CreateUserApiV1AdminUsersPostResponses, DeleteEventApiV1AdminEventsEventIdDeleteData, DeleteEventApiV1AdminEventsEventIdDeleteError, DeleteEventApiV1AdminEventsEventIdDeleteErrors, DeleteEventApiV1AdminEventsEventIdDeleteResponse, DeleteEventApiV1AdminEventsEventIdDeleteResponses, DeleteEventApiV1EventsEventIdDeleteData, DeleteEventApiV1EventsEventIdDeleteError, DeleteEventApiV1EventsEventIdDeleteErrors, DeleteEventApiV1EventsEventIdDeleteResponse, DeleteEventApiV1EventsEventIdDeleteResponses, DeleteEventResponse, DeleteExecutionApiV1ExecutionIdDeleteData, DeleteExecutionApiV1ExecutionIdDeleteError, DeleteExecutionApiV1ExecutionIdDeleteErrors, DeleteExecutionApiV1ExecutionIdDeleteResponse, DeleteExecutionApiV1ExecutionIdDeleteResponses, DeleteNotificationApiV1NotificationsNotificationIdDeleteData, DeleteNotificationApiV1NotificationsNotificationIdDeleteError, DeleteNotificationApiV1NotificationsNotificationIdDeleteErrors, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponse, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponses, DeleteNotificationResponse, DeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteData, DeleteSavedScriptApiV1ScriptsScriptIdDeleteError, DeleteSavedScriptApiV1ScriptsScriptIdDeleteErrors, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponses, DeleteUserApiV1AdminUsersUserIdDeleteData, DeleteUserApiV1AdminUsersUserIdDeleteError, DeleteUserApiV1AdminUsersUserIdDeleteErrors, DeleteUserApiV1AdminUsersUserIdDeleteResponse, DeleteUserApiV1AdminUsersUserIdDeleteResponses, DeleteUserResponse, DerivedCounts, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteData, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteError, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteErrors, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponse, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponses, DlqBatchRetryResponse, DlqMessageDetail, DlqMessageResponse, DlqMessagesResponse, DlqMessageStatus, DlqStats, DlqTopicSummaryResponse, EditorSettings, EndpointGroup, EventAggregationRequest, EventBrowseRequest, EventBrowseResponse, EventDeleteResponse, EventDetailResponse, EventFilter, EventFilterRequest, EventListResponse, EventReplayRequest, EventReplayResponse, EventReplayStatusResponse, EventResponse, EventStatistics, EventStatsResponse, EventType, ExampleScripts, ExecutionErrorType, ExecutionEventResponse, ExecutionEventsApiV1EventsExecutionsExecutionIdGetData, ExecutionEventsApiV1EventsExecutionsExecutionIdGetError, ExecutionEventsApiV1EventsExecutionsExecutionIdGetErrors, ExecutionEventsApiV1EventsExecutionsExecutionIdGetResponses, ExecutionLimitsSchema, ExecutionListResponse, ExecutionRequest, ExecutionResponse, ExecutionResult, ExecutionStatus, ExportEventsCsvApiV1AdminEventsExportCsvGetData, ExportEventsCsvApiV1AdminEventsExportCsvGetError, ExportEventsCsvApiV1AdminEventsExportCsvGetErrors, ExportEventsCsvApiV1AdminEventsExportCsvGetResponses, ExportEventsJsonApiV1AdminEventsExportJsonGetData, ExportEventsJsonApiV1AdminEventsExportJsonGetError, ExportEventsJsonApiV1AdminEventsExportJsonGetErrors, ExportEventsJsonApiV1AdminEventsExportJsonGetResponses, GetCurrentRequestEventsApiV1EventsCurrentRequestGetData, GetCurrentRequestEventsApiV1EventsCurrentRequestGetError, GetCurrentRequestEventsApiV1EventsCurrentRequestGetErrors, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponse, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponses, GetCurrentUserProfileApiV1AuthMeGetData, GetCurrentUserProfileApiV1AuthMeGetResponse, GetCurrentUserProfileApiV1AuthMeGetResponses, GetDlqMessageApiV1DlqMessagesEventIdGetData, GetDlqMessageApiV1DlqMessagesEventIdGetError, GetDlqMessageApiV1DlqMessagesEventIdGetErrors, GetDlqMessageApiV1DlqMessagesEventIdGetResponse, GetDlqMessageApiV1DlqMessagesEventIdGetResponses, GetDlqMessagesApiV1DlqMessagesGetData, GetDlqMessagesApiV1DlqMessagesGetError, GetDlqMessagesApiV1DlqMessagesGetErrors, GetDlqMessagesApiV1DlqMessagesGetResponse, GetDlqMessagesApiV1DlqMessagesGetResponses, GetDlqStatisticsApiV1DlqStatsGetData, GetDlqStatisticsApiV1DlqStatsGetResponse, GetDlqStatisticsApiV1DlqStatsGetResponses, GetDlqTopicsApiV1DlqTopicsGetData, GetDlqTopicsApiV1DlqTopicsGetResponse, GetDlqTopicsApiV1DlqTopicsGetResponses, GetEventApiV1EventsEventIdGetData, GetEventApiV1EventsEventIdGetError, GetEventApiV1EventsEventIdGetErrors, GetEventApiV1EventsEventIdGetResponse, GetEventApiV1EventsEventIdGetResponses, GetEventDetailApiV1AdminEventsEventIdGetData, GetEventDetailApiV1AdminEventsEventIdGetError, GetEventDetailApiV1AdminEventsEventIdGetErrors, GetEventDetailApiV1AdminEventsEventIdGetResponse, GetEventDetailApiV1AdminEventsEventIdGetResponses, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetError, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetErrors, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponse, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponses, GetEventStatisticsApiV1EventsStatisticsGetData, GetEventStatisticsApiV1EventsStatisticsGetError, GetEventStatisticsApiV1EventsStatisticsGetErrors, GetEventStatisticsApiV1EventsStatisticsGetResponse, GetEventStatisticsApiV1EventsStatisticsGetResponses, GetEventStatsApiV1AdminEventsStatsGetData, GetEventStatsApiV1AdminEventsStatsGetError, GetEventStatsApiV1AdminEventsStatsGetErrors, GetEventStatsApiV1AdminEventsStatsGetResponse, GetEventStatsApiV1AdminEventsStatsGetResponses, GetExampleScriptsApiV1ExampleScriptsGetData, GetExampleScriptsApiV1ExampleScriptsGetResponse, GetExampleScriptsApiV1ExampleScriptsGetResponses, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponses, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponses, GetExecutionSagasApiV1SagasExecutionExecutionIdGetData, GetExecutionSagasApiV1SagasExecutionExecutionIdGetError, GetExecutionSagasApiV1SagasExecutionExecutionIdGetErrors, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponse, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponses, GetK8sResourceLimitsApiV1K8sLimitsGetData, GetK8sResourceLimitsApiV1K8sLimitsGetResponse, GetK8sResourceLimitsApiV1K8sLimitsGetResponses, GetNotificationsApiV1NotificationsGetData, GetNotificationsApiV1NotificationsGetError, GetNotificationsApiV1NotificationsGetErrors, GetNotificationsApiV1NotificationsGetResponse, GetNotificationsApiV1NotificationsGetResponses, GetReplaySessionApiV1ReplaySessionsSessionIdGetData, GetReplaySessionApiV1ReplaySessionsSessionIdGetError, GetReplaySessionApiV1ReplaySessionsSessionIdGetErrors, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponse, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponses, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetData, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetError, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetErrors, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponse, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponses, GetResultApiV1ResultExecutionIdGetData, GetResultApiV1ResultExecutionIdGetError, GetResultApiV1ResultExecutionIdGetErrors, GetResultApiV1ResultExecutionIdGetResponse, GetResultApiV1ResultExecutionIdGetResponses, GetSagaStatusApiV1SagasSagaIdGetData, GetSagaStatusApiV1SagasSagaIdGetError, GetSagaStatusApiV1SagasSagaIdGetErrors, GetSagaStatusApiV1SagasSagaIdGetResponse, GetSagaStatusApiV1SagasSagaIdGetResponses, GetSavedScriptApiV1ScriptsScriptIdGetData, GetSavedScriptApiV1ScriptsScriptIdGetError, GetSavedScriptApiV1ScriptsScriptIdGetErrors, GetSavedScriptApiV1ScriptsScriptIdGetResponse, GetSavedScriptApiV1ScriptsScriptIdGetResponses, GetSettingsHistoryApiV1UserSettingsHistoryGetData, GetSettingsHistoryApiV1UserSettingsHistoryGetError, GetSettingsHistoryApiV1UserSettingsHistoryGetErrors, GetSettingsHistoryApiV1UserSettingsHistoryGetResponse, GetSettingsHistoryApiV1UserSettingsHistoryGetResponses, GetSubscriptionsApiV1NotificationsSubscriptionsGetData, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponse, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponses, GetSystemSettingsApiV1AdminSettingsGetData, GetSystemSettingsApiV1AdminSettingsGetResponse, GetSystemSettingsApiV1AdminSettingsGetResponses, GetUnreadCountApiV1NotificationsUnreadCountGetData, GetUnreadCountApiV1NotificationsUnreadCountGetResponse, GetUnreadCountApiV1NotificationsUnreadCountGetResponses, GetUserApiV1AdminUsersUserIdGetData, GetUserApiV1AdminUsersUserIdGetError, GetUserApiV1AdminUsersUserIdGetErrors, GetUserApiV1AdminUsersUserIdGetResponse, GetUserApiV1AdminUsersUserIdGetResponses, GetUserEventsApiV1EventsUserGetData, GetUserEventsApiV1EventsUserGetError, GetUserEventsApiV1EventsUserGetErrors, GetUserEventsApiV1EventsUserGetResponse, GetUserEventsApiV1EventsUserGetResponses, GetUserExecutionsApiV1UserExecutionsGetData, GetUserExecutionsApiV1UserExecutionsGetError, GetUserExecutionsApiV1UserExecutionsGetErrors, GetUserExecutionsApiV1UserExecutionsGetResponse, GetUserExecutionsApiV1UserExecutionsGetResponses, GetUserOverviewApiV1AdminUsersUserIdOverviewGetData, GetUserOverviewApiV1AdminUsersUserIdOverviewGetError, GetUserOverviewApiV1AdminUsersUserIdOverviewGetErrors, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponse, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponses, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetData, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetError, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetErrors, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponse, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponses, GetUserSettingsApiV1UserSettingsGetData, GetUserSettingsApiV1UserSettingsGetResponse, GetUserSettingsApiV1UserSettingsGetResponses, GrafanaAlertItem, GrafanaWebhook, HttpValidationError, LanguageInfo, ListEventTypesApiV1EventsTypesListGetData, ListEventTypesApiV1EventsTypesListGetResponse, ListEventTypesApiV1EventsTypesListGetResponses, ListReplaySessionsApiV1ReplaySessionsGetData, ListReplaySessionsApiV1ReplaySessionsGetError, ListReplaySessionsApiV1ReplaySessionsGetErrors, ListReplaySessionsApiV1ReplaySessionsGetResponse, ListReplaySessionsApiV1ReplaySessionsGetResponses, ListSagasApiV1SagasGetData, ListSagasApiV1SagasGetError, ListSagasApiV1SagasGetErrors, ListSagasApiV1SagasGetResponse, ListSagasApiV1SagasGetResponses, ListSavedScriptsApiV1ScriptsGetData, ListSavedScriptsApiV1ScriptsGetResponse, ListSavedScriptsApiV1ScriptsGetResponses, ListUsersApiV1AdminUsersGetData, ListUsersApiV1AdminUsersGetError, ListUsersApiV1AdminUsersGetErrors, ListUsersApiV1AdminUsersGetResponse, ListUsersApiV1AdminUsersGetResponses, LivenessApiV1HealthLiveGetData, LivenessApiV1HealthLiveGetResponse, LivenessApiV1HealthLiveGetResponses, LivenessResponse, LoginApiV1AuthLoginPostData, LoginApiV1AuthLoginPostError, LoginApiV1AuthLoginPostErrors, LoginApiV1AuthLoginPostResponse, LoginApiV1AuthLoginPostResponses, LoginResponse, LogoutApiV1AuthLogoutPostData, LogoutApiV1AuthLogoutPostResponse, LogoutApiV1AuthLogoutPostResponses, ManualRetryRequest, MarkAllReadApiV1NotificationsMarkAllReadPostData, MarkAllReadApiV1NotificationsMarkAllReadPostResponse, MarkAllReadApiV1NotificationsMarkAllReadPostResponses, MarkNotificationReadApiV1NotificationsNotificationIdReadPutData, MarkNotificationReadApiV1NotificationsNotificationIdReadPutError, MarkNotificationReadApiV1NotificationsNotificationIdReadPutErrors, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponse, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponses, MessageResponse, MonitoringSettingsSchema, NotificationChannel, NotificationListResponse, NotificationResponse, NotificationSettings, NotificationSeverity, NotificationStatus, NotificationStreamApiV1EventsNotificationsStreamGetData, NotificationStreamApiV1EventsNotificationsStreamGetResponses, NotificationSubscription, PasswordResetRequest, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostData, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostError, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostErrors, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponse, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponses, PublishCustomEventApiV1EventsPublishPostData, PublishCustomEventApiV1EventsPublishPostError, PublishCustomEventApiV1EventsPublishPostErrors, PublishCustomEventApiV1EventsPublishPostResponse, PublishCustomEventApiV1EventsPublishPostResponses, PublishEventRequest, PublishEventResponse, QueryEventsApiV1EventsQueryPostData, QueryEventsApiV1EventsQueryPostError, QueryEventsApiV1EventsQueryPostErrors, QueryEventsApiV1EventsQueryPostResponse, QueryEventsApiV1EventsQueryPostResponses, RateLimitAlgorithm, RateLimitRule, RateLimitRuleResponse, RateLimitSummary, RateLimitUpdateResponse, ReadinessApiV1HealthReadyGetData, ReadinessApiV1HealthReadyGetResponse, ReadinessApiV1HealthReadyGetResponses, ReadinessResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostData, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostError, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostErrors, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponses, RegisterApiV1AuthRegisterPostData, RegisterApiV1AuthRegisterPostError, RegisterApiV1AuthRegisterPostErrors, RegisterApiV1AuthRegisterPostResponse, RegisterApiV1AuthRegisterPostResponses, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostData, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostError, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostErrors, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponse, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponses, ReplayAggregateResponse, ReplayConfigSchema, ReplayEventsApiV1AdminEventsReplayPostData, ReplayEventsApiV1AdminEventsReplayPostError, ReplayEventsApiV1AdminEventsReplayPostErrors, ReplayEventsApiV1AdminEventsReplayPostResponse, ReplayEventsApiV1AdminEventsReplayPostResponses, ReplayFilterSchema, ReplayRequest, ReplayResponse, ReplaySession, ReplayStatus, ReplayTarget, ReplayType, ResetSystemSettingsApiV1AdminSettingsResetPostData, ResetSystemSettingsApiV1AdminSettingsResetPostResponse, ResetSystemSettingsApiV1AdminSettingsResetPostResponses, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostData, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostError, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostErrors, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponse, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponses, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostData, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostError, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostErrors, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponse, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponses, ResourceLimits, ResourceUsage, RestoreSettingsApiV1UserSettingsRestorePostData, RestoreSettingsApiV1UserSettingsRestorePostError, RestoreSettingsApiV1UserSettingsRestorePostErrors, RestoreSettingsApiV1UserSettingsRestorePostResponse, RestoreSettingsApiV1UserSettingsRestorePostResponses, RestoreSettingsRequest, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostData, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostError, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostErrors, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponse, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponses, RetryDlqMessagesApiV1DlqRetryPostData, RetryDlqMessagesApiV1DlqRetryPostError, RetryDlqMessagesApiV1DlqRetryPostErrors, RetryDlqMessagesApiV1DlqRetryPostResponse, RetryDlqMessagesApiV1DlqRetryPostResponses, RetryExecutionApiV1ExecutionIdRetryPostData, RetryExecutionApiV1ExecutionIdRetryPostError, RetryExecutionApiV1ExecutionIdRetryPostErrors, RetryExecutionApiV1ExecutionIdRetryPostResponse, RetryExecutionApiV1ExecutionIdRetryPostResponses, RetryExecutionRequest, RetryPolicyRequest, RetryStrategy, SagaCancellationResponse, SagaListResponse, SagaState, SagaStatusResponse, SavedScriptCreateRequest, SavedScriptResponse, SecuritySettingsSchema, SessionSummary, SetRetryPolicyApiV1DlqRetryPolicyPostData, SetRetryPolicyApiV1DlqRetryPolicyPostError, SetRetryPolicyApiV1DlqRetryPolicyPostErrors, SetRetryPolicyApiV1DlqRetryPolicyPostResponse, SetRetryPolicyApiV1DlqRetryPolicyPostResponses, SettingsHistoryEntry, SettingsHistoryResponse, ShutdownStatusResponse, SortOrder, SseHealthApiV1EventsHealthGetData, SseHealthApiV1EventsHealthGetResponse, SseHealthApiV1EventsHealthGetResponses, SseHealthResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostData, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostError, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostErrors, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponses, SubscriptionsResponse, SubscriptionUpdate, SystemSettings, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetData, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponse, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponses, Theme, ThemeUpdateRequest, TokenValidationResponse, UnreadCountResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutData, UpdateCustomSettingApiV1UserSettingsCustomKeyPutError, UpdateCustomSettingApiV1UserSettingsCustomKeyPutErrors, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponses, UpdateEditorSettingsApiV1UserSettingsEditorPutData, UpdateEditorSettingsApiV1UserSettingsEditorPutError, UpdateEditorSettingsApiV1UserSettingsEditorPutErrors, UpdateEditorSettingsApiV1UserSettingsEditorPutResponse, UpdateEditorSettingsApiV1UserSettingsEditorPutResponses, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutData, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutError, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutErrors, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponse, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponses, UpdateSavedScriptApiV1ScriptsScriptIdPutData, UpdateSavedScriptApiV1ScriptsScriptIdPutError, UpdateSavedScriptApiV1ScriptsScriptIdPutErrors, UpdateSavedScriptApiV1ScriptsScriptIdPutResponse, UpdateSavedScriptApiV1ScriptsScriptIdPutResponses, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutData, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutError, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutErrors, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponse, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponses, UpdateSystemSettingsApiV1AdminSettingsPutData, UpdateSystemSettingsApiV1AdminSettingsPutError, UpdateSystemSettingsApiV1AdminSettingsPutErrors, UpdateSystemSettingsApiV1AdminSettingsPutResponse, UpdateSystemSettingsApiV1AdminSettingsPutResponses, UpdateThemeApiV1UserSettingsThemePutData, UpdateThemeApiV1UserSettingsThemePutError, UpdateThemeApiV1UserSettingsThemePutErrors, UpdateThemeApiV1UserSettingsThemePutResponse, UpdateThemeApiV1UserSettingsThemePutResponses, UpdateUserApiV1AdminUsersUserIdPutData, UpdateUserApiV1AdminUsersUserIdPutError, UpdateUserApiV1AdminUsersUserIdPutErrors, UpdateUserApiV1AdminUsersUserIdPutResponse, UpdateUserApiV1AdminUsersUserIdPutResponses, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutData, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutError, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutErrors, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponse, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponses, UpdateUserSettingsApiV1UserSettingsPutData, UpdateUserSettingsApiV1UserSettingsPutError, UpdateUserSettingsApiV1UserSettingsPutErrors, UpdateUserSettingsApiV1UserSettingsPutResponse, UpdateUserSettingsApiV1UserSettingsPutResponses, UserCreate, UserListResponse, UserRateLimit, UserRateLimitConfigResponse, UserRateLimitsResponse, UserResponse, UserRole, UserSettings, UserSettingsUpdate, UserUpdate, ValidationError, VerifyTokenApiV1AuthVerifyTokenGetData, VerifyTokenApiV1AuthVerifyTokenGetResponse, VerifyTokenApiV1AuthVerifyTokenGetResponses } from './types.gen';
+export type { AdminUserOverview, AggregateEventsApiV1EventsAggregatePostData, AggregateEventsApiV1EventsAggregatePostError, AggregateEventsApiV1EventsAggregatePostErrors, AggregateEventsApiV1EventsAggregatePostResponse, AggregateEventsApiV1EventsAggregatePostResponses, AlertResponse, BodyLoginApiV1AuthLoginPost, BrowseEventsApiV1AdminEventsBrowsePostData, BrowseEventsApiV1AdminEventsBrowsePostError, BrowseEventsApiV1AdminEventsBrowsePostErrors, BrowseEventsApiV1AdminEventsBrowsePostResponse, BrowseEventsApiV1AdminEventsBrowsePostResponses, CancelExecutionApiV1ExecutionIdCancelPostData, CancelExecutionApiV1ExecutionIdCancelPostError, CancelExecutionApiV1ExecutionIdCancelPostErrors, CancelExecutionApiV1ExecutionIdCancelPostResponse, CancelExecutionApiV1ExecutionIdCancelPostResponses, CancelExecutionRequest, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostData, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostError, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostErrors, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponse, CancelReplaySessionApiV1ReplaySessionsSessionIdCancelPostResponses, CancelResponse, CancelSagaApiV1SagasSagaIdCancelPostData, CancelSagaApiV1SagasSagaIdCancelPostError, CancelSagaApiV1SagasSagaIdCancelPostErrors, CancelSagaApiV1SagasSagaIdCancelPostResponse, CancelSagaApiV1SagasSagaIdCancelPostResponses, CleanupOldSessionsApiV1ReplayCleanupPostData, CleanupOldSessionsApiV1ReplayCleanupPostError, CleanupOldSessionsApiV1ReplayCleanupPostErrors, CleanupOldSessionsApiV1ReplayCleanupPostResponse, CleanupOldSessionsApiV1ReplayCleanupPostResponses, CleanupResponse, ClientOptions, CreateExecutionApiV1ExecutePostData, CreateExecutionApiV1ExecutePostError, CreateExecutionApiV1ExecutePostErrors, CreateExecutionApiV1ExecutePostResponse, CreateExecutionApiV1ExecutePostResponses, CreateReplaySessionApiV1ReplaySessionsPostData, CreateReplaySessionApiV1ReplaySessionsPostError, CreateReplaySessionApiV1ReplaySessionsPostErrors, CreateReplaySessionApiV1ReplaySessionsPostResponse, CreateReplaySessionApiV1ReplaySessionsPostResponses, CreateSavedScriptApiV1ScriptsPostData, CreateSavedScriptApiV1ScriptsPostError, CreateSavedScriptApiV1ScriptsPostErrors, CreateSavedScriptApiV1ScriptsPostResponse, CreateSavedScriptApiV1ScriptsPostResponses, CreateUserApiV1AdminUsersPostData, CreateUserApiV1AdminUsersPostError, CreateUserApiV1AdminUsersPostErrors, CreateUserApiV1AdminUsersPostResponse, CreateUserApiV1AdminUsersPostResponses, DeleteEventApiV1AdminEventsEventIdDeleteData, DeleteEventApiV1AdminEventsEventIdDeleteError, DeleteEventApiV1AdminEventsEventIdDeleteErrors, DeleteEventApiV1AdminEventsEventIdDeleteResponse, DeleteEventApiV1AdminEventsEventIdDeleteResponses, DeleteEventApiV1EventsEventIdDeleteData, DeleteEventApiV1EventsEventIdDeleteError, DeleteEventApiV1EventsEventIdDeleteErrors, DeleteEventApiV1EventsEventIdDeleteResponse, DeleteEventApiV1EventsEventIdDeleteResponses, DeleteEventResponse, DeleteExecutionApiV1ExecutionIdDeleteData, DeleteExecutionApiV1ExecutionIdDeleteError, DeleteExecutionApiV1ExecutionIdDeleteErrors, DeleteExecutionApiV1ExecutionIdDeleteResponse, DeleteExecutionApiV1ExecutionIdDeleteResponses, DeleteNotificationApiV1NotificationsNotificationIdDeleteData, DeleteNotificationApiV1NotificationsNotificationIdDeleteError, DeleteNotificationApiV1NotificationsNotificationIdDeleteErrors, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponse, DeleteNotificationApiV1NotificationsNotificationIdDeleteResponses, DeleteNotificationResponse, DeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteData, DeleteSavedScriptApiV1ScriptsScriptIdDeleteError, DeleteSavedScriptApiV1ScriptsScriptIdDeleteErrors, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponse, DeleteSavedScriptApiV1ScriptsScriptIdDeleteResponses, DeleteUserApiV1AdminUsersUserIdDeleteData, DeleteUserApiV1AdminUsersUserIdDeleteError, DeleteUserApiV1AdminUsersUserIdDeleteErrors, DeleteUserApiV1AdminUsersUserIdDeleteResponse, DeleteUserApiV1AdminUsersUserIdDeleteResponses, DeleteUserResponse, DerivedCounts, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteData, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteError, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteErrors, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponse, DiscardDlqMessageApiV1DlqMessagesEventIdDeleteResponses, DlqBatchRetryResponse, DlqMessageDetail, DlqMessageResponse, DlqMessagesResponse, DlqMessageStatus, DlqStats, DlqTopicSummaryResponse, EditorSettings, EndpointGroup, EventAggregationRequest, EventBrowseRequest, EventBrowseResponse, EventDeleteResponse, EventDetailResponse, EventFilter, EventFilterRequest, EventListResponse, EventMetadataResponse, EventReplayRequest, EventReplayResponse, EventReplayStatusResponse, EventReplayStatusResponseWritable, EventResponse, EventStatistics, EventStatsResponse, EventType, ExampleScripts, ExecutionErrorType, ExecutionEventResponse, ExecutionEventsApiV1EventsExecutionsExecutionIdGetData, ExecutionEventsApiV1EventsExecutionsExecutionIdGetError, ExecutionEventsApiV1EventsExecutionsExecutionIdGetErrors, ExecutionEventsApiV1EventsExecutionsExecutionIdGetResponses, ExecutionLimitsSchema, ExecutionListResponse, ExecutionRequest, ExecutionResponse, ExecutionResult, ExecutionStatus, ExportEventsCsvApiV1AdminEventsExportCsvGetData, ExportEventsCsvApiV1AdminEventsExportCsvGetError, ExportEventsCsvApiV1AdminEventsExportCsvGetErrors, ExportEventsCsvApiV1AdminEventsExportCsvGetResponses, ExportEventsJsonApiV1AdminEventsExportJsonGetData, ExportEventsJsonApiV1AdminEventsExportJsonGetError, ExportEventsJsonApiV1AdminEventsExportJsonGetErrors, ExportEventsJsonApiV1AdminEventsExportJsonGetResponses, GetCurrentRequestEventsApiV1EventsCurrentRequestGetData, GetCurrentRequestEventsApiV1EventsCurrentRequestGetError, GetCurrentRequestEventsApiV1EventsCurrentRequestGetErrors, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponse, GetCurrentRequestEventsApiV1EventsCurrentRequestGetResponses, GetCurrentUserProfileApiV1AuthMeGetData, GetCurrentUserProfileApiV1AuthMeGetResponse, GetCurrentUserProfileApiV1AuthMeGetResponses, GetDlqMessageApiV1DlqMessagesEventIdGetData, GetDlqMessageApiV1DlqMessagesEventIdGetError, GetDlqMessageApiV1DlqMessagesEventIdGetErrors, GetDlqMessageApiV1DlqMessagesEventIdGetResponse, GetDlqMessageApiV1DlqMessagesEventIdGetResponses, GetDlqMessagesApiV1DlqMessagesGetData, GetDlqMessagesApiV1DlqMessagesGetError, GetDlqMessagesApiV1DlqMessagesGetErrors, GetDlqMessagesApiV1DlqMessagesGetResponse, GetDlqMessagesApiV1DlqMessagesGetResponses, GetDlqStatisticsApiV1DlqStatsGetData, GetDlqStatisticsApiV1DlqStatsGetResponse, GetDlqStatisticsApiV1DlqStatsGetResponses, GetDlqTopicsApiV1DlqTopicsGetData, GetDlqTopicsApiV1DlqTopicsGetResponse, GetDlqTopicsApiV1DlqTopicsGetResponses, GetEventApiV1EventsEventIdGetData, GetEventApiV1EventsEventIdGetError, GetEventApiV1EventsEventIdGetErrors, GetEventApiV1EventsEventIdGetResponse, GetEventApiV1EventsEventIdGetResponses, GetEventDetailApiV1AdminEventsEventIdGetData, GetEventDetailApiV1AdminEventsEventIdGetError, GetEventDetailApiV1AdminEventsEventIdGetErrors, GetEventDetailApiV1AdminEventsEventIdGetResponse, GetEventDetailApiV1AdminEventsEventIdGetResponses, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetError, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetErrors, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponse, GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetResponses, GetEventStatisticsApiV1EventsStatisticsGetData, GetEventStatisticsApiV1EventsStatisticsGetError, GetEventStatisticsApiV1EventsStatisticsGetErrors, GetEventStatisticsApiV1EventsStatisticsGetResponse, GetEventStatisticsApiV1EventsStatisticsGetResponses, GetEventStatsApiV1AdminEventsStatsGetData, GetEventStatsApiV1AdminEventsStatsGetError, GetEventStatsApiV1AdminEventsStatsGetErrors, GetEventStatsApiV1AdminEventsStatsGetResponse, GetEventStatsApiV1AdminEventsStatsGetResponses, GetExampleScriptsApiV1ExampleScriptsGetData, GetExampleScriptsApiV1ExampleScriptsGetResponse, GetExampleScriptsApiV1ExampleScriptsGetResponses, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetResponses, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetData, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetError, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetErrors, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponse, GetExecutionEventsApiV1ExecutionsExecutionIdEventsGetResponses, GetExecutionSagasApiV1SagasExecutionExecutionIdGetData, GetExecutionSagasApiV1SagasExecutionExecutionIdGetError, GetExecutionSagasApiV1SagasExecutionExecutionIdGetErrors, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponse, GetExecutionSagasApiV1SagasExecutionExecutionIdGetResponses, GetK8sResourceLimitsApiV1K8sLimitsGetData, GetK8sResourceLimitsApiV1K8sLimitsGetResponse, GetK8sResourceLimitsApiV1K8sLimitsGetResponses, GetNotificationsApiV1NotificationsGetData, GetNotificationsApiV1NotificationsGetError, GetNotificationsApiV1NotificationsGetErrors, GetNotificationsApiV1NotificationsGetResponse, GetNotificationsApiV1NotificationsGetResponses, GetReplaySessionApiV1ReplaySessionsSessionIdGetData, GetReplaySessionApiV1ReplaySessionsSessionIdGetError, GetReplaySessionApiV1ReplaySessionsSessionIdGetErrors, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponse, GetReplaySessionApiV1ReplaySessionsSessionIdGetResponses, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetData, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetError, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetErrors, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponse, GetReplayStatusApiV1AdminEventsReplaySessionIdStatusGetResponses, GetResultApiV1ResultExecutionIdGetData, GetResultApiV1ResultExecutionIdGetError, GetResultApiV1ResultExecutionIdGetErrors, GetResultApiV1ResultExecutionIdGetResponse, GetResultApiV1ResultExecutionIdGetResponses, GetSagaStatusApiV1SagasSagaIdGetData, GetSagaStatusApiV1SagasSagaIdGetError, GetSagaStatusApiV1SagasSagaIdGetErrors, GetSagaStatusApiV1SagasSagaIdGetResponse, GetSagaStatusApiV1SagasSagaIdGetResponses, GetSavedScriptApiV1ScriptsScriptIdGetData, GetSavedScriptApiV1ScriptsScriptIdGetError, GetSavedScriptApiV1ScriptsScriptIdGetErrors, GetSavedScriptApiV1ScriptsScriptIdGetResponse, GetSavedScriptApiV1ScriptsScriptIdGetResponses, GetSettingsHistoryApiV1UserSettingsHistoryGetData, GetSettingsHistoryApiV1UserSettingsHistoryGetError, GetSettingsHistoryApiV1UserSettingsHistoryGetErrors, GetSettingsHistoryApiV1UserSettingsHistoryGetResponse, GetSettingsHistoryApiV1UserSettingsHistoryGetResponses, GetSubscriptionsApiV1NotificationsSubscriptionsGetData, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponse, GetSubscriptionsApiV1NotificationsSubscriptionsGetResponses, GetSystemSettingsApiV1AdminSettingsGetData, GetSystemSettingsApiV1AdminSettingsGetResponse, GetSystemSettingsApiV1AdminSettingsGetResponses, GetUnreadCountApiV1NotificationsUnreadCountGetData, GetUnreadCountApiV1NotificationsUnreadCountGetResponse, GetUnreadCountApiV1NotificationsUnreadCountGetResponses, GetUserApiV1AdminUsersUserIdGetData, GetUserApiV1AdminUsersUserIdGetError, GetUserApiV1AdminUsersUserIdGetErrors, GetUserApiV1AdminUsersUserIdGetResponse, GetUserApiV1AdminUsersUserIdGetResponses, GetUserEventsApiV1EventsUserGetData, GetUserEventsApiV1EventsUserGetError, GetUserEventsApiV1EventsUserGetErrors, GetUserEventsApiV1EventsUserGetResponse, GetUserEventsApiV1EventsUserGetResponses, GetUserExecutionsApiV1UserExecutionsGetData, GetUserExecutionsApiV1UserExecutionsGetError, GetUserExecutionsApiV1UserExecutionsGetErrors, GetUserExecutionsApiV1UserExecutionsGetResponse, GetUserExecutionsApiV1UserExecutionsGetResponses, GetUserOverviewApiV1AdminUsersUserIdOverviewGetData, GetUserOverviewApiV1AdminUsersUserIdOverviewGetError, GetUserOverviewApiV1AdminUsersUserIdOverviewGetErrors, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponse, GetUserOverviewApiV1AdminUsersUserIdOverviewGetResponses, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetData, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetError, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetErrors, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponse, GetUserRateLimitsApiV1AdminUsersUserIdRateLimitsGetResponses, GetUserSettingsApiV1UserSettingsGetData, GetUserSettingsApiV1UserSettingsGetResponse, GetUserSettingsApiV1UserSettingsGetResponses, GrafanaAlertItem, GrafanaWebhook, HourlyEventCountSchema, HttpValidationError, LanguageInfo, ListEventTypesApiV1EventsTypesListGetData, ListEventTypesApiV1EventsTypesListGetResponse, ListEventTypesApiV1EventsTypesListGetResponses, ListReplaySessionsApiV1ReplaySessionsGetData, ListReplaySessionsApiV1ReplaySessionsGetError, ListReplaySessionsApiV1ReplaySessionsGetErrors, ListReplaySessionsApiV1ReplaySessionsGetResponse, ListReplaySessionsApiV1ReplaySessionsGetResponses, ListSagasApiV1SagasGetData, ListSagasApiV1SagasGetError, ListSagasApiV1SagasGetErrors, ListSagasApiV1SagasGetResponse, ListSagasApiV1SagasGetResponses, ListSavedScriptsApiV1ScriptsGetData, ListSavedScriptsApiV1ScriptsGetResponse, ListSavedScriptsApiV1ScriptsGetResponses, ListUsersApiV1AdminUsersGetData, ListUsersApiV1AdminUsersGetError, ListUsersApiV1AdminUsersGetErrors, ListUsersApiV1AdminUsersGetResponse, ListUsersApiV1AdminUsersGetResponses, LivenessApiV1HealthLiveGetData, LivenessApiV1HealthLiveGetResponse, LivenessApiV1HealthLiveGetResponses, LivenessResponse, LoginApiV1AuthLoginPostData, LoginApiV1AuthLoginPostError, LoginApiV1AuthLoginPostErrors, LoginApiV1AuthLoginPostResponse, LoginApiV1AuthLoginPostResponses, LoginResponse, LogoutApiV1AuthLogoutPostData, LogoutApiV1AuthLogoutPostResponse, LogoutApiV1AuthLogoutPostResponses, ManualRetryRequest, MarkAllReadApiV1NotificationsMarkAllReadPostData, MarkAllReadApiV1NotificationsMarkAllReadPostResponse, MarkAllReadApiV1NotificationsMarkAllReadPostResponses, MarkNotificationReadApiV1NotificationsNotificationIdReadPutData, MarkNotificationReadApiV1NotificationsNotificationIdReadPutError, MarkNotificationReadApiV1NotificationsNotificationIdReadPutErrors, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponse, MarkNotificationReadApiV1NotificationsNotificationIdReadPutResponses, MessageResponse, MonitoringSettingsSchema, NotificationChannel, NotificationListResponse, NotificationResponse, NotificationSettings, NotificationSeverity, NotificationStatus, NotificationStreamApiV1EventsNotificationsStreamGetData, NotificationStreamApiV1EventsNotificationsStreamGetResponses, NotificationSubscription, PasswordResetRequest, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostData, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostError, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostErrors, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponse, PauseReplaySessionApiV1ReplaySessionsSessionIdPausePostResponses, PublishCustomEventApiV1EventsPublishPostData, PublishCustomEventApiV1EventsPublishPostError, PublishCustomEventApiV1EventsPublishPostErrors, PublishCustomEventApiV1EventsPublishPostResponse, PublishCustomEventApiV1EventsPublishPostResponses, PublishEventRequest, PublishEventResponse, QueryEventsApiV1EventsQueryPostData, QueryEventsApiV1EventsQueryPostError, QueryEventsApiV1EventsQueryPostErrors, QueryEventsApiV1EventsQueryPostResponse, QueryEventsApiV1EventsQueryPostResponses, RateLimitAlgorithm, RateLimitRule, RateLimitRuleResponse, RateLimitSummary, RateLimitUpdateResponse, ReadinessApiV1HealthReadyGetData, ReadinessApiV1HealthReadyGetResponse, ReadinessApiV1HealthReadyGetResponses, ReadinessResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostData, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostError, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostErrors, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponse, ReceiveGrafanaAlertsApiV1AlertsGrafanaPostResponses, RegisterApiV1AuthRegisterPostData, RegisterApiV1AuthRegisterPostError, RegisterApiV1AuthRegisterPostErrors, RegisterApiV1AuthRegisterPostResponse, RegisterApiV1AuthRegisterPostResponses, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostData, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostError, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostErrors, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponse, ReplayAggregateEventsApiV1EventsReplayAggregateIdPostResponses, ReplayAggregateResponse, ReplayConfigSchema, ReplayErrorInfo, ReplayEventsApiV1AdminEventsReplayPostData, ReplayEventsApiV1AdminEventsReplayPostError, ReplayEventsApiV1AdminEventsReplayPostErrors, ReplayEventsApiV1AdminEventsReplayPostResponse, ReplayEventsApiV1AdminEventsReplayPostResponses, ReplayFilter, ReplayFilterSchema, ReplayRequest, ReplayResponse, ReplaySession, ReplayStatus, ReplayTarget, ReplayType, ResetSystemSettingsApiV1AdminSettingsResetPostData, ResetSystemSettingsApiV1AdminSettingsResetPostResponse, ResetSystemSettingsApiV1AdminSettingsResetPostResponses, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostData, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostError, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostErrors, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponse, ResetUserPasswordApiV1AdminUsersUserIdResetPasswordPostResponses, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostData, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostError, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostErrors, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponse, ResetUserRateLimitsApiV1AdminUsersUserIdRateLimitsResetPostResponses, ResourceLimits, ResourceUsage, RestoreSettingsApiV1UserSettingsRestorePostData, RestoreSettingsApiV1UserSettingsRestorePostError, RestoreSettingsApiV1UserSettingsRestorePostErrors, RestoreSettingsApiV1UserSettingsRestorePostResponse, RestoreSettingsApiV1UserSettingsRestorePostResponses, RestoreSettingsRequest, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostData, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostError, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostErrors, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponse, ResumeReplaySessionApiV1ReplaySessionsSessionIdResumePostResponses, RetryDlqMessagesApiV1DlqRetryPostData, RetryDlqMessagesApiV1DlqRetryPostError, RetryDlqMessagesApiV1DlqRetryPostErrors, RetryDlqMessagesApiV1DlqRetryPostResponse, RetryDlqMessagesApiV1DlqRetryPostResponses, RetryExecutionApiV1ExecutionIdRetryPostData, RetryExecutionApiV1ExecutionIdRetryPostError, RetryExecutionApiV1ExecutionIdRetryPostErrors, RetryExecutionApiV1ExecutionIdRetryPostResponse, RetryExecutionApiV1ExecutionIdRetryPostResponses, RetryExecutionRequest, RetryPolicyRequest, RetryStrategy, SagaCancellationResponse, SagaListResponse, SagaState, SagaStatusResponse, SavedScriptCreateRequest, SavedScriptResponse, SavedScriptUpdate, SecuritySettingsSchema, SessionSummary, SessionSummaryWritable, SetRetryPolicyApiV1DlqRetryPolicyPostData, SetRetryPolicyApiV1DlqRetryPolicyPostError, SetRetryPolicyApiV1DlqRetryPolicyPostErrors, SetRetryPolicyApiV1DlqRetryPolicyPostResponse, SetRetryPolicyApiV1DlqRetryPolicyPostResponses, SettingsHistoryEntry, SettingsHistoryResponse, ShutdownStatusResponse, SortOrder, SseHealthApiV1EventsHealthGetData, SseHealthApiV1EventsHealthGetResponse, SseHealthApiV1EventsHealthGetResponses, SseHealthResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostData, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostError, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostErrors, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponse, StartReplaySessionApiV1ReplaySessionsSessionIdStartPostResponses, SubscriptionsResponse, SubscriptionUpdate, SystemSettings, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetData, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponse, TestGrafanaAlertEndpointApiV1AlertsGrafanaTestGetResponses, Theme, ThemeUpdateRequest, TokenValidationResponse, UnreadCountResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutData, UpdateCustomSettingApiV1UserSettingsCustomKeyPutError, UpdateCustomSettingApiV1UserSettingsCustomKeyPutErrors, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponse, UpdateCustomSettingApiV1UserSettingsCustomKeyPutResponses, UpdateEditorSettingsApiV1UserSettingsEditorPutData, UpdateEditorSettingsApiV1UserSettingsEditorPutError, UpdateEditorSettingsApiV1UserSettingsEditorPutErrors, UpdateEditorSettingsApiV1UserSettingsEditorPutResponse, UpdateEditorSettingsApiV1UserSettingsEditorPutResponses, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutData, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutError, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutErrors, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponse, UpdateNotificationSettingsApiV1UserSettingsNotificationsPutResponses, UpdateSavedScriptApiV1ScriptsScriptIdPutData, UpdateSavedScriptApiV1ScriptsScriptIdPutError, UpdateSavedScriptApiV1ScriptsScriptIdPutErrors, UpdateSavedScriptApiV1ScriptsScriptIdPutResponse, UpdateSavedScriptApiV1ScriptsScriptIdPutResponses, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutData, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutError, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutErrors, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponse, UpdateSubscriptionApiV1NotificationsSubscriptionsChannelPutResponses, UpdateSystemSettingsApiV1AdminSettingsPutData, UpdateSystemSettingsApiV1AdminSettingsPutError, UpdateSystemSettingsApiV1AdminSettingsPutErrors, UpdateSystemSettingsApiV1AdminSettingsPutResponse, UpdateSystemSettingsApiV1AdminSettingsPutResponses, UpdateThemeApiV1UserSettingsThemePutData, UpdateThemeApiV1UserSettingsThemePutError, UpdateThemeApiV1UserSettingsThemePutErrors, UpdateThemeApiV1UserSettingsThemePutResponse, UpdateThemeApiV1UserSettingsThemePutResponses, UpdateUserApiV1AdminUsersUserIdPutData, UpdateUserApiV1AdminUsersUserIdPutError, UpdateUserApiV1AdminUsersUserIdPutErrors, UpdateUserApiV1AdminUsersUserIdPutResponse, UpdateUserApiV1AdminUsersUserIdPutResponses, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutData, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutError, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutErrors, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponse, UpdateUserRateLimitsApiV1AdminUsersUserIdRateLimitsPutResponses, UpdateUserSettingsApiV1UserSettingsPutData, UpdateUserSettingsApiV1UserSettingsPutError, UpdateUserSettingsApiV1UserSettingsPutErrors, UpdateUserSettingsApiV1UserSettingsPutResponse, UpdateUserSettingsApiV1UserSettingsPutResponses, UserCreate, UserEventCountSchema, UserListResponse, UserRateLimit, UserRateLimitConfigResponse, UserRateLimitsResponse, UserResponse, UserRole, UserSettings, UserSettingsUpdate, UserUpdate, ValidationError, VerifyTokenApiV1AuthVerifyTokenGetData, VerifyTokenApiV1AuthVerifyTokenGetResponse, VerifyTokenApiV1AuthVerifyTokenGetResponses } from './types.gen';
diff --git a/frontend/src/lib/api/sdk.gen.ts b/frontend/src/lib/api/sdk.gen.ts
index 503b996f..5b5b33fe 100644
--- a/frontend/src/lib/api/sdk.gen.ts
+++ b/frontend/src/lib/api/sdk.gen.ts
@@ -699,9 +699,11 @@ export const getSagaStatusApiV1SagasSagaIdGet =
| null;
/**
* Estimated Completion
*/
@@ -892,9 +938,11 @@ export type EventReplayStatusResponse = {
/**
* Execution Results
*/
- execution_results?: Array<{
- [key: string]: unknown;
- }> | null;
+ execution_results?: Array | null;
+ /**
+ * Progress Percentage
+ */
+ readonly progress_percentage: number;
};
/**
@@ -926,12 +974,7 @@ export type EventResponse = {
* Causation Id
*/
causation_id?: string | null;
- /**
- * Metadata
- */
- metadata: {
- [key: string]: unknown;
- };
+ metadata: EventMetadataResponse;
/**
* Payload
*/
@@ -969,9 +1012,7 @@ export type EventStatistics = {
/**
* Events By Hour
*/
- events_by_hour: Array<{
- [key: string]: unknown;
- }>;
+ events_by_hour: Array;
/**
* Start Time
*/
@@ -1001,15 +1042,11 @@ export type EventStatsResponse = {
/**
* Events By Hour
*/
- events_by_hour: Array<{
- [key: string]: unknown;
- }>;
+ events_by_hour: Array;
/**
* Top Users
*/
- top_users: Array<{
- [key: string]: unknown;
- }>;
+ top_users: Array;
/**
* Error Rate
*/
@@ -1284,6 +1321,22 @@ export type HttpValidationError = {
detail?: Array;
};
+/**
+ * HourlyEventCountSchema
+ *
+ * Hourly event count for statistics.
+ */
+export type HourlyEventCountSchema = {
+ /**
+ * Hour
+ */
+ hour: string;
+ /**
+ * Count
+ */
+ count: number;
+};
+
/**
* LanguageInfo
*
@@ -1841,7 +1894,7 @@ export type ReplayAggregateResponse = {
export type ReplayConfigSchema = {
replay_type: ReplayType;
target?: ReplayTarget;
- filter: ReplayFilterSchema;
+ filter?: ReplayFilterSchema;
/**
* Speed Multiplier
*/
@@ -1887,9 +1940,33 @@ export type ReplayConfigSchema = {
};
/**
- * ReplayFilterSchema
+ * ReplayErrorInfo
+ *
+ * Error info for replay operations.
*/
-export type ReplayFilterSchema = {
+export type ReplayErrorInfo = {
+ /**
+ * Timestamp
+ */
+ timestamp: string;
+ /**
+ * Error
+ */
+ error: string;
+ /**
+ * Event Id
+ */
+ event_id?: string | null;
+ /**
+ * Error Type
+ */
+ error_type?: string | null;
+};
+
+/**
+ * ReplayFilter
+ */
+export type ReplayFilter = {
/**
* Execution Id
*/
@@ -1897,7 +1974,7 @@ export type ReplayFilterSchema = {
/**
* Event Types
*/
- event_types?: Array | null;
+ event_types?: Array | null;
/**
* Start Time
*/
@@ -1923,17 +2000,13 @@ export type ReplayFilterSchema = {
/**
* Exclude Event Types
*/
- exclude_event_types?: Array | null;
+ exclude_event_types?: Array | null;
};
/**
- * ReplayRequest
- *
- * Request schema for creating replay sessions
+ * ReplayFilterSchema
*/
-export type ReplayRequest = {
- replay_type: ReplayType;
- target?: ReplayTarget;
+export type ReplayFilterSchema = {
/**
* Execution Id
*/
@@ -1958,6 +2031,27 @@ export type ReplayRequest = {
* Service Name
*/
service_name?: string | null;
+ /**
+ * Custom Query
+ */
+ custom_query?: {
+ [key: string]: unknown;
+ } | null;
+ /**
+ * Exclude Event Types
+ */
+ exclude_event_types?: Array | null;
+};
+
+/**
+ * ReplayRequest
+ *
+ * Request schema for creating replay sessions
+ */
+export type ReplayRequest = {
+ replay_type: ReplayType;
+ target?: ReplayTarget;
+ filter?: ReplayFilter;
/**
* Speed Multiplier
*/
@@ -1982,6 +2076,24 @@ export type ReplayRequest = {
* Target File Path
*/
target_file_path?: string | null;
+ /**
+ * Target Topics
+ */
+ target_topics?: {
+ [key: string]: string;
+ } | null;
+ /**
+ * Retry Failed
+ */
+ retry_failed?: boolean;
+ /**
+ * Retry Attempts
+ */
+ retry_attempts?: number;
+ /**
+ * Enable Progress Tracking
+ */
+ enable_progress_tracking?: boolean;
};
/**
@@ -2108,28 +2220,20 @@ export type ResourceLimits = {
export type ResourceUsage = {
/**
* Execution Time Wall Seconds
- *
- * Wall clock execution time in seconds
*/
- execution_time_wall_seconds?: number | null;
+ execution_time_wall_seconds?: number;
/**
* Cpu Time Jiffies
- *
- * CPU time in jiffies (multiply by 10 for milliseconds)
*/
- cpu_time_jiffies?: number | null;
+ cpu_time_jiffies?: number;
/**
* Clk Tck Hertz
- *
- * Clock ticks per second (usually 100)
*/
- clk_tck_hertz?: number | null;
+ clk_tck_hertz?: number;
/**
* Peak Memory Kb
- *
- * Peak memory usage in KB
*/
- peak_memory_kb?: number | null;
+ peak_memory_kb?: number;
};
/**
@@ -2292,6 +2396,18 @@ export type SagaListResponse = {
* Total
*/
total: number;
+ /**
+ * Skip
+ */
+ skip: number;
+ /**
+ * Limit
+ */
+ limit: number;
+ /**
+ * Has More
+ */
+ has_more: boolean;
};
/**
@@ -2418,6 +2534,36 @@ export type SavedScriptResponse = {
updated_at: string;
};
+/**
+ * SavedScriptUpdate
+ */
+export type SavedScriptUpdate = {
+ /**
+ * Name
+ */
+ name?: string | null;
+ /**
+ * Script
+ */
+ script?: string | null;
+ /**
+ * Lang
+ */
+ lang?: string | null;
+ /**
+ * Lang Version
+ */
+ lang_version?: string | null;
+ /**
+ * Description
+ */
+ description?: string | null;
+ /**
+ * Updated At
+ */
+ updated_at?: string;
+};
+
/**
* SecuritySettingsSchema
*
@@ -2494,11 +2640,11 @@ export type SessionSummary = {
/**
* Duration Seconds
*/
- duration_seconds?: number | null;
+ readonly duration_seconds: number | null;
/**
* Throughput Events Per Second
*/
- throughput_events_per_second?: number | null;
+ readonly throughput_events_per_second: number | null;
};
/**
@@ -2537,7 +2683,7 @@ export type SettingsHistoryEntry = {
/**
* SettingsHistoryResponse
*
- * Response model for settings history
+ * Response model for settings history (limited snapshot of recent changes)
*/
export type SettingsHistoryResponse = {
/**
@@ -2545,9 +2691,9 @@ export type SettingsHistoryResponse = {
*/
history: Array;
/**
- * Total
+ * Limit
*/
- total: number;
+ limit: number;
};
/**
@@ -2753,6 +2899,22 @@ export type UserCreate = {
password: string;
};
+/**
+ * UserEventCountSchema
+ *
+ * User event count schema
+ */
+export type UserEventCountSchema = {
+ /**
+ * User Id
+ */
+ user_id: string;
+ /**
+ * Event Count
+ */
+ event_count: number;
+};
+
/**
* UserListResponse
*
@@ -3041,6 +3203,109 @@ export type ValidationError = {
type: string;
};
+/**
+ * EventReplayStatusResponse
+ *
+ * Response model for replay status
+ */
+export type EventReplayStatusResponseWritable = {
+ /**
+ * Session Id
+ */
+ session_id: string;
+ /**
+ * Status
+ */
+ status: string;
+ /**
+ * Total Events
+ */
+ total_events: number;
+ /**
+ * Replayed Events
+ */
+ replayed_events: number;
+ /**
+ * Failed Events
+ */
+ failed_events: number;
+ /**
+ * Skipped Events
+ */
+ skipped_events: number;
+ /**
+ * Correlation Id
+ */
+ correlation_id: string;
+ /**
+ * Created At
+ */
+ created_at: string;
+ /**
+ * Started At
+ */
+ started_at?: string | null;
+ /**
+ * Completed At
+ */
+ completed_at?: string | null;
+ /**
+ * Errors
+ */
+ errors?: Array | null;
+ /**
+ * Estimated Completion
+ */
+ estimated_completion?: string | null;
+ /**
+ * Execution Results
+ */
+ execution_results?: Array | null;
+};
+
+/**
+ * SessionSummary
+ *
+ * Summary information for replay sessions
+ */
+export type SessionSummaryWritable = {
+ /**
+ * Session Id
+ */
+ session_id: string;
+ replay_type: ReplayType;
+ target: ReplayTarget;
+ status: ReplayStatus;
+ /**
+ * Total Events
+ */
+ total_events: number;
+ /**
+ * Replayed Events
+ */
+ replayed_events: number;
+ /**
+ * Failed Events
+ */
+ failed_events: number;
+ /**
+ * Skipped Events
+ */
+ skipped_events: number;
+ /**
+ * Created At
+ */
+ created_at: string;
+ /**
+ * Started At
+ */
+ started_at: string | null;
+ /**
+ * Completed At
+ */
+ completed_at: string | null;
+};
+
export type LoginApiV1AuthLoginPostData = {
body: BodyLoginApiV1AuthLoginPost;
path?: never;
@@ -3519,7 +3784,7 @@ export type GetSavedScriptApiV1ScriptsScriptIdGetResponses = {
export type GetSavedScriptApiV1ScriptsScriptIdGetResponse = GetSavedScriptApiV1ScriptsScriptIdGetResponses[keyof GetSavedScriptApiV1ScriptsScriptIdGetResponses];
export type UpdateSavedScriptApiV1ScriptsScriptIdPutData = {
- body: SavedScriptCreateRequest;
+ body: SavedScriptUpdate;
path: {
/**
* Script Id
@@ -3852,7 +4117,7 @@ export type GetDlqMessagesApiV1DlqMessagesGetData = {
/**
* Event Type
*/
- event_type?: string | null;
+ event_type?: EventType | null;
/**
* Limit
*/
@@ -4091,6 +4356,14 @@ export type GetExecutionEventsApiV1EventsExecutionsExecutionIdEventsGetData = {
* Include system-generated events
*/
include_system_events?: boolean;
+ /**
+ * Limit
+ */
+ limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/executions/{execution_id}/events';
};
@@ -4204,6 +4477,10 @@ export type GetEventsByCorrelationApiV1EventsCorrelationCorrelationIdGetData = {
* Limit
*/
limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/correlation/{correlation_id}';
};
@@ -4234,6 +4511,10 @@ export type GetCurrentRequestEventsApiV1EventsCurrentRequestGetData = {
* Limit
*/
limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/events/current-request';
};
@@ -5574,6 +5855,14 @@ export type GetExecutionSagasApiV1SagasExecutionExecutionIdGetData = {
* Filter by saga state
*/
state?: SagaState | null;
+ /**
+ * Limit
+ */
+ limit?: number;
+ /**
+ * Skip
+ */
+ skip?: number;
};
url: '/api/v1/sagas/execution/{execution_id}';
};
@@ -5611,9 +5900,9 @@ export type ListSagasApiV1SagasGetData = {
*/
limit?: number;
/**
- * Offset
+ * Skip
*/
- offset?: number;
+ skip?: number;
};
url: '/api/v1/sagas/';
};
From dcda01d8c193cb5c5efd5ebb497d43e485b67300 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:29:33 +0100
Subject: [PATCH 36/48] ruff + unit tests fixes
---
.../admin/admin_events_repository.py | 4 +-
.../tests/unit/events/test_metadata_model.py | 14 ---
.../schemas_pydantic/test_events_schemas.py | 98 +----------------
.../test_execution_schemas.py | 1 -
.../test_health_dashboard_schemas.py | 100 ------------------
.../test_notification_schemas.py | 42 +-------
.../test_replay_models_schemas.py | 44 --------
.../schemas_pydantic/test_saga_schemas.py | 26 -----
8 files changed, 7 insertions(+), 322 deletions(-)
delete mode 100644 backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
delete mode 100644 backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
delete mode 100644 backend/tests/unit/schemas_pydantic/test_saga_schemas.py
diff --git a/backend/app/db/repositories/admin/admin_events_repository.py b/backend/app/db/repositories/admin/admin_events_repository.py
index 70630ccf..1190752e 100644
--- a/backend/app/db/repositories/admin/admin_events_repository.py
+++ b/backend/app/db/repositories/admin/admin_events_repository.py
@@ -83,7 +83,9 @@ async def get_event_detail(self, event_id: str) -> EventDetail | None:
return None
doc_fields = set(EventStoreDocument.model_fields.keys()) - {"id", "revision_id"}
- event = Event(**{**doc.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**doc.metadata.model_dump())})
+ event = Event(
+ **{**doc.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**doc.metadata.model_dump())}
+ )
related_query = {"metadata.correlation_id": doc.metadata.correlation_id, "event_id": {"$ne": event_id}}
related_docs = await (
diff --git a/backend/tests/unit/events/test_metadata_model.py b/backend/tests/unit/events/test_metadata_model.py
index 94afa349..71440ce7 100644
--- a/backend/tests/unit/events/test_metadata_model.py
+++ b/backend/tests/unit/events/test_metadata_model.py
@@ -1,20 +1,6 @@
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-def test_to_dict() -> None:
- m = AvroEventMetadata(service_name="svc", service_version="1.0")
- d = m.to_dict()
- assert d["service_name"] == "svc"
- assert d["service_version"] == "1.0"
-
-
-def test_from_dict() -> None:
- m = AvroEventMetadata.from_dict({"service_name": "a", "service_version": "2", "user_id": "u"})
- assert m.service_name == "a"
- assert m.service_version == "2"
- assert m.user_id == "u"
-
-
def test_with_correlation() -> None:
m = AvroEventMetadata(service_name="svc", service_version="1")
m2 = m.with_correlation("cid")
diff --git a/backend/tests/unit/schemas_pydantic/test_events_schemas.py b/backend/tests/unit/schemas_pydantic/test_events_schemas.py
index 13b99bd8..30ef50c2 100644
--- a/backend/tests/unit/schemas_pydantic/test_events_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_events_schemas.py
@@ -1,25 +1,7 @@
-import math
-from datetime import datetime, timezone, timedelta
-
import pytest
+from app.schemas_pydantic.events import EventFilterRequest
from app.domain.enums.common import SortOrder
-from app.domain.enums.events import EventType
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.schemas_pydantic.events import (
- EventAggregationRequest,
- EventBase,
- EventFilterRequest,
- EventInDB,
- EventListResponse,
- EventProjection,
- EventQuery,
- EventResponse,
- EventStatistics,
- PublishEventRequest,
- PublishEventResponse,
- ResourceUsage,
-)
def test_event_filter_request_sort_validator_accepts_allowed_fields():
@@ -34,81 +16,3 @@ def test_event_filter_request_sort_validator_accepts_allowed_fields():
def test_event_filter_request_sort_validator_rejects_invalid():
with pytest.raises(ValueError):
EventFilterRequest(sort_by="not-a-field")
-
-
-def test_event_base_and_in_db_defaults_and_metadata():
- meta = AvroEventMetadata(service_name="tests", service_version="1.0", user_id="u1")
- ev = EventBase(
- event_type=EventType.EXECUTION_REQUESTED,
- metadata=meta,
- payload={"execution_id": "e1"},
- )
- assert ev.event_id and ev.timestamp.tzinfo is not None
- edb = EventInDB(**ev.model_dump())
- assert isinstance(edb.stored_at, datetime)
- assert isinstance(edb.ttl_expires_at, datetime)
- # ttl should be after stored_at by ~30 days
- assert edb.ttl_expires_at > edb.stored_at
-
-
-def test_publish_event_request_and_response():
- req = PublishEventRequest(
- event_type=EventType.EXECUTION_REQUESTED,
- payload={"x": 1},
- aggregate_id="agg",
- )
- assert req.event_type is EventType.EXECUTION_REQUESTED
- resp = PublishEventResponse(event_id="e", status="queued", timestamp=datetime.now(timezone.utc))
- assert resp.status == "queued"
-
-
-def test_event_query_schema_and_list_response():
- q = EventQuery(
- event_types=[EventType.EXECUTION_REQUESTED, EventType.POD_CREATED],
- user_id="u1",
- start_time=datetime.now(timezone.utc) - timedelta(hours=1),
- end_time=datetime.now(timezone.utc),
- limit=50,
- skip=0,
- )
- assert len(q.event_types or []) == 2 and q.limit == 50
-
- # Minimal list response compose/decompose
- from app.schemas_pydantic.events import EventMetadataResponse
- er = EventResponse(
- event_id="id",
- event_type=EventType.POD_CREATED,
- event_version="1.0",
- timestamp=datetime.now(timezone.utc),
- metadata=EventMetadataResponse(
- service_name="test", service_version="1.0", correlation_id="cid-1"
- ),
- payload={},
- )
- lst = EventListResponse(events=[er], total=1, limit=1, skip=0, has_more=False)
- assert lst.total == 1 and not lst.has_more
-
-
-def test_event_projection_and_statistics_examples():
- proj = EventProjection(
- name="exec_summary",
- source_events=[EventType.EXECUTION_REQUESTED, EventType.EXECUTION_COMPLETED],
- aggregation_pipeline=[{"$match": {"event_type": str(EventType.EXECUTION_REQUESTED)}}],
- output_collection="summary",
- )
- assert proj.refresh_interval_seconds == 300
-
- stats = EventStatistics(
- total_events=2,
- events_by_type={str(EventType.EXECUTION_REQUESTED): 1},
- events_by_service={"svc": 2},
- events_by_hour=[{"hour": "2025-01-01 00:00", "count": 2}],
- )
- assert stats.total_events == 2
-
-
-def test_resource_usage_schema():
- ru = ResourceUsage(cpu_seconds=1.5, memory_mb_seconds=256.0, disk_io_mb=10.0, network_io_mb=5.0)
- dumped = ru.model_dump()
- assert math.isclose(dumped["cpu_seconds"], 1.5)
-
diff --git a/backend/tests/unit/schemas_pydantic/test_execution_schemas.py b/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
index 2ff863f4..38e59401 100644
--- a/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_execution_schemas.py
@@ -20,4 +20,3 @@ def test_execution_request_unsupported_version_raises():
with pytest.raises(ValueError) as e:
ExecutionRequest(script="print(1)", lang="python", lang_version="9.9")
assert "Version '9.9' not supported for python" in str(e.value)
-
diff --git a/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py b/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
deleted file mode 100644
index fb1f0d02..00000000
--- a/backend/tests/unit/schemas_pydantic/test_health_dashboard_schemas.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from datetime import datetime, timezone
-
-from app.schemas_pydantic.health_dashboard import (
- CategoryHealthResponse,
- CategoryHealthStatistics,
- CategoryServices,
- DependencyEdge,
- DependencyGraph,
- DependencyNode,
- DetailedHealthStatus,
- HealthAlert,
- HealthCheckConfig,
- HealthCheckState,
- HealthDashboardResponse,
- HealthMetricsSummary,
- HealthStatistics,
- HealthTrend,
- ServiceHealth,
- ServiceHealthDetails,
- ServiceHistoryDataPoint,
- ServiceHistoryResponse,
- ServiceHistorySummary,
- ServiceRealtimeStatus,
- ServiceDependenciesResponse,
-)
-from app.domain.enums.health import AlertSeverity
-
-
-def _now() -> datetime:
- return datetime.now(timezone.utc)
-
-
-def test_alert_and_metrics_and_trend_models():
- alert = HealthAlert(
- id="a1", severity=AlertSeverity.CRITICAL, service="backend", status="unhealthy", message="down",
- timestamp=_now(), duration_ms=12.3
- )
- assert alert.severity is AlertSeverity.CRITICAL
-
- metrics = HealthMetricsSummary(
- total_checks=10, healthy_checks=7, failed_checks=3, avg_check_duration_ms=5.5, total_failures_24h=3, uptime_percentage_24h=99.1
- )
- assert metrics.total_checks == 10
-
- trend = HealthTrend(timestamp=_now(), status="ok", healthy_count=10, unhealthy_count=0, degraded_count=0)
- assert trend.healthy_count == 10
-
-
-def test_service_health_and_dashboard_models():
- svc = ServiceHealth(name="backend", status="healthy", uptime_percentage=99.9, last_check=_now(), message="ok", critical=False)
- dash = HealthDashboardResponse(
- overall_status="healthy", last_updated=_now(), services=[svc], statistics={"total": 1}, alerts=[], trends=[]
- )
- assert dash.overall_status == "healthy"
-
-
-def test_category_services_and_detailed_status():
- cat = CategoryServices(status="healthy", message="ok", duration_ms=1.0, details={"k": "v"})
- stats = HealthStatistics(total_checks=10, healthy=9, degraded=1, unhealthy=0, unknown=0)
- detailed = DetailedHealthStatus(
- timestamp=_now().isoformat(), overall_status="healthy", categories={"core": {"db": cat}}, statistics=stats
- )
- assert detailed.categories["core"]["db"].status == "healthy"
-
-
-def test_dependency_graph_and_service_dependencies():
- nodes = [DependencyNode(id="svcA", label="Service A", status="healthy", critical=False, message="ok")]
- edges = [DependencyEdge(**{"from": "svcA", "to": "svcB", "critical": True})]
- graph = DependencyGraph(nodes=nodes, edges=edges)
- assert graph.edges[0].from_service == "svcA" and graph.edges[0].to_service == "svcB"
-
- from app.schemas_pydantic.health_dashboard import ServiceImpactAnalysis
- impact = {"svcA": ServiceImpactAnalysis(status="ok", affected_services=[], is_critical=False)}
- dep = ServiceDependenciesResponse(
- dependency_graph=graph,
- impact_analysis=impact,
- total_services=1,
- healthy_services=1,
- critical_services_down=0,
- )
- assert dep.total_services == 1
-
-
-def test_service_health_details_and_history():
- cfg = HealthCheckConfig(type="http", critical=True, interval_seconds=10.0, timeout_seconds=2.0, failure_threshold=3)
- state = HealthCheckState(consecutive_failures=0, consecutive_successes=5)
- details = ServiceHealthDetails(
- name="backend", status="healthy", message="ok", duration_ms=1.2, timestamp=_now(), check_config=cfg, state=state
- )
- assert details.state.consecutive_successes == 5
-
- dp = ServiceHistoryDataPoint(timestamp=_now(), status="ok", duration_ms=1.0, healthy=True)
- summary = ServiceHistorySummary(uptime_percentage=99.9, total_checks=10, healthy_checks=9, failure_count=1)
- hist = ServiceHistoryResponse(service_name="backend", time_range_hours=24, data_points=[dp], summary=summary)
- assert hist.time_range_hours == 24
-
-
-def test_realtime_status_model():
- rt = ServiceRealtimeStatus(status="ok", message="fine", duration_ms=2.0, last_check=_now(), details={})
- assert rt.status == "ok"
diff --git a/backend/tests/unit/schemas_pydantic/test_notification_schemas.py b/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
index 00a0c7d4..14b304bc 100644
--- a/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
+++ b/backend/tests/unit/schemas_pydantic/test_notification_schemas.py
@@ -1,19 +1,12 @@
from datetime import UTC, datetime, timedelta
import pytest
+
from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
-from app.schemas_pydantic.notification import (
- Notification,
- NotificationBatch,
- NotificationListResponse,
- NotificationResponse,
- NotificationStats,
- NotificationSubscription,
- SubscriptionUpdate,
-)
+from app.schemas_pydantic.notification import Notification, NotificationBatch
-def test_notification_scheduled_for_future_validation():
+def test_notification_scheduled_for_must_be_future():
n = Notification(
user_id="u1",
channel=NotificationChannel.IN_APP,
@@ -43,35 +36,6 @@ def test_notification_batch_validation_limits():
with pytest.raises(ValueError):
NotificationBatch(notifications=[])
- # Upper bound: >1000 should fail
many = [n1.model_copy() for _ in range(1001)]
with pytest.raises(ValueError):
NotificationBatch(notifications=many)
-
-
-def test_notification_response_and_list():
- n = Notification(user_id="u1", channel=NotificationChannel.IN_APP, subject="s", body="b")
- resp = NotificationResponse(
- notification_id=n.notification_id,
- channel=n.channel,
- status=n.status,
- subject=n.subject,
- body=n.body,
- action_url=None,
- created_at=n.created_at,
- read_at=None,
- severity=n.severity,
- tags=[],
- )
- lst = NotificationListResponse(notifications=[resp], total=1, unread_count=1)
- assert lst.unread_count == 1
-
-
-def test_subscription_models_and_stats():
- sub = NotificationSubscription(user_id="u1", channel=NotificationChannel.IN_APP)
- upd = SubscriptionUpdate(enabled=True)
- assert sub.enabled is True and upd.enabled is True
-
- now = datetime.now(UTC)
- stats = NotificationStats(start_date=now - timedelta(days=1), end_date=now)
- assert stats.total_sent == 0 and stats.delivery_rate == 0.0
diff --git a/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py b/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
deleted file mode 100644
index 98fff483..00000000
--- a/backend/tests/unit/schemas_pydantic/test_replay_models_schemas.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from datetime import datetime, timezone
-
-from app.domain.enums.events import EventType
-from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
-from app.domain.replay.models import ReplayConfig as DomainReplayConfig, ReplayFilter as DomainReplayFilter
-from app.schemas_pydantic.replay_models import ReplayConfigSchema, ReplayFilterSchema, ReplaySession
-
-
-def test_replay_filter_schema_from_domain():
- df = DomainReplayFilter(
- execution_id="e1",
- event_types=[EventType.EXECUTION_REQUESTED],
- exclude_event_types=[EventType.POD_CREATED],
- start_time=datetime.now(timezone.utc),
- end_time=datetime.now(timezone.utc),
- user_id="u1",
- service_name="svc",
- custom_query={"x": 1},
- )
- sf = ReplayFilterSchema.from_domain(df)
- assert sf.event_types == [str(EventType.EXECUTION_REQUESTED)]
- assert sf.exclude_event_types == [str(EventType.POD_CREATED)]
-
-
-def test_replay_config_schema_from_domain_and_key_conversion():
- df = DomainReplayFilter(event_types=[EventType.EXECUTION_REQUESTED])
- cfg = DomainReplayConfig(
- replay_type=ReplayType.TIME_RANGE,
- target=ReplayTarget.KAFKA,
- filter=df,
- target_topics={EventType.EXECUTION_REQUESTED: "execution-events"},
- max_events=10,
- )
- sc = ReplayConfigSchema.model_validate(cfg)
- assert sc.target_topics == {str(EventType.EXECUTION_REQUESTED): "execution-events"}
- assert sc.max_events == 10
-
-
-def test_replay_session_coerces_config_from_domain():
- df = DomainReplayFilter()
- cfg = DomainReplayConfig(replay_type=ReplayType.TIME_RANGE, filter=df)
- session = ReplaySession(config=cfg)
- assert session.status == ReplayStatus.CREATED
- assert isinstance(session.config, ReplayConfigSchema)
diff --git a/backend/tests/unit/schemas_pydantic/test_saga_schemas.py b/backend/tests/unit/schemas_pydantic/test_saga_schemas.py
deleted file mode 100644
index 290446c4..00000000
--- a/backend/tests/unit/schemas_pydantic/test_saga_schemas.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from datetime import datetime, timezone
-
-from app.domain.enums.saga import SagaState
-from app.domain.saga.models import Saga
-from app.schemas_pydantic.saga import SagaStatusResponse
-
-
-def test_saga_status_response_from_domain():
- s = Saga(
- saga_id="s1",
- saga_name="exec-saga",
- execution_id="e1",
- state=SagaState.RUNNING,
- current_step="allocate",
- completed_steps=["validate"],
- compensated_steps=[],
- error_message=None,
- created_at=datetime.now(timezone.utc),
- updated_at=datetime.now(timezone.utc),
- completed_at=None,
- retry_count=1,
- )
- resp = SagaStatusResponse.from_domain(s)
- assert resp.saga_id == "s1" and resp.current_step == "allocate"
- assert resp.created_at.endswith("Z") is False # isoformat without enforced Z; just ensure string
-
From 981ae0a5c1f4445a9df4422c168a934eb5c2165b Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:50:55 +0100
Subject: [PATCH 37/48] ruff + unit tests fixes
---
backend/app/core/container.py | 3 +++
backend/app/core/dishka_lifespan.py | 11 ++++-------
backend/app/core/providers.py | 18 ++++++++++++++++++
3 files changed, 25 insertions(+), 7 deletions(-)
diff --git a/backend/app/core/container.py b/backend/app/core/container.py
index 7976e615..c7f8b9d7 100644
--- a/backend/app/core/container.py
+++ b/backend/app/core/container.py
@@ -7,6 +7,7 @@
BusinessServicesProvider,
ConnectionProvider,
CoreServicesProvider,
+ DatabaseProvider,
EventProvider,
LoggingProvider,
MessagingProvider,
@@ -24,6 +25,7 @@ def create_app_container() -> AsyncContainer:
return make_async_container(
SettingsProvider(),
LoggingProvider(),
+ DatabaseProvider(),
RedisProvider(),
CoreServicesProvider(),
MessagingProvider(),
@@ -45,6 +47,7 @@ def create_result_processor_container() -> AsyncContainer:
return make_async_container(
SettingsProvider(),
LoggingProvider(),
+ DatabaseProvider(),
CoreServicesProvider(),
ConnectionProvider(),
RedisProvider(),
diff --git a/backend/app/core/dishka_lifespan.py b/backend/app/core/dishka_lifespan.py
index 99448676..038fd18d 100644
--- a/backend/app/core/dishka_lifespan.py
+++ b/backend/app/core/dishka_lifespan.py
@@ -6,8 +6,8 @@
from beanie import init_beanie
from dishka import AsyncContainer
from fastapi import FastAPI
-from pymongo.asynchronous.mongo_client import AsyncMongoClient
+from app.core.database_context import Database
from app.core.startup import initialize_metrics_context, initialize_rate_limits
from app.core.tracing import init_tracing
from app.db.docs import ALL_DOCUMENTS
@@ -69,11 +69,9 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
schema_registry = await container.get(SchemaRegistryManager)
await initialize_event_schemas(schema_registry)
- # Initialize Beanie ODM with PyMongo async database
- db_client: AsyncMongoClient[dict[str, object]] = AsyncMongoClient(
- settings.MONGODB_URL, tz_aware=True, serverSelectionTimeoutMS=5000
- )
- await init_beanie(database=db_client[settings.DATABASE_NAME], document_models=ALL_DOCUMENTS)
+ # Initialize Beanie ODM with database from DI container
+ database = await container.get(Database)
+ await init_beanie(database=database, document_models=ALL_DOCUMENTS)
logger.info(f"Beanie ODM initialized with {len(ALL_DOCUMENTS)} document models")
# Initialize metrics context with instances from DI container
@@ -93,7 +91,6 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
event_store_consumer = await container.get(EventStoreConsumer)
async with AsyncExitStack() as stack:
- stack.callback(db_client.close)
await stack.enter_async_context(sse_bridge)
logger.info("SSE Kafka→Redis bridge started with consumer pool")
await stack.enter_async_context(event_store_consumer)
diff --git a/backend/app/core/providers.py b/backend/app/core/providers.py
index 9733db3c..49f3c7d2 100644
--- a/backend/app/core/providers.py
+++ b/backend/app/core/providers.py
@@ -3,7 +3,9 @@
import redis.asyncio as redis
from dishka import Provider, Scope, provide
+from pymongo.asynchronous.mongo_client import AsyncMongoClient
+from app.core.database_context import Database
from app.core.k8s_clients import K8sClients, close_k8s_clients, create_k8s_clients
from app.core.logging import setup_logger
from app.core.metrics import (
@@ -118,6 +120,22 @@ def get_rate_limit_service(
return RateLimitService(redis_client, settings, rate_limit_metrics)
+class DatabaseProvider(Provider):
+ scope = Scope.APP
+
+ @provide
+ async def get_database(self, settings: Settings, logger: logging.Logger) -> AsyncIterator[Database]:
+ client: AsyncMongoClient[dict[str, object]] = AsyncMongoClient(
+ settings.MONGODB_URL, tz_aware=True, serverSelectionTimeoutMS=5000
+ )
+ database = client[settings.DATABASE_NAME]
+ logger.info(f"MongoDB connected: {settings.DATABASE_NAME}")
+ try:
+ yield database
+ finally:
+ client.close()
+
+
class CoreServicesProvider(Provider):
scope = Scope.APP
From 295436ceb918356a7c3a161abea78d2bb0d2ae90 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Tue, 30 Dec 2025 23:54:44 +0100
Subject: [PATCH 38/48] providers fix
---
backend/app/core/providers.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/app/core/providers.py b/backend/app/core/providers.py
index 49f3c7d2..0ab818d4 100644
--- a/backend/app/core/providers.py
+++ b/backend/app/core/providers.py
@@ -133,7 +133,7 @@ async def get_database(self, settings: Settings, logger: logging.Logger) -> Asyn
try:
yield database
finally:
- client.close()
+ await client.close()
class CoreServicesProvider(Provider):
From 87b2dffe1b285e2661d951e823d63f334e37a579 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:07:25 +0100
Subject: [PATCH 39/48] e2e test fix
---
backend/app/api/routes/auth.py | 10 ++++++++++
.../integration/k8s/test_k8s_worker_create_pod.py | 15 +++++----------
backend/tests/integration/test_saga_routes.py | 9 ++++-----
3 files changed, 19 insertions(+), 15 deletions(-)
diff --git a/backend/app/api/routes/auth.py b/backend/app/api/routes/auth.py
index e8263cd3..606d57f2 100644
--- a/backend/app/api/routes/auth.py
+++ b/backend/app/api/routes/auth.py
@@ -5,6 +5,7 @@
from dishka.integrations.fastapi import DishkaRoute
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from fastapi.security import OAuth2PasswordRequestForm
+from pymongo.errors import DuplicateKeyError
from app.core.security import security_service
from app.core.utils import get_client_ip
@@ -181,6 +182,15 @@ async def register(
updated_at=created_user.updated_at,
)
+ except DuplicateKeyError as e:
+ logger.warning(
+ "Registration failed - duplicate email",
+ extra={
+ "username": user.username,
+ "client_ip": get_client_ip(request),
+ },
+ )
+ raise HTTPException(status_code=409, detail="Email already registered") from e
except Exception as e:
logger.error(
f"Registration failed - database error: {str(e)}",
diff --git a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
index 1222db83..732ce094 100644
--- a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
+++ b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py
@@ -3,18 +3,15 @@
import uuid
import pytest
-from kubernetes.client.rest import ApiException
-
+from app.events.core import UnifiedProducer
+from app.events.event_store import EventStore
+from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.saga import CreatePodCommandEvent
+from app.services.idempotency import IdempotencyManager
from app.services.k8s_worker.config import K8sWorkerConfig
from app.services.k8s_worker.worker import KubernetesWorker
-
-from app.core.database_context import Database
-from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
-from app.events.core import UnifiedProducer
-from app.services.idempotency import IdempotencyManager
+from kubernetes.client.rest import ApiException
pytestmark = [pytest.mark.integration, pytest.mark.k8s]
@@ -29,7 +26,6 @@ async def test_worker_creates_configmap_and_pod(scope, monkeypatch): # type: ig
ns = "integr8scode"
monkeypatch.setenv("K8S_NAMESPACE", ns)
- database: Database = await scope.get(Database)
schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
store: EventStore = await scope.get(EventStore)
producer: UnifiedProducer = await scope.get(UnifiedProducer)
@@ -38,7 +34,6 @@ async def test_worker_creates_configmap_and_pod(scope, monkeypatch): # type: ig
cfg = K8sWorkerConfig(namespace=ns, max_concurrent_pods=1)
worker = KubernetesWorker(
config=cfg,
- database=database,
producer=producer,
schema_registry_manager=schema,
event_store=store,
diff --git a/backend/tests/integration/test_saga_routes.py b/backend/tests/integration/test_saga_routes.py
index 81cb1379..b26d7b90 100644
--- a/backend/tests/integration/test_saga_routes.py
+++ b/backend/tests/integration/test_saga_routes.py
@@ -1,15 +1,14 @@
-import uuid
import asyncio
+import uuid
from typing import Dict
import pytest
-from httpx import AsyncClient
-
from app.domain.enums.saga import SagaState
from app.schemas_pydantic.saga import (
SagaListResponse,
SagaStatusResponse,
)
+from httpx import AsyncClient
class TestSagaRoutes:
@@ -34,7 +33,7 @@ async def test_get_saga_not_found(
saga_id = str(uuid.uuid4())
response = await client.get(f"/api/v1/sagas/{saga_id}")
assert response.status_code == 404
- assert "Saga not found" in response.json()["detail"]
+ assert "not found" in response.json()["detail"]
@pytest.mark.asyncio
async def test_get_execution_sagas_requires_auth(
@@ -196,7 +195,7 @@ async def test_cancel_saga_not_found(
saga_id = str(uuid.uuid4())
response = await client.post(f"/api/v1/sagas/{saga_id}/cancel")
assert response.status_code == 404
- assert "Saga not found" in response.json()["detail"]
+ assert "not found" in response.json()["detail"]
@pytest.mark.asyncio
async def test_saga_access_control(
From 690f44444b89b21352932c7b7eb4b08d7fff1acd Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:36:14 +0100
Subject: [PATCH 40/48] e2e test fix
---
.../app/db/repositories/event_repository.py | 6 +-
backend/app/services/kafka_event_service.py | 6 +-
backend/app/services/user_settings_service.py | 2 +-
.../test_admin_events_repository.py | 66 ++++++++---
.../test_admin_settings_repository.py | 8 +-
.../test_admin_user_repository.py | 112 +++++++++++-------
.../db/repositories/test_dlq_repository.py | 5 +-
.../db/repositories/test_event_repository.py | 27 +++--
.../repositories/test_execution_repository.py | 2 +-
.../test_notification_repository.py | 84 ++++++++++---
.../db/repositories/test_replay_repository.py | 42 +++++--
.../db/repositories/test_saga_repository.py | 42 +++++--
.../test_saved_script_repository.py | 9 +-
.../db/repositories/test_sse_repository.py | 24 ++--
.../db/repositories/test_user_repository.py | 14 ++-
.../test_user_settings_repository.py | 43 +++----
.../integration/events/test_admin_utils.py | 3 +-
.../events/test_consume_roundtrip.py | 8 +-
.../events/test_consumer_group_monitor.py | 4 +-
.../events/test_consumer_group_monitor_e2e.py | 15 ++-
.../integration/events/test_dlq_handler.py | 22 ++--
.../events/test_event_dispatcher.py | 3 +-
.../integration/events/test_event_store.py | 19 +--
.../events/test_event_store_consumer.py | 8 +-
.../test_event_store_consumer_flush_e2e.py | 17 ++-
.../events/test_event_store_e2e.py | 20 +---
.../integration/events/test_producer_e2e.py | 17 +--
.../events/test_schema_registry_e2e.py | 5 +-
.../events/test_schema_registry_real.py | 2 +-
29 files changed, 383 insertions(+), 252 deletions(-)
diff --git a/backend/app/db/repositories/event_repository.py b/backend/app/db/repositories/event_repository.py
index dc2a4e31..901f72ff 100644
--- a/backend/app/db/repositories/event_repository.py
+++ b/backend/app/db/repositories/event_repository.py
@@ -39,7 +39,8 @@ def _build_time_filter(self, start_time: datetime | None, end_time: datetime | N
async def store_event(self, event: Event) -> str:
data = asdict(event)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in asdict(event.metadata).items()}
+ meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
+ data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = datetime.now(timezone.utc)
# Remove None values so EventDocument defaults can apply (e.g., ttl_expires_at)
@@ -64,7 +65,8 @@ async def store_events_batch(self, events: list[Event]) -> list[str]:
docs = []
for event in events:
data = asdict(event)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in asdict(event.metadata).items()}
+ meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
+ data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = now
# Remove None values so EventDocument defaults can apply
diff --git a/backend/app/services/kafka_event_service.py b/backend/app/services/kafka_event_service.py
index 07b76bf1..25a13f46 100644
--- a/backend/app/services/kafka_event_service.py
+++ b/backend/app/services/kafka_event_service.py
@@ -72,8 +72,10 @@ async def publish_event(
event_id = str(uuid4())
timestamp = datetime.now(timezone.utc)
- # Convert to domain metadata for storage
- domain_metadata = DomainEventMetadata(**avro_metadata.model_dump())
+ # Convert to domain metadata for storage (only include defined fields)
+ domain_metadata = DomainEventMetadata(
+ **avro_metadata.model_dump(include=set(DomainEventMetadata.__dataclass_fields__))
+ )
event = Event(
event_id=event_id,
diff --git a/backend/app/services/user_settings_service.py b/backend/app/services/user_settings_service.py
index 12a7f3eb..ce51aaf2 100644
--- a/backend/app/services/user_settings_service.py
+++ b/backend/app/services/user_settings_service.py
@@ -332,7 +332,7 @@ async def _get_settings_events(
event_type=et,
timestamp=e.timestamp,
payload=e.payload,
- correlation_id=e.correlation_id,
+ correlation_id=e.metadata.correlation_id if e.metadata else None,
)
)
return out
diff --git a/backend/tests/integration/db/repositories/test_admin_events_repository.py b/backend/tests/integration/db/repositories/test_admin_events_repository.py
index b6a84d4a..de46ca1b 100644
--- a/backend/tests/integration/db/repositories/test_admin_events_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_events_repository.py
@@ -1,31 +1,46 @@
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
import pytest
-
-from app.db.docs import ReplaySessionDocument
from app.db.docs.replay import ReplayConfig, ReplayFilter
from app.db.repositories.admin.admin_events_repository import AdminEventsRepository
from app.domain.admin import ReplayQuery
from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayType, ReplayTarget
-from app.domain.events.event_models import EventFilter, EventStatistics, Event
+from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
+from app.domain.events.event_models import Event, EventFilter, EventStatistics
+from app.domain.replay.models import ReplaySessionState
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo() -> AdminEventsRepository:
- return AdminEventsRepository()
+async def repo(scope) -> AdminEventsRepository: # type: ignore[valid-type]
+ return await scope.get(AdminEventsRepository)
@pytest.mark.asyncio
async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e1", "event_type": "X", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()},
- {"event_id": "e2", "event_type": "X", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e1",
+ "event_type": "X",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(
+ service_name="svc", service_version="1", correlation_id="c1"
+ ).model_dump(),
+ },
+ {
+ "event_id": "e2",
+ "event_type": "X",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(
+ service_name="svc", service_version="1", correlation_id="c1"
+ ).model_dump(),
+ },
+ ]
+ )
res = await repo.browse_events(EventFilter())
assert res.total >= 2
detail = await repo.get_event_detail("e1")
@@ -38,13 +53,29 @@ async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db)
@pytest.mark.asyncio
async def test_event_stats_and_archive(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e10", "event_type": "step.completed", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").to_dict()},
- ])
- await db.get_collection("executions").insert_one({"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}})
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e10",
+ "event_type": "step.completed",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").model_dump(),
+ },
+ ]
+ )
+ await db.get_collection("executions").insert_one(
+ {"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}}
+ )
stats = await repo.get_event_stats(hours=1)
assert isinstance(stats, EventStatistics)
- ev = Event(event_id="a1", event_type="X", event_version="1.0", timestamp=now, metadata=AvroEventMetadata(service_name="s", service_version="1"), payload={})
+ ev = Event(
+ event_id="a1",
+ event_type="X",
+ event_version="1.0",
+ timestamp=now,
+ metadata=AvroEventMetadata(service_name="s", service_version="1"),
+ payload={},
+ )
assert await repo.archive_event(ev, deleted_by="admin") is True
@@ -56,7 +87,7 @@ async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db)
target=ReplayTarget.TEST,
filter=ReplayFilter(),
)
- session = ReplaySessionDocument(
+ session = ReplaySessionState(
session_id="s1",
config=config,
status=ReplayStatus.SCHEDULED,
@@ -76,4 +107,3 @@ async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db)
assert await repo.count_events_for_replay(ReplayQuery()) >= 0
prev = await repo.get_replay_events_preview(event_ids=["e10"]) # from earlier insert
assert isinstance(prev, dict)
-
diff --git a/backend/tests/integration/db/repositories/test_admin_settings_repository.py b/backend/tests/integration/db/repositories/test_admin_settings_repository.py
index 11edf3ad..7c19cf50 100644
--- a/backend/tests/integration/db/repositories/test_admin_settings_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_settings_repository.py
@@ -1,17 +1,13 @@
-import logging
import pytest
-
from app.db.repositories.admin.admin_settings_repository import AdminSettingsRepository
from app.domain.admin import SystemSettings
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.admin_settings_repository")
-
@pytest.fixture()
-def repo(db) -> AdminSettingsRepository: # type: ignore[valid-type]
- return AdminSettingsRepository(db, logger=_test_logger)
+async def repo(scope) -> AdminSettingsRepository: # type: ignore[valid-type]
+ return await scope.get(AdminSettingsRepository)
@pytest.mark.asyncio
diff --git a/backend/tests/integration/db/repositories/test_admin_user_repository.py b/backend/tests/integration/db/repositories/test_admin_user_repository.py
index 10dd72c6..e65658aa 100644
--- a/backend/tests/integration/db/repositories/test_admin_user_repository.py
+++ b/backend/tests/integration/db/repositories/test_admin_user_repository.py
@@ -1,32 +1,34 @@
-import pytest
from datetime import datetime, timezone
-from app.db.repositories.admin.admin_user_repository import AdminUserRepository
-from app.domain.user import UserFields, UserUpdate, PasswordReset
+import pytest
from app.core.security import SecurityService
+from app.db.repositories.admin.admin_user_repository import AdminUserRepository
+from app.domain.user import PasswordReset, UserFields, UserUpdate
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo(db) -> AdminUserRepository: # type: ignore[valid-type]
- return AdminUserRepository(db)
+async def repo(scope) -> AdminUserRepository: # type: ignore[valid-type]
+ return await scope.get(AdminUserRepository)
@pytest.mark.asyncio
async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
# Insert a user
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "alice",
- UserFields.EMAIL: "alice@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "alice",
+ UserFields.EMAIL: "alice@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
res = await repo.list_users(limit=10)
assert res.total >= 1 and any(u.username == "alice" for u in res.users)
user = await repo.get_user_by_id("u1")
@@ -36,17 +38,19 @@ async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type
@pytest.mark.asyncio
async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, monkeypatch: pytest.MonkeyPatch) -> None: # type: ignore[valid-type]
# Insert base user
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "bob",
+ UserFields.EMAIL: "bob@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
# No updates → returns current
updated = await repo.update_user("u1", UserUpdate())
assert updated and updated.user_id == "u1"
@@ -54,17 +58,19 @@ async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, m
deleted = await repo.delete_user("u1", cascade=True)
assert deleted["user"] in (0, 1)
# Re-insert and reset password
- await db.get_collection("users").insert_one({
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- })
+ await db.get_collection("users").insert_one(
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "bob",
+ UserFields.EMAIL: "bob@example.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ }
+ )
monkeypatch.setattr(SecurityService, "get_password_hash", staticmethod(lambda p: "HASHED"))
pr = PasswordReset(user_id="u1", new_password="secret123")
assert await repo.reset_user_password(pr) is True
@@ -73,10 +79,32 @@ async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, m
@pytest.mark.asyncio
async def test_list_with_filters_and_reset_invalid(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
# Insert a couple of users
- await db.get_collection("users").insert_many([
- {UserFields.USER_ID: "u1", UserFields.USERNAME: "Alice", UserFields.EMAIL: "a@e.com", UserFields.ROLE: "user", UserFields.IS_ACTIVE: True, UserFields.IS_SUPERUSER: False, UserFields.HASHED_PASSWORD: "h", UserFields.CREATED_AT: datetime.now(timezone.utc), UserFields.UPDATED_AT: datetime.now(timezone.utc)},
- {UserFields.USER_ID: "u2", UserFields.USERNAME: "Bob", UserFields.EMAIL: "b@e.com", UserFields.ROLE: "admin", UserFields.IS_ACTIVE: True, UserFields.IS_SUPERUSER: True, UserFields.HASHED_PASSWORD: "h", UserFields.CREATED_AT: datetime.now(timezone.utc), UserFields.UPDATED_AT: datetime.now(timezone.utc)},
- ])
+ await db.get_collection("users").insert_many(
+ [
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "Alice",
+ UserFields.EMAIL: "a@e.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: False,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ },
+ {
+ UserFields.USER_ID: "u2",
+ UserFields.USERNAME: "Bob",
+ UserFields.EMAIL: "b@e.com",
+ UserFields.ROLE: "admin",
+ UserFields.IS_ACTIVE: True,
+ UserFields.IS_SUPERUSER: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.CREATED_AT: datetime.now(timezone.utc),
+ UserFields.UPDATED_AT: datetime.now(timezone.utc),
+ },
+ ]
+ )
res = await repo.list_users(limit=5, offset=0, search="Al", role=None)
assert any(u.username.lower().startswith("al") for u in res.users) or res.total >= 0
# invalid password reset (empty)
diff --git a/backend/tests/integration/db/repositories/test_dlq_repository.py b/backend/tests/integration/db/repositories/test_dlq_repository.py
index a2b06afb..07d3711f 100644
--- a/backend/tests/integration/db/repositories/test_dlq_repository.py
+++ b/backend/tests/integration/db/repositories/test_dlq_repository.py
@@ -1,12 +1,11 @@
-from datetime import datetime, timezone
import logging
+from datetime import datetime, timezone
import pytest
-
from app.db.docs import DLQMessageDocument
from app.db.repositories.dlq_repository import DLQRepository
-from app.domain.enums.events import EventType
from app.dlq import DLQMessageStatus
+from app.domain.enums.events import EventType
pytestmark = pytest.mark.integration
diff --git a/backend/tests/integration/db/repositories/test_event_repository.py b/backend/tests/integration/db/repositories/test_event_repository.py
index e1f9e192..feda7d47 100644
--- a/backend/tests/integration/db/repositories/test_event_repository.py
+++ b/backend/tests/integration/db/repositories/test_event_repository.py
@@ -1,20 +1,16 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.db.repositories.event_repository import EventRepository
-from app.domain.events.event_models import Event, EventFilter
+from app.domain.events.event_models import Event
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.event_repository")
-
@pytest.fixture()
-def repo(db) -> EventRepository: # type: ignore[valid-type]
- return EventRepository(db, logger=_test_logger)
+async def repo(scope) -> EventRepository: # type: ignore[valid-type]
+ return await scope.get(EventRepository)
def make_event(event_id: str, etype: str = "UserLoggedIn", user: str | None = "u1", agg: str | None = "agg1") -> Event:
@@ -54,9 +50,18 @@ async def test_store_get_and_queries(repo: EventRepository, db) -> None: # type
@pytest.mark.asyncio
async def test_statistics_and_search_and_delete(repo: EventRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {"event_id": "e3", "event_type": "C", "event_version": "1.0", "timestamp": now, "metadata": AvroEventMetadata(service_name="svc", service_version="1").to_dict(), "payload": {}},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e3",
+ "event_type": "C",
+ "event_version": "1.0",
+ "timestamp": now,
+ "metadata": AvroEventMetadata(service_name="svc", service_version="1").model_dump(),
+ "payload": {},
+ },
+ ]
+ )
stats = await repo.get_event_statistics(start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
assert stats.total_events >= 1
diff --git a/backend/tests/integration/db/repositories/test_execution_repository.py b/backend/tests/integration/db/repositories/test_execution_repository.py
index 8fdaabd7..eb3bf2cb 100644
--- a/backend/tests/integration/db/repositories/test_execution_repository.py
+++ b/backend/tests/integration/db/repositories/test_execution_repository.py
@@ -1,7 +1,7 @@
import logging
-import pytest
from uuid import uuid4
+import pytest
from app.db.repositories.execution_repository import ExecutionRepository
from app.domain.enums.execution import ExecutionStatus
from app.domain.execution import DomainExecutionCreate, DomainExecutionUpdate
diff --git a/backend/tests/integration/db/repositories/test_notification_repository.py b/backend/tests/integration/db/repositories/test_notification_repository.py
index bbcda29e..83903557 100644
--- a/backend/tests/integration/db/repositories/test_notification_repository.py
+++ b/backend/tests/integration/db/repositories/test_notification_repository.py
@@ -1,10 +1,9 @@
-from datetime import datetime, UTC, timedelta
import logging
+from datetime import UTC, datetime, timedelta
import pytest
-
from app.db.repositories.notification_repository import NotificationRepository
-from app.domain.enums.notification import NotificationChannel, NotificationStatus, NotificationSeverity
+from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
from app.domain.enums.notification import NotificationChannel as NC
from app.domain.enums.user import UserRole
from app.domain.notification import (
@@ -52,10 +51,32 @@ async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-unt
now = datetime.now(UTC)
# Seed notifications
- await db.get_collection("notifications").insert_many([
- {"notification_id": "n1", "user_id": "u1", "severity": NotificationSeverity.MEDIUM.value, "tags": ["execution"], "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.PENDING.value, "created_at": now},
- {"notification_id": "n2", "user_id": "u1", "severity": NotificationSeverity.LOW.value, "tags": ["completed"], "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.DELIVERED.value, "created_at": now},
- ])
+ await db.get_collection("notifications").insert_many(
+ [
+ {
+ "notification_id": "n1",
+ "user_id": "u1",
+ "severity": NotificationSeverity.MEDIUM.value,
+ "tags": ["execution"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.PENDING.value,
+ "created_at": now,
+ },
+ {
+ "notification_id": "n2",
+ "user_id": "u1",
+ "severity": NotificationSeverity.LOW.value,
+ "tags": ["completed"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.DELIVERED.value,
+ "created_at": now,
+ },
+ ]
+ )
lst = await repo.list_notifications("u1")
assert len(lst) >= 2
assert await repo.count_notifications("u1") >= 2
@@ -64,11 +85,20 @@ async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-unt
# Pending and scheduled
pending = await repo.find_pending_notifications()
assert any(n.status == NotificationStatus.PENDING for n in pending)
- await db.get_collection("notifications").insert_one({
- "notification_id": "n3", "user_id": "u1", "severity": NotificationSeverity.MEDIUM.value, "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value, "subject": "s", "body": "b", "status": NotificationStatus.PENDING.value,
- "created_at": now, "scheduled_for": now + timedelta(seconds=1)
- })
+ await db.get_collection("notifications").insert_one(
+ {
+ "notification_id": "n3",
+ "user_id": "u1",
+ "severity": NotificationSeverity.MEDIUM.value,
+ "tags": ["execution"],
+ "channel": NotificationChannel.IN_APP.value,
+ "subject": "s",
+ "body": "b",
+ "status": NotificationStatus.PENDING.value,
+ "created_at": now,
+ "scheduled_for": now + timedelta(seconds=1),
+ }
+ )
scheduled = await repo.find_scheduled_notifications()
assert isinstance(scheduled, list)
assert await repo.cleanup_old_notifications(days=0) >= 0
@@ -89,12 +119,32 @@ async def test_subscriptions_and_user_queries(db) -> None: # type: ignore[no-un
assert len(subs) == len(list(NC))
# Users by role and active users
- await db.get_collection("users").insert_many([
- {UserFields.USER_ID: "u1", UserFields.USERNAME: "A", UserFields.EMAIL: "a@e.com", UserFields.ROLE: "user", UserFields.IS_ACTIVE: True},
- {UserFields.USER_ID: "u2", UserFields.USERNAME: "B", UserFields.EMAIL: "b@e.com", UserFields.ROLE: "admin", UserFields.IS_ACTIVE: True},
- ])
+ await db.get_collection("users").insert_many(
+ [
+ {
+ UserFields.USER_ID: "u1",
+ UserFields.USERNAME: "A",
+ UserFields.EMAIL: "a@e.com",
+ UserFields.ROLE: "user",
+ UserFields.IS_ACTIVE: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.IS_SUPERUSER: False,
+ },
+ {
+ UserFields.USER_ID: "u2",
+ UserFields.USERNAME: "B",
+ UserFields.EMAIL: "b@e.com",
+ UserFields.ROLE: "admin",
+ UserFields.IS_ACTIVE: True,
+ UserFields.HASHED_PASSWORD: "h",
+ UserFields.IS_SUPERUSER: False,
+ },
+ ]
+ )
ids = await repo.get_users_by_roles([UserRole.USER])
assert "u1" in ids or isinstance(ids, list)
- await db.get_collection("executions").insert_one({"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)})
+ await db.get_collection("executions").insert_one(
+ {"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)}
+ )
active = await repo.get_active_users(days=1)
assert set(active) >= {"u2"} or isinstance(active, list)
diff --git a/backend/tests/integration/db/repositories/test_replay_repository.py b/backend/tests/integration/db/repositories/test_replay_repository.py
index 496b8649..7ab5bc72 100644
--- a/backend/tests/integration/db/repositories/test_replay_repository.py
+++ b/backend/tests/integration/db/repositories/test_replay_repository.py
@@ -1,8 +1,6 @@
from datetime import datetime, timezone
-import logging
import pytest
-
from app.db.repositories.replay_repository import ReplayRepository
from app.domain.admin.replay_updates import ReplaySessionUpdate
from app.domain.enums.replay import ReplayStatus, ReplayType
@@ -11,19 +9,19 @@
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.replay_repository")
-
@pytest.fixture()
-def repo(db) -> ReplayRepository: # type: ignore[valid-type]
- return ReplayRepository(db, logger=_test_logger)
+async def repo(scope) -> ReplayRepository: # type: ignore[valid-type]
+ return await scope.get(ReplayRepository)
@pytest.mark.asyncio
async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
await repo.create_indexes()
config = ReplayConfig(replay_type=ReplayType.EXECUTION, filter=ReplayFilter())
- session = ReplaySession(session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config)
+ session = ReplaySession(
+ session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config
+ )
await repo.save_session(session)
got = await repo.get_session("s1")
assert got and got.session_id == "s1"
@@ -38,11 +36,31 @@ async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
async def test_count_fetch_events_and_delete(repo: ReplayRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
# Insert events
- await db.get_collection("events").insert_many([
- {"event_id": "e1", "timestamp": now, "execution_id": "x1", "event_type": "T", "metadata": {"user_id": "u1"}},
- {"event_id": "e2", "timestamp": now, "execution_id": "x2", "event_type": "T", "metadata": {"user_id": "u1"}},
- {"event_id": "e3", "timestamp": now, "execution_id": "x3", "event_type": "U", "metadata": {"user_id": "u2"}},
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "event_id": "e1",
+ "timestamp": now,
+ "execution_id": "x1",
+ "event_type": "T",
+ "metadata": {"user_id": "u1"},
+ },
+ {
+ "event_id": "e2",
+ "timestamp": now,
+ "execution_id": "x2",
+ "event_type": "T",
+ "metadata": {"user_id": "u1"},
+ },
+ {
+ "event_id": "e3",
+ "timestamp": now,
+ "execution_id": "x3",
+ "event_type": "U",
+ "metadata": {"user_id": "u2"},
+ },
+ ]
+ )
cnt = await repo.count_events(ReplayFilter())
assert cnt >= 3
batches = []
diff --git a/backend/tests/integration/db/repositories/test_saga_repository.py b/backend/tests/integration/db/repositories/test_saga_repository.py
index c237d614..d78f82e0 100644
--- a/backend/tests/integration/db/repositories/test_saga_repository.py
+++ b/backend/tests/integration/db/repositories/test_saga_repository.py
@@ -1,27 +1,43 @@
from datetime import datetime, timezone
import pytest
-
from app.db.repositories.saga_repository import SagaRepository
from app.domain.enums.saga import SagaState
-from app.domain.saga.models import Saga, SagaFilter, SagaListResult
+from app.domain.saga.models import SagaFilter, SagaListResult
pytestmark = pytest.mark.integration
@pytest.fixture()
-def repo(db) -> SagaRepository: # type: ignore[valid-type]
- return SagaRepository(db)
+async def repo(scope) -> SagaRepository: # type: ignore[valid-type]
+ return await scope.get(SagaRepository)
@pytest.mark.asyncio
async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type: ignore[valid-type]
now = datetime.now(timezone.utc)
# Insert saga docs
- await db.get_collection("sagas").insert_many([
- {"saga_id": "s1", "saga_name": "test", "execution_id": "e1", "state": "running", "created_at": now, "updated_at": now},
- {"saga_id": "s2", "saga_name": "test2", "execution_id": "e2", "state": "completed", "created_at": now, "updated_at": now, "completed_at": now},
- ])
+ await db.get_collection("sagas").insert_many(
+ [
+ {
+ "saga_id": "s1",
+ "saga_name": "test",
+ "execution_id": "e1",
+ "state": "running",
+ "created_at": now,
+ "updated_at": now,
+ },
+ {
+ "saga_id": "s2",
+ "saga_name": "test2",
+ "execution_id": "e2",
+ "state": "completed",
+ "created_at": now,
+ "updated_at": now,
+ "completed_at": now,
+ },
+ ]
+ )
saga = await repo.get_saga("s1")
assert saga and saga.saga_id == "s1"
lst = await repo.get_sagas_by_execution("e1")
@@ -34,10 +50,12 @@ async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type:
assert await repo.update_saga_state("s1", SagaState.COMPLETED) in (True, False)
# user execution ids
- await db.get_collection("executions").insert_many([
- {"execution_id": "e1", "user_id": "u1"},
- {"execution_id": "e2", "user_id": "u1"},
- ])
+ await db.get_collection("executions").insert_many(
+ [
+ {"execution_id": "e1", "user_id": "u1"},
+ {"execution_id": "e2", "user_id": "u1"},
+ ]
+ )
ids = await repo.get_user_execution_ids("u1")
assert set(ids) == {"e1", "e2"}
diff --git a/backend/tests/integration/db/repositories/test_saved_script_repository.py b/backend/tests/integration/db/repositories/test_saved_script_repository.py
index 73f2d64f..85fc2b58 100644
--- a/backend/tests/integration/db/repositories/test_saved_script_repository.py
+++ b/backend/tests/integration/db/repositories/test_saved_script_repository.py
@@ -1,14 +1,17 @@
import pytest
-
from app.db.repositories.saved_script_repository import SavedScriptRepository
from app.domain.saved_script import DomainSavedScriptCreate, DomainSavedScriptUpdate
pytestmark = pytest.mark.integration
+@pytest.fixture()
+async def repo(scope) -> SavedScriptRepository: # type: ignore[valid-type]
+ return await scope.get(SavedScriptRepository)
+
+
@pytest.mark.asyncio
-async def test_create_get_update_delete_saved_script(db) -> None: # type: ignore[valid-type]
- repo = SavedScriptRepository(db)
+async def test_create_get_update_delete_saved_script(repo: SavedScriptRepository) -> None:
create = DomainSavedScriptCreate(name="n", lang="python", lang_version="3.11", description=None, script="print(1)")
created = await repo.create_saved_script(create, user_id="u1")
assert created.user_id == "u1" and created.script == "print(1)"
diff --git a/backend/tests/integration/db/repositories/test_sse_repository.py b/backend/tests/integration/db/repositories/test_sse_repository.py
index bd7556a4..b196ba43 100644
--- a/backend/tests/integration/db/repositories/test_sse_repository.py
+++ b/backend/tests/integration/db/repositories/test_sse_repository.py
@@ -1,14 +1,17 @@
import pytest
-
from app.db.repositories.sse_repository import SSERepository
from app.domain.enums.execution import ExecutionStatus
pytestmark = pytest.mark.integration
+@pytest.fixture()
+async def repo(scope) -> SSERepository: # type: ignore[valid-type]
+ return await scope.get(SSERepository)
+
+
@pytest.mark.asyncio
-async def test_get_execution_status(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_status(repo: SSERepository, db) -> None: # type: ignore[valid-type]
await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "running"})
status = await repo.get_execution_status("e1")
assert status is not None
@@ -17,25 +20,18 @@ async def test_get_execution_status(db) -> None: # type: ignore[valid-type]
@pytest.mark.asyncio
-async def test_get_execution_status_none(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_status_none(repo: SSERepository, db) -> None: # type: ignore[valid-type]
assert await repo.get_execution_status("missing") is None
@pytest.mark.asyncio
-async def test_get_execution(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
- await db.get_collection("executions").insert_one({
- "execution_id": "e1",
- "status": "queued",
- "resource_usage": {}
- })
+async def test_get_execution(repo: SSERepository, db) -> None: # type: ignore[valid-type]
+ await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "queued", "resource_usage": {}})
doc = await repo.get_execution("e1")
assert doc is not None
assert doc.execution_id == "e1"
@pytest.mark.asyncio
-async def test_get_execution_not_found(db) -> None: # type: ignore[valid-type]
- repo = SSERepository(db)
+async def test_get_execution_not_found(repo: SSERepository, db) -> None: # type: ignore[valid-type]
assert await repo.get_execution("missing") is None
diff --git a/backend/tests/integration/db/repositories/test_user_repository.py b/backend/tests/integration/db/repositories/test_user_repository.py
index fca2873e..12f86725 100644
--- a/backend/tests/integration/db/repositories/test_user_repository.py
+++ b/backend/tests/integration/db/repositories/test_user_repository.py
@@ -1,17 +1,21 @@
-import pytest
from datetime import datetime, timezone
+import pytest
from app.db.repositories.user_repository import UserRepository
-from app.domain.user.user_models import User as DomainUser, UserUpdate
from app.domain.enums.user import UserRole
+from app.domain.user.user_models import User as DomainUser
+from app.domain.user.user_models import UserUpdate
pytestmark = pytest.mark.integration
-@pytest.mark.asyncio
-async def test_create_get_update_delete_user(db) -> None: # type: ignore[valid-type]
- repo = UserRepository(db)
+@pytest.fixture()
+async def repo(scope) -> UserRepository: # type: ignore[valid-type]
+ return await scope.get(UserRepository)
+
+@pytest.mark.asyncio
+async def test_create_get_update_delete_user(repo: UserRepository) -> None:
# Create user
user = DomainUser(
user_id="", # let repo assign
diff --git a/backend/tests/integration/db/repositories/test_user_settings_repository.py b/backend/tests/integration/db/repositories/test_user_settings_repository.py
index 8b647eee..83bf6a6e 100644
--- a/backend/tests/integration/db/repositories/test_user_settings_repository.py
+++ b/backend/tests/integration/db/repositories/test_user_settings_repository.py
@@ -1,21 +1,20 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.db.repositories.user_settings_repository import UserSettingsRepository
from app.domain.enums.events import EventType
from app.domain.user.settings_models import DomainUserSettings
pytestmark = pytest.mark.integration
-_test_logger = logging.getLogger("test.db.repositories.user_settings_repository")
+@pytest.fixture()
+async def repo(scope) -> UserSettingsRepository: # type: ignore[valid-type]
+ return await scope.get(UserSettingsRepository)
-@pytest.mark.asyncio
-async def test_user_settings_snapshot_and_events(db) -> None: # type: ignore[valid-type]
- repo = UserSettingsRepository(db, logger=_test_logger)
+@pytest.mark.asyncio
+async def test_user_settings_snapshot_and_events(repo: UserSettingsRepository, db) -> None: # type: ignore[valid-type]
# Create indexes (should not raise)
await repo.create_indexes()
@@ -27,20 +26,22 @@ async def test_user_settings_snapshot_and_events(db) -> None: # type: ignore[va
# Insert events and query
now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many([
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_SETTINGS_UPDATED),
- "timestamp": now,
- "payload": {}
- },
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_THEME_CHANGED),
- "timestamp": now,
- "payload": {}
- },
- ])
+ await db.get_collection("events").insert_many(
+ [
+ {
+ "aggregate_id": "user_settings_u1",
+ "event_type": str(EventType.USER_SETTINGS_UPDATED),
+ "timestamp": now,
+ "payload": {},
+ },
+ {
+ "aggregate_id": "user_settings_u1",
+ "event_type": str(EventType.USER_THEME_CHANGED),
+ "timestamp": now,
+ "payload": {},
+ },
+ ]
+ )
evs = await repo.get_settings_events("u1", [EventType.USER_SETTINGS_UPDATED], since=now - timedelta(days=1))
assert any(e.event_type == EventType.USER_SETTINGS_UPDATED for e in evs)
diff --git a/backend/tests/integration/events/test_admin_utils.py b/backend/tests/integration/events/test_admin_utils.py
index 689fa705..7ab34509 100644
--- a/backend/tests/integration/events/test_admin_utils.py
+++ b/backend/tests/integration/events/test_admin_utils.py
@@ -2,7 +2,6 @@
import os
import pytest
-
from app.events.admin_utils import AdminUtils
_test_logger = logging.getLogger("test.events.admin_utils")
@@ -12,7 +11,7 @@
@pytest.mark.asyncio
async def test_admin_utils_real_topic_checks() -> None:
prefix = os.environ.get("KAFKA_TOPIC_PREFIX", "test.")
- topic = f"{prefix}adminutils.{os.environ.get('PYTEST_SESSION_ID','sid')}"
+ topic = f"{prefix}adminutils.{os.environ.get('PYTEST_SESSION_ID', 'sid')}"
au = AdminUtils(logger=_test_logger)
# Ensure topic exists (idempotent)
diff --git a/backend/tests/integration/events/test_consume_roundtrip.py b/backend/tests/integration/events/test_consume_roundtrip.py
index 604bdbd9..185196e5 100644
--- a/backend/tests/integration/events/test_consume_roundtrip.py
+++ b/backend/tests/integration/events/test_consume_roundtrip.py
@@ -3,17 +3,15 @@
import uuid
import pytest
-
from app.domain.enums.events import EventType
+from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedConsumer, UnifiedProducer
+from app.events.core.dispatcher import EventDispatcher
from app.events.core.types import ConsumerConfig
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from app.domain.enums.kafka import KafkaTopic
-from tests.helpers import make_execution_requested_event
-from app.core.metrics.context import get_event_metrics
-from app.events.core.dispatcher import EventDispatcher
from app.settings import get_settings
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
diff --git a/backend/tests/integration/events/test_consumer_group_monitor.py b/backend/tests/integration/events/test_consumer_group_monitor.py
index 150e1236..cfab3017 100644
--- a/backend/tests/integration/events/test_consumer_group_monitor.py
+++ b/backend/tests/integration/events/test_consumer_group_monitor.py
@@ -1,7 +1,7 @@
import logging
-import pytest
-from app.events.consumer_group_monitor import NativeConsumerGroupMonitor, ConsumerGroupHealth
+import pytest
+from app.events.consumer_group_monitor import ConsumerGroupHealth, NativeConsumerGroupMonitor
_test_logger = logging.getLogger("test.events.consumer_group_monitor")
diff --git a/backend/tests/integration/events/test_consumer_group_monitor_e2e.py b/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
index b901c6e0..1be58358 100644
--- a/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
+++ b/backend/tests/integration/events/test_consumer_group_monitor_e2e.py
@@ -1,16 +1,13 @@
-import asyncio
import logging
from uuid import uuid4
import pytest
-
from app.events.consumer_group_monitor import (
ConsumerGroupHealth,
ConsumerGroupStatus,
NativeConsumerGroupMonitor,
)
-
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
_test_logger = logging.getLogger("test.events.consumer_group_monitor_e2e")
@@ -34,8 +31,16 @@ def test_assess_group_health_branches():
m = NativeConsumerGroupMonitor(logger=_test_logger)
# Error state
s = ConsumerGroupStatus(
- group_id="g", state="ERROR", protocol="p", protocol_type="ptype", coordinator="c",
- members=[], member_count=0, assigned_partitions=0, partition_distribution={}, total_lag=0
+ group_id="g",
+ state="ERROR",
+ protocol="p",
+ protocol_type="ptype",
+ coordinator="c",
+ members=[],
+ member_count=0,
+ assigned_partitions=0,
+ partition_distribution={},
+ total_lag=0,
)
h, msg = m._assess_group_health(s) # noqa: SLF001
assert h is ConsumerGroupHealth.UNHEALTHY and "error" in msg.lower()
diff --git a/backend/tests/integration/events/test_dlq_handler.py b/backend/tests/integration/events/test_dlq_handler.py
index 3e4d0e18..5659529b 100644
--- a/backend/tests/integration/events/test_dlq_handler.py
+++ b/backend/tests/integration/events/test_dlq_handler.py
@@ -1,9 +1,7 @@
import logging
import pytest
-
-from app.events.core import create_dlq_error_handler, create_immediate_dlq_handler
-from app.events.core import UnifiedProducer
+from app.events.core import UnifiedProducer, create_dlq_error_handler, create_immediate_dlq_handler
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.saga import SagaStartedEvent
@@ -22,8 +20,13 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count
monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq)
h = create_dlq_error_handler(p, original_topic="t", max_retries=2, logger=_test_logger)
- e = SagaStartedEvent(saga_id="s", saga_name="n", execution_id="x", initial_event_id="i",
- metadata=AvroEventMetadata(service_name="a", service_version="1"))
+ e = SagaStartedEvent(
+ saga_id="s",
+ saga_name="n",
+ execution_id="x",
+ initial_event_id="i",
+ metadata=AvroEventMetadata(service_name="a", service_version="1"),
+ )
# Call 1 and 2 should not send to DLQ
await h(RuntimeError("boom"), e)
await h(RuntimeError("boom"), e)
@@ -44,7 +47,12 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count
monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq)
h = create_immediate_dlq_handler(p, original_topic="t", logger=_test_logger)
- e = SagaStartedEvent(saga_id="s2", saga_name="n", execution_id="x", initial_event_id="i",
- metadata=AvroEventMetadata(service_name="a", service_version="1"))
+ e = SagaStartedEvent(
+ saga_id="s2",
+ saga_name="n",
+ execution_id="x",
+ initial_event_id="i",
+ metadata=AvroEventMetadata(service_name="a", service_version="1"),
+ )
await h(RuntimeError("x"), e)
assert calls and calls[0][3] == 0
diff --git a/backend/tests/integration/events/test_event_dispatcher.py b/backend/tests/integration/events/test_event_dispatcher.py
index c79ef290..c88e3fa6 100644
--- a/backend/tests/integration/events/test_event_dispatcher.py
+++ b/backend/tests/integration/events/test_event_dispatcher.py
@@ -3,16 +3,15 @@
import uuid
import pytest
-
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedConsumer, UnifiedProducer
from app.events.core.dispatcher import EventDispatcher
from app.events.core.types import ConsumerConfig
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from tests.helpers import make_execution_requested_event
from app.settings import get_settings
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/integration/events/test_event_store.py
index 49822e57..470beb91 100644
--- a/backend/tests/integration/events/test_event_store.py
+++ b/backend/tests/integration/events/test_event_store.py
@@ -1,27 +1,17 @@
-from datetime import datetime, timezone, timedelta
-import logging
+from datetime import datetime, timedelta, timezone
import pytest
-
from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.pod import PodCreatedEvent
from app.infrastructure.kafka.events.user import UserLoggedInEvent
-from app.core.database_context import Database
pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store")
-
@pytest.fixture()
async def event_store(scope) -> EventStore: # type: ignore[valid-type]
- db: Database = await scope.get(Database)
- schema_registry: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- store = EventStore(db=db, schema_registry=schema_registry, logger=_test_logger)
- await store.initialize()
- return store
+ return await scope.get(EventStore)
@pytest.mark.asyncio
@@ -57,8 +47,9 @@ async def test_store_and_query_events(event_store: EventStore) -> None:
@pytest.mark.asyncio
async def test_replay_events(event_store: EventStore) -> None:
- ev = UserLoggedInEvent(user_id="u1", login_method="password",
- metadata=AvroEventMetadata(service_name="svc", service_version="1"))
+ ev = UserLoggedInEvent(
+ user_id="u1", login_method="password", metadata=AvroEventMetadata(service_name="svc", service_version="1")
+ )
await event_store.store_event(ev)
called = {"n": 0}
diff --git a/backend/tests/integration/events/test_event_store_consumer.py b/backend/tests/integration/events/test_event_store_consumer.py
index 25bea134..111d6fe2 100644
--- a/backend/tests/integration/events/test_event_store_consumer.py
+++ b/backend/tests/integration/events/test_event_store_consumer.py
@@ -1,19 +1,15 @@
-import asyncio
import logging
import uuid
import pytest
-
from app.core.database_context import Database
-
from app.domain.enums.kafka import KafkaTopic
from app.events.core import UnifiedProducer
-from app.events.event_store_consumer import EventStoreConsumer, create_event_store_consumer
from app.events.event_store import EventStore
+from app.events.event_store_consumer import EventStoreConsumer, create_event_store_consumer
from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas
-from app.infrastructure.kafka.events.user import UserLoggedInEvent
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
+from app.infrastructure.kafka.events.user import UserLoggedInEvent
pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
diff --git a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
index 458ab3eb..38610f51 100644
--- a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
+++ b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
@@ -1,37 +1,36 @@
-import asyncio
-import logging
from uuid import uuid4
import pytest
from app.core.database_context import Database
-
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
+from app.events.core import UnifiedProducer
from app.events.event_store import EventStore
from app.events.event_store_consumer import create_event_store_consumer
-from app.events.core import UnifiedProducer
from app.events.schema.schema_registry import SchemaRegistryManager
+
from tests.helpers import make_execution_requested_event
from tests.helpers.eventually import eventually
pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store_consumer_flush_e2e")
+
+@pytest.fixture()
+async def store(scope) -> EventStore: # type: ignore[valid-type]
+ return await scope.get(EventStore)
@pytest.mark.asyncio
-async def test_event_store_consumer_flush_on_timeout(scope): # type: ignore[valid-type]
+async def test_event_store_consumer_flush_on_timeout(scope, store: EventStore) -> None: # type: ignore[valid-type]
producer: UnifiedProducer = await scope.get(UnifiedProducer)
schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
db: Database = await scope.get(Database)
- store = EventStore(db=db, schema_registry=schema, logger=_test_logger)
- await store.initialize()
consumer = create_event_store_consumer(
event_store=store,
topics=[KafkaTopic.EXECUTION_EVENTS],
schema_registry_manager=schema,
- logger=_test_logger,
+ logger=store.logger,
producer=producer,
batch_size=100,
batch_timeout_seconds=0.2,
diff --git a/backend/tests/integration/events/test_event_store_e2e.py b/backend/tests/integration/events/test_event_store_e2e.py
index 7ad8c583..25f5be90 100644
--- a/backend/tests/integration/events/test_event_store_e2e.py
+++ b/backend/tests/integration/events/test_event_store_e2e.py
@@ -1,27 +1,19 @@
-from datetime import datetime, timezone, timedelta
-import logging
-
import pytest
-from app.core.database_context import Database
-
from app.domain.enums.events import EventType
from app.events.event_store import EventStore
-from app.events.schema.schema_registry import SchemaRegistryManager
-from tests.helpers import make_execution_requested_event
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-_test_logger = logging.getLogger("test.events.event_store_e2e")
+@pytest.fixture()
+async def store(scope) -> EventStore: # type: ignore[valid-type]
+ return await scope.get(EventStore)
-@pytest.mark.asyncio
-async def test_event_store_initialize_and_crud(scope): # type: ignore[valid-type]
- schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- db: Database = await scope.get(Database)
- store = EventStore(db=db, schema_registry=schema, ttl_days=1, logger=_test_logger)
- await store.initialize()
+@pytest.mark.asyncio
+async def test_event_store_initialize_and_crud(store: EventStore) -> None:
# Store single event
ev = make_execution_requested_event(execution_id="e-1")
assert await store.store_event(ev) is True
diff --git a/backend/tests/integration/events/test_producer_e2e.py b/backend/tests/integration/events/test_producer_e2e.py
index 898042bb..eedbfaa0 100644
--- a/backend/tests/integration/events/test_producer_e2e.py
+++ b/backend/tests/integration/events/test_producer_e2e.py
@@ -1,14 +1,12 @@
-import asyncio
import json
import logging
from uuid import uuid4
import pytest
-
-from app.events.core import UnifiedProducer, ProducerConfig
+from app.events.core import ProducerConfig, UnifiedProducer
from app.events.schema.schema_registry import SchemaRegistryManager
-from tests.helpers import make_execution_requested_event
+from tests.helpers import make_execution_requested_event
pytestmark = [pytest.mark.integration, pytest.mark.kafka]
@@ -36,17 +34,14 @@ async def test_unified_producer_start_produce_send_to_dlq_stop(scope): # type:
def test_producer_handle_stats_path():
# Directly run stats parsing to cover branch logic; avoid relying on timing
- from app.events.core.producer import UnifiedProducer as UP, ProducerMetrics
+ from app.events.core.producer import ProducerMetrics
+ from app.events.core.producer import UnifiedProducer as UP
+
m = ProducerMetrics()
p = object.__new__(UP) # bypass __init__ safely for method call
# Inject required attributes
p._metrics = m # type: ignore[attr-defined]
p._stats_callback = None # type: ignore[attr-defined]
- payload = json.dumps({
- "msg_cnt": 1,
- "topics": {
- "t": {"partitions": {"0": {"msgq_cnt": 2, "rtt": {"avg": 5}}}}
- }
- })
+ payload = json.dumps({"msg_cnt": 1, "topics": {"t": {"partitions": {"0": {"msgq_cnt": 2, "rtt": {"avg": 5}}}}}})
UP._handle_stats(p, payload) # type: ignore[misc]
assert m.queue_size == 1 and m.avg_latency_ms > 0
diff --git a/backend/tests/integration/events/test_schema_registry_e2e.py b/backend/tests/integration/events/test_schema_registry_e2e.py
index 2a8df443..44c5f827 100644
--- a/backend/tests/integration/events/test_schema_registry_e2e.py
+++ b/backend/tests/integration/events/test_schema_registry_e2e.py
@@ -1,13 +1,10 @@
-import asyncio
import logging
-import struct
import pytest
+from app.events.schema.schema_registry import MAGIC_BYTE, SchemaRegistryManager
-from app.events.schema.schema_registry import SchemaRegistryManager, MAGIC_BYTE
from tests.helpers import make_execution_requested_event
-
pytestmark = [pytest.mark.integration]
_test_logger = logging.getLogger("test.events.schema_registry_e2e")
diff --git a/backend/tests/integration/events/test_schema_registry_real.py b/backend/tests/integration/events/test_schema_registry_real.py
index dedf3264..895f109d 100644
--- a/backend/tests/integration/events/test_schema_registry_real.py
+++ b/backend/tests/integration/events/test_schema_registry_real.py
@@ -1,6 +1,6 @@
import logging
-import pytest
+import pytest
from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.metadata import AvroEventMetadata
from app.infrastructure.kafka.events.pod import PodCreatedEvent
From 527da5c2f93a04601827a619d26208305967777f Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 00:46:31 +0100
Subject: [PATCH 41/48] removed outdated tests
---
.../test_admin_events_repository.py | 109 -------------
.../test_admin_user_repository.py | 112 -------------
.../db/repositories/test_event_repository.py | 74 ---------
.../test_notification_repository.py | 150 ------------------
.../db/repositories/test_replay_repository.py | 71 ---------
.../db/repositories/test_saga_repository.py | 66 --------
.../db/repositories/test_sse_repository.py | 37 -----
.../db/repositories/test_user_repository.py | 53 -------
.../test_user_settings_repository.py | 50 ------
.../integration/events/test_event_store.py | 62 --------
.../test_event_store_consumer_flush_e2e.py | 55 -------
.../events/test_event_store_e2e.py | 40 -----
.../events/test_event_service_integration.py | 63 --------
13 files changed, 942 deletions(-)
delete mode 100644 backend/tests/integration/db/repositories/test_admin_events_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_admin_user_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_event_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_notification_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_replay_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_saga_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_sse_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_user_repository.py
delete mode 100644 backend/tests/integration/db/repositories/test_user_settings_repository.py
delete mode 100644 backend/tests/integration/events/test_event_store.py
delete mode 100644 backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
delete mode 100644 backend/tests/integration/events/test_event_store_e2e.py
delete mode 100644 backend/tests/integration/services/events/test_event_service_integration.py
diff --git a/backend/tests/integration/db/repositories/test_admin_events_repository.py b/backend/tests/integration/db/repositories/test_admin_events_repository.py
deleted file mode 100644
index de46ca1b..00000000
--- a/backend/tests/integration/db/repositories/test_admin_events_repository.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.docs.replay import ReplayConfig, ReplayFilter
-from app.db.repositories.admin.admin_events_repository import AdminEventsRepository
-from app.domain.admin import ReplayQuery
-from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
-from app.domain.events.event_models import Event, EventFilter, EventStatistics
-from app.domain.replay.models import ReplaySessionState
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> AdminEventsRepository: # type: ignore[valid-type]
- return await scope.get(AdminEventsRepository)
-
-
-@pytest.mark.asyncio
-async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e1",
- "event_type": "X",
- "timestamp": now,
- "metadata": AvroEventMetadata(
- service_name="svc", service_version="1", correlation_id="c1"
- ).model_dump(),
- },
- {
- "event_id": "e2",
- "event_type": "X",
- "timestamp": now,
- "metadata": AvroEventMetadata(
- service_name="svc", service_version="1", correlation_id="c1"
- ).model_dump(),
- },
- ]
- )
- res = await repo.browse_events(EventFilter())
- assert res.total >= 2
- detail = await repo.get_event_detail("e1")
- assert detail and detail.event.event_id == "e1"
- assert await repo.delete_event("e2") is True
- rows = await repo.export_events_csv(EventFilter())
- assert isinstance(rows, list) and len(rows) >= 1
-
-
-@pytest.mark.asyncio
-async def test_event_stats_and_archive(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e10",
- "event_type": "step.completed",
- "timestamp": now,
- "metadata": AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").model_dump(),
- },
- ]
- )
- await db.get_collection("executions").insert_one(
- {"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}}
- )
- stats = await repo.get_event_stats(hours=1)
- assert isinstance(stats, EventStatistics)
- ev = Event(
- event_id="a1",
- event_type="X",
- event_version="1.0",
- timestamp=now,
- metadata=AvroEventMetadata(service_name="s", service_version="1"),
- payload={},
- )
- assert await repo.archive_event(ev, deleted_by="admin") is True
-
-
-@pytest.mark.asyncio
-async def test_replay_session_flow_and_helpers(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type]
- # create/get/update
- config = ReplayConfig(
- replay_type=ReplayType.QUERY,
- target=ReplayTarget.TEST,
- filter=ReplayFilter(),
- )
- session = ReplaySessionState(
- session_id="s1",
- config=config,
- status=ReplayStatus.SCHEDULED,
- total_events=1,
- correlation_id="corr",
- created_at=datetime.now(timezone.utc) - timedelta(seconds=5),
- dry_run=False,
- )
- sid = await repo.create_replay_session(session)
- assert sid == "s1"
- got = await repo.get_replay_session("s1")
- assert got and got.session_id == "s1"
- session_update = ReplaySessionUpdate(status=ReplayStatus.RUNNING)
- assert await repo.update_replay_session("s1", session_update) is True
- detail = await repo.get_replay_status_with_progress("s1")
- assert detail and detail.session.session_id == "s1"
- assert await repo.count_events_for_replay(ReplayQuery()) >= 0
- prev = await repo.get_replay_events_preview(event_ids=["e10"]) # from earlier insert
- assert isinstance(prev, dict)
diff --git a/backend/tests/integration/db/repositories/test_admin_user_repository.py b/backend/tests/integration/db/repositories/test_admin_user_repository.py
deleted file mode 100644
index e65658aa..00000000
--- a/backend/tests/integration/db/repositories/test_admin_user_repository.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.core.security import SecurityService
-from app.db.repositories.admin.admin_user_repository import AdminUserRepository
-from app.domain.user import PasswordReset, UserFields, UserUpdate
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> AdminUserRepository: # type: ignore[valid-type]
- return await scope.get(AdminUserRepository)
-
-
-@pytest.mark.asyncio
-async def test_list_and_get_user(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
- # Insert a user
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "alice",
- UserFields.EMAIL: "alice@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- res = await repo.list_users(limit=10)
- assert res.total >= 1 and any(u.username == "alice" for u in res.users)
- user = await repo.get_user_by_id("u1")
- assert user and user.user_id == "u1"
-
-
-@pytest.mark.asyncio
-async def test_update_delete_and_reset_password(repo: AdminUserRepository, db, monkeypatch: pytest.MonkeyPatch) -> None: # type: ignore[valid-type]
- # Insert base user
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- # No updates → returns current
- updated = await repo.update_user("u1", UserUpdate())
- assert updated and updated.user_id == "u1"
- # Delete cascade (collections empty → zeros)
- deleted = await repo.delete_user("u1", cascade=True)
- assert deleted["user"] in (0, 1)
- # Re-insert and reset password
- await db.get_collection("users").insert_one(
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "bob",
- UserFields.EMAIL: "bob@example.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- }
- )
- monkeypatch.setattr(SecurityService, "get_password_hash", staticmethod(lambda p: "HASHED"))
- pr = PasswordReset(user_id="u1", new_password="secret123")
- assert await repo.reset_user_password(pr) is True
-
-
-@pytest.mark.asyncio
-async def test_list_with_filters_and_reset_invalid(repo: AdminUserRepository, db) -> None: # type: ignore[valid-type]
- # Insert a couple of users
- await db.get_collection("users").insert_many(
- [
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "Alice",
- UserFields.EMAIL: "a@e.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: False,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- },
- {
- UserFields.USER_ID: "u2",
- UserFields.USERNAME: "Bob",
- UserFields.EMAIL: "b@e.com",
- UserFields.ROLE: "admin",
- UserFields.IS_ACTIVE: True,
- UserFields.IS_SUPERUSER: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.CREATED_AT: datetime.now(timezone.utc),
- UserFields.UPDATED_AT: datetime.now(timezone.utc),
- },
- ]
- )
- res = await repo.list_users(limit=5, offset=0, search="Al", role=None)
- assert any(u.username.lower().startswith("al") for u in res.users) or res.total >= 0
- # invalid password reset (empty)
- with pytest.raises(ValueError):
- await repo.reset_user_password(PasswordReset(user_id="u1", new_password=""))
diff --git a/backend/tests/integration/db/repositories/test_event_repository.py b/backend/tests/integration/db/repositories/test_event_repository.py
deleted file mode 100644
index feda7d47..00000000
--- a/backend/tests/integration/db/repositories/test_event_repository.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.repositories.event_repository import EventRepository
-from app.domain.events.event_models import Event
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> EventRepository: # type: ignore[valid-type]
- return await scope.get(EventRepository)
-
-
-def make_event(event_id: str, etype: str = "UserLoggedIn", user: str | None = "u1", agg: str | None = "agg1") -> Event:
- return Event(
- event_id=event_id,
- event_type=etype,
- event_version="1.0",
- timestamp=datetime.now(timezone.utc),
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id=user, correlation_id="c1"),
- payload={"k": 1, "execution_id": agg} if agg else {"k": 1},
- aggregate_id=agg,
- )
-
-
-@pytest.mark.asyncio
-async def test_store_get_and_queries(repo: EventRepository, db) -> None: # type: ignore[valid-type]
- e1 = make_event("e1", etype="A", agg="x1")
- e2 = make_event("e2", etype="B", agg="x2")
- await repo.store_event(e1)
- await repo.store_events_batch([e2])
- got = await repo.get_event("e1")
- assert got and got.event_id == "e1"
-
- now = datetime.now(timezone.utc)
- by_type = await repo.get_events_by_type("A", start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
- assert any(ev.event_id == "e1" for ev in by_type)
- by_agg = await repo.get_events_by_aggregate("x2")
- assert any(ev.event_id == "e2" for ev in by_agg)
- by_corr = await repo.get_events_by_correlation("c1")
- assert len(by_corr.events) >= 2
- by_user = await repo.get_events_by_user("u1", limit=10)
- assert len(by_user) >= 2
- exec_events = await repo.get_execution_events("x1")
- assert any(ev.event_id == "e1" for ev in exec_events.events)
-
-
-@pytest.mark.asyncio
-async def test_statistics_and_search_and_delete(repo: EventRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e3",
- "event_type": "C",
- "event_version": "1.0",
- "timestamp": now,
- "metadata": AvroEventMetadata(service_name="svc", service_version="1").model_dump(),
- "payload": {},
- },
- ]
- )
- stats = await repo.get_event_statistics(start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
- assert stats.total_events >= 1
-
- # search requires text index; guard if index not present
- try:
- res = await repo.search_events("test", filters=None, limit=10, skip=0)
- assert isinstance(res, list)
- except Exception:
- # Accept environments without text index
- pass
diff --git a/backend/tests/integration/db/repositories/test_notification_repository.py b/backend/tests/integration/db/repositories/test_notification_repository.py
deleted file mode 100644
index 83903557..00000000
--- a/backend/tests/integration/db/repositories/test_notification_repository.py
+++ /dev/null
@@ -1,150 +0,0 @@
-import logging
-from datetime import UTC, datetime, timedelta
-
-import pytest
-from app.db.repositories.notification_repository import NotificationRepository
-from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus
-from app.domain.enums.notification import NotificationChannel as NC
-from app.domain.enums.user import UserRole
-from app.domain.notification import (
- DomainNotificationCreate,
- DomainNotificationUpdate,
- DomainSubscriptionUpdate,
-)
-from app.domain.user import UserFields
-
-pytestmark = pytest.mark.integration
-
-_test_logger = logging.getLogger("test.db.repositories.notification_repository")
-
-
-@pytest.mark.asyncio
-async def test_create_and_crud() -> None:
- repo = NotificationRepository(logger=_test_logger)
-
- create_data = DomainNotificationCreate(
- user_id="u1",
- channel=NotificationChannel.IN_APP,
- subject="sub",
- body="body",
- severity=NotificationSeverity.MEDIUM,
- tags=["execution", "completed"],
- )
- n = await repo.create_notification(create_data)
- assert n.notification_id
-
- # Update
- update_data = DomainNotificationUpdate(status=NotificationStatus.DELIVERED)
- assert await repo.update_notification(n.notification_id, n.user_id, update_data) is True
-
- got = await repo.get_notification(n.notification_id, n.user_id)
- assert got and got.notification_id == n.notification_id and got.status == NotificationStatus.DELIVERED
-
- assert await repo.mark_as_read(n.notification_id, n.user_id) is True
- assert await repo.mark_all_as_read(n.user_id) >= 0
- assert await repo.delete_notification(n.notification_id, n.user_id) is True
-
-
-@pytest.mark.asyncio
-async def test_list_count_unread_and_pending(db) -> None: # type: ignore[no-untyped-def]
- repo = NotificationRepository(logger=_test_logger)
- now = datetime.now(UTC)
-
- # Seed notifications
- await db.get_collection("notifications").insert_many(
- [
- {
- "notification_id": "n1",
- "user_id": "u1",
- "severity": NotificationSeverity.MEDIUM.value,
- "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.PENDING.value,
- "created_at": now,
- },
- {
- "notification_id": "n2",
- "user_id": "u1",
- "severity": NotificationSeverity.LOW.value,
- "tags": ["completed"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.DELIVERED.value,
- "created_at": now,
- },
- ]
- )
- lst = await repo.list_notifications("u1")
- assert len(lst) >= 2
- assert await repo.count_notifications("u1") >= 2
- assert await repo.get_unread_count("u1") >= 0
-
- # Pending and scheduled
- pending = await repo.find_pending_notifications()
- assert any(n.status == NotificationStatus.PENDING for n in pending)
- await db.get_collection("notifications").insert_one(
- {
- "notification_id": "n3",
- "user_id": "u1",
- "severity": NotificationSeverity.MEDIUM.value,
- "tags": ["execution"],
- "channel": NotificationChannel.IN_APP.value,
- "subject": "s",
- "body": "b",
- "status": NotificationStatus.PENDING.value,
- "created_at": now,
- "scheduled_for": now + timedelta(seconds=1),
- }
- )
- scheduled = await repo.find_scheduled_notifications()
- assert isinstance(scheduled, list)
- assert await repo.cleanup_old_notifications(days=0) >= 0
-
-
-@pytest.mark.asyncio
-async def test_subscriptions_and_user_queries(db) -> None: # type: ignore[no-untyped-def]
- repo = NotificationRepository(logger=_test_logger)
-
- update_data = DomainSubscriptionUpdate(enabled=True, severities=[])
- sub = await repo.upsert_subscription("u1", NotificationChannel.IN_APP, update_data)
- assert sub.user_id == "u1"
-
- got = await repo.get_subscription("u1", NotificationChannel.IN_APP)
- assert got and got.user_id == "u1"
-
- subs = await repo.get_all_subscriptions("u1")
- assert len(subs) == len(list(NC))
-
- # Users by role and active users
- await db.get_collection("users").insert_many(
- [
- {
- UserFields.USER_ID: "u1",
- UserFields.USERNAME: "A",
- UserFields.EMAIL: "a@e.com",
- UserFields.ROLE: "user",
- UserFields.IS_ACTIVE: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.IS_SUPERUSER: False,
- },
- {
- UserFields.USER_ID: "u2",
- UserFields.USERNAME: "B",
- UserFields.EMAIL: "b@e.com",
- UserFields.ROLE: "admin",
- UserFields.IS_ACTIVE: True,
- UserFields.HASHED_PASSWORD: "h",
- UserFields.IS_SUPERUSER: False,
- },
- ]
- )
- ids = await repo.get_users_by_roles([UserRole.USER])
- assert "u1" in ids or isinstance(ids, list)
- await db.get_collection("executions").insert_one(
- {"execution_id": "e1", "user_id": "u2", "created_at": datetime.now(UTC)}
- )
- active = await repo.get_active_users(days=1)
- assert set(active) >= {"u2"} or isinstance(active, list)
diff --git a/backend/tests/integration/db/repositories/test_replay_repository.py b/backend/tests/integration/db/repositories/test_replay_repository.py
deleted file mode 100644
index 7ab5bc72..00000000
--- a/backend/tests/integration/db/repositories/test_replay_repository.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.replay_repository import ReplayRepository
-from app.domain.admin.replay_updates import ReplaySessionUpdate
-from app.domain.enums.replay import ReplayStatus, ReplayType
-from app.domain.replay import ReplayConfig, ReplayFilter
-from app.schemas_pydantic.replay_models import ReplaySession
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> ReplayRepository: # type: ignore[valid-type]
- return await scope.get(ReplayRepository)
-
-
-@pytest.mark.asyncio
-async def test_indexes_and_session_crud(repo: ReplayRepository) -> None:
- await repo.create_indexes()
- config = ReplayConfig(replay_type=ReplayType.EXECUTION, filter=ReplayFilter())
- session = ReplaySession(
- session_id="s1", status=ReplayStatus.CREATED, created_at=datetime.now(timezone.utc), config=config
- )
- await repo.save_session(session)
- got = await repo.get_session("s1")
- assert got and got.session_id == "s1"
- lst = await repo.list_sessions(limit=5)
- assert any(s.session_id == "s1" for s in lst)
- assert await repo.update_session_status("s1", ReplayStatus.RUNNING) is True
- session_update = ReplaySessionUpdate(status=ReplayStatus.COMPLETED)
- assert await repo.update_replay_session("s1", session_update) is True
-
-
-@pytest.mark.asyncio
-async def test_count_fetch_events_and_delete(repo: ReplayRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- # Insert events
- await db.get_collection("events").insert_many(
- [
- {
- "event_id": "e1",
- "timestamp": now,
- "execution_id": "x1",
- "event_type": "T",
- "metadata": {"user_id": "u1"},
- },
- {
- "event_id": "e2",
- "timestamp": now,
- "execution_id": "x2",
- "event_type": "T",
- "metadata": {"user_id": "u1"},
- },
- {
- "event_id": "e3",
- "timestamp": now,
- "execution_id": "x3",
- "event_type": "U",
- "metadata": {"user_id": "u2"},
- },
- ]
- )
- cnt = await repo.count_events(ReplayFilter())
- assert cnt >= 3
- batches = []
- async for b in repo.fetch_events(ReplayFilter(), batch_size=2):
- batches.append(b)
- assert sum(len(b) for b in batches) >= 3
- # Delete old sessions (none match date predicate likely)
- assert await repo.delete_old_sessions(datetime(2000, 1, 1, tzinfo=timezone.utc)) >= 0
diff --git a/backend/tests/integration/db/repositories/test_saga_repository.py b/backend/tests/integration/db/repositories/test_saga_repository.py
deleted file mode 100644
index d78f82e0..00000000
--- a/backend/tests/integration/db/repositories/test_saga_repository.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.saga_repository import SagaRepository
-from app.domain.enums.saga import SagaState
-from app.domain.saga.models import SagaFilter, SagaListResult
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> SagaRepository: # type: ignore[valid-type]
- return await scope.get(SagaRepository)
-
-
-@pytest.mark.asyncio
-async def test_saga_crud_and_queries(repo: SagaRepository, db) -> None: # type: ignore[valid-type]
- now = datetime.now(timezone.utc)
- # Insert saga docs
- await db.get_collection("sagas").insert_many(
- [
- {
- "saga_id": "s1",
- "saga_name": "test",
- "execution_id": "e1",
- "state": "running",
- "created_at": now,
- "updated_at": now,
- },
- {
- "saga_id": "s2",
- "saga_name": "test2",
- "execution_id": "e2",
- "state": "completed",
- "created_at": now,
- "updated_at": now,
- "completed_at": now,
- },
- ]
- )
- saga = await repo.get_saga("s1")
- assert saga and saga.saga_id == "s1"
- lst = await repo.get_sagas_by_execution("e1")
- assert len(lst.sagas) >= 1
-
- f = SagaFilter(execution_ids=["e1"])
- result = await repo.list_sagas(f, limit=2)
- assert isinstance(result, SagaListResult)
-
- assert await repo.update_saga_state("s1", SagaState.COMPLETED) in (True, False)
-
- # user execution ids
- await db.get_collection("executions").insert_many(
- [
- {"execution_id": "e1", "user_id": "u1"},
- {"execution_id": "e2", "user_id": "u1"},
- ]
- )
- ids = await repo.get_user_execution_ids("u1")
- assert set(ids) == {"e1", "e2"}
-
- counts = await repo.count_sagas_by_state()
- assert isinstance(counts, dict) and ("running" in counts or "completed" in counts)
-
- stats = await repo.get_saga_statistics()
- assert isinstance(stats, dict) and "total" in stats
diff --git a/backend/tests/integration/db/repositories/test_sse_repository.py b/backend/tests/integration/db/repositories/test_sse_repository.py
deleted file mode 100644
index b196ba43..00000000
--- a/backend/tests/integration/db/repositories/test_sse_repository.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import pytest
-from app.db.repositories.sse_repository import SSERepository
-from app.domain.enums.execution import ExecutionStatus
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> SSERepository: # type: ignore[valid-type]
- return await scope.get(SSERepository)
-
-
-@pytest.mark.asyncio
-async def test_get_execution_status(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "running"})
- status = await repo.get_execution_status("e1")
- assert status is not None
- assert status.status == ExecutionStatus.RUNNING
- assert status.execution_id == "e1"
-
-
-@pytest.mark.asyncio
-async def test_get_execution_status_none(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- assert await repo.get_execution_status("missing") is None
-
-
-@pytest.mark.asyncio
-async def test_get_execution(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- await db.get_collection("executions").insert_one({"execution_id": "e1", "status": "queued", "resource_usage": {}})
- doc = await repo.get_execution("e1")
- assert doc is not None
- assert doc.execution_id == "e1"
-
-
-@pytest.mark.asyncio
-async def test_get_execution_not_found(repo: SSERepository, db) -> None: # type: ignore[valid-type]
- assert await repo.get_execution("missing") is None
diff --git a/backend/tests/integration/db/repositories/test_user_repository.py b/backend/tests/integration/db/repositories/test_user_repository.py
deleted file mode 100644
index 12f86725..00000000
--- a/backend/tests/integration/db/repositories/test_user_repository.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from datetime import datetime, timezone
-
-import pytest
-from app.db.repositories.user_repository import UserRepository
-from app.domain.enums.user import UserRole
-from app.domain.user.user_models import User as DomainUser
-from app.domain.user.user_models import UserUpdate
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> UserRepository: # type: ignore[valid-type]
- return await scope.get(UserRepository)
-
-
-@pytest.mark.asyncio
-async def test_create_get_update_delete_user(repo: UserRepository) -> None:
- # Create user
- user = DomainUser(
- user_id="", # let repo assign
- username="alice",
- email="alice@example.com",
- role=UserRole.USER,
- is_active=True,
- is_superuser=False,
- hashed_password="h",
- created_at=datetime.now(timezone.utc),
- updated_at=datetime.now(timezone.utc),
- )
- created = await repo.create_user(user)
- assert created.user_id
-
- # Get by username
- fetched = await repo.get_user("alice")
- assert fetched and fetched.username == "alice"
-
- # Get by id
- by_id = await repo.get_user_by_id(created.user_id)
- assert by_id and by_id.user_id == created.user_id
-
- # List with search + role
- users = await repo.list_users(limit=10, offset=0, search="ali", role=UserRole.USER)
- assert any(u.username == "alice" for u in users)
-
- # Update
- upd = UserUpdate(email="alice2@example.com")
- updated = await repo.update_user(created.user_id, upd)
- assert updated and updated.email == "alice2@example.com"
-
- # Delete
- assert await repo.delete_user(created.user_id) is True
- assert await repo.get_user("alice") is None
diff --git a/backend/tests/integration/db/repositories/test_user_settings_repository.py b/backend/tests/integration/db/repositories/test_user_settings_repository.py
deleted file mode 100644
index 83bf6a6e..00000000
--- a/backend/tests/integration/db/repositories/test_user_settings_repository.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.db.repositories.user_settings_repository import UserSettingsRepository
-from app.domain.enums.events import EventType
-from app.domain.user.settings_models import DomainUserSettings
-
-pytestmark = pytest.mark.integration
-
-
-@pytest.fixture()
-async def repo(scope) -> UserSettingsRepository: # type: ignore[valid-type]
- return await scope.get(UserSettingsRepository)
-
-
-@pytest.mark.asyncio
-async def test_user_settings_snapshot_and_events(repo: UserSettingsRepository, db) -> None: # type: ignore[valid-type]
- # Create indexes (should not raise)
- await repo.create_indexes()
-
- # Snapshot CRUD
- us = DomainUserSettings(user_id="u1")
- await repo.create_snapshot(us)
- got = await repo.get_snapshot("u1")
- assert got and got.user_id == "u1"
-
- # Insert events and query
- now = datetime.now(timezone.utc)
- await db.get_collection("events").insert_many(
- [
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_SETTINGS_UPDATED),
- "timestamp": now,
- "payload": {},
- },
- {
- "aggregate_id": "user_settings_u1",
- "event_type": str(EventType.USER_THEME_CHANGED),
- "timestamp": now,
- "payload": {},
- },
- ]
- )
- evs = await repo.get_settings_events("u1", [EventType.USER_SETTINGS_UPDATED], since=now - timedelta(days=1))
- assert any(e.event_type == EventType.USER_SETTINGS_UPDATED for e in evs)
-
- # Counting helpers
- assert await repo.count_events_for_user("u1") >= 2
- assert await repo.count_events_since_snapshot("u1") >= 0
diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/integration/events/test_event_store.py
deleted file mode 100644
index 470beb91..00000000
--- a/backend/tests/integration/events/test_event_store.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import pytest
-from app.events.event_store import EventStore
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.infrastructure.kafka.events.pod import PodCreatedEvent
-from app.infrastructure.kafka.events.user import UserLoggedInEvent
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def event_store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_store_and_query_events(event_store: EventStore) -> None:
- ev1 = PodCreatedEvent(
- execution_id="x1",
- pod_name="pod1",
- namespace="ns",
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="cid"),
- )
- assert await event_store.store_event(ev1) is True
-
- ev2 = PodCreatedEvent(
- execution_id="x2",
- pod_name="pod2",
- namespace="ns",
- metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1"),
- )
- res = await event_store.store_batch([ev1, ev2])
- assert res["total"] == 2 and res["stored"] >= 1
-
- items = await event_store.get_events_by_type(ev1.event_type)
- assert any(getattr(e, "execution_id", None) == "x1" for e in items)
- exec_items = await event_store.get_execution_events("x1")
- assert any(getattr(e, "execution_id", None) == "x1" for e in exec_items)
- user_items = await event_store.get_user_events("u1")
- assert len(user_items) >= 2
- chain = await event_store.get_correlation_chain("cid")
- assert isinstance(chain, list)
- # Security types (may be empty)
- _ = await event_store.get_security_events()
-
-
-@pytest.mark.asyncio
-async def test_replay_events(event_store: EventStore) -> None:
- ev = UserLoggedInEvent(
- user_id="u1", login_method="password", metadata=AvroEventMetadata(service_name="svc", service_version="1")
- )
- await event_store.store_event(ev)
-
- called = {"n": 0}
-
- async def cb(_): # noqa: ANN001
- called["n"] += 1
-
- start = datetime.now(timezone.utc) - timedelta(days=1)
- cnt = await event_store.replay_events(start_time=start, callback=cb)
- assert cnt >= 1 and called["n"] >= 1
diff --git a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py b/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
deleted file mode 100644
index 38610f51..00000000
--- a/backend/tests/integration/events/test_event_store_consumer_flush_e2e.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from uuid import uuid4
-
-import pytest
-from app.core.database_context import Database
-from app.domain.enums.events import EventType
-from app.domain.enums.kafka import KafkaTopic
-from app.events.core import UnifiedProducer
-from app.events.event_store import EventStore
-from app.events.event_store_consumer import create_event_store_consumer
-from app.events.schema.schema_registry import SchemaRegistryManager
-
-from tests.helpers import make_execution_requested_event
-from tests.helpers.eventually import eventually
-
-pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_event_store_consumer_flush_on_timeout(scope, store: EventStore) -> None: # type: ignore[valid-type]
- producer: UnifiedProducer = await scope.get(UnifiedProducer)
- schema: SchemaRegistryManager = await scope.get(SchemaRegistryManager)
- db: Database = await scope.get(Database)
-
- consumer = create_event_store_consumer(
- event_store=store,
- topics=[KafkaTopic.EXECUTION_EVENTS],
- schema_registry_manager=schema,
- logger=store.logger,
- producer=producer,
- batch_size=100,
- batch_timeout_seconds=0.2,
- )
- await consumer.start()
- try:
- # Directly invoke handler to enqueue
- exec_ids = []
- for _ in range(3):
- x = f"exec-{uuid4().hex[:6]}"
- exec_ids.append(x)
- ev = make_execution_requested_event(execution_id=x)
- await consumer._handle_event(ev) # noqa: SLF001
-
- async def _all_present() -> None:
- docs = await db[store.collection_name].find({"event_type": str(EventType.EXECUTION_REQUESTED)}).to_list(50)
- have = {d.get("execution_id") for d in docs}
- assert set(exec_ids).issubset(have)
-
- await eventually(_all_present, timeout=5.0, interval=0.2)
- finally:
- await consumer.stop()
diff --git a/backend/tests/integration/events/test_event_store_e2e.py b/backend/tests/integration/events/test_event_store_e2e.py
deleted file mode 100644
index 25f5be90..00000000
--- a/backend/tests/integration/events/test_event_store_e2e.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import pytest
-from app.domain.enums.events import EventType
-from app.events.event_store import EventStore
-
-from tests.helpers import make_execution_requested_event
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.fixture()
-async def store(scope) -> EventStore: # type: ignore[valid-type]
- return await scope.get(EventStore)
-
-
-@pytest.mark.asyncio
-async def test_event_store_initialize_and_crud(store: EventStore) -> None:
- # Store single event
- ev = make_execution_requested_event(execution_id="e-1")
- assert await store.store_event(ev) is True
-
- # Duplicate insert should be treated as success True (DuplicateKey swallowed)
- assert await store.store_event(ev) is True
-
- # Batch store with duplicates
- ev2 = ev.model_copy(update={"event_id": "new-2", "execution_id": "e-2"})
- res = await store.store_batch([ev, ev2])
- assert res["total"] == 2 and res["stored"] >= 1
-
- # Queries
- by_id = await store.get_event(ev.event_id)
- assert by_id is not None and by_id.event_id == ev.event_id
-
- by_type = await store.get_events_by_type(EventType.EXECUTION_REQUESTED, limit=10)
- assert any(e.event_id == ev.event_id for e in by_type)
-
- by_exec = await store.get_execution_events("e-1")
- assert any(e.event_id == ev.event_id for e in by_exec)
-
- by_user = await store.get_user_events("u-unknown", limit=10)
- assert isinstance(by_user, list)
diff --git a/backend/tests/integration/services/events/test_event_service_integration.py b/backend/tests/integration/services/events/test_event_service_integration.py
deleted file mode 100644
index 21b7895e..00000000
--- a/backend/tests/integration/services/events/test_event_service_integration.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from datetime import datetime, timezone, timedelta
-
-import pytest
-
-from app.db.repositories import EventRepository
-from app.domain.events.event_models import Event, EventFilter
-from app.domain.enums.common import SortOrder
-from app.domain.enums.user import UserRole
-from app.infrastructure.kafka.events.metadata import AvroEventMetadata
-from app.domain.enums.events import EventType
-from app.services.event_service import EventService
-
-pytestmark = [pytest.mark.integration, pytest.mark.mongodb]
-
-
-@pytest.mark.asyncio
-async def test_event_service_access_and_queries(scope) -> None: # type: ignore[valid-type]
- repo: EventRepository = await scope.get(EventRepository)
- svc: EventService = await scope.get(EventService)
-
- now = datetime.now(timezone.utc)
- # Seed some events (domain Event, not infra BaseEvent)
- md1 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="c1")
- md2 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u2", correlation_id="c1")
- e1 = Event(event_id="e1", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now,
- metadata=md1, payload={"user_id": "u1", "login_method": "password"}, aggregate_id="agg1")
- e2 = Event(event_id="e2", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now,
- metadata=md2, payload={"user_id": "u2", "login_method": "password"}, aggregate_id="agg2")
- await repo.store_event(e1)
- await repo.store_event(e2)
-
- # get_execution_events returns None when non-admin for different user; then admin sees
- events_user = await svc.get_execution_events("agg1", "u2", UserRole.USER)
- assert events_user is None
- events_admin = await svc.get_execution_events("agg1", "admin", UserRole.ADMIN)
- assert any(ev.aggregate_id == "agg1" for ev in events_admin.events)
-
- # query_events_advanced: basic run (empty filters) should return a result structure
- res = await svc.query_events_advanced("u1", UserRole.USER, filters=EventFilter(), sort_by="correlation_id", sort_order=SortOrder.ASC)
- assert res is not None
-
- # get_events_by_correlation filters non-admin to their own user_id
- by_corr_user = await svc.get_events_by_correlation("c1", user_id="u1", user_role=UserRole.USER, include_all_users=False)
- assert all(ev.metadata.user_id == "u1" for ev in by_corr_user.events)
- by_corr_admin = await svc.get_events_by_correlation("c1", user_id="admin", user_role=UserRole.ADMIN, include_all_users=True)
- assert len(by_corr_admin.events) >= 2
-
- # get_event_statistics (time window)
- _ = await svc.get_event_statistics("u1", UserRole.USER, start_time=now - timedelta(days=1), end_time=now + timedelta(days=1))
-
- # get_event enforces access control
- one_allowed = await svc.get_event(e1.event_id, user_id="u1", user_role=UserRole.USER)
- assert one_allowed is not None
- one_denied = await svc.get_event(e1.event_id, user_id="u2", user_role=UserRole.USER)
- assert one_denied is None
-
- # aggregate_events injects user filter for non-admin
- pipe = [{"$match": {"event_type": str(e1.event_type)}}]
- _ = await svc.aggregate_events("u1", UserRole.USER, pipe)
-
- # list_event_types returns at least one type
- types = await svc.list_event_types("u1", UserRole.USER)
- assert isinstance(types, list) and len(types) >= 1
From 9cc16c4d57839784af6b4dc73d19d623d69f8826 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 01:27:02 +0100
Subject: [PATCH 42/48] moved from stdlib dataclass to pydantic dataclass
---
backend/app/domain/admin/overview_models.py | 4 +-
backend/app/domain/admin/replay_models.py | 4 +-
backend/app/domain/admin/replay_updates.py | 3 +-
backend/app/domain/admin/settings_models.py | 4 +-
backend/app/domain/events/event_metadata.py | 4 +-
backend/app/domain/events/event_models.py | 4 +-
backend/app/domain/execution/models.py | 4 +-
backend/app/domain/idempotency/models.py | 3 +-
backend/app/domain/notification/models.py | 4 +-
.../domain/rate_limit/rate_limit_models.py | 4 +-
backend/app/domain/replay/models.py | 3 +-
backend/app/domain/saga/models.py | 4 +-
backend/app/domain/saved_script/models.py | 4 +-
backend/app/domain/sse/models.py | 3 +-
backend/app/domain/user/settings_models.py | 4 +-
backend/app/domain/user/user_models.py | 3 +-
docs/architecture/pydantic-dataclasses.md | 173 ++++++++++++++++++
mkdocs.yml | 1 +
18 files changed, 217 insertions(+), 16 deletions(-)
create mode 100644 docs/architecture/pydantic-dataclasses.md
diff --git a/backend/app/domain/admin/overview_models.py b/backend/app/domain/admin/overview_models.py
index a208c953..23f91408 100644
--- a/backend/app/domain/admin/overview_models.py
+++ b/backend/app/domain/admin/overview_models.py
@@ -1,8 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from typing import List
+from pydantic.dataclasses import dataclass
+
from app.domain.events import Event, EventStatistics
from app.domain.user import User as DomainAdminUser
diff --git a/backend/app/domain/admin/replay_models.py b/backend/app/domain/admin/replay_models.py
index 59bd7209..44d7d79c 100644
--- a/backend/app/domain/admin/replay_models.py
+++ b/backend/app/domain/admin/replay_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.replay import ReplayStatus
from app.domain.events.event_models import EventSummary
from app.domain.replay.models import ReplaySessionState
diff --git a/backend/app/domain/admin/replay_updates.py b/backend/app/domain/admin/replay_updates.py
index 075a34b8..c326565b 100644
--- a/backend/app/domain/admin/replay_updates.py
+++ b/backend/app/domain/admin/replay_updates.py
@@ -1,8 +1,9 @@
"""Domain models for replay session updates."""
-from dataclasses import dataclass
from datetime import datetime
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.replay import ReplayStatus
diff --git a/backend/app/domain/admin/settings_models.py b/backend/app/domain/admin/settings_models.py
index d8ffc1c1..cad09f3c 100644
--- a/backend/app/domain/admin/settings_models.py
+++ b/backend/app/domain/admin/settings_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/events/event_metadata.py b/backend/app/domain/events/event_metadata.py
index 9bca16cb..c3a57440 100644
--- a/backend/app/domain/events/event_metadata.py
+++ b/backend/app/domain/events/event_metadata.py
@@ -1,6 +1,8 @@
-from dataclasses import dataclass, field, replace
+from dataclasses import field, replace
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.common import Environment
diff --git a/backend/app/domain/events/event_models.py b/backend/app/domain/events/event_models.py
index eda1fefd..3dc57627 100644
--- a/backend/app/domain/events/event_models.py
+++ b/backend/app/domain/events/event_models.py
@@ -1,7 +1,9 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime
from typing import Any
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
from app.domain.enums.events import EventType
from app.domain.events.event_metadata import EventMetadata
diff --git a/backend/app/domain/execution/models.py b/backend/app/domain/execution/models.py
index 2b6e8eb6..2bd30956 100644
--- a/backend/app/domain/execution/models.py
+++ b/backend/app/domain/execution/models.py
@@ -1,10 +1,12 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Optional
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.execution import ExecutionStatus
from app.domain.enums.storage import ExecutionErrorType
diff --git a/backend/app/domain/idempotency/models.py b/backend/app/domain/idempotency/models.py
index f3001c8f..38fba578 100644
--- a/backend/app/domain/idempotency/models.py
+++ b/backend/app/domain/idempotency/models.py
@@ -1,9 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass
from datetime import datetime
from typing import Dict, Optional
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/notification/models.py b/backend/app/domain/notification/models.py
index d697b253..8a1bac45 100644
--- a/backend/app/domain/notification/models.py
+++ b/backend/app/domain/notification/models.py
@@ -1,10 +1,12 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import UTC, datetime
from typing import Any
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.notification import (
NotificationChannel,
NotificationSeverity,
diff --git a/backend/app/domain/rate_limit/rate_limit_models.py b/backend/app/domain/rate_limit/rate_limit_models.py
index 59713554..08ef9460 100644
--- a/backend/app/domain/rate_limit/rate_limit_models.py
+++ b/backend/app/domain/rate_limit/rate_limit_models.py
@@ -1,8 +1,10 @@
import re
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Dict, List, Optional
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
diff --git a/backend/app/domain/replay/models.py b/backend/app/domain/replay/models.py
index 80331c1d..17e241b3 100644
--- a/backend/app/domain/replay/models.py
+++ b/backend/app/domain/replay/models.py
@@ -1,9 +1,10 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Dict, List
from uuid import uuid4
from pydantic import BaseModel, Field, PrivateAttr
+from pydantic.dataclasses import dataclass
from app.domain.enums.events import EventType
from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
diff --git a/backend/app/domain/saga/models.py b/backend/app/domain/saga/models.py
index 06c2ccac..a885c3bd 100644
--- a/backend/app/domain/saga/models.py
+++ b/backend/app/domain/saga/models.py
@@ -1,8 +1,10 @@
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any
from uuid import uuid4
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.saga import SagaState
diff --git a/backend/app/domain/saved_script/models.py b/backend/app/domain/saved_script/models.py
index ba819cbd..08622426 100644
--- a/backend/app/domain/saved_script/models.py
+++ b/backend/app/domain/saved_script/models.py
@@ -1,8 +1,10 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
+from pydantic.dataclasses import dataclass
+
@dataclass
class DomainSavedScriptBase:
diff --git a/backend/app/domain/sse/models.py b/backend/app/domain/sse/models.py
index e4dfa5fe..c8a59e8c 100644
--- a/backend/app/domain/sse/models.py
+++ b/backend/app/domain/sse/models.py
@@ -1,8 +1,9 @@
from __future__ import annotations
-from dataclasses import dataclass
from datetime import datetime
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.execution import ExecutionStatus
diff --git a/backend/app/domain/user/settings_models.py b/backend/app/domain/user/settings_models.py
index 171f1b17..10a730d2 100644
--- a/backend/app/domain/user/settings_models.py
+++ b/backend/app/domain/user/settings_models.py
@@ -1,9 +1,11 @@
from __future__ import annotations
-from dataclasses import dataclass, field
+from dataclasses import field
from datetime import datetime, timezone
from typing import Any, Dict, List, Optional
+from pydantic.dataclasses import dataclass
+
from app.domain.enums.common import Theme
from app.domain.enums.events import EventType
from app.domain.enums.notification import NotificationChannel
diff --git a/backend/app/domain/user/user_models.py b/backend/app/domain/user/user_models.py
index 242529ff..fa34d066 100644
--- a/backend/app/domain/user/user_models.py
+++ b/backend/app/domain/user/user_models.py
@@ -1,8 +1,9 @@
import re
-from dataclasses import dataclass
from datetime import datetime
from typing import List
+from pydantic.dataclasses import dataclass
+
from app.core.utils import StringEnum
from app.domain.enums.user import UserRole
diff --git a/docs/architecture/pydantic-dataclasses.md b/docs/architecture/pydantic-dataclasses.md
new file mode 100644
index 00000000..b169958c
--- /dev/null
+++ b/docs/architecture/pydantic-dataclasses.md
@@ -0,0 +1,173 @@
+# Pydantic dataclasses
+
+This document explains why domain models use `pydantic.dataclasses.dataclass` instead of the standard library
+`dataclasses.dataclass`. It covers the problem with nested dict conversion, the solution, and migration considerations.
+
+## Why pydantic dataclasses
+
+Domain models are dataclasses that represent business entities like `DomainUserSettings`, `DomainExecution`, and `Saga`.
+These models often have nested structures - for example, `DomainUserSettings` contains `DomainNotificationSettings` and
+`DomainEditorSettings` as nested dataclasses.
+
+The problem appears when loading data from MongoDB. Beanie documents are Pydantic models, and calling `model_dump()` on
+them returns plain Python dicts, including nested dicts for nested models. When you pass these dicts to a stdlib
+dataclass constructor, nested dicts stay as dicts instead of being converted to their proper dataclass types.
+
+```python
+# Data from MongoDB via Beanie document.model_dump()
+data = {
+ "user_id": "user123",
+ "notifications": {
+ "execution_completed": False,
+ "execution_failed": True
+ }
+}
+
+# With stdlib dataclass - FAILS
+settings = DomainUserSettings(**data)
+settings.notifications.execution_completed # AttributeError: 'dict' has no attribute 'execution_completed'
+
+# With pydantic dataclass - WORKS
+settings = DomainUserSettings(**data)
+settings.notifications.execution_completed # Returns False
+```
+
+Pydantic dataclasses use type annotations to automatically convert nested dicts into the correct dataclass instances. No
+reflection, no isinstance checks, no manual conversion code.
+
+## What pydantic dataclasses provide
+
+Pydantic dataclasses are a drop-in replacement for stdlib dataclasses with added features:
+
+| Feature | stdlib | pydantic |
+|------------------------|--------|----------|
+| Nested dict conversion | No | Yes |
+| Enum from string | No | Yes |
+| Type validation | No | Yes |
+| String-to-int coercion | No | Yes |
+| `asdict()` | Yes | Yes |
+| `is_dataclass()` | Yes | Yes |
+| `__dataclass_fields__` | Yes | Yes |
+| `field()` | Yes | Yes |
+| `__post_init__` | Yes | Yes |
+| `replace()` | Yes | Yes |
+| frozen/eq/hash | Yes | Yes |
+| Inheritance | Yes | Yes |
+
+The migration requires changing one import:
+
+```python
+# Before
+from dataclasses import dataclass
+
+# After
+from pydantic.dataclasses import dataclass
+```
+
+Everything else stays the same. The `field` function still comes from stdlib `dataclasses`.
+
+## Performance
+
+Pydantic dataclasses add validation overhead at construction time:
+
+| Operation | stdlib | pydantic | Ratio |
+|--------------------|-------------|-------------|-------------|
+| Creation from dict | 0.2 us | 1.4 us | 6x slower |
+| Attribute access | 4.1 ms/100k | 4.6 ms/100k | 1.1x slower |
+
+The creation overhead is negligible for typical usage patterns - domain models are created during request handling, not
+in tight loops. Attribute access after construction has no meaningful overhead.
+
+## Domain model locations
+
+All domain models live in `app/domain/` and use pydantic dataclasses:
+
+| Module | File | Key models |
+|--------------|---------------------------------------|----------------------------------------------------------------------------|
+| User | `app/domain/user/settings_models.py` | `DomainUserSettings`, `DomainNotificationSettings`, `DomainEditorSettings` |
+| User | `app/domain/user/user_models.py` | `User`, `UserCreation`, `UserUpdate` |
+| Execution | `app/domain/execution/models.py` | `DomainExecution`, `ExecutionResultDomain` |
+| Events | `app/domain/events/event_models.py` | `Event`, `EventFilter`, `EventQuery` |
+| Events | `app/domain/events/event_metadata.py` | `EventMetadata` |
+| Saga | `app/domain/saga/models.py` | `Saga`, `SagaInstance`, `SagaConfig` |
+| Replay | `app/domain/replay/models.py` | `ReplaySessionState` |
+| Notification | `app/domain/notification/models.py` | `DomainNotification`, `DomainNotificationSubscription` |
+| Admin | `app/domain/admin/settings_models.py` | `SystemSettings`, `ExecutionLimits` |
+
+## Using domain models in repositories
+
+Repositories that load from MongoDB convert Beanie documents to domain models:
+
+```python
+from app.domain.user.settings_models import DomainUserSettings
+
+class UserSettingsRepository:
+ async def get_snapshot(self, user_id: str) -> DomainUserSettings | None:
+ doc = await UserSettingsDocument.find_one({"user_id": user_id})
+ if not doc:
+ return None
+ # Pydantic dataclass handles nested conversion automatically
+ return DomainUserSettings(**doc.model_dump(exclude={"id", "revision_id"}))
+```
+
+No manual conversion of nested fields needed. The type annotations on `DomainUserSettings` tell pydantic how to convert
+each nested dict.
+
+## Validation behavior
+
+Pydantic dataclasses validate input data at construction time. Invalid data raises `ValidationError`:
+
+```python
+# Invalid enum value
+DomainUserSettings(user_id="u1", theme="invalid_theme")
+# ValidationError: Input should be 'light', 'dark' or 'auto'
+
+# Invalid type
+DomainNotificationSettings(execution_completed="not_a_bool")
+# ValidationError: Input should be a valid boolean
+```
+
+This catches data problems at the boundary where data enters the domain, rather than later during processing. Services
+can trust that domain models contain valid data.
+
+## What stays as Pydantic BaseModel
+
+Some classes still use `pydantic.BaseModel` instead of dataclasses:
+
+- Beanie documents (require BaseModel for ODM features)
+- Request/response schemas (FastAPI integration)
+- Configuration models with complex validation
+- Classes that need `model_validate()`, `model_json_schema()`, or other BaseModel methods
+
+The rule: use pydantic dataclasses for domain models that represent business entities. Use BaseModel for infrastructure
+concerns like documents, schemas, and configs.
+
+## Adding new domain models
+
+When creating a new domain model:
+
+1. Import dataclass from pydantic: `from pydantic.dataclasses import dataclass`
+2. Import field from stdlib if needed: `from dataclasses import field`
+3. Define the class with `@dataclass` decorator
+4. Use type annotations - pydantic uses them for conversion and validation
+5. Put nested dataclasses before the parent class that uses them
+
+```python
+from dataclasses import field
+from datetime import datetime
+from pydantic.dataclasses import dataclass
+
+@dataclass
+class NestedModel:
+ value: int
+ label: str = "default"
+
+@dataclass
+class ParentModel:
+ id: str
+ nested: NestedModel
+ items: list[str] = field(default_factory=list)
+ created_at: datetime = field(default_factory=datetime.utcnow)
+```
+
+The model automatically handles nested dict conversion, enum parsing, and type coercion.
diff --git a/mkdocs.yml b/mkdocs.yml
index 1078a3f8..aabc3c45 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -104,6 +104,7 @@ nav:
- Overview: architecture/overview.md
- Services: architecture/services-overview.md
- Domain Exceptions: architecture/domain-exceptions.md
+ - Pydantic Dataclasses: architecture/pydantic-dataclasses.md
- Frontend Build: architecture/frontend-build.md
- Svelte 5 Migration: architecture/svelte5-migration.md
- Kafka Topics: architecture/kafka-topic-architecture.md
From 31b09d839b68cd1e7a653121568f789cd6101c42 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 14:44:36 +0100
Subject: [PATCH 43/48] fixes
---
backend/app/api/routes/admin/events.py | 6 +-
backend/app/db/docs/__init__.py | 3 -
backend/app/db/docs/event.py | 73 +-------
.../admin/admin_events_repository.py | 102 +++++------
.../app/db/repositories/event_repository.py | 87 ++--------
.../db/repositories/execution_repository.py | 31 +---
.../app/db/repositories/replay_repository.py | 28 ++-
backend/app/db/repositories/sse_repository.py | 11 +-
backend/app/domain/admin/__init__.py | 2 -
backend/app/domain/admin/replay_models.py | 16 +-
backend/app/domain/replay/models.py | 39 ++++-
backend/app/events/event_store.py | 160 +++++++++---------
.../services/admin/admin_events_service.py | 14 +-
docs/architecture/event-storage.md | 151 ++++++++++-------
frontend/src/styles/components.css | 8 +-
15 files changed, 295 insertions(+), 436 deletions(-)
diff --git a/backend/app/api/routes/admin/events.py b/backend/app/api/routes/admin/events.py
index 6dbda0e0..7e610192 100644
--- a/backend/app/api/routes/admin/events.py
+++ b/backend/app/api/routes/admin/events.py
@@ -10,8 +10,8 @@
from app.api.dependencies import admin_user
from app.core.correlation import CorrelationContext
-from app.domain.admin import ReplayQuery
from app.domain.enums.events import EventType
+from app.domain.replay import ReplayFilter
from app.domain.events.event_models import EventFilter
from app.schemas_pydantic.admin_events import (
EventBrowseRequest,
@@ -153,7 +153,7 @@ async def replay_events(
) -> EventReplayResponse:
try:
replay_correlation_id = f"replay_{CorrelationContext.get_correlation_id()}"
- rq = ReplayQuery(
+ replay_filter = ReplayFilter(
event_ids=request.event_ids,
correlation_id=request.correlation_id,
aggregate_id=request.aggregate_id,
@@ -162,7 +162,7 @@ async def replay_events(
)
try:
result = await service.prepare_or_schedule_replay(
- replay_query=rq,
+ replay_filter=replay_filter,
dry_run=request.dry_run,
replay_correlation_id=replay_correlation_id,
target_service=request.target_service,
diff --git a/backend/app/db/docs/__init__.py b/backend/app/db/docs/__init__.py
index 8d23bbc0..909343a1 100644
--- a/backend/app/db/docs/__init__.py
+++ b/backend/app/db/docs/__init__.py
@@ -9,7 +9,6 @@
from app.db.docs.event import (
EventArchiveDocument,
EventDocument,
- EventStoreDocument,
)
from app.db.docs.execution import ExecutionDocument, ResourceUsage
from app.db.docs.notification import (
@@ -44,7 +43,6 @@
SagaDocument,
DLQMessageDocument,
EventDocument,
- EventStoreDocument,
EventArchiveDocument,
ReplaySessionDocument,
ResourceAllocationDocument,
@@ -74,7 +72,6 @@
"DLQMessageDocument",
# Event
"EventDocument",
- "EventStoreDocument",
"EventArchiveDocument",
# Replay
"ReplaySessionDocument",
diff --git a/backend/app/db/docs/event.py b/backend/app/db/docs/event.py
index ac64ad4e..87f2b2d6 100644
--- a/backend/app/db/docs/event.py
+++ b/backend/app/db/docs/event.py
@@ -1,5 +1,5 @@
from datetime import datetime, timedelta, timezone
-from typing import Any, Dict
+from typing import Any
from uuid import uuid4
import pymongo
@@ -28,10 +28,10 @@ class EventMetadata(BaseModel):
class EventDocument(Document):
- """Event document as stored in database.
+ """Event document for event browsing/admin system.
- Copied from EventInDB schema. Uses extra="allow" to store
- additional fields from polymorphic BaseEvent subclasses.
+ Uses payload dict for flexible event data storage.
+ This is separate from EventStoreDocument which uses flat structure for Kafka events.
"""
event_id: Indexed(str, unique=True) = Field(default_factory=lambda: str(uuid4())) # type: ignore[valid-type]
@@ -40,7 +40,7 @@ class EventDocument(Document):
timestamp: Indexed(datetime) = Field(default_factory=lambda: datetime.now(timezone.utc)) # type: ignore[valid-type]
aggregate_id: Indexed(str) | None = None # type: ignore[valid-type]
metadata: EventMetadata
- payload: Dict[str, Any] = Field(default_factory=dict)
+ payload: dict[str, Any] = Field(default_factory=dict)
stored_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
ttl_expires_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc) + timedelta(days=30))
@@ -86,68 +86,10 @@ class Settings:
]
-class EventStoreDocument(Document):
- """Event store document for permanent event storage.
-
- Same structure as EventDocument but in event_store collection.
- Uses extra="allow" to store additional fields from polymorphic events.
- No TTL index since this is permanent storage.
- """
-
- event_id: Indexed(str, unique=True) = Field(default_factory=lambda: str(uuid4())) # type: ignore[valid-type]
- event_type: EventType # Indexed via Settings.indexes
- event_version: str = "1.0"
- timestamp: Indexed(datetime) = Field(default_factory=lambda: datetime.now(timezone.utc)) # type: ignore[valid-type]
- aggregate_id: Indexed(str) | None = None # type: ignore[valid-type]
- metadata: EventMetadata
- payload: Dict[str, Any] = Field(default_factory=dict)
- stored_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
- ttl_expires_at: datetime | None = None
-
- model_config = ConfigDict(from_attributes=True, extra="allow")
-
- class Settings:
- name = "event_store"
- use_state_management = True
- indexes = [
- # Compound indexes for common query patterns
- IndexModel([("event_type", ASCENDING), ("timestamp", DESCENDING)], name="idx_event_type_ts"),
- IndexModel([("aggregate_id", ASCENDING), ("timestamp", DESCENDING)], name="idx_aggregate_ts"),
- IndexModel([("metadata.correlation_id", ASCENDING)], name="idx_meta_correlation"),
- IndexModel([("metadata.user_id", ASCENDING), ("timestamp", DESCENDING)], name="idx_meta_user_ts"),
- IndexModel([("metadata.service_name", ASCENDING), ("timestamp", DESCENDING)], name="idx_meta_service_ts"),
- # Payload sparse indexes
- IndexModel([("payload.execution_id", ASCENDING)], name="idx_payload_execution", sparse=True),
- IndexModel([("payload.pod_name", ASCENDING)], name="idx_payload_pod", sparse=True),
- # Additional compound indexes for query optimization
- IndexModel([("event_type", ASCENDING), ("aggregate_id", ASCENDING)], name="idx_events_type_agg"),
- IndexModel([("aggregate_id", ASCENDING), ("timestamp", ASCENDING)], name="idx_events_agg_ts"),
- IndexModel([("event_type", ASCENDING), ("timestamp", ASCENDING)], name="idx_events_type_ts_asc"),
- IndexModel([("metadata.user_id", ASCENDING), ("timestamp", ASCENDING)], name="idx_events_user_ts"),
- IndexModel([("metadata.user_id", ASCENDING), ("event_type", ASCENDING)], name="idx_events_user_type"),
- IndexModel(
- [("event_type", ASCENDING), ("metadata.user_id", ASCENDING), ("timestamp", DESCENDING)],
- name="idx_events_type_user_ts",
- ),
- # Text search index
- IndexModel(
- [
- ("event_type", pymongo.TEXT),
- ("metadata.service_name", pymongo.TEXT),
- ("metadata.user_id", pymongo.TEXT),
- ("payload", pymongo.TEXT),
- ],
- name="idx_text_search",
- language_override="none",
- default_language="english",
- ),
- ]
-
-
class EventArchiveDocument(Document):
"""Archived event with deletion metadata.
- Uses extra="allow" to preserve all fields from polymorphic events.
+ Mirrors EventDocument structure with additional archive metadata.
"""
event_id: Indexed(str, unique=True) # type: ignore[valid-type]
@@ -156,13 +98,14 @@ class EventArchiveDocument(Document):
timestamp: Indexed(datetime) # type: ignore[valid-type]
aggregate_id: str | None = None
metadata: EventMetadata
- payload: Dict[str, Any] = Field(default_factory=dict)
+ payload: dict[str, Any] = Field(default_factory=dict)
stored_at: datetime | None = None
ttl_expires_at: datetime | None = None
# Archive metadata
deleted_at: Indexed(datetime) = Field(default_factory=lambda: datetime.now(timezone.utc)) # type: ignore[valid-type]
deleted_by: str | None = None
+ deletion_reason: str | None = None
model_config = ConfigDict(from_attributes=True, extra="allow")
diff --git a/backend/app/db/repositories/admin/admin_events_repository.py b/backend/app/db/repositories/admin/admin_events_repository.py
index 1190752e..6c5ce164 100644
--- a/backend/app/db/repositories/admin/admin_events_repository.py
+++ b/backend/app/db/repositories/admin/admin_events_repository.py
@@ -7,11 +7,11 @@
from app.db.docs import (
EventArchiveDocument,
- EventStoreDocument,
+ EventDocument,
ExecutionDocument,
ReplaySessionDocument,
)
-from app.domain.admin import ReplayQuery, ReplaySessionData, ReplaySessionStatusDetail
+from app.domain.admin import ReplaySessionData, ReplaySessionStatusDetail
from app.domain.admin.replay_updates import ReplaySessionUpdate
from app.domain.enums.replay import ReplayStatus
from app.domain.events import EventMetadata as DomainEventMetadata
@@ -27,35 +27,24 @@
UserEventCount,
)
from app.domain.events.query_builders import EventStatsAggregation
-from app.domain.replay.models import ReplayConfig, ReplaySessionState
+from app.domain.replay.models import ReplayConfig, ReplayFilter, ReplaySessionState
class AdminEventsRepository:
def _event_filter_conditions(self, f: EventFilter) -> list[Any]:
- """Build Beanie query conditions from EventFilter for EventStoreDocument."""
+ """Build Beanie query conditions from EventFilter for EventDocument."""
conditions = [
- In(EventStoreDocument.event_type, f.event_types) if f.event_types else None,
- EventStoreDocument.aggregate_id == f.aggregate_id if f.aggregate_id else None,
- EventStoreDocument.metadata.correlation_id == f.correlation_id if f.correlation_id else None,
- EventStoreDocument.metadata.user_id == f.user_id if f.user_id else None,
- EventStoreDocument.metadata.service_name == f.service_name if f.service_name else None,
- GTE(EventStoreDocument.timestamp, f.start_time) if f.start_time else None,
- LTE(EventStoreDocument.timestamp, f.end_time) if f.end_time else None,
+ In(EventDocument.event_type, f.event_types) if f.event_types else None,
+ EventDocument.aggregate_id == f.aggregate_id if f.aggregate_id else None,
+ EventDocument.metadata.correlation_id == f.correlation_id if f.correlation_id else None,
+ EventDocument.metadata.user_id == f.user_id if f.user_id else None,
+ EventDocument.metadata.service_name == f.service_name if f.service_name else None,
+ GTE(EventDocument.timestamp, f.start_time) if f.start_time else None,
+ LTE(EventDocument.timestamp, f.end_time) if f.end_time else None,
Text(f.search_text) if f.search_text else None,
]
return [c for c in conditions if c is not None]
- def _replay_conditions_for_store(self, q: ReplayQuery) -> list[Any]:
- """Build Beanie query conditions from ReplayQuery for EventStoreDocument."""
- conditions = [
- In(EventStoreDocument.event_id, q.event_ids) if q.event_ids else None,
- EventStoreDocument.metadata.correlation_id == q.correlation_id if q.correlation_id else None,
- EventStoreDocument.aggregate_id == q.aggregate_id if q.aggregate_id else None,
- GTE(EventStoreDocument.timestamp, q.start_time) if q.start_time else None,
- LTE(EventStoreDocument.timestamp, q.end_time) if q.end_time else None,
- ]
- return [c for c in conditions if c is not None]
-
async def browse_events(
self,
event_filter: EventFilter,
@@ -65,31 +54,37 @@ async def browse_events(
sort_order: SortDirection = SortDirection.DESCENDING,
) -> EventBrowseResult:
conditions = self._event_filter_conditions(event_filter)
- query = EventStoreDocument.find(*conditions)
+ query = EventDocument.find(*conditions)
total = await query.count()
docs = await query.sort([(sort_by, sort_order)]).skip(skip).limit(limit).to_list()
- doc_fields = set(EventStoreDocument.model_fields.keys()) - {"id", "revision_id"}
events = [
- Event(**{**d.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**d.metadata.model_dump())})
+ Event(
+ **{
+ **d.model_dump(exclude={"id", "revision_id"}),
+ "metadata": DomainEventMetadata(**d.metadata.model_dump()),
+ }
+ )
for d in docs
]
return EventBrowseResult(events=events, total=total, skip=skip, limit=limit)
async def get_event_detail(self, event_id: str) -> EventDetail | None:
- doc = await EventStoreDocument.find_one({"event_id": event_id})
+ doc = await EventDocument.find_one({"event_id": event_id})
if not doc:
return None
- doc_fields = set(EventStoreDocument.model_fields.keys()) - {"id", "revision_id"}
event = Event(
- **{**doc.model_dump(include=doc_fields), "metadata": DomainEventMetadata(**doc.metadata.model_dump())}
+ **{
+ **doc.model_dump(exclude={"id", "revision_id"}),
+ "metadata": DomainEventMetadata(**doc.metadata.model_dump()),
+ }
)
related_query = {"metadata.correlation_id": doc.metadata.correlation_id, "event_id": {"$ne": event_id}}
related_docs = await (
- EventStoreDocument.find(related_query).sort([("timestamp", SortDirection.ASCENDING)]).limit(10).to_list()
+ EventDocument.find(related_query).sort([("timestamp", SortDirection.ASCENDING)]).limit(10).to_list()
)
related_events = [
EventSummary(
@@ -105,7 +100,7 @@ async def get_event_detail(self, event_id: str) -> EventDetail | None:
return EventDetail(event=event, related_events=related_events, timeline=timeline)
async def delete_event(self, event_id: str) -> bool:
- doc = await EventStoreDocument.find_one({"event_id": event_id})
+ doc = await EventDocument.find_one({"event_id": event_id})
if not doc:
return False
await doc.delete()
@@ -115,7 +110,7 @@ async def get_event_stats(self, hours: int = 24) -> EventStatistics:
start_time = datetime.now(timezone.utc) - timedelta(hours=hours)
overview_pipeline = EventStatsAggregation.build_overview_pipeline(start_time)
- overview_result = await EventStoreDocument.aggregate(overview_pipeline).to_list()
+ overview_result = await EventDocument.aggregate(overview_pipeline).to_list()
stats = (
overview_result[0]
@@ -123,7 +118,7 @@ async def get_event_stats(self, hours: int = 24) -> EventStatistics:
else {"total_events": 0, "event_type_count": 0, "unique_user_count": 0, "service_count": 0}
)
- error_count = await EventStoreDocument.find(
+ error_count = await EventDocument.find(
{
"timestamp": {"$gte": start_time},
"event_type": {"$regex": "failed|error|timeout", "$options": "i"},
@@ -133,17 +128,17 @@ async def get_event_stats(self, hours: int = 24) -> EventStatistics:
error_rate = (error_count / stats["total_events"] * 100) if stats["total_events"] > 0 else 0
type_pipeline = EventStatsAggregation.build_event_types_pipeline(start_time)
- top_types = await EventStoreDocument.aggregate(type_pipeline).to_list()
+ top_types = await EventDocument.aggregate(type_pipeline).to_list()
events_by_type = {t["_id"]: t["count"] for t in top_types}
hourly_pipeline = EventStatsAggregation.build_hourly_events_pipeline(start_time)
- hourly_result = await EventStoreDocument.aggregate(hourly_pipeline).to_list()
+ hourly_result = await EventDocument.aggregate(hourly_pipeline).to_list()
events_by_hour: list[HourlyEventCount | dict[str, Any]] = [
HourlyEventCount(hour=doc["_id"], count=doc["count"]) for doc in hourly_result
]
user_pipeline = EventStatsAggregation.build_top_users_pipeline(start_time)
- top_users_result = await EventStoreDocument.aggregate(user_pipeline).to_list()
+ top_users_result = await EventDocument.aggregate(user_pipeline).to_list()
top_users = [
UserEventCount(user_id=doc["_id"], event_count=doc["count"]) for doc in top_users_result if doc["_id"]
]
@@ -176,7 +171,7 @@ async def get_event_stats(self, hours: int = 24) -> EventStatistics:
async def export_events_csv(self, event_filter: EventFilter) -> list[EventExportRow]:
conditions = self._event_filter_conditions(event_filter)
docs = await (
- EventStoreDocument.find(*conditions).sort([("timestamp", SortDirection.DESCENDING)]).limit(10000).to_list()
+ EventDocument.find(*conditions).sort([("timestamp", SortDirection.DESCENDING)]).limit(10000).to_list()
)
return [
@@ -279,7 +274,7 @@ async def get_replay_status_with_progress(self, session_id: str) -> ReplaySessio
execution_results: list[dict[str, Any]] = []
if doc.config and doc.config.filter and doc.config.filter.custom_query:
original_query = doc.config.filter.custom_query
- original_events = await EventStoreDocument.find(original_query).limit(10).to_list()
+ original_events = await EventDocument.find(original_query).limit(10).to_list()
execution_ids = set()
for event in original_events:
@@ -315,13 +310,11 @@ async def get_replay_status_with_progress(self, session_id: str) -> ReplaySessio
execution_results=execution_results,
)
- async def count_events_for_replay(self, replay_query: ReplayQuery) -> int:
- conditions = self._replay_conditions_for_store(replay_query)
- return await EventStoreDocument.find(*conditions).count()
+ async def count_events_for_replay(self, replay_filter: ReplayFilter) -> int:
+ return await EventDocument.find(replay_filter.to_mongo_query()).count()
- async def get_events_preview_for_replay(self, replay_query: ReplayQuery, limit: int = 100) -> list[EventSummary]:
- conditions = self._replay_conditions_for_store(replay_query)
- docs = await EventStoreDocument.find(*conditions).limit(limit).to_list()
+ async def get_events_preview_for_replay(self, replay_filter: ReplayFilter, limit: int = 100) -> list[EventSummary]:
+ docs = await EventDocument.find(replay_filter.to_mongo_query()).limit(limit).to_list()
return [
EventSummary(
event_id=doc.event_id,
@@ -333,9 +326,9 @@ async def get_events_preview_for_replay(self, replay_query: ReplayQuery, limit:
]
async def prepare_replay_session(
- self, replay_query: ReplayQuery, dry_run: bool, replay_correlation_id: str, max_events: int = 1000
+ self, replay_filter: ReplayFilter, dry_run: bool, replay_correlation_id: str, max_events: int = 1000
) -> ReplaySessionData:
- event_count = await self.count_events_for_replay(replay_query)
+ event_count = await self.count_events_for_replay(replay_filter)
if event_count == 0:
raise ValueError("No events found matching the criteria")
if event_count > max_events and not dry_run:
@@ -343,32 +336,27 @@ async def prepare_replay_session(
events_preview: list[EventSummary] = []
if dry_run:
- events_preview = await self.get_events_preview_for_replay(replay_query, limit=100)
+ events_preview = await self.get_events_preview_for_replay(replay_filter, limit=100)
return ReplaySessionData(
total_events=event_count,
replay_correlation_id=replay_correlation_id,
dry_run=dry_run,
- query=replay_query,
+ filter=replay_filter,
events_preview=events_preview,
)
async def get_replay_events_preview(
self, event_ids: list[str] | None = None, correlation_id: str | None = None, aggregate_id: str | None = None
) -> dict[str, Any]:
- replay_query = ReplayQuery(event_ids=event_ids, correlation_id=correlation_id, aggregate_id=aggregate_id)
- conditions = self._replay_conditions_for_store(replay_query)
+ replay_filter = ReplayFilter(event_ids=event_ids, correlation_id=correlation_id, aggregate_id=aggregate_id)
+ query = replay_filter.to_mongo_query()
- if not conditions:
+ if not query:
return {"events": [], "total": 0}
- total = await EventStoreDocument.find(*conditions).count()
- docs = (
- await EventStoreDocument.find(*conditions)
- .sort([("timestamp", SortDirection.ASCENDING)])
- .limit(100)
- .to_list()
- )
+ total = await EventDocument.find(query).count()
+ docs = await EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).limit(100).to_list()
events = [doc.model_dump() for doc in docs]
return {"events": events, "total": total}
diff --git a/backend/app/db/repositories/event_repository.py b/backend/app/db/repositories/event_repository.py
index 901f72ff..b7cbc245 100644
--- a/backend/app/db/repositories/event_repository.py
+++ b/backend/app/db/repositories/event_repository.py
@@ -11,7 +11,6 @@
from app.db.docs import EventArchiveDocument, EventDocument
from app.domain.enums.events import EventType
from app.domain.events import Event
-from app.domain.events import EventMetadata as DomainEventMetadata
from app.domain.events.event_models import (
ArchivedEvent,
EventAggregationResult,
@@ -39,8 +38,6 @@ def _build_time_filter(self, start_time: datetime | None, end_time: datetime | N
async def store_event(self, event: Event) -> str:
data = asdict(event)
- meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = datetime.now(timezone.utc)
# Remove None values so EventDocument defaults can apply (e.g., ttl_expires_at)
@@ -65,8 +62,6 @@ async def store_events_batch(self, events: list[Event]) -> list[str]:
docs = []
for event in events:
data = asdict(event)
- meta = event.metadata.model_dump() if hasattr(event.metadata, "model_dump") else asdict(event.metadata)
- data["metadata"] = {k: (v.value if hasattr(v, "value") else v) for k, v in meta.items()}
if not data.get("stored_at"):
data["stored_at"] = now
# Remove None values so EventDocument defaults can apply
@@ -81,9 +76,7 @@ async def get_event(self, event_id: str) -> Event | None:
doc = await EventDocument.find_one({"event_id": event_id})
if not doc:
return None
- data = doc.model_dump(exclude={"id", "revision_id"})
- data["metadata"] = DomainEventMetadata(**data["metadata"])
- return Event(**data)
+ return Event(**doc.model_dump(exclude={"id", "revision_id"}))
async def get_events_by_type(
self,
@@ -104,15 +97,7 @@ async def get_events_by_type(
.limit(limit)
.to_list()
)
- return [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ return [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
async def get_events_by_aggregate(
self, aggregate_id: str, event_types: list[EventType] | None = None, limit: int = 100
@@ -125,29 +110,13 @@ async def get_events_by_aggregate(
docs = (
await EventDocument.find(*conditions).sort([("timestamp", SortDirection.ASCENDING)]).limit(limit).to_list()
)
- return [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ return [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
async def get_events_by_correlation(self, correlation_id: str, limit: int = 100, skip: int = 0) -> EventListResult:
query = EventDocument.find(EventDocument.metadata.correlation_id == correlation_id)
total_count = await query.count()
docs = await query.sort([("timestamp", SortDirection.ASCENDING)]).skip(skip).limit(limit).to_list()
- events = [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ events = [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
return EventListResult(
events=events,
total=total_count,
@@ -178,15 +147,7 @@ async def get_events_by_user(
.limit(limit)
.to_list()
)
- return [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ return [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
async def get_execution_events(
self, execution_id: str, limit: int = 100, skip: int = 0, exclude_system_events: bool = False
@@ -202,15 +163,7 @@ async def get_execution_events(
query = EventDocument.find(*conditions)
total_count = await query.count()
docs = await query.sort([("timestamp", SortDirection.ASCENDING)]).skip(skip).limit(limit).to_list()
- events = [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ events = [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
return EventListResult(
events=events,
total=total_count,
@@ -328,16 +281,7 @@ async def get_user_events_paginated(
total_count = await query.count()
sort_direction = SortDirection.DESCENDING if sort_order == "desc" else SortDirection.ASCENDING
docs = await query.sort([("timestamp", sort_direction)]).skip(skip).limit(limit).to_list()
- events = [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
-
+ events = [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
return EventListResult(
events=events,
total=total_count,
@@ -360,15 +304,7 @@ async def query_events(
cursor = EventDocument.find(query)
total_count = await cursor.count()
docs = await cursor.sort([(sort_field, SortDirection.DESCENDING)]).skip(skip).limit(limit).to_list()
- events = [
- Event(
- **{
- **d.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**d.metadata.model_dump()),
- }
- )
- for d in docs
- ]
+ events = [Event(**d.model_dump(exclude={"id", "revision_id"})) for d in docs]
return EventListResult(
events=events, total=total_count, skip=skip, limit=limit, has_more=(skip + limit) < total_count
)
@@ -418,10 +354,7 @@ async def delete_event_with_archival(
await archived_doc.insert()
await doc.delete()
return ArchivedEvent(
- **{
- **doc.model_dump(exclude={"id", "revision_id"}),
- "metadata": DomainEventMetadata(**doc.metadata.model_dump()),
- },
+ **doc.model_dump(exclude={"id", "revision_id"}),
deleted_at=deleted_at,
deleted_by=deleted_by,
deletion_reason=deletion_reason,
@@ -448,7 +381,7 @@ async def get_aggregate_replay_info(self, aggregate_id: str) -> EventReplayInfo
]
async for doc in EventDocument.aggregate(pipeline):
- events = [Event(**{**e, "metadata": DomainEventMetadata(**e["metadata"])}) for e in doc["events"]]
+ events = [Event(**e) for e in doc["events"]]
return EventReplayInfo(
events=events,
event_count=doc["event_count"],
diff --git a/backend/app/db/repositories/execution_repository.py b/backend/app/db/repositories/execution_repository.py
index f0a8fcb6..e7b62e67 100644
--- a/backend/app/db/repositories/execution_repository.py
+++ b/backend/app/db/repositories/execution_repository.py
@@ -11,7 +11,6 @@
DomainExecutionCreate,
DomainExecutionUpdate,
ExecutionResultDomain,
- ResourceUsageDomain,
)
@@ -24,14 +23,7 @@ async def create_execution(self, create_data: DomainExecutionCreate) -> DomainEx
self.logger.info("Inserting execution into MongoDB", extra={"execution_id": doc.execution_id})
await doc.insert()
self.logger.info("Inserted execution", extra={"execution_id": doc.execution_id})
- return DomainExecution(
- **{
- **doc.model_dump(exclude={"id"}),
- "resource_usage": ResourceUsageDomain(**doc.resource_usage.model_dump())
- if doc.resource_usage
- else None,
- }
- )
+ return DomainExecution(**doc.model_dump(exclude={"id"}))
async def get_execution(self, execution_id: str) -> DomainExecution | None:
self.logger.info("Searching for execution in MongoDB", extra={"execution_id": execution_id})
@@ -41,14 +33,7 @@ async def get_execution(self, execution_id: str) -> DomainExecution | None:
return None
self.logger.info("Found execution in MongoDB", extra={"execution_id": execution_id})
- return DomainExecution(
- **{
- **doc.model_dump(exclude={"id"}),
- "resource_usage": ResourceUsageDomain(**doc.resource_usage.model_dump())
- if doc.resource_usage
- else None,
- }
- )
+ return DomainExecution(**doc.model_dump(exclude={"id"}))
async def update_execution(self, execution_id: str, update_data: DomainExecutionUpdate) -> bool:
doc = await ExecutionDocument.find_one({"execution_id": execution_id})
@@ -93,17 +78,7 @@ async def get_executions(
]
find_query = find_query.sort(beanie_sort)
docs = await find_query.skip(skip).limit(limit).to_list()
- return [
- DomainExecution(
- **{
- **doc.model_dump(exclude={"id"}),
- "resource_usage": ResourceUsageDomain(**doc.resource_usage.model_dump())
- if doc.resource_usage
- else None,
- }
- )
- for doc in docs
- ]
+ return [DomainExecution(**doc.model_dump(exclude={"id"})) for doc in docs]
async def count_executions(self, query: dict[str, Any]) -> int:
return await ExecutionDocument.find(query).count()
diff --git a/backend/app/db/repositories/replay_repository.py b/backend/app/db/repositories/replay_repository.py
index e2c07846..387f489a 100644
--- a/backend/app/db/repositories/replay_repository.py
+++ b/backend/app/db/repositories/replay_repository.py
@@ -6,10 +6,10 @@
from beanie.odm.enums import SortDirection
from beanie.operators import LT, In
-from app.db.docs import EventStoreDocument, ReplaySessionDocument
+from app.db.docs import EventDocument, ReplaySessionDocument
from app.domain.admin.replay_updates import ReplaySessionUpdate
from app.domain.enums.replay import ReplayStatus
-from app.domain.replay.models import ReplayConfig, ReplayFilter, ReplaySessionState
+from app.domain.replay.models import ReplayFilter, ReplaySessionState
class ReplayRepository:
@@ -18,10 +18,7 @@ def __init__(self, logger: logging.Logger) -> None:
async def save_session(self, session: ReplaySessionState) -> None:
existing = await ReplaySessionDocument.find_one({"session_id": session.session_id})
- data = asdict(session)
- # config is a Pydantic model, convert to dict for document
- data["config"] = session.config.model_dump()
- doc = ReplaySessionDocument(**data)
+ doc = ReplaySessionDocument(**asdict(session))
if existing:
doc.id = existing.id
await doc.save()
@@ -30,9 +27,7 @@ async def get_session(self, session_id: str) -> ReplaySessionState | None:
doc = await ReplaySessionDocument.find_one({"session_id": session_id})
if not doc:
return None
- data = doc.model_dump(exclude={"id", "revision_id"})
- data["config"] = ReplayConfig.model_validate(data["config"])
- return ReplaySessionState(**data)
+ return ReplaySessionState(**doc.model_dump(exclude={"id", "revision_id"}))
async def list_sessions(
self, status: ReplayStatus | None = None, user_id: str | None = None, limit: int = 100, skip: int = 0
@@ -49,12 +44,7 @@ async def list_sessions(
.limit(limit)
.to_list()
)
- results = []
- for doc in docs:
- data = doc.model_dump(exclude={"id", "revision_id"})
- data["config"] = ReplayConfig.model_validate(data["config"])
- results.append(ReplaySessionState(**data))
- return results
+ return [ReplaySessionState(**doc.model_dump(exclude={"id", "revision_id"})) for doc in docs]
async def update_session_status(self, session_id: str, status: ReplayStatus) -> bool:
doc = await ReplaySessionDocument.find_one({"session_id": session_id})
@@ -91,17 +81,19 @@ async def update_replay_session(self, session_id: str, updates: ReplaySessionUpd
async def count_events(self, replay_filter: ReplayFilter) -> int:
query = replay_filter.to_mongo_query()
- return await EventStoreDocument.find(query).count()
+ return await EventDocument.find(query).count()
async def fetch_events(
self, replay_filter: ReplayFilter, batch_size: int = 100, skip: int = 0
) -> AsyncIterator[list[dict[str, Any]]]:
query = replay_filter.to_mongo_query()
- cursor = EventStoreDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).skip(skip)
+ cursor = EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).skip(skip)
batch = []
async for doc in cursor:
- batch.append(doc.model_dump(exclude={"id", "revision_id", "stored_at"}))
+ # Merge payload to top level for schema_registry deserialization
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ batch.append({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})})
if len(batch) >= batch_size:
yield batch
batch = []
diff --git a/backend/app/db/repositories/sse_repository.py b/backend/app/db/repositories/sse_repository.py
index 1c46dbe2..0979842d 100644
--- a/backend/app/db/repositories/sse_repository.py
+++ b/backend/app/db/repositories/sse_repository.py
@@ -1,7 +1,7 @@
from datetime import datetime, timezone
from app.db.docs import ExecutionDocument
-from app.domain.execution import DomainExecution, ResourceUsageDomain
+from app.domain.execution import DomainExecution
from app.domain.sse import SSEExecutionStatusDomain
@@ -20,11 +20,4 @@ async def get_execution(self, execution_id: str) -> DomainExecution | None:
doc = await ExecutionDocument.find_one({"execution_id": execution_id})
if not doc:
return None
- return DomainExecution(
- **{
- **doc.model_dump(exclude={"id", "revision_id"}),
- "resource_usage": ResourceUsageDomain(**doc.resource_usage.model_dump())
- if doc.resource_usage
- else None,
- }
- )
+ return DomainExecution(**doc.model_dump(exclude={"id", "revision_id"}))
diff --git a/backend/app/domain/admin/__init__.py b/backend/app/domain/admin/__init__.py
index beb7ab03..4912ec4d 100644
--- a/backend/app/domain/admin/__init__.py
+++ b/backend/app/domain/admin/__init__.py
@@ -4,7 +4,6 @@
RateLimitSummaryDomain,
)
from .replay_models import (
- ReplayQuery,
ReplaySessionData,
ReplaySessionStatusDetail,
ReplaySessionStatusInfo,
@@ -33,7 +32,6 @@
"SystemSettings",
"AuditLogEntry",
# Replay
- "ReplayQuery",
"ReplaySessionData",
"ReplaySessionStatusDetail",
"ReplaySessionStatusInfo",
diff --git a/backend/app/domain/admin/replay_models.py b/backend/app/domain/admin/replay_models.py
index 44d7d79c..b20d1762 100644
--- a/backend/app/domain/admin/replay_models.py
+++ b/backend/app/domain/admin/replay_models.py
@@ -6,7 +6,7 @@
from app.domain.enums.replay import ReplayStatus
from app.domain.events.event_models import EventSummary
-from app.domain.replay.models import ReplaySessionState
+from app.domain.replay.models import ReplayFilter, ReplaySessionState
@dataclass
@@ -36,18 +36,6 @@ class ReplaySessionStatusInfo:
progress_percentage: float = 0.0
-@dataclass
-class ReplayQuery:
- event_ids: list[str] | None = None
- correlation_id: str | None = None
- aggregate_id: str | None = None
- start_time: datetime | None = None
- end_time: datetime | None = None
-
- def is_empty(self) -> bool:
- return not any([self.event_ids, self.correlation_id, self.aggregate_id, self.start_time, self.end_time])
-
-
@dataclass
class ReplaySessionData:
"""Unified replay session data for both preview and actual replay."""
@@ -55,5 +43,5 @@ class ReplaySessionData:
total_events: int
replay_correlation_id: str
dry_run: bool
- query: ReplayQuery
+ filter: ReplayFilter
events_preview: list[EventSummary] = field(default_factory=list)
diff --git a/backend/app/domain/replay/models.py b/backend/app/domain/replay/models.py
index 17e241b3..429df321 100644
--- a/backend/app/domain/replay/models.py
+++ b/backend/app/domain/replay/models.py
@@ -11,20 +11,55 @@
class ReplayFilter(BaseModel):
+ # Event selection filters
+ event_ids: List[str] | None = None
execution_id: str | None = None
+ correlation_id: str | None = None
+ aggregate_id: str | None = None
event_types: List[EventType] | None = None
+ exclude_event_types: List[EventType] | None = None
+
+ # Time range
start_time: datetime | None = None
end_time: datetime | None = None
+
+ # Metadata filters
user_id: str | None = None
service_name: str | None = None
+
+ # Escape hatch for complex queries
custom_query: Dict[str, Any] | None = None
- exclude_event_types: List[EventType] | None = None
+
+ def is_empty(self) -> bool:
+ return not any(
+ [
+ self.event_ids,
+ self.execution_id,
+ self.correlation_id,
+ self.aggregate_id,
+ self.event_types,
+ self.start_time,
+ self.end_time,
+ self.user_id,
+ self.service_name,
+ self.custom_query,
+ ]
+ )
def to_mongo_query(self) -> Dict[str, Any]:
query: Dict[str, Any] = {}
+ if self.event_ids:
+ query["event_id"] = {"$in": self.event_ids}
+
if self.execution_id:
- query["execution_id"] = str(self.execution_id)
+ query["payload.execution_id"] = str(self.execution_id)
+
+ if self.correlation_id:
+ query["metadata.correlation_id"] = self.correlation_id
+
+ if self.aggregate_id:
+ query["aggregate_id"] = self.aggregate_id
if self.event_types:
query["event_type"] = {"$in": [str(et) for et in self.event_types]}
diff --git a/backend/app/events/event_store.py b/backend/app/events/event_store.py
index 491d2c90..7605dca8 100644
--- a/backend/app/events/event_store.py
+++ b/backend/app/events/event_store.py
@@ -1,8 +1,8 @@
import asyncio
import logging
from collections.abc import Awaitable, Callable
-from datetime import datetime, timezone
-from typing import Any, Dict, List
+from datetime import datetime, timedelta, timezone
+from typing import Any
from beanie.odm.enums import SortDirection
from pymongo.errors import BulkWriteError, DuplicateKeyError
@@ -10,12 +10,14 @@
from app.core.metrics.context import get_event_metrics
from app.core.tracing import EventAttributes
from app.core.tracing.utils import add_span_attributes
-from app.db.docs import EventStoreDocument
-from app.db.docs.event import EventMetadata
+from app.db.docs import EventDocument
from app.domain.enums.events import EventType
from app.events.schema.schema_registry import SchemaRegistryManager
from app.infrastructure.kafka.events.base import BaseEvent
+# Base fields stored at document level (everything else goes into payload)
+_BASE_FIELDS = {"event_id", "event_type", "event_version", "timestamp", "aggregate_id", "metadata"}
+
class EventStore:
def __init__(
@@ -32,7 +34,6 @@ def __init__(
self.batch_size = batch_size
self._initialized = False
- self._PROJECTION = {"stored_at": 0, "_id": 0}
self._SECURITY_TYPES = [
EventType.USER_LOGIN,
EventType.USER_LOGGED_OUT,
@@ -42,43 +43,16 @@ def __init__(
async def initialize(self) -> None:
if self._initialized:
return
- # Beanie handles index creation via Document.Settings.indexes
self._initialized = True
self.logger.info("Event store initialized with Beanie")
- def _event_to_doc(self, event: BaseEvent) -> EventStoreDocument:
- """Convert BaseEvent to EventStoreDocument."""
- event_dict = event.model_dump()
- metadata_dict = event_dict.pop("metadata", {})
- metadata = EventMetadata(**metadata_dict)
- base_fields = set(BaseEvent.model_fields.keys())
- payload = {k: v for k, v in event_dict.items() if k not in base_fields}
-
- return EventStoreDocument(
- event_id=event.event_id,
- event_type=event.event_type,
- event_version=event.event_version,
- timestamp=event.timestamp,
- aggregate_id=event.aggregate_id,
- metadata=metadata,
- payload=payload,
- stored_at=datetime.now(timezone.utc),
- )
-
- def _doc_to_dict(self, doc: EventStoreDocument) -> Dict[str, Any]:
- """Convert EventStoreDocument to dict for schema_registry deserialization."""
- result: Dict[str, Any] = doc.model_dump(exclude={"id", "revision_id", "stored_at"})
- # Ensure metadata is a dict for schema_registry
- if isinstance(result.get("metadata"), dict):
- pass # Already a dict
- elif hasattr(result.get("metadata"), "model_dump"):
- result["metadata"] = result["metadata"].model_dump()
- return result
-
async def store_event(self, event: BaseEvent) -> bool:
start = asyncio.get_event_loop().time()
try:
- doc = self._event_to_doc(event)
+ now = datetime.now(timezone.utc)
+ data = event.model_dump(exclude={"topic"})
+ payload = {k: data.pop(k) for k in list(data) if k not in _BASE_FIELDS}
+ doc = EventDocument(**data, payload=payload, stored_at=now, ttl_expires_at=now + timedelta(days=self.ttl_days))
await doc.insert()
add_span_attributes(
@@ -101,22 +75,27 @@ async def store_event(self, event: BaseEvent) -> bool:
self.metrics.record_event_store_failed(event.event_type, type(e).__name__)
return False
- async def store_batch(self, events: List[BaseEvent]) -> Dict[str, int]:
+ async def store_batch(self, events: list[BaseEvent]) -> dict[str, int]:
start = asyncio.get_event_loop().time()
results = {"total": len(events), "stored": 0, "duplicates": 0, "failed": 0}
if not events:
return results
+ now = datetime.now(timezone.utc)
+ ttl = now + timedelta(days=self.ttl_days)
try:
- docs = [self._event_to_doc(e) for e in events]
+ docs = []
+ for e in events:
+ data = e.model_dump(exclude={"topic"})
+ payload = {k: data.pop(k) for k in list(data) if k not in _BASE_FIELDS}
+ docs.append(EventDocument(**data, payload=payload, stored_at=now, ttl_expires_at=ttl))
try:
- await EventStoreDocument.insert_many(docs)
+ await EventDocument.insert_many(docs)
results["stored"] = len(docs)
except Exception as e:
if isinstance(e, BulkWriteError) and e.details:
- errs = e.details.get("writeErrors", [])
- for err in errs:
+ for err in e.details.get("writeErrors", []):
if err.get("code") == 11000:
results["duplicates"] += 1
else:
@@ -139,12 +118,12 @@ async def store_batch(self, events: List[BaseEvent]) -> Dict[str, int]:
async def get_event(self, event_id: str) -> BaseEvent | None:
start = asyncio.get_event_loop().time()
- doc = await EventStoreDocument.find_one({"event_id": event_id})
+ doc = await EventDocument.find_one({"event_id": event_id})
if not doc:
return None
- event_dict = self._doc_to_dict(doc)
- event = self.schema_registry.deserialize_json(event_dict)
+ data = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ event = self.schema_registry.deserialize_json({**{k: v for k, v in data.items() if k != "payload"}, **data.get("payload", {})})
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_by_id", "event_store")
@@ -157,20 +136,23 @@ async def get_events_by_type(
end_time: datetime | None = None,
limit: int = 100,
offset: int = 0,
- ) -> List[BaseEvent]:
+ ) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
- query: Dict[str, Any] = {"event_type": event_type}
+ query: dict[str, Any] = {"event_type": event_type}
if tr := self._time_range(start_time, end_time):
query["timestamp"] = tr
docs = await (
- EventStoreDocument.find(query)
+ EventDocument.find(query)
.sort([("timestamp", SortDirection.DESCENDING)])
.skip(offset)
.limit(limit)
.to_list()
)
- events = [self.schema_registry.deserialize_json(self._doc_to_dict(d)) for d in docs]
+ events = []
+ for doc in docs:
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_by_type", "event_store")
@@ -179,15 +161,20 @@ async def get_events_by_type(
async def get_execution_events(
self,
execution_id: str,
- event_types: List[EventType] | None = None,
- ) -> List[BaseEvent]:
+ event_types: list[EventType] | None = None,
+ ) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
- query: Dict[str, Any] = {"execution_id": execution_id}
+ query: dict[str, Any] = {
+ "$or": [{"payload.execution_id": execution_id}, {"aggregate_id": execution_id}]
+ }
if event_types:
query["event_type"] = {"$in": event_types}
- docs = await EventStoreDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).to_list()
- events = [self.schema_registry.deserialize_json(self._doc_to_dict(d)) for d in docs]
+ docs = await EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).to_list()
+ events = []
+ for doc in docs:
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_execution_events", "event_store")
@@ -196,22 +183,23 @@ async def get_execution_events(
async def get_user_events(
self,
user_id: str,
- event_types: List[EventType] | None = None,
+ event_types: list[EventType] | None = None,
start_time: datetime | None = None,
end_time: datetime | None = None,
limit: int = 100,
- ) -> List[BaseEvent]:
+ ) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
- query: Dict[str, Any] = {"metadata.user_id": str(user_id)}
+ query: dict[str, Any] = {"metadata.user_id": str(user_id)}
if event_types:
query["event_type"] = {"$in": event_types}
if tr := self._time_range(start_time, end_time):
query["timestamp"] = tr
- docs = (
- await EventStoreDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
- )
- events = [self.schema_registry.deserialize_json(self._doc_to_dict(d)) for d in docs]
+ docs = await EventDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
+ events = []
+ for doc in docs:
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_user_events", "event_store")
@@ -223,31 +211,35 @@ async def get_security_events(
end_time: datetime | None = None,
user_id: str | None = None,
limit: int = 100,
- ) -> List[BaseEvent]:
+ ) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
- query: Dict[str, Any] = {"event_type": {"$in": self._SECURITY_TYPES}}
+ query: dict[str, Any] = {"event_type": {"$in": self._SECURITY_TYPES}}
if user_id:
query["metadata.user_id"] = str(user_id)
if tr := self._time_range(start_time, end_time):
query["timestamp"] = tr
- docs = (
- await EventStoreDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
- )
- events = [self.schema_registry.deserialize_json(self._doc_to_dict(d)) for d in docs]
+ docs = await EventDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
+ events = []
+ for doc in docs:
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_security_events", "event_store")
return events
- async def get_correlation_chain(self, correlation_id: str) -> List[BaseEvent]:
+ async def get_correlation_chain(self, correlation_id: str) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
docs = await (
- EventStoreDocument.find({"metadata.correlation_id": str(correlation_id)})
+ EventDocument.find({"metadata.correlation_id": str(correlation_id)})
.sort([("timestamp", SortDirection.ASCENDING)])
.to_list()
)
- events = [self.schema_registry.deserialize_json(self._doc_to_dict(d)) for d in docs]
+ events = []
+ for doc in docs:
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_correlation_chain", "event_store")
@@ -257,22 +249,22 @@ async def replay_events(
self,
start_time: datetime,
end_time: datetime | None = None,
- event_types: List[EventType] | None = None,
+ event_types: list[EventType] | None = None,
callback: Callable[[BaseEvent], Awaitable[None]] | None = None,
) -> int:
start = asyncio.get_event_loop().time()
count = 0
try:
- query: Dict[str, Any] = {"timestamp": {"$gte": start_time}}
+ query: dict[str, Any] = {"timestamp": {"$gte": start_time}}
if end_time:
query["timestamp"]["$lte"] = end_time
if event_types:
query["event_type"] = {"$in": event_types}
- async for doc in EventStoreDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]):
- event_dict = self._doc_to_dict(doc)
- event = self.schema_registry.deserialize_json(event_dict)
+ async for doc in EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]):
+ d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+ event = self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})})
if callback:
await callback(event)
count += 1
@@ -289,10 +281,10 @@ async def get_event_stats(
self,
start_time: datetime | None = None,
end_time: datetime | None = None,
- ) -> Dict[str, Any]:
- pipeline: List[Dict[str, Any]] = []
+ ) -> dict[str, Any]:
+ pipeline: list[dict[str, Any]] = []
if start_time or end_time:
- match: Dict[str, Any] = {}
+ match: dict[str, Any] = {}
if start_time:
match["timestamp"] = {"$gte": start_time}
if end_time:
@@ -313,8 +305,8 @@ async def get_event_stats(
]
)
- stats: Dict[str, Any] = {"total_events": 0, "event_types": {}, "start_time": start_time, "end_time": end_time}
- async for r in EventStoreDocument.aggregate(pipeline):
+ stats: dict[str, Any] = {"total_events": 0, "event_types": {}, "start_time": start_time, "end_time": end_time}
+ async for r in EventDocument.aggregate(pipeline):
et = r["_id"]
c = r["count"]
stats["event_types"][et] = {
@@ -325,23 +317,23 @@ async def get_event_stats(
stats["total_events"] += c
return stats
- def _time_range(self, start_time: datetime | None, end_time: datetime | None) -> Dict[str, Any] | None:
+ def _time_range(self, start_time: datetime | None, end_time: datetime | None) -> dict[str, Any] | None:
if not start_time and not end_time:
return None
- tr: Dict[str, Any] = {}
+ tr: dict[str, Any] = {}
if start_time:
tr["$gte"] = start_time
if end_time:
tr["$lte"] = end_time
return tr
- async def health_check(self) -> Dict[str, Any]:
+ async def health_check(self) -> dict[str, Any]:
try:
- event_count = await EventStoreDocument.count()
+ event_count = await EventDocument.count()
return {
"healthy": True,
"event_count": event_count,
- "collection": "event_store",
+ "collection": "events",
"initialized": self._initialized,
}
except Exception as e:
diff --git a/backend/app/services/admin/admin_events_service.py b/backend/app/services/admin/admin_events_service.py
index 684ab7d7..bbe6e442 100644
--- a/backend/app/services/admin/admin_events_service.py
+++ b/backend/app/services/admin/admin_events_service.py
@@ -9,7 +9,7 @@
from beanie.odm.enums import SortDirection
from app.db.repositories.admin import AdminEventsRepository
-from app.domain.admin import ReplayQuery, ReplaySessionStatusDetail
+from app.domain.admin import ReplaySessionStatusDetail
from app.domain.admin.replay_updates import ReplaySessionUpdate
from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType
from app.domain.events.event_models import (
@@ -95,12 +95,12 @@ async def get_event_stats(self, *, hours: int) -> EventStatistics:
async def prepare_or_schedule_replay(
self,
*,
- replay_query: ReplayQuery,
+ replay_filter: ReplayFilter,
dry_run: bool,
replay_correlation_id: str,
target_service: str | None,
) -> AdminReplayResult:
- if replay_query.is_empty():
+ if replay_filter.is_empty():
raise ValueError("Must specify at least one filter for replay")
# Prepare and optionally preview
@@ -112,7 +112,7 @@ async def prepare_or_schedule_replay(
},
)
session_data = await self._repo.prepare_replay_session(
- replay_query=replay_query,
+ replay_filter=replay_filter,
dry_run=dry_run,
replay_correlation_id=replay_correlation_id,
max_events=1000,
@@ -145,11 +145,7 @@ async def prepare_or_schedule_replay(
)
return result
- # Build config for actual replay and create session via replay service
- replay_filter = ReplayFilter(
- start_time=replay_query.start_time,
- end_time=replay_query.end_time,
- )
+ # Build config for actual replay - filter is already unified, just pass it
config = ReplayConfig(
replay_type=ReplayType.QUERY,
target=ReplayTarget.KAFKA if target_service else ReplayTarget.TEST,
diff --git a/docs/architecture/event-storage.md b/docs/architecture/event-storage.md
index 287aa8da..800938ca 100644
--- a/docs/architecture/event-storage.md
+++ b/docs/architecture/event-storage.md
@@ -1,100 +1,129 @@
# Event storage architecture
-## Two collections, one purpose
+## Unified events collection
+
+The system stores all events in a single `events` MongoDB collection using `EventDocument`. This provides a unified
+approach where all event data—whether from Kafka consumers, API operations, or pod monitors—flows into one collection
+with consistent structure.
+
+## EventDocument structure
+
+`EventDocument` uses a flexible payload pattern:
+
+```python
+class EventDocument(Document):
+ event_id: str # Unique event identifier
+ event_type: EventType # Typed event classification
+ event_version: str # Schema version
+ timestamp: datetime # When event occurred
+ aggregate_id: str # Related entity (e.g., execution_id)
+ metadata: EventMetadata # Service info, correlation, user context
+ payload: dict[str, Any] # Event-specific data (flexible)
+ stored_at: datetime # When stored in MongoDB
+ ttl_expires_at: datetime # Auto-expiration time
+```
+
+**Base fields** (`event_id`, `event_type`, `timestamp`, `aggregate_id`, `metadata`) are stored at document level for
+efficient indexing.
-The system maintains *two separate MongoDB collections* for events: `events` and `event_store`. This implements a hybrid CQRS pattern where writes and reads are optimized for different use cases.
+**Event-specific fields** go into `payload` dict, allowing different event types to have different data without schema
+changes.
-## EventDocument vs EventStoreDocument
+## Storage pattern
-**event_store** is the system's *permanent audit log* — an immutable append-only record of everything that happened:
+When storing events, base fields stay at top level while everything else goes into payload:
-- Sourced from Kafka via `EventStoreConsumer`
-- No TTL — events persist indefinitely
-- Used for replay, compliance, and forensics
-- Single writer: the event store consumer
+```python
+_BASE_FIELDS = {"event_id", "event_type", "event_version", "timestamp", "aggregate_id", "metadata"}
-**events** is an *operational projection* — a working copy optimized for day-to-day queries:
+data = event.model_dump(exclude={"topic"})
+payload = {k: data.pop(k) for k in list(data) if k not in _BASE_FIELDS}
+doc = EventDocument(**data, payload=payload, stored_at=now, ttl_expires_at=ttl)
+```
-- Sourced from application code via `KafkaEventService`
-- 30-day TTL — old events expire automatically
-- Used for admin dashboards, user-facing queries, analytics
-- Written by any service publishing events
+## Query pattern
-Both collections share identical schemas. The difference is *retention and purpose*.
+For typed deserialization, flatten payload inline:
-## Write flow
+```python
+d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
+flat = {**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}
+event = schema_registry.deserialize_json(flat)
+```
+
+For MongoDB queries, access payload fields with dot notation:
+
+```python
+query["payload.execution_id"] = execution_id
+query["metadata.correlation_id"] = correlation_id
+```
-When application code publishes an event, it flows through two paths:
+## Write flow
```mermaid
graph TD
App[Application Code] --> KES[KafkaEventService.publish_event]
- KES --> ER[EventRepository.store_event]
- ER --> Events[(events collection)]
+ KES --> ES[EventStore.store_event]
+ ES --> Events[(events collection)]
KES --> Producer[UnifiedProducer]
Producer --> Kafka[(Kafka)]
Kafka --> ESC[EventStoreConsumer]
- ESC --> ES[EventStore.store_batch]
- ES --> EventStore[(event_store collection)]
+ ESC --> ES
```
-1. `KafkaEventService.publish_event()` stores to `events` collection AND publishes to Kafka
-2. `EventStoreConsumer` consumes from Kafka and stores to `event_store` collection
-
-This dual-write ensures:
-
-- **Immediate availability**: Events appear in `events` instantly for operational queries
-- **Permanent record**: Events flow through Kafka to `event_store` for audit trail
-- **Decoupling**: If Kafka consumer falls behind, operational queries remain fast
+1. `KafkaEventService.publish_event()` stores to `events` AND publishes to Kafka
+2. `EventStoreConsumer` consumes from Kafka and stores to same `events` collection
+3. Deduplication via unique `event_id` index handles double-writes gracefully
## Read patterns
-Different repositories query different collections based on use case:
-
-| Repository | Collection | Use Case |
-|------------|------------|----------|
-| `EventRepository` | events | User-facing queries, recent events |
-| `AdminEventsRepository` | events | Admin dashboard, analytics |
-| `EventStore` | event_store | Replay, audit, historical queries |
+All repositories query the same `events` collection:
-The admin console and user-facing features query `events` for fast access to recent data. The event store is reserved for replay scenarios and compliance needs.
+| Repository | Use Case |
+|-------------------------|------------------------------------------------------|
+| `EventStore` | Core event operations, replay, typed deserialization |
+| `AdminEventsRepository` | Admin dashboard, analytics, browsing |
+| `ReplayRepository` | Replay session management, event streaming |
-## Why not just one collection?
+## TTL and retention
-**Storage costs**: The `events` collection with 30-day TTL keeps storage bounded. Without TTL, event volume would grow unbounded — problematic for operational queries that scan recent data.
+Events have a configurable TTL (default 90 days). The `ttl_expires_at` field triggers MongoDB's TTL index for automatic
+cleanup:
-**Query performance**: Operational queries (last 24 hours, user's recent events) benefit from a smaller, indexed dataset. Scanning a years-long audit log for recent events wastes resources.
+```python
+ttl_expires_at = datetime.now(timezone.utc) + timedelta(days=self.ttl_days)
+```
-**Retention policies**: Different data has different retention requirements. Operational data can expire. Audit logs often cannot.
+For permanent audit requirements, events can be archived to `EventArchiveDocument` before deletion.
-**Failure isolation**: If the event store consumer falls behind (Kafka lag), operational queries remain unaffected. The `events` collection stays current through direct writes.
+## ReplayFilter
-## Pod monitor integration
+`ReplayFilter` provides a unified way to query events across all use cases:
-The `PodMonitor` watches Kubernetes pods and publishes lifecycle events. These events must appear in both collections:
+```python
+class ReplayFilter(BaseModel):
+ event_ids: list[str] | None = None
+ execution_id: str | None = None
+ correlation_id: str | None = None
+ aggregate_id: str | None = None
+ event_types: list[EventType] | None = None
+ start_time: datetime | None = None
+ end_time: datetime | None = None
+ user_id: str | None = None
+ service_name: str | None = None
+ custom_query: dict[str, Any] | None = None
-```mermaid
-graph LR
- K8s[Kubernetes Watch] --> PM[PodMonitor]
- PM --> KES[KafkaEventService.publish_base_event]
- KES --> Events[(events)]
- KES --> Kafka[(Kafka)]
- Kafka --> EventStore[(event_store)]
+ def to_mongo_query(self) -> dict[str, Any]:
+ # Builds MongoDB query from filter fields
```
-`PodMonitor` uses `KafkaEventService.publish_base_event()` to:
-
-1. Store pre-built events to `events` collection
-2. Publish to Kafka for downstream consumers and `event_store`
-
-This ensures pod events appear in admin dashboards immediately while maintaining the permanent audit trail.
+All event querying—admin browse, replay preview, event export—uses `ReplayFilter.to_mongo_query()` for consistency.
## Key files
-- `db/docs/event.py` — `EventDocument` and `EventStoreDocument` definitions
-- `db/repositories/event_repository.py` — operational event queries
+- `db/docs/event.py` — `EventDocument` and `EventArchiveDocument` definitions
+- `domain/replay/models.py` — `ReplayFilter`, `ReplayConfig`, `ReplaySessionState`
+- `events/event_store.py` — event storage and retrieval operations
+- `db/repositories/replay_repository.py` — replay-specific queries
- `db/repositories/admin/admin_events_repository.py` — admin dashboard queries
-- `events/event_store.py` — permanent event store operations
-- `events/event_store_consumer.py` — Kafka to event_store consumer
- `services/kafka_event_service.py` — unified publish (store + Kafka)
-- `services/pod_monitor/monitor.py` — pod lifecycle events
diff --git a/frontend/src/styles/components.css b/frontend/src/styles/components.css
index ffd324fe..9aaf1698 100644
--- a/frontend/src/styles/components.css
+++ b/frontend/src/styles/components.css
@@ -150,11 +150,11 @@
}
.modal-container {
- @apply bg-surface-overlay dark:bg-dark-surface-overlay rounded-lg shadow-xl max-w-4xl w-full max-h-[95vh] sm:max-h-[90vh] overflow-hidden;
+ @apply bg-surface-overlay dark:bg-dark-surface-overlay rounded-lg shadow-xl max-w-4xl w-full max-h-[95vh] sm:max-h-[90vh] flex flex-col;
}
.modal-header {
- @apply p-4 sm:p-6 border-b border-border-default dark:border-dark-border-default flex justify-between items-center;
+ @apply p-4 sm:p-6 border-b border-border-default dark:border-dark-border-default flex justify-between items-center shrink-0;
}
.modal-title {
@@ -166,11 +166,11 @@
}
.modal-body {
- @apply p-4 sm:p-6 overflow-y-auto max-h-[calc(95vh-100px)] sm:max-h-[calc(90vh-120px)];
+ @apply p-4 sm:p-6 overflow-y-auto flex-1 min-h-0;
}
.modal-footer {
- @apply p-4 sm:p-6 border-t border-border-default dark:border-dark-border-default flex gap-3 justify-end;
+ @apply p-4 sm:p-6 border-t border-border-default dark:border-dark-border-default flex gap-3 justify-end shrink-0;
}
/* Badge/Pill Styles */
From 759a186f2b3c7454351143f8462beb00d0d3e42e Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 14:50:42 +0100
Subject: [PATCH 44/48] ruff fixes
---
backend/app/events/event_store.py | 45 ++++++++++++-------------------
1 file changed, 17 insertions(+), 28 deletions(-)
diff --git a/backend/app/events/event_store.py b/backend/app/events/event_store.py
index 7605dca8..e6b633b4 100644
--- a/backend/app/events/event_store.py
+++ b/backend/app/events/event_store.py
@@ -17,6 +17,13 @@
# Base fields stored at document level (everything else goes into payload)
_BASE_FIELDS = {"event_id", "event_type", "event_version", "timestamp", "aggregate_id", "metadata"}
+_EXCLUDE_FIELDS = {"id", "revision_id", "stored_at", "ttl_expires_at"}
+
+
+def _flatten_doc(doc: "EventDocument") -> dict[str, Any]:
+ """Flatten EventDocument payload to top level for schema registry deserialization."""
+ d = doc.model_dump(exclude=_EXCLUDE_FIELDS)
+ return {**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}
class EventStore:
@@ -52,7 +59,8 @@ async def store_event(self, event: BaseEvent) -> bool:
now = datetime.now(timezone.utc)
data = event.model_dump(exclude={"topic"})
payload = {k: data.pop(k) for k in list(data) if k not in _BASE_FIELDS}
- doc = EventDocument(**data, payload=payload, stored_at=now, ttl_expires_at=now + timedelta(days=self.ttl_days))
+ ttl = now + timedelta(days=self.ttl_days)
+ doc = EventDocument(**data, payload=payload, stored_at=now, ttl_expires_at=ttl)
await doc.insert()
add_span_attributes(
@@ -122,8 +130,7 @@ async def get_event(self, event_id: str) -> BaseEvent | None:
if not doc:
return None
- data = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- event = self.schema_registry.deserialize_json({**{k: v for k, v in data.items() if k != "payload"}, **data.get("payload", {})})
+ event = self.schema_registry.deserialize_json(_flatten_doc(doc))
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_by_id", "event_store")
@@ -149,10 +156,7 @@ async def get_events_by_type(
.limit(limit)
.to_list()
)
- events = []
- for doc in docs:
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
+ events = [self.schema_registry.deserialize_json(_flatten_doc(doc)) for doc in docs]
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_by_type", "event_store")
@@ -164,17 +168,12 @@ async def get_execution_events(
event_types: list[EventType] | None = None,
) -> list[BaseEvent]:
start = asyncio.get_event_loop().time()
- query: dict[str, Any] = {
- "$or": [{"payload.execution_id": execution_id}, {"aggregate_id": execution_id}]
- }
+ query: dict[str, Any] = {"$or": [{"payload.execution_id": execution_id}, {"aggregate_id": execution_id}]}
if event_types:
query["event_type"] = {"$in": event_types}
docs = await EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]).to_list()
- events = []
- for doc in docs:
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
+ events = [self.schema_registry.deserialize_json(_flatten_doc(doc)) for doc in docs]
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_execution_events", "event_store")
@@ -196,10 +195,7 @@ async def get_user_events(
query["timestamp"] = tr
docs = await EventDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
- events = []
- for doc in docs:
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
+ events = [self.schema_registry.deserialize_json(_flatten_doc(doc)) for doc in docs]
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_user_events", "event_store")
@@ -220,10 +216,7 @@ async def get_security_events(
query["timestamp"] = tr
docs = await EventDocument.find(query).sort([("timestamp", SortDirection.DESCENDING)]).limit(limit).to_list()
- events = []
- for doc in docs:
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
+ events = [self.schema_registry.deserialize_json(_flatten_doc(doc)) for doc in docs]
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_security_events", "event_store")
@@ -236,10 +229,7 @@ async def get_correlation_chain(self, correlation_id: str) -> list[BaseEvent]:
.sort([("timestamp", SortDirection.ASCENDING)])
.to_list()
)
- events = []
- for doc in docs:
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- events.append(self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})}))
+ events = [self.schema_registry.deserialize_json(_flatten_doc(doc)) for doc in docs]
duration = asyncio.get_event_loop().time() - start
self.metrics.record_event_query_duration(duration, "get_correlation_chain", "event_store")
@@ -263,8 +253,7 @@ async def replay_events(
query["event_type"] = {"$in": event_types}
async for doc in EventDocument.find(query).sort([("timestamp", SortDirection.ASCENDING)]):
- d = doc.model_dump(exclude={"id", "revision_id", "stored_at", "ttl_expires_at"})
- event = self.schema_registry.deserialize_json({**{k: v for k, v in d.items() if k != "payload"}, **d.get("payload", {})})
+ event = self.schema_registry.deserialize_json(_flatten_doc(doc))
if callback:
await callback(event)
count += 1
From 8f00ede48fab887812daafc42b754b4102d88efa Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 14:54:04 +0100
Subject: [PATCH 45/48] ruff fixes
---
backend/app/api/routes/admin/events.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/app/api/routes/admin/events.py b/backend/app/api/routes/admin/events.py
index 7e610192..1e22a218 100644
--- a/backend/app/api/routes/admin/events.py
+++ b/backend/app/api/routes/admin/events.py
@@ -11,8 +11,8 @@
from app.api.dependencies import admin_user
from app.core.correlation import CorrelationContext
from app.domain.enums.events import EventType
-from app.domain.replay import ReplayFilter
from app.domain.events.event_models import EventFilter
+from app.domain.replay import ReplayFilter
from app.schemas_pydantic.admin_events import (
EventBrowseRequest,
EventBrowseResponse,
From 473e4db41a02d41e2c031f2a412a7c11e304a619 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 17:23:10 +0100
Subject: [PATCH 46/48] user settings fixes
---
backend/app/domain/enums/events.py | 3 -
backend/app/domain/enums/kafka.py | 3 -
.../infrastructure/kafka/events/__init__.py | 6 -
.../app/infrastructure/kafka/events/base.py | 2 +-
.../app/infrastructure/kafka/events/user.py | 61 ++--
backend/app/infrastructure/kafka/mappings.py | 6 -
backend/app/infrastructure/kafka/topics.py | 24 --
backend/app/services/user_settings_service.py | 339 +++++-------------
docs/architecture/event-storage.md | 5 +
docs/architecture/pydantic-dataclasses.md | 5 +
docs/architecture/services-overview.md | 2 +-
docs/architecture/user-settings-events.md | 179 +++++++++
frontend/src/routes/Settings.svelte | 93 ++---
mkdocs.yml | 3 +
14 files changed, 364 insertions(+), 367 deletions(-)
create mode 100644 docs/architecture/user-settings-events.md
diff --git a/backend/app/domain/enums/events.py b/backend/app/domain/enums/events.py
index 624196e8..021ca6bb 100644
--- a/backend/app/domain/enums/events.py
+++ b/backend/app/domain/enums/events.py
@@ -34,9 +34,6 @@ class EventType(StringEnum):
# User settings events
USER_SETTINGS_UPDATED = "user_settings_updated"
- USER_THEME_CHANGED = "user_theme_changed"
- USER_NOTIFICATION_SETTINGS_UPDATED = "user_notification_settings_updated"
- USER_EDITOR_SETTINGS_UPDATED = "user_editor_settings_updated"
# Notification events
NOTIFICATION_CREATED = "notification_created"
diff --git a/backend/app/domain/enums/kafka.py b/backend/app/domain/enums/kafka.py
index 148d29c0..2824a4fc 100644
--- a/backend/app/domain/enums/kafka.py
+++ b/backend/app/domain/enums/kafka.py
@@ -27,9 +27,6 @@ class KafkaTopic(StringEnum):
USER_EVENTS = "user_events"
USER_NOTIFICATIONS = "user_notifications"
USER_SETTINGS_EVENTS = "user_settings_events"
- USER_SETTINGS_THEME_EVENTS = "user_settings_theme_events"
- USER_SETTINGS_NOTIFICATION_EVENTS = "user_settings_notification_events"
- USER_SETTINGS_EDITOR_EVENTS = "user_settings_editor_events"
# Script topics
SCRIPT_EVENTS = "script_events"
diff --git a/backend/app/infrastructure/kafka/events/__init__.py b/backend/app/infrastructure/kafka/events/__init__.py
index f06fb640..9539d470 100644
--- a/backend/app/infrastructure/kafka/events/__init__.py
+++ b/backend/app/infrastructure/kafka/events/__init__.py
@@ -57,13 +57,10 @@
)
from app.infrastructure.kafka.events.user import (
UserDeletedEvent,
- UserEditorSettingsUpdatedEvent,
UserLoggedInEvent,
UserLoggedOutEvent,
- UserNotificationSettingsUpdatedEvent,
UserRegisteredEvent,
UserSettingsUpdatedEvent,
- UserThemeChangedEvent,
UserUpdatedEvent,
)
@@ -96,9 +93,6 @@
"UserUpdatedEvent",
"UserDeletedEvent",
"UserSettingsUpdatedEvent",
- "UserThemeChangedEvent",
- "UserNotificationSettingsUpdatedEvent",
- "UserEditorSettingsUpdatedEvent",
# Notification
"NotificationCreatedEvent",
"NotificationSentEvent",
diff --git a/backend/app/infrastructure/kafka/events/base.py b/backend/app/infrastructure/kafka/events/base.py
index 6af405d5..23592c41 100644
--- a/backend/app/infrastructure/kafka/events/base.py
+++ b/backend/app/infrastructure/kafka/events/base.py
@@ -3,7 +3,7 @@
from uuid import uuid4
from pydantic import ConfigDict, Field, field_serializer
-from pydantic_avro import AvroBase # type: ignore[attr-defined]
+from pydantic_avro.to_avro.base import AvroBase
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
diff --git a/backend/app/infrastructure/kafka/events/user.py b/backend/app/infrastructure/kafka/events/user.py
index 32d98abf..3443019f 100644
--- a/backend/app/infrastructure/kafka/events/user.py
+++ b/backend/app/infrastructure/kafka/events/user.py
@@ -1,11 +1,34 @@
from typing import ClassVar, Literal
-from app.domain.enums.auth import LoginMethod, SettingsType
+from pydantic_avro.to_avro.base import AvroBase
+
+from app.domain.enums.auth import LoginMethod
from app.domain.enums.events import EventType
from app.domain.enums.kafka import KafkaTopic
from app.infrastructure.kafka.events.base import BaseEvent
+class NotificationSettingsPayload(AvroBase):
+ """Avro-compatible payload for notification settings changes."""
+
+ execution_completed: bool | None = None
+ execution_failed: bool | None = None
+ system_updates: bool | None = None
+ security_alerts: bool | None = None
+ channels: list[str] | None = None
+
+
+class EditorSettingsPayload(AvroBase):
+ """Avro-compatible payload for editor settings changes."""
+
+ theme: str | None = None
+ font_size: int | None = None
+ tab_size: int | None = None
+ use_tabs: bool | None = None
+ word_wrap: bool | None = None
+ show_line_numbers: bool | None = None
+
+
class UserRegisteredEvent(BaseEvent):
event_type: Literal[EventType.USER_REGISTERED] = EventType.USER_REGISTERED
topic: ClassVar[KafkaTopic] = KafkaTopic.USER_EVENTS
@@ -47,31 +70,17 @@ class UserDeletedEvent(BaseEvent):
class UserSettingsUpdatedEvent(BaseEvent):
+ """Unified event for all user settings changes with typed payloads."""
+
event_type: Literal[EventType.USER_SETTINGS_UPDATED] = EventType.USER_SETTINGS_UPDATED
topic: ClassVar[KafkaTopic] = KafkaTopic.USER_SETTINGS_EVENTS
user_id: str
- settings_type: SettingsType
- updated: dict[str, str]
-
-
-class UserThemeChangedEvent(BaseEvent):
- event_type: Literal[EventType.USER_THEME_CHANGED] = EventType.USER_THEME_CHANGED
- topic: ClassVar[KafkaTopic] = KafkaTopic.USER_SETTINGS_THEME_EVENTS
- user_id: str
- old_theme: str
- new_theme: str
-
-
-class UserNotificationSettingsUpdatedEvent(BaseEvent):
- event_type: Literal[EventType.USER_NOTIFICATION_SETTINGS_UPDATED] = EventType.USER_NOTIFICATION_SETTINGS_UPDATED
- topic: ClassVar[KafkaTopic] = KafkaTopic.USER_SETTINGS_NOTIFICATION_EVENTS
- user_id: str
- settings: dict[str, bool]
- channels: list[str] | None = None
-
-
-class UserEditorSettingsUpdatedEvent(BaseEvent):
- event_type: Literal[EventType.USER_EDITOR_SETTINGS_UPDATED] = EventType.USER_EDITOR_SETTINGS_UPDATED
- topic: ClassVar[KafkaTopic] = KafkaTopic.USER_SETTINGS_EDITOR_EVENTS
- user_id: str
- settings: dict[str, str | int | bool]
+ changed_fields: list[str]
+ # Typed fields for each settings category (Avro-compatible)
+ theme: str | None = None
+ timezone: str | None = None
+ date_format: str | None = None
+ time_format: str | None = None
+ notifications: NotificationSettingsPayload | None = None
+ editor: EditorSettingsPayload | None = None
+ reason: str | None = None
diff --git a/backend/app/infrastructure/kafka/mappings.py b/backend/app/infrastructure/kafka/mappings.py
index 431f1c86..b1dcfe98 100644
--- a/backend/app/infrastructure/kafka/mappings.py
+++ b/backend/app/infrastructure/kafka/mappings.py
@@ -60,13 +60,10 @@
)
from app.infrastructure.kafka.events.user import (
UserDeletedEvent,
- UserEditorSettingsUpdatedEvent,
UserLoggedInEvent,
UserLoggedOutEvent,
- UserNotificationSettingsUpdatedEvent,
UserRegisteredEvent,
UserSettingsUpdatedEvent,
- UserThemeChangedEvent,
UserUpdatedEvent,
)
@@ -99,9 +96,6 @@ def get_event_class_for_type(event_type: EventType) -> Type[BaseEvent] | None:
EventType.USER_UPDATED: UserUpdatedEvent,
EventType.USER_DELETED: UserDeletedEvent,
EventType.USER_SETTINGS_UPDATED: UserSettingsUpdatedEvent,
- EventType.USER_THEME_CHANGED: UserThemeChangedEvent,
- EventType.USER_NOTIFICATION_SETTINGS_UPDATED: UserNotificationSettingsUpdatedEvent,
- EventType.USER_EDITOR_SETTINGS_UPDATED: UserEditorSettingsUpdatedEvent,
# Notification events
EventType.NOTIFICATION_CREATED: NotificationCreatedEvent,
EventType.NOTIFICATION_SENT: NotificationSentEvent,
diff --git a/backend/app/infrastructure/kafka/topics.py b/backend/app/infrastructure/kafka/topics.py
index 389c4121..c82ed2c5 100644
--- a/backend/app/infrastructure/kafka/topics.py
+++ b/backend/app/infrastructure/kafka/topics.py
@@ -127,30 +127,6 @@ def get_topic_configs() -> dict[KafkaTopic, dict[str, Any]]:
"compression.type": "gzip",
},
},
- KafkaTopic.USER_SETTINGS_THEME_EVENTS: {
- "num_partitions": 3,
- "replication_factor": 1,
- "config": {
- "retention.ms": "2592000000", # 30 days
- "compression.type": "gzip",
- },
- },
- KafkaTopic.USER_SETTINGS_NOTIFICATION_EVENTS: {
- "num_partitions": 3,
- "replication_factor": 1,
- "config": {
- "retention.ms": "2592000000", # 30 days
- "compression.type": "gzip",
- },
- },
- KafkaTopic.USER_SETTINGS_EDITOR_EVENTS: {
- "num_partitions": 3,
- "replication_factor": 1,
- "config": {
- "retention.ms": "2592000000", # 30 days
- "compression.type": "gzip",
- },
- },
# Script topics
KafkaTopic.SCRIPT_EVENTS: {
"num_partitions": 3,
diff --git a/backend/app/services/user_settings_service.py b/backend/app/services/user_settings_service.py
index ce51aaf2..d90bfd42 100644
--- a/backend/app/services/user_settings_service.py
+++ b/backend/app/services/user_settings_service.py
@@ -1,16 +1,13 @@
-import asyncio
-import json
import logging
from datetime import datetime, timedelta, timezone
from typing import Any, List
from cachetools import TTLCache
+from pydantic import TypeAdapter
from app.db.repositories.user_settings_repository import UserSettingsRepository
from app.domain.enums import Theme
-from app.domain.enums.auth import SettingsType
from app.domain.enums.events import EventType
-from app.domain.enums.notification import NotificationChannel
from app.domain.user import (
DomainEditorSettings,
DomainNotificationSettings,
@@ -19,9 +16,14 @@
DomainUserSettings,
DomainUserSettingsUpdate,
)
+from app.infrastructure.kafka.events.user import EditorSettingsPayload, NotificationSettingsPayload
from app.services.event_bus import EventBusEvent, EventBusManager
from app.services.kafka_event_service import KafkaEventService
+# TypeAdapters for dict-based settings updates
+_settings_adapter = TypeAdapter(DomainUserSettings)
+_update_adapter = TypeAdapter(DomainUserSettingsUpdate)
+
class UserSettingsService:
def __init__(
@@ -62,8 +64,7 @@ async def initialize(self, event_bus_manager: EventBusManager) -> None:
async def _handle(evt: EventBusEvent) -> None:
uid = evt.payload.get("user_id")
if uid:
- # Use asyncio.to_thread for the sync operation to make it properly async
- await asyncio.to_thread(self.invalidate_cache, str(uid))
+ await self.invalidate_cache(str(uid))
self._subscription_id = await bus.subscribe("user.settings.updated*", _handle)
@@ -89,128 +90,54 @@ async def update_user_settings(
self, user_id: str, updates: DomainUserSettingsUpdate, reason: str | None = None
) -> DomainUserSettings:
"""Upsert provided fields into current settings, publish minimal event, and cache."""
- s = await self.get_user_settings(user_id)
- updated: dict[str, object] = {}
- old_theme = s.theme
- # Top-level
- if updates.theme is not None:
- s.theme = updates.theme
- updated["theme"] = str(updates.theme)
- if updates.timezone is not None:
- s.timezone = updates.timezone
- updated["timezone"] = updates.timezone
- if updates.date_format is not None:
- s.date_format = updates.date_format
- updated["date_format"] = updates.date_format
- if updates.time_format is not None:
- s.time_format = updates.time_format
- updated["time_format"] = updates.time_format
- # Nested
- if updates.notifications is not None:
- n = updates.notifications
- s.notifications = n
- updated["notifications"] = {
- "execution_completed": n.execution_completed,
- "execution_failed": n.execution_failed,
- "system_updates": n.system_updates,
- "security_alerts": n.security_alerts,
- "channels": [str(c) for c in n.channels],
- }
- if updates.editor is not None:
- e = updates.editor
- s.editor = e
- updated["editor"] = {
- "theme": e.theme,
- "font_size": e.font_size,
- "tab_size": e.tab_size,
- "use_tabs": e.use_tabs,
- "word_wrap": e.word_wrap,
- "show_line_numbers": e.show_line_numbers,
- }
- if updates.custom_settings is not None:
- s.custom_settings = updates.custom_settings
- updated["custom_settings"] = updates.custom_settings
-
- if not updated:
- return s
-
- s.updated_at = datetime.now(timezone.utc)
- s.version = (s.version or 0) + 1
-
- # Choose appropriate event payload
- if "theme" in updated and len(updated) == 1:
- await self.event_service.publish_event(
- event_type=EventType.USER_THEME_CHANGED,
- aggregate_id=f"user_settings_{user_id}",
- payload={
- "user_id": user_id,
- "old_theme": str(old_theme),
- "new_theme": str(s.theme),
- "reason": reason,
- },
- metadata=None,
- )
- elif "notifications" in updated and len(updated) == 1:
- # Only notification settings changed
- notif = updated["notifications"]
- channels = notif.pop("channels", None) if isinstance(notif, dict) else None
- await self.event_service.publish_event(
- event_type=EventType.USER_NOTIFICATION_SETTINGS_UPDATED,
- aggregate_id=f"user_settings_{user_id}",
- payload={
- "user_id": user_id,
- "settings": notif,
- "channels": channels,
- "reason": reason,
- },
- metadata=None,
- )
- elif "editor" in updated and len(updated) == 1:
- # Only editor settings changed
- await self.event_service.publish_event(
- event_type=EventType.USER_EDITOR_SETTINGS_UPDATED,
- aggregate_id=f"user_settings_{user_id}",
- payload={
- "user_id": user_id,
- "settings": updated["editor"],
- "reason": reason,
- },
- metadata=None,
- )
- else:
- # Multiple fields changed or other fields
- if "notifications" in updated:
- settings_type = SettingsType.NOTIFICATION
- elif "editor" in updated:
- settings_type = SettingsType.EDITOR
- elif "theme" in updated:
- settings_type = SettingsType.DISPLAY
- else:
- settings_type = SettingsType.PREFERENCES
- # Stringify all values for Avro compatibility (nested dicts become JSON strings)
- updated_stringified: dict[str, str] = {
- k: json.dumps(v) if isinstance(v, dict) else str(v) for k, v in updated.items()
- }
- await self.event_service.publish_event(
- event_type=EventType.USER_SETTINGS_UPDATED,
- aggregate_id=f"user_settings_{user_id}",
- payload={
- "user_id": user_id,
- "settings_type": settings_type,
- "updated": updated_stringified,
- "reason": reason,
- },
- metadata=None,
- )
+ current = await self.get_user_settings(user_id)
+
+ # Get only fields that were explicitly set (non-None)
+ changes = _update_adapter.dump_python(updates, exclude_none=True)
+ if not changes:
+ return current
+
+ # Merge current settings with changes and update metadata
+ current_dict = _settings_adapter.dump_python(current)
+ merged = {**current_dict, **changes}
+ merged["version"] = (current.version or 0) + 1
+ merged["updated_at"] = datetime.now(timezone.utc)
+ # Reconstruct settings object (TypeAdapter handles nested dict → dataclass)
+ new_settings = _settings_adapter.validate_python(merged)
+
+ # Publish event with JSON-serializable payload (enums → strings)
+ changes_json = _update_adapter.dump_python(updates, exclude_none=True, mode="json")
+ await self._publish_settings_event(user_id, changes_json, reason)
+
+ # Notify event bus for cache invalidation
if self._event_bus_manager is not None:
bus = await self._event_bus_manager.get_event_bus()
await bus.publish("user.settings.updated", {"user_id": user_id})
- self._add_to_cache(user_id, s)
+ self._add_to_cache(user_id, new_settings)
if (await self.repository.count_events_since_snapshot(user_id)) >= 10:
- await self.repository.create_snapshot(s)
- return s
+ await self.repository.create_snapshot(new_settings)
+ return new_settings
+
+ # Mapping for nested settings → typed Avro payloads
+ _payload_types = {"notifications": NotificationSettingsPayload, "editor": EditorSettingsPayload}
+
+ async def _publish_settings_event(
+ self, user_id: str, changes: dict[str, Any], reason: str | None
+ ) -> None:
+ """Publish settings update event with typed payload fields."""
+ await self.event_service.publish_event(
+ event_type=EventType.USER_SETTINGS_UPDATED,
+ aggregate_id=f"user_settings_{user_id}",
+ payload={
+ "user_id": user_id,
+ "changed_fields": list(changes.keys()),
+ "reason": reason,
+ **{k: self._payload_types[k](**v) if k in self._payload_types else v for k, v in changes.items()},
+ },
+ metadata=None,
+ )
async def update_theme(self, user_id: str, theme: Theme) -> DomainUserSettings:
"""Update user's theme preference"""
@@ -246,35 +173,20 @@ async def update_custom_setting(self, user_id: str, key: str, value: Any) -> Dom
)
async def get_settings_history(self, user_id: str, limit: int = 50) -> List[DomainSettingsHistoryEntry]:
- """Get history from changed paths recorded in events."""
+ """Get history from changed fields recorded in events."""
events = await self._get_settings_events(user_id, limit=limit)
history: list[DomainSettingsHistoryEntry] = []
for event in events:
- if event.event_type == EventType.USER_THEME_CHANGED:
- history.append(
- DomainSettingsHistoryEntry(
- timestamp=event.timestamp,
- event_type=event.event_type,
- field="/theme",
- old_value=event.payload.get("old_theme"),
- new_value=event.payload.get("new_theme"),
- reason=event.payload.get("reason"),
- correlation_id=event.correlation_id,
- )
- )
- continue
-
- upd = event.payload.get("updated", {})
- if not upd:
- continue
- for path in (f"/{k}" for k in upd.keys()):
+ changed_fields = event.payload.get("changed_fields", [])
+ changes = event.payload.get("changes", {})
+ for field in changed_fields:
history.append(
DomainSettingsHistoryEntry(
timestamp=event.timestamp,
event_type=event.event_type,
- field=path,
+ field=f"/{field}",
old_value=None,
- new_value=None,
+ new_value=changes.get(field),
reason=event.payload.get("reason"),
correlation_id=event.correlation_id,
)
@@ -295,14 +207,15 @@ async def restore_settings_to_point(self, user_id: str, timestamp: datetime) ->
await self.repository.create_snapshot(settings)
self._add_to_cache(user_id, settings)
- # Publish restoration event (generic settings update form)
+ # Publish restoration event
await self.event_service.publish_event(
event_type=EventType.USER_SETTINGS_UPDATED,
aggregate_id=f"user_settings_{user_id}",
payload={
"user_id": user_id,
- "settings_type": SettingsType.PREFERENCES,
- "updated": {"restored_to": timestamp.isoformat()},
+ "changed_fields": ["restored"],
+ "changes": {"restored_to": timestamp.isoformat()},
+ "reason": f"Settings restored to {timestamp.isoformat()}",
},
metadata=None,
)
@@ -312,122 +225,39 @@ async def restore_settings_to_point(self, user_id: str, timestamp: datetime) ->
async def _get_settings_events(
self, user_id: str, since: datetime | None = None, until: datetime | None = None, limit: int | None = None
) -> List[DomainSettingsEvent]:
- """Get settings-related events for a user"""
- event_types = [
- EventType.USER_SETTINGS_UPDATED,
- EventType.USER_THEME_CHANGED,
- EventType.USER_NOTIFICATION_SETTINGS_UPDATED,
- EventType.USER_EDITOR_SETTINGS_UPDATED,
- ]
-
+ """Get settings-related events for a user."""
raw = await self.repository.get_settings_events(
- user_id=user_id, event_types=event_types, since=since, until=until, limit=limit
+ user_id=user_id,
+ event_types=[EventType.USER_SETTINGS_UPDATED],
+ since=since,
+ until=until,
+ limit=limit,
)
- # map to domain
- out: list[DomainSettingsEvent] = []
- for e in raw:
- et = EventType(e.event_type)
- out.append(
- DomainSettingsEvent(
- event_type=et,
- timestamp=e.timestamp,
- payload=e.payload,
- correlation_id=e.metadata.correlation_id if e.metadata else None,
- )
+ return [
+ DomainSettingsEvent(
+ event_type=EventType.USER_SETTINGS_UPDATED,
+ timestamp=e.timestamp,
+ payload=e.payload,
+ correlation_id=e.metadata.correlation_id if e.metadata else None,
)
- return out
+ for e in raw
+ ]
def _apply_event(self, settings: DomainUserSettings, event: DomainSettingsEvent) -> DomainUserSettings:
- if event.event_type == EventType.USER_THEME_CHANGED:
- new_theme = event.payload.get("new_theme")
- if new_theme:
- settings.theme = Theme(new_theme)
- return settings
-
- if event.event_type == EventType.USER_NOTIFICATION_SETTINGS_UPDATED:
- n = event.payload.get("settings", {})
- channels_raw = event.payload.get("channels", [])
- channels: list[NotificationChannel] = [NotificationChannel(c) for c in channels_raw] if channels_raw else []
- settings.notifications = DomainNotificationSettings(
- execution_completed=n.get("execution_completed", settings.notifications.execution_completed),
- execution_failed=n.get("execution_failed", settings.notifications.execution_failed),
- system_updates=n.get("system_updates", settings.notifications.system_updates),
- security_alerts=n.get("security_alerts", settings.notifications.security_alerts),
- channels=channels or settings.notifications.channels,
- )
- settings.updated_at = event.timestamp
+ """Apply a settings update event using TypeAdapter merge."""
+ changes = event.payload.get("changes", {})
+ if not changes:
return settings
- if event.event_type == EventType.USER_EDITOR_SETTINGS_UPDATED:
- e = event.payload.get("settings", {})
- settings.editor = DomainEditorSettings(
- theme=e.get("theme", settings.editor.theme),
- font_size=e.get("font_size", settings.editor.font_size),
- tab_size=e.get("tab_size", settings.editor.tab_size),
- use_tabs=e.get("use_tabs", settings.editor.use_tabs),
- word_wrap=e.get("word_wrap", settings.editor.word_wrap),
- show_line_numbers=e.get("show_line_numbers", settings.editor.show_line_numbers),
- )
- settings.updated_at = event.timestamp
- return settings
+ current_dict = _settings_adapter.dump_python(settings)
+ merged = {**current_dict, **changes}
+ merged["updated_at"] = event.timestamp
- upd = event.payload.get("updated")
- if not upd:
- return settings
-
- # Helper to parse JSON strings or return dict as-is
- def parse_value(val: object) -> object:
- if isinstance(val, str):
- try:
- return json.loads(val)
- except (json.JSONDecodeError, ValueError):
- return val
- return val
-
- # Top-level
- if "theme" in upd:
- settings.theme = Theme(str(upd["theme"]))
- if "timezone" in upd:
- settings.timezone = str(upd["timezone"])
- if "date_format" in upd:
- settings.date_format = str(upd["date_format"])
- if "time_format" in upd:
- settings.time_format = str(upd["time_format"])
- # Nested (may be JSON strings or dicts)
- if "notifications" in upd:
- n = parse_value(upd["notifications"])
- if isinstance(n, dict):
- notif_channels: list[NotificationChannel] = [NotificationChannel(c) for c in n.get("channels", [])]
- settings.notifications = DomainNotificationSettings(
- execution_completed=n.get("execution_completed", settings.notifications.execution_completed),
- execution_failed=n.get("execution_failed", settings.notifications.execution_failed),
- system_updates=n.get("system_updates", settings.notifications.system_updates),
- security_alerts=n.get("security_alerts", settings.notifications.security_alerts),
- channels=notif_channels or settings.notifications.channels,
- )
- if "editor" in upd:
- e = parse_value(upd["editor"])
- if isinstance(e, dict):
- settings.editor = DomainEditorSettings(
- theme=e.get("theme", settings.editor.theme),
- font_size=e.get("font_size", settings.editor.font_size),
- tab_size=e.get("tab_size", settings.editor.tab_size),
- use_tabs=e.get("use_tabs", settings.editor.use_tabs),
- word_wrap=e.get("word_wrap", settings.editor.word_wrap),
- show_line_numbers=e.get("show_line_numbers", settings.editor.show_line_numbers),
- )
- if "custom_settings" in upd:
- cs = parse_value(upd["custom_settings"])
- if isinstance(cs, dict):
- settings.custom_settings = cs
- settings.version = event.payload.get("version", settings.version)
- settings.updated_at = event.timestamp
- return settings
+ return _settings_adapter.validate_python(merged)
- def invalidate_cache(self, user_id: str) -> None:
- """Invalidate cached settings for a user"""
- removed = self._cache.pop(user_id, None) is not None
- if removed:
+ async def invalidate_cache(self, user_id: str) -> None:
+ """Invalidate cached settings for a user."""
+ if self._cache.pop(user_id, None) is not None:
self.logger.debug(f"Invalidated cache for user {user_id}", extra={"cache_size": len(self._cache)})
def _add_to_cache(self, user_id: str, settings: DomainUserSettings) -> None:
@@ -446,8 +276,7 @@ def get_cache_stats(self) -> dict[str, Any]:
async def reset_user_settings(self, user_id: str) -> None:
"""Reset user settings by deleting all data and cache."""
- # Clear from cache
- self.invalidate_cache(user_id)
+ await self.invalidate_cache(user_id)
# Delete from database
await self.repository.delete_user_settings(user_id)
diff --git a/docs/architecture/event-storage.md b/docs/architecture/event-storage.md
index 800938ca..fd21d31e 100644
--- a/docs/architecture/event-storage.md
+++ b/docs/architecture/event-storage.md
@@ -127,3 +127,8 @@ All event querying—admin browse, replay preview, event export—uses `ReplayFi
- `db/repositories/replay_repository.py` — replay-specific queries
- `db/repositories/admin/admin_events_repository.py` — admin dashboard queries
- `services/kafka_event_service.py` — unified publish (store + Kafka)
+
+## Related docs
+
+- [User Settings Events](user-settings-events.md) — event sourcing pattern for user settings with TypeAdapter merging
+- [Pydantic Dataclasses](pydantic-dataclasses.md) — why domain models use pydantic dataclasses for nested conversion
diff --git a/docs/architecture/pydantic-dataclasses.md b/docs/architecture/pydantic-dataclasses.md
index b169958c..22bc1523 100644
--- a/docs/architecture/pydantic-dataclasses.md
+++ b/docs/architecture/pydantic-dataclasses.md
@@ -171,3 +171,8 @@ class ParentModel:
```
The model automatically handles nested dict conversion, enum parsing, and type coercion.
+
+## Related docs
+
+- [User Settings Events](user-settings-events.md) — practical example of TypeAdapter usage for event sourcing
+- [Model Conversion Patterns](model-conversion.md) — general patterns for converting between model types
diff --git a/docs/architecture/services-overview.md b/docs/architecture/services-overview.md
index 6b625792..bce59980 100644
--- a/docs/architecture/services-overview.md
+++ b/docs/architecture/services-overview.md
@@ -36,7 +36,7 @@ The replay_service.py and event_replay/ provide tools and workers for replaying
The notification_service.py sends and stores notifications, exposes subscription management, and integrates with metrics and optional channels like webhook and Slack with delivery measurements and retries. See [Notification Types](../operations/notification-types.md) for the notification model.
-The user_settings_service.py provides CRUD plus event-sourced history for user settings with a small in-proc cache and helpers to compute what changed.
+The user_settings_service.py provides CRUD plus event-sourced history for user settings with a small in-proc cache and TypeAdapter-based merging. See [User Settings Events](user-settings-events.md) for the event sourcing pattern.
The saved_script_service.py handles CRUD for saved scripts with ownership checks and validations, integrating with the API for run-saved-script flows.
diff --git a/docs/architecture/user-settings-events.md b/docs/architecture/user-settings-events.md
new file mode 100644
index 00000000..3b319f25
--- /dev/null
+++ b/docs/architecture/user-settings-events.md
@@ -0,0 +1,179 @@
+# User settings events
+
+This document explains how user settings are stored, updated, and reconstructed using event sourcing with a unified event type and TypeAdapter-based merging.
+
+## Unified event approach
+
+All user settings changes emit a single `USER_SETTINGS_UPDATED` event type. There are no specialized events for theme, notifications, or editor settings. This eliminates branching in both publishing and consuming code.
+
+```python
+class UserSettingsUpdatedEvent(BaseEvent):
+ event_type: Literal[EventType.USER_SETTINGS_UPDATED] = EventType.USER_SETTINGS_UPDATED
+ topic: ClassVar[KafkaTopic] = KafkaTopic.USER_SETTINGS_EVENTS
+ user_id: str
+ changed_fields: list[str]
+ changes: dict[str, str | int | bool | list | dict | None]
+ reason: str | None = None
+```
+
+The `changed_fields` list identifies which settings changed. The `changes` dict contains the new values in JSON-serializable form (enums as strings, nested objects as dicts).
+
+## Event payload structure
+
+When updating settings, the service publishes:
+
+```python
+payload = {
+ "user_id": "user_123",
+ "changed_fields": ["theme", "notifications"],
+ "changes": {
+ "theme": "dark",
+ "notifications": {
+ "execution_completed": True,
+ "channels": ["email", "in_app"]
+ }
+ },
+ "reason": "User updated preferences"
+}
+```
+
+No old values are tracked. If needed, previous state can be reconstructed by replaying events up to a specific timestamp.
+
+## TypeAdapter pattern
+
+The service uses Pydantic's `TypeAdapter` for dict-based operations without reflection or branching:
+
+```python
+from pydantic import TypeAdapter
+
+_settings_adapter = TypeAdapter(DomainUserSettings)
+_update_adapter = TypeAdapter(DomainUserSettingsUpdate)
+```
+
+### Updating settings
+
+```python
+async def update_user_settings(self, user_id: str, updates: DomainUserSettingsUpdate) -> DomainUserSettings:
+ current = await self.get_user_settings(user_id)
+
+ # Get only fields that were explicitly set
+ changes = _update_adapter.dump_python(updates, exclude_none=True)
+ if not changes:
+ return current
+
+ # Merge via dict unpacking
+ current_dict = _settings_adapter.dump_python(current)
+ merged = {**current_dict, **changes}
+ merged["version"] = (current.version or 0) + 1
+ merged["updated_at"] = datetime.now(timezone.utc)
+
+ # Reconstruct with nested dataclass conversion
+ new_settings = _settings_adapter.validate_python(merged)
+
+ # Publish with JSON-serializable payload
+ changes_json = _update_adapter.dump_python(updates, exclude_none=True, mode="json")
+ await self._publish_settings_event(user_id, changes_json, reason)
+
+ return new_settings
+```
+
+### Applying events
+
+```python
+def _apply_event(self, settings: DomainUserSettings, event: DomainSettingsEvent) -> DomainUserSettings:
+ changes = event.payload.get("changes", {})
+ if not changes:
+ return settings
+
+ current_dict = _settings_adapter.dump_python(settings)
+ merged = {**current_dict, **changes}
+ merged["updated_at"] = event.timestamp
+
+ return _settings_adapter.validate_python(merged)
+```
+
+The `validate_python` call handles nested dict-to-dataclass conversion, enum parsing, and type coercion automatically. See [Pydantic Dataclasses](pydantic-dataclasses.md) for details.
+
+## Settings reconstruction
+
+User settings are rebuilt from a snapshot plus events:
+
+```
+┌─────────────────────────────────────────────────────────────┐
+│ get_user_settings(user_id) │
+├─────────────────────────────────────────────────────────────┤
+│ 1. Check cache → return if hit │
+│ 2. Load snapshot from DB (if exists) │
+│ 3. Query events since snapshot.updated_at │
+│ 4. Apply each event via _apply_event() │
+│ 5. Cache result, return │
+└─────────────────────────────────────────────────────────────┘
+```
+
+Snapshots are created automatically when event count exceeds threshold:
+
+```python
+if (await self.repository.count_events_since_snapshot(user_id)) >= 10:
+ await self.repository.create_snapshot(new_settings)
+```
+
+This bounds reconstruction cost while preserving full event history for auditing.
+
+## Cache layer
+
+Settings are cached with TTL to avoid repeated reconstruction:
+
+```python
+self._cache: TTLCache[str, DomainUserSettings] = TTLCache(
+ maxsize=1000,
+ ttl=timedelta(minutes=5).total_seconds(),
+)
+```
+
+Cache invalidation happens via event bus subscription:
+
+```python
+async def initialize(self, event_bus_manager: EventBusManager) -> None:
+ bus = await event_bus_manager.get_event_bus()
+
+ async def _handle(evt: EventBusEvent) -> None:
+ uid = evt.payload.get("user_id")
+ if uid:
+ await self.invalidate_cache(str(uid))
+
+ await bus.subscribe("user.settings.updated*", _handle)
+```
+
+After each update, the service publishes to the event bus, triggering cache invalidation across instances.
+
+## Settings history
+
+The `get_settings_history` method returns a list of changes extracted from events:
+
+```python
+async def get_settings_history(self, user_id: str, limit: int = 50) -> List[DomainSettingsHistoryEntry]:
+ events = await self._get_settings_events(user_id, limit=limit)
+ history = []
+ for event in events:
+ changed_fields = event.payload.get("changed_fields", [])
+ changes = event.payload.get("changes", {})
+ for field in changed_fields:
+ history.append(
+ DomainSettingsHistoryEntry(
+ timestamp=event.timestamp,
+ event_type=event.event_type,
+ field=f"/{field}",
+ new_value=changes.get(field),
+ reason=event.payload.get("reason"),
+ )
+ )
+ return history
+```
+
+## Key files
+
+- `services/user_settings_service.py` — settings service with caching and event sourcing
+- `domain/user/settings_models.py` — `DomainUserSettings`, `DomainUserSettingsUpdate` dataclasses
+- `infrastructure/kafka/events/user.py` — `UserSettingsUpdatedEvent` definition
+- `db/repositories/user_settings_repository.py` — snapshot and event queries
+- `domain/enums/events.py` — `EventType.USER_SETTINGS_UPDATED`
diff --git a/frontend/src/routes/Settings.svelte b/frontend/src/routes/Settings.svelte
index 47810919..e5206a81 100644
--- a/frontend/src/routes/Settings.svelte
+++ b/frontend/src/routes/Settings.svelte
@@ -15,7 +15,6 @@
import Spinner from '$components/Spinner.svelte';
import { ChevronDown } from '@lucide/svelte';
- let settings = $state(null);
let loading = $state(true);
let saving = $state(false);
let activeTab = $state('general');
@@ -27,10 +26,7 @@
let showThemeDropdown = $state(false);
let showEditorThemeDropdown = $state(false);
- // Debounce timer for auto-save
- let saveDebounceTimer: ReturnType | null = null;
-
- // Local state for form
+ // Form state (single source of truth for UI)
let formData = $state({
theme: 'auto',
notifications: {
@@ -49,6 +45,9 @@
show_line_numbers: true,
}
});
+
+ // Snapshot for change detection (JSON string - no reference issues)
+ let savedSnapshot = $state('');
const tabs = [
{ id: 'general', label: 'General' },
@@ -99,70 +98,81 @@
const { data, error } = await getUserSettingsApiV1UserSettingsGet({});
if (error) throw error;
- settings = data;
- setUserSettings(settings);
+ setUserSettings(data);
formData = {
- theme: settings.theme || 'auto',
+ theme: data.theme || 'auto',
notifications: {
- execution_completed: settings.notifications?.execution_completed ?? true,
- execution_failed: settings.notifications?.execution_failed ?? true,
- system_updates: settings.notifications?.system_updates ?? true,
- security_alerts: settings.notifications?.security_alerts ?? true,
- channels: settings.notifications?.channels || ['in_app']
+ execution_completed: data.notifications?.execution_completed ?? true,
+ execution_failed: data.notifications?.execution_failed ?? true,
+ system_updates: data.notifications?.system_updates ?? true,
+ security_alerts: data.notifications?.security_alerts ?? true,
+ channels: [...(data.notifications?.channels || ['in_app'])]
},
editor: {
- theme: settings.editor?.theme || 'auto',
- font_size: settings.editor?.font_size || 14,
- tab_size: settings.editor?.tab_size || 4,
- use_tabs: settings.editor?.use_tabs ?? false,
- word_wrap: settings.editor?.word_wrap ?? true,
- show_line_numbers: settings.editor?.show_line_numbers ?? true,
+ theme: data.editor?.theme || 'auto',
+ font_size: data.editor?.font_size || 14,
+ tab_size: data.editor?.tab_size || 4,
+ use_tabs: data.editor?.use_tabs ?? false,
+ word_wrap: data.editor?.word_wrap ?? true,
+ show_line_numbers: data.editor?.show_line_numbers ?? true,
}
};
+ savedSnapshot = JSON.stringify(formData);
} catch (err) {
console.error('Failed to load settings:', err);
addToast('Failed to load settings. Using defaults.', 'error');
- settings = {};
} finally {
loading = false;
}
}
- // Simple JSON-based deep equality check - sufficient for plain settings objects
- const deepEqual = (a: object | null | undefined, b: object | null | undefined): boolean =>
- JSON.stringify(a) === JSON.stringify(b);
-
async function saveSettings() {
+ const currentState = JSON.stringify(formData);
+ if (currentState === savedSnapshot) {
+ addToast('No changes to save', 'info');
+ return;
+ }
+
saving = true;
try {
- const updates = {};
-
- if (formData.theme !== settings.theme) updates.theme = formData.theme;
+ const original = JSON.parse(savedSnapshot);
+ const updates: Record = {};
- if (!deepEqual(formData.notifications, settings.notifications)) {
+ if (formData.theme !== original.theme) {
+ updates.theme = formData.theme;
+ }
+ if (JSON.stringify(formData.notifications) !== JSON.stringify(original.notifications)) {
updates.notifications = formData.notifications;
}
- if (!deepEqual(formData.editor, settings.editor)) {
+ if (JSON.stringify(formData.editor) !== JSON.stringify(original.editor)) {
updates.editor = formData.editor;
}
- if (Object.keys(updates).length === 0) {
- addToast('No changes to save', 'info');
- return;
- }
-
const { data, error } = await updateUserSettingsApiV1UserSettingsPut({ body: updates });
if (error) throw error;
- settings = data;
- setUserSettings(settings);
+ setUserSettings(data);
formData = {
- theme: settings.theme || 'auto',
- notifications: settings.notifications || formData.notifications,
- editor: settings.editor || formData.editor
+ theme: data.theme || 'auto',
+ notifications: {
+ execution_completed: data.notifications?.execution_completed ?? true,
+ execution_failed: data.notifications?.execution_failed ?? true,
+ system_updates: data.notifications?.system_updates ?? true,
+ security_alerts: data.notifications?.security_alerts ?? true,
+ channels: [...(data.notifications?.channels || ['in_app'])]
+ },
+ editor: {
+ theme: data.editor?.theme || 'auto',
+ font_size: data.editor?.font_size || 14,
+ tab_size: data.editor?.tab_size || 4,
+ use_tabs: data.editor?.use_tabs ?? false,
+ word_wrap: data.editor?.word_wrap ?? true,
+ show_line_numbers: data.editor?.show_line_numbers ?? true,
+ }
};
+ savedSnapshot = JSON.stringify(formData);
addToast('Settings saved successfully', 'success');
} catch (err) {
@@ -224,14 +234,13 @@
});
if (error) throw error;
- settings = data;
historyCache = null;
historyCacheTime = 0;
await loadSettings();
- if (settings.theme) {
- setTheme(settings.theme);
+ if (data.theme) {
+ setTheme(data.theme);
}
showHistory = false;
diff --git a/mkdocs.yml b/mkdocs.yml
index aabc3c45..07d57116 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -105,6 +105,9 @@ nav:
- Services: architecture/services-overview.md
- Domain Exceptions: architecture/domain-exceptions.md
- Pydantic Dataclasses: architecture/pydantic-dataclasses.md
+ - Model Conversion: architecture/model-conversion.md
+ - Event Storage: architecture/event-storage.md
+ - User Settings Events: architecture/user-settings-events.md
- Frontend Build: architecture/frontend-build.md
- Svelte 5 Migration: architecture/svelte5-migration.md
- Kafka Topics: architecture/kafka-topic-architecture.md
From 43c2b1c3da4ac3faf547267f9e8f52a1abf6dbf4 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 17:41:28 +0100
Subject: [PATCH 47/48] user settings fixes
---
backend/app/services/user_settings_service.py | 20 +++++++++----------
1 file changed, 9 insertions(+), 11 deletions(-)
diff --git a/backend/app/services/user_settings_service.py b/backend/app/services/user_settings_service.py
index d90bfd42..1c9a888d 100644
--- a/backend/app/services/user_settings_service.py
+++ b/backend/app/services/user_settings_service.py
@@ -16,7 +16,6 @@
DomainUserSettings,
DomainUserSettingsUpdate,
)
-from app.infrastructure.kafka.events.user import EditorSettingsPayload, NotificationSettingsPayload
from app.services.event_bus import EventBusEvent, EventBusManager
from app.services.kafka_event_service import KafkaEventService
@@ -120,9 +119,6 @@ async def update_user_settings(
await self.repository.create_snapshot(new_settings)
return new_settings
- # Mapping for nested settings → typed Avro payloads
- _payload_types = {"notifications": NotificationSettingsPayload, "editor": EditorSettingsPayload}
-
async def _publish_settings_event(
self, user_id: str, changes: dict[str, Any], reason: str | None
) -> None:
@@ -134,7 +130,7 @@ async def _publish_settings_event(
"user_id": user_id,
"changed_fields": list(changes.keys()),
"reason": reason,
- **{k: self._payload_types[k](**v) if k in self._payload_types else v for k, v in changes.items()},
+ **changes,
},
metadata=None,
)
@@ -178,7 +174,6 @@ async def get_settings_history(self, user_id: str, limit: int = 50) -> List[Doma
history: list[DomainSettingsHistoryEntry] = []
for event in events:
changed_fields = event.payload.get("changed_fields", [])
- changes = event.payload.get("changes", {})
for field in changed_fields:
history.append(
DomainSettingsHistoryEntry(
@@ -186,7 +181,7 @@ async def get_settings_history(self, user_id: str, limit: int = 50) -> List[Doma
event_type=event.event_type,
field=f"/{field}",
old_value=None,
- new_value=changes.get(field),
+ new_value=event.payload.get(field),
reason=event.payload.get("reason"),
correlation_id=event.correlation_id,
)
@@ -207,14 +202,13 @@ async def restore_settings_to_point(self, user_id: str, timestamp: datetime) ->
await self.repository.create_snapshot(settings)
self._add_to_cache(user_id, settings)
- # Publish restoration event
+ # Publish restoration event (marker only, no field changes)
await self.event_service.publish_event(
event_type=EventType.USER_SETTINGS_UPDATED,
aggregate_id=f"user_settings_{user_id}",
payload={
"user_id": user_id,
- "changed_fields": ["restored"],
- "changes": {"restored_to": timestamp.isoformat()},
+ "changed_fields": [],
"reason": f"Settings restored to {timestamp.isoformat()}",
},
metadata=None,
@@ -243,9 +237,13 @@ async def _get_settings_events(
for e in raw
]
+ # Fields that are stored directly in event payload (not in nested 'changes')
+ _settings_fields = {"theme", "timezone", "date_format", "time_format", "notifications", "editor"}
+
def _apply_event(self, settings: DomainUserSettings, event: DomainSettingsEvent) -> DomainUserSettings:
"""Apply a settings update event using TypeAdapter merge."""
- changes = event.payload.get("changes", {})
+ # Extract changes from typed fields in payload
+ changes = {k: v for k, v in event.payload.items() if k in self._settings_fields and v is not None}
if not changes:
return settings
From cb98fdfb56850dc54d530411d0673eee00a767e8 Mon Sep 17 00:00:00 2001
From: HardMax71
Date: Wed, 31 Dec 2025 17:50:28 +0100
Subject: [PATCH 48/48] user settings fixes
---
.../user_settings/test_user_settings_service_integration.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/tests/integration/services/user_settings/test_user_settings_service_integration.py b/backend/tests/integration/services/user_settings/test_user_settings_service_integration.py
index 61edfd7f..40be7478 100644
--- a/backend/tests/integration/services/user_settings/test_user_settings_service_integration.py
+++ b/backend/tests/integration/services/user_settings/test_user_settings_service_integration.py
@@ -21,7 +21,7 @@ async def test_get_update_and_history(scope) -> None: # type: ignore[valid-type
s1 = await svc.get_user_settings(user_id)
s2 = await svc.get_user_settings(user_id)
assert s1.user_id == s2.user_id
- svc.invalidate_cache(user_id)
+ await svc.invalidate_cache(user_id)
s3 = await svc.get_user_settings(user_id)
assert s3.user_id == user_id