1- from typing import List , Optional
1+ from typing import Any , Dict , List , Optional
22from uuid import UUID
33
44import requests
55import structlog
6- from fastapi import APIRouter , Depends , HTTPException , Response
6+ from fastapi import APIRouter , Depends , HTTPException , Query , Response
77from fastapi .responses import StreamingResponse
88from fastapi .routing import APIRoute
99from pydantic import BaseModel , ValidationError
1010
11+ from codegate .config import API_DEFAULT_PAGE_SIZE , API_MAX_PAGE_SIZE
1112import codegate .muxing .models as mux_models
1213from codegate import __version__
1314from codegate .api import v1_models , v1_processing
@@ -378,7 +379,11 @@ async def hard_delete_workspace(workspace_name: str):
378379 tags = ["Workspaces" ],
379380 generate_unique_id_function = uniq_name ,
380381)
381- async def get_workspace_alerts (workspace_name : str ) -> List [Optional [v1_models .AlertConversation ]]:
382+ async def get_workspace_alerts (
383+ workspace_name : str ,
384+ page : int = Query (1 , ge = 1 ),
385+ page_size : int = Query (API_DEFAULT_PAGE_SIZE , get = 1 , le = API_MAX_PAGE_SIZE ),
386+ ) -> Dict [str , Any ]:
382387 """Get alerts for a workspace."""
383388 try :
384389 ws = await wscrud .get_workspace_by_name (workspace_name )
@@ -388,13 +393,35 @@ async def get_workspace_alerts(workspace_name: str) -> List[Optional[v1_models.A
388393 logger .exception ("Error while getting workspace" )
389394 raise HTTPException (status_code = 500 , detail = "Internal server error" )
390395
391- try :
392- alerts = await dbreader .get_alerts_by_workspace (ws .id , AlertSeverity .CRITICAL .value )
393- prompts_outputs = await dbreader .get_prompts_with_output (ws .id )
394- return await v1_processing .parse_get_alert_conversation (alerts , prompts_outputs )
395- except Exception :
396- logger .exception ("Error while getting alerts and messages" )
397- raise HTTPException (status_code = 500 , detail = "Internal server error" )
396+ total_alerts = 0
397+ fetched_alerts = []
398+ offset = (page - 1 ) * page_size
399+ batch_size = page_size * 2 # fetch more alerts per batch to allow deduplication
400+
401+ while len (fetched_alerts ) < page_size :
402+ alerts_batch , total_alerts = await dbreader .get_alerts_by_workspace (
403+ ws .id , AlertSeverity .CRITICAL .value , page_size , offset
404+ )
405+ if not alerts_batch :
406+ break
407+
408+ dedup_alerts = await v1_processing .remove_duplicate_alerts (alerts_batch )
409+ fetched_alerts .extend (dedup_alerts )
410+ offset += batch_size
411+
412+ final_alerts = fetched_alerts [:page_size ]
413+ prompt_ids = list ({alert .prompt_id for alert in final_alerts if alert .prompt_id })
414+ prompts_outputs = await dbreader .get_prompts_with_output (prompt_ids )
415+ alert_conversations = await v1_processing .parse_get_alert_conversation (
416+ final_alerts , prompts_outputs
417+ )
418+ return {
419+ "page" : page ,
420+ "page_size" : page_size ,
421+ "total_alerts" : total_alerts ,
422+ "total_pages" : (total_alerts + page_size - 1 ) // page_size ,
423+ "alerts" : alert_conversations ,
424+ }
398425
399426
400427@v1 .get (
0 commit comments