Skip to content

Commit

Permalink
Merge pull request #947 from fractal-analytics-platform/933-implement…
Browse files Browse the repository at this point in the history
…-a-first-set-of-superuser-restricted-monitoring-endpoints

Monitoring endpoints
  • Loading branch information
tcompa authored Nov 16, 2023
2 parents 0b3268f + b22dd33 commit 196cab7
Show file tree
Hide file tree
Showing 5 changed files with 504 additions and 5 deletions.
14 changes: 9 additions & 5 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
# 1.4.0 (unreleased)

* API:
* Make it possible to delete a `Dataset`, `Workflow` or `Project`, even when it is in relationship to an `ApplyWorkflow` (\#927).
* New `GET` endpoints `api/v1/project/job/` and `api/v1/project/{project_id}/workflow/{workflow_id}/job/` (\#969).
* Change response of `/api/v1/project/{project_id}/job/{job_id}/stop/` endpoint to 204 no-content (\#967).
* Include `workflow_list` and `job_list` in `ProjectRead` response (\#927).
* Fix construction of `ApplyWorkflow.workflow_dump`, within apply endpoint (\#968).
* New endpoints:
* New monitoring endpoints restricted to superusers at `/monitoring` (\#947).
* New `GET` endpoints `api/v1/project/job/` and `api/v1/project/{project_id}/workflow/{workflow_id}/job/` (\#969).
* New behaviors or responses of existing endpoints:
* Change response of `/api/v1/project/{project_id}/job/{job_id}/stop/` endpoint to 204 no-content (\#967).
* Include `workflow_list` and `job_list` attributes for `ProjectRead`, which affects all `GET`-project endpoints (\#927).
* Make it possible to delete a `Dataset`, `Workflow` or `Project`, even when it is in relationship to an `ApplyWorkflow` (\#927).
* Internal changes:
* Fix construction of `ApplyWorkflow.workflow_dump`, within apply endpoint (\#968).
* Database:
* Make foreign-keys of `ApplyWorkflow` (`project_id`, `workflow_id`, `input_dataset_id`, `output_dataset_id`) optional (\#927).
* Add columns `input_dataset_dump`, `output_dataset_dump` and `user_email` to `ApplyWorkflow` (\#927).
Expand Down
1 change: 1 addition & 0 deletions fractal_server/app/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from ...syringe import Inject
from .v1.dataset import router as dataset_router
from .v1.job import router as job_router
from .v1.monitoring import router as router_monitoring # noqa
from .v1.project import router as project_router
from .v1.task import router as task_router
from .v1.task_collection import router as taskcollection_router
Expand Down
150 changes: 150 additions & 0 deletions fractal_server/app/api/v1/monitoring.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
from datetime import datetime
from typing import Optional

from fastapi import APIRouter
from fastapi import Depends
from sqlalchemy import func
from sqlmodel import select

from ...db import AsyncSession
from ...db import get_db
from ...models import ApplyWorkflow
from ...models import Dataset
from ...models import JobStatusType
from ...models import Project
from ...models import Workflow
from ...schemas import ApplyWorkflowRead
from ...schemas import DatasetRead
from ...schemas import ProjectRead
from ...schemas import WorkflowRead
from ...security import current_active_superuser
from ...security import User


router = APIRouter()


@router.get("/project/", response_model=list[ProjectRead])
async def monitor_project(
id: Optional[int] = None,
user_id: Optional[int] = None,
user: User = Depends(current_active_superuser),
db: AsyncSession = Depends(get_db),
) -> list[ProjectRead]:

stm = select(Project)

if id is not None:
stm = stm.where(Project.id == id)

if user_id is not None:
stm = stm.where(Project.user_list.any(User.id == user_id))

res = await db.execute(stm)
project_list = res.scalars().all()
await db.close()

return project_list


@router.get("/workflow/", response_model=list[WorkflowRead])
async def monitor_workflow(
id: Optional[int] = None,
project_id: Optional[int] = None,
name_contains: Optional[str] = None,
user: User = Depends(current_active_superuser),
db: AsyncSession = Depends(get_db),
) -> list[WorkflowRead]:
stm = select(Workflow)

if id is not None:
stm = stm.where(Workflow.id == id)
if project_id is not None:
stm = stm.where(Workflow.project_id == project_id)
if name_contains is not None:
# SQLAlchemy2: use icontains
stm = stm.where(
func.lower(Workflow.name).contains(name_contains.lower())
)

res = await db.execute(stm)
workflow_list = res.scalars().all()
await db.close()

return workflow_list


@router.get("/dataset/", response_model=list[DatasetRead])
async def monitor_dataset(
id: Optional[int] = None,
project_id: Optional[int] = None,
name_contains: Optional[str] = None,
type: Optional[str] = None,
user: User = Depends(current_active_superuser),
db: AsyncSession = Depends(get_db),
) -> list[DatasetRead]:
stm = select(Dataset)

if id is not None:
stm = stm.where(Dataset.id == id)
if project_id is not None:
stm = stm.where(Dataset.project_id == project_id)
if name_contains is not None:
# SQLAlchemy2: use icontains
stm = stm.where(
func.lower(Dataset.name).contains(name_contains.lower())
)
if type is not None:
stm = stm.where(Dataset.type == type)

res = await db.execute(stm)
dataset_list = res.scalars().all()
await db.close()

return dataset_list


@router.get("/job/", response_model=list[ApplyWorkflowRead])
async def monitor_job(
id: Optional[int] = None,
project_id: Optional[int] = None,
input_dataset_id: Optional[int] = None,
output_dataset_id: Optional[int] = None,
workflow_id: Optional[int] = None,
status: Optional[JobStatusType] = None,
start_timestamp_min: Optional[datetime] = None,
start_timestamp_max: Optional[datetime] = None,
end_timestamp_min: Optional[datetime] = None,
end_timestamp_max: Optional[datetime] = None,
user: User = Depends(current_active_superuser),
db: AsyncSession = Depends(get_db),
) -> list[ApplyWorkflowRead]:

stm = select(ApplyWorkflow)

if id is not None:
stm = stm.where(ApplyWorkflow.id == id)
if project_id is not None:
stm = stm.where(ApplyWorkflow.project_id == project_id)
if input_dataset_id is not None:
stm = stm.where(ApplyWorkflow.input_dataset_id == input_dataset_id)
if output_dataset_id is not None:
stm = stm.where(ApplyWorkflow.output_dataset_id == output_dataset_id)
if workflow_id is not None:
stm = stm.where(ApplyWorkflow.workflow_id == workflow_id)
if status is not None:
stm = stm.where(ApplyWorkflow.status == status)
if start_timestamp_min is not None:
stm = stm.where(ApplyWorkflow.start_timestamp >= start_timestamp_min)
if start_timestamp_max is not None:
stm = stm.where(ApplyWorkflow.start_timestamp <= start_timestamp_max)
if end_timestamp_min is not None:
stm = stm.where(ApplyWorkflow.end_timestamp >= end_timestamp_min)
if end_timestamp_max is not None:
stm = stm.where(ApplyWorkflow.end_timestamp <= end_timestamp_max)

res = await db.execute(stm)
job_list = res.scalars().all()
await db.close()

return job_list
5 changes: 5 additions & 0 deletions fractal_server/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,15 @@ def collect_routers(app: FastAPI) -> None:
"""
from .app.api import router_default
from .app.api import router_v1
from .app.api import router_monitoring

from .app.security import auth_router

app.include_router(router_default, prefix="/api")
app.include_router(router_v1, prefix="/api/v1")
app.include_router(
router_monitoring, prefix="/monitoring", tags=["Monitoring"]
)
app.include_router(auth_router, prefix="/auth", tags=["auth"])


Expand Down
Loading

0 comments on commit 196cab7

Please sign in to comment.