Skip to content

Commit

Permalink
Merge branch 'main' into 935-review-enum-related-behavior
Browse files Browse the repository at this point in the history
  • Loading branch information
tcompa committed Nov 16, 2023
2 parents 8316069 + 79a8988 commit 3503a6c
Show file tree
Hide file tree
Showing 6 changed files with 128 additions and 13 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@

* API:
* Make it possible to delete a `Dataset`, `Workflow` or `Project`, even when it is in relationship to an `ApplyWorkflow` (\#927).
* Include `workflow_list` and `job_list` in `ProjectRead` response (\#927).
* New `GET` endpoints `api/v1/project/job/` and `api/v1/project/{project_id}/workflow/{workflow_id}/job/` (\#969).
* Change response of `/api/v1/project/{project_id}/job/{job_id}/stop/` endpoint to 204 no-content (\#967).
* Include `workflow_list` and `job_list` in `ProjectRead` response (\#927).
* Fix construction of `ApplyWorkflow.workflow_dump`, within apply endpoint (\#968).
* Database:
* Make foreign-keys of `ApplyWorkflow` (`project_id`, `workflow_id`, `input_dataset_id`, `output_dataset_id`) optional (\#927).
Expand Down
39 changes: 39 additions & 0 deletions fractal_server/app/api/v1/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,50 @@
from ...security import User
from ._aux_functions import _get_job_check_owner
from ._aux_functions import _get_project_check_owner
from ._aux_functions import _get_workflow_check_owner


router = APIRouter()


@router.get("/project/job/", response_model=list[ApplyWorkflowRead])
async def get_user_jobs(
user: User = Depends(current_active_user),
) -> list[ApplyWorkflowRead]:
"""
Returns all the jobs of the current user
"""

job_list = [
job for project in user.project_list for job in project.job_list
]

return job_list


@router.get(
"/project/{project_id}/workflow/{workflow_id}/job/",
response_model=list[ApplyWorkflowRead],
)
async def get_workflow_jobs(
project_id: int,
workflow_id: int,
user: User = Depends(current_active_user),
db: AsyncSession = Depends(get_db),
) -> Optional[list[ApplyWorkflowRead]]:
"""
Returns all the jobs related to a specific workflow
"""

workflow = await _get_workflow_check_owner(
project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
)
job_list = workflow.job_list
await db.close()

return job_list


@router.get(
"/project/{project_id}/job/{job_id}",
response_model=ApplyWorkflowRead,
Expand Down
12 changes: 5 additions & 7 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ gunicorn = ["gunicorn"]
asgi-lifespan = "^2"
pytest = "^7.2"
httpx = "^0.23"
devtools = "^0.10"
devtools = "^0.12"
pytest-asyncio = "^0.20"
bumpver = "^2022.1120"
pre-commit = "^2.19"
Expand Down Expand Up @@ -107,5 +107,5 @@ relative_files = true
omit = ["tests/*"]

[[tool.mypy.overrides]]
module = ["devtools", "uvicorn"]
module = ["devtools", "uvicorn", "pytest", "asgi_lifespan", "asyncpg"]
ignore_missing_imports = true
16 changes: 13 additions & 3 deletions tests/fixtures_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ async def project_factory(db):
"""
Factory that adds a project to the database
"""

from fractal_server.app.models import Project

async def __project_factory(user, **kwargs):
Expand Down Expand Up @@ -433,6 +434,7 @@ async def job_factory(db: AsyncSession):
Insert job in db
"""
from fractal_server.app.models import Dataset
from fractal_server.app.models import Project
from fractal_server.app.models import ApplyWorkflow
from fractal_server.app.models import Workflow
from fractal_server.app.runner.common import set_start_and_last_task_index
Expand All @@ -447,7 +449,7 @@ async def __job_factory(
**kwargs,
):
workflow = await db.get(Workflow, workflow_id)
if not workflow:
if workflow is None:
raise IndexError(
"Error from job_factory: "
f"Workflow {workflow_id} does not exist."
Expand All @@ -460,17 +462,23 @@ async def __job_factory(
)

input_dataset = await db.get(Dataset, input_dataset_id)
if not input_dataset:
if input_dataset is None:
raise IndexError(
"Error from job_factory: "
f"Dataset {input_dataset_id} does not exist."
)
output_dataset = await db.get(Dataset, output_dataset_id)
if not output_dataset:
if output_dataset is None:
raise IndexError(
"Error from job_factory: "
f"Dataset {input_dataset_id} does not exist."
)
project = await db.get(Project, project_id)
if project is None:
raise IndexError(
"Error from job_factory: "
f"Project {project_id} does not exist."
)

args = dict(
project_id=project_id,
Expand Down Expand Up @@ -498,6 +506,8 @@ async def __job_factory(
args.update(**kwargs)
job = ApplyWorkflow(**args)
db.add(job)
project.job_list.append(job)
db.add(project)
await db.commit()
await db.refresh(job)
return job
Expand Down
67 changes: 67 additions & 0 deletions tests/test_job_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,8 @@ async def test_get_job_list(
assert len(res.json()) == 0

workflow = await workflow_factory(project_id=project.id)
workflow2 = await workflow_factory(project_id=project.id)

t = await task_factory()
await workflow.insert_task(task_id=t.id, db=db)
dataset = await dataset_factory(project_id=project.id)
Expand All @@ -206,3 +208,68 @@ async def test_get_job_list(
debug(res)
assert res.status_code == 200
assert len(res.json()) == N

res = await client.get(
f"{PREFIX}/project/{project.id}/workflow/{workflow.id}/job/"
)
assert res.status_code == 200
assert len(res.json()) == N
res = await client.get(
f"{PREFIX}/project/{project.id}/workflow/{workflow2.id}/job/"
)
assert res.status_code == 200
assert len(res.json()) == 0


async def test_get_user_jobs(
MockCurrentUser,
project_factory,
dataset_factory,
workflow_factory,
task_factory,
job_factory,
db,
client,
tmp_path,
):

async with MockCurrentUser(persist=True, user_kwargs={"id": 123}) as user:

task = await task_factory()

project = await project_factory(user)
workflow = await workflow_factory(project_id=project.id)
await workflow.insert_task(task_id=task.id, db=db)
dataset = await dataset_factory(project_id=project.id)

for _ in range(3):
await job_factory(
working_dir=tmp_path.as_posix(),
project_id=project.id,
input_dataset_id=dataset.id,
output_dataset_id=dataset.id,
workflow_id=workflow.id,
)

project2 = await project_factory(user)
workflow2 = await workflow_factory(project_id=project.id)
await workflow2.insert_task(task_id=task.id, db=db)
dataset2 = await dataset_factory(project_id=project.id)

for _ in range(2):
await job_factory(
working_dir=tmp_path.as_posix(),
project_id=project2.id,
input_dataset_id=dataset2.id,
output_dataset_id=dataset2.id,
workflow_id=workflow2.id,
)

res = await client.get(f"{PREFIX}/project/job/")
assert res.status_code == 200
assert len(res.json()) == 5

async with MockCurrentUser(persist=True, user_kwargs={"id": 321}):
res = await client.get(f"{PREFIX}/project/job/")
assert res.status_code == 200
assert len(res.json()) == 0

0 comments on commit 3503a6c

Please sign in to comment.