From a2aa908d26e1c83eb36e06f4aa6ef2df70cdf3df Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Thu, 16 Nov 2023 10:16:56 +0100 Subject: [PATCH 1/9] GET project/{p.id}/workflow/{w.id}/job/ --- fractal_server/app/api/v1/job.py | 24 ++++++++++++++++++++++++ tests/test_job_api.py | 13 +++++++++++++ 2 files changed, 37 insertions(+) diff --git a/fractal_server/app/api/v1/job.py b/fractal_server/app/api/v1/job.py index b8cb4636bf..ff7e261d67 100644 --- a/fractal_server/app/api/v1/job.py +++ b/fractal_server/app/api/v1/job.py @@ -22,11 +22,35 @@ from ...security import User from ._aux_functions import _get_job_check_owner from ._aux_functions import _get_project_check_owner +from ._aux_functions import _get_workflow_check_owner router = APIRouter() +@router.get( + "/project/{project_id}/workflow/{workflow_id}/job/", + response_model=list[ApplyWorkflowRead], +) +async def get_workflow_jobs( + project_id: int, + workflow_id: int, + user: User = Depends(current_active_user), + db: AsyncSession = Depends(get_db), +) -> list[ApplyWorkflowRead]: + """ + Returns all the jobs related to a specific workflow + """ + + workflow = await _get_workflow_check_owner( + project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db + ) + job_list = workflow.job_list + await db.close() + + return job_list + + @router.get( "/project/{project_id}/job/{job_id}", response_model=ApplyWorkflowRead, diff --git a/tests/test_job_api.py b/tests/test_job_api.py index 3b6ceb4142..b89723d407 100644 --- a/tests/test_job_api.py +++ b/tests/test_job_api.py @@ -188,6 +188,8 @@ async def test_get_job_list( assert len(res.json()) == 0 workflow = await workflow_factory(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) + t = await task_factory() await workflow.insert_task(task_id=t.id, db=db) dataset = await dataset_factory(project_id=project.id) @@ -206,3 +208,14 @@ async def test_get_job_list( debug(res) assert res.status_code == 200 assert len(res.json()) == N + + res = await client.get( + f"{PREFIX}/project/{project.id}/workflow/{workflow.id}/job/" + ) + assert res.status_code == 200 + assert len(res.json()) == N + res = await client.get( + f"{PREFIX}/project/{project.id}/workflow/{workflow2.id}/job/" + ) + assert res.status_code == 200 + assert len(res.json()) == 0 From 2d412a9fe90895354cd86158f5ac9a87a8c6da7c Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Thu, 16 Nov 2023 10:57:07 +0100 Subject: [PATCH 2/9] GET project/job/ --- fractal_server/app/api/v1/job.py | 22 +++++++++++++ tests/test_job_api.py | 56 ++++++++++++++++++++++++++++++++ 2 files changed, 78 insertions(+) diff --git a/fractal_server/app/api/v1/job.py b/fractal_server/app/api/v1/job.py index ff7e261d67..54372dc91f 100644 --- a/fractal_server/app/api/v1/job.py +++ b/fractal_server/app/api/v1/job.py @@ -28,6 +28,28 @@ router = APIRouter() +@router.get("/project/job/", response_model=list[ApplyWorkflowRead]) +async def get_user_jobs( + user: User = Depends(current_active_user), + db: AsyncSession = Depends(get_db), +) -> list[ApplyWorkflowRead]: + """ + Returns all the jobs of the current user + """ + + job_list = [ + ( + await _get_job_check_owner( + project_id=project.id, job_id=job.id, user_id=user.id, db=db + ) + )["job"] + for project in user.project_list + for job in project.job_list + ] + + return job_list + + @router.get( "/project/{project_id}/workflow/{workflow_id}/job/", response_model=list[ApplyWorkflowRead], diff --git a/tests/test_job_api.py b/tests/test_job_api.py index b89723d407..f8faf0d7fd 100644 --- a/tests/test_job_api.py +++ b/tests/test_job_api.py @@ -219,3 +219,59 @@ async def test_get_job_list( ) assert res.status_code == 200 assert len(res.json()) == 0 + + +async def test_get_user_jobs( + MockCurrentUser, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + job_factory, + db, + client, + tmp_path, +): + + async with MockCurrentUser(persist=True, user_kwargs={"id": 123}) as user: + + task = await task_factory() + + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + await workflow.insert_task(task_id=task.id, db=db) + dataset = await dataset_factory(project_id=project.id) + + for _ in range(3): + await job_factory( + working_dir=tmp_path.as_posix(), + project_id=project.id, + input_dataset_id=dataset.id, + output_dataset_id=dataset.id, + workflow_id=workflow.id, + ) + + project2 = await project_factory(user) + workflow2 = await workflow_factory(project_id=project.id) + await workflow2.insert_task(task_id=task.id, db=db) + dataset2 = await dataset_factory(project_id=project.id) + + await db.refresh(project) + for _ in range(2): + await job_factory( + working_dir=tmp_path.as_posix(), + project_id=project2.id, + input_dataset_id=dataset2.id, + output_dataset_id=dataset2.id, + workflow_id=workflow2.id, + ) + + await db.refresh(project2) + res = await client.get(f"{PREFIX}/project/job/") + assert res.status_code == 200 + assert len(res.json()) == 5 + + async with MockCurrentUser(persist=True, user_kwargs={"id": 321}): + res = await client.get(f"{PREFIX}/project/job/") + assert res.status_code == 200 + assert len(res.json()) == 0 From abf86083a5befcac804f907596621aa40bd9c2a6 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Thu, 16 Nov 2023 11:09:36 +0100 Subject: [PATCH 3/9] simplify list comprehension --- fractal_server/app/api/v1/job.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/fractal_server/app/api/v1/job.py b/fractal_server/app/api/v1/job.py index 54372dc91f..d239faaaf8 100644 --- a/fractal_server/app/api/v1/job.py +++ b/fractal_server/app/api/v1/job.py @@ -38,13 +38,7 @@ async def get_user_jobs( """ job_list = [ - ( - await _get_job_check_owner( - project_id=project.id, job_id=job.id, user_id=user.id, db=db - ) - )["job"] - for project in user.project_list - for job in project.job_list + job for project in user.project_list for job in project.job_list ] return job_list From d53b4638902a588d2276ba8c8f705fe5eae18a15 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Thu, 16 Nov 2023 11:11:09 +0100 Subject: [PATCH 4/9] rm db from /project/job/ --- fractal_server/app/api/v1/job.py | 1 - 1 file changed, 1 deletion(-) diff --git a/fractal_server/app/api/v1/job.py b/fractal_server/app/api/v1/job.py index d239faaaf8..7cef68c8dc 100644 --- a/fractal_server/app/api/v1/job.py +++ b/fractal_server/app/api/v1/job.py @@ -31,7 +31,6 @@ @router.get("/project/job/", response_model=list[ApplyWorkflowRead]) async def get_user_jobs( user: User = Depends(current_active_user), - db: AsyncSession = Depends(get_db), ) -> list[ApplyWorkflowRead]: """ Returns all the jobs of the current user From c7ef2c24cb784f753f4ed3168e62d9f7881113f2 Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Thu, 16 Nov 2023 11:20:14 +0100 Subject: [PATCH 5/9] Fix `job_factory` fixture and update `test_get_user_jobs` --- tests/fixtures_server.py | 16 +++++++++++++--- tests/test_job_api.py | 2 -- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/tests/fixtures_server.py b/tests/fixtures_server.py index c5079d3744..f988902b68 100644 --- a/tests/fixtures_server.py +++ b/tests/fixtures_server.py @@ -344,6 +344,7 @@ async def project_factory(db): """ Factory that adds a project to the database """ + from fractal_server.app.models import Project async def __project_factory(user, **kwargs): @@ -433,6 +434,7 @@ async def job_factory(db: AsyncSession): Insert job in db """ from fractal_server.app.models import Dataset + from fractal_server.app.models import Project from fractal_server.app.models import ApplyWorkflow from fractal_server.app.models import Workflow from fractal_server.app.runner.common import set_start_and_last_task_index @@ -447,7 +449,7 @@ async def __job_factory( **kwargs, ): workflow = await db.get(Workflow, workflow_id) - if not workflow: + if workflow is None: raise IndexError( "Error from job_factory: " f"Workflow {workflow_id} does not exist." @@ -460,17 +462,23 @@ async def __job_factory( ) input_dataset = await db.get(Dataset, input_dataset_id) - if not input_dataset: + if input_dataset is None: raise IndexError( "Error from job_factory: " f"Dataset {input_dataset_id} does not exist." ) output_dataset = await db.get(Dataset, output_dataset_id) - if not output_dataset: + if output_dataset is None: raise IndexError( "Error from job_factory: " f"Dataset {input_dataset_id} does not exist." ) + project = await db.get(Project, project_id) + if project is None: + raise IndexError( + "Error from job_factory: " + f"Project {project_id} does not exist." + ) args = dict( project_id=project_id, @@ -498,6 +506,8 @@ async def __job_factory( args.update(**kwargs) job = ApplyWorkflow(**args) db.add(job) + project.job_list.append(job) + db.add(project) await db.commit() await db.refresh(job) return job diff --git a/tests/test_job_api.py b/tests/test_job_api.py index f8faf0d7fd..48f413acee 100644 --- a/tests/test_job_api.py +++ b/tests/test_job_api.py @@ -256,7 +256,6 @@ async def test_get_user_jobs( await workflow2.insert_task(task_id=task.id, db=db) dataset2 = await dataset_factory(project_id=project.id) - await db.refresh(project) for _ in range(2): await job_factory( working_dir=tmp_path.as_posix(), @@ -266,7 +265,6 @@ async def test_get_user_jobs( workflow_id=workflow2.id, ) - await db.refresh(project2) res = await client.get(f"{PREFIX}/project/job/") assert res.status_code == 200 assert len(res.json()) == 5 From d6da4144093c5b0072b648bc76d3727f548f5f03 Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Thu, 16 Nov 2023 11:20:38 +0100 Subject: [PATCH 6/9] Update devtools version --- poetry.lock | 12 +++++------- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index bfcfe8f774..dc0debc9ad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -553,21 +553,19 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "devtools" -version = "0.10.0" +version = "0.12.2" description = "Python's missing debug print command, and more." optional = false python-versions = ">=3.7" files = [ - {file = "devtools-0.10.0-py3-none-any.whl", hash = "sha256:b0bc02043bb032cdfb93e227226e2fea1aaea8f5a31fca25fabc4eadca22f228"}, - {file = "devtools-0.10.0.tar.gz", hash = "sha256:6eb7c4fa7c4b90e5cfe623537a9961d1dc3199d8be0981802c6931cd8f02418f"}, + {file = "devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7"}, + {file = "devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507"}, ] [package.dependencies] asttokens = ">=2.0.0,<3.0.0" executing = ">=1.1.1" - -[package.extras] -pygments = ["pygments (>=2.2.0)"] +pygments = ">=2.15.0" [[package]] name = "distlib" @@ -2284,4 +2282,4 @@ slurm = ["cloudpickle", "clusterfutures"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "8a81731248d501a9d7de382f675ac1a0585a241909432705e8966a7d7b4a8166" +content-hash = "dcaf99a3011b0746c60b366dbb49faefd03c1c1925b6ff9eb8d370d9cfdd9e80" diff --git a/pyproject.toml b/pyproject.toml index a553852002..31aee1f8a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,7 @@ gunicorn = ["gunicorn"] asgi-lifespan = "^2" pytest = "^7.2" httpx = "^0.23" -devtools = "^0.10" +devtools = "^0.12" pytest-asyncio = "^0.20" bumpver = "^2022.1120" pre-commit = "^2.19" From 336b3b57471a029863363e5b7a287edca365e989 Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Thu, 16 Nov 2023 11:21:00 +0100 Subject: [PATCH 7/9] Add overrides to mypy config --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 31aee1f8a9..d0553e3f57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,5 +107,5 @@ relative_files = true omit = ["tests/*"] [[tool.mypy.overrides]] -module = ["devtools", "uvicorn"] +module = ["devtools", "uvicorn", "pytest", "asgi_lifespan", "asyncpg"] ignore_missing_imports = true From 462b8a9b359c0070d6b26cb0a8b76c643d106deb Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Thu, 16 Nov 2023 12:32:44 +0100 Subject: [PATCH 8/9] add optional and update changelog --- CHANGELOG.md | 2 ++ fractal_server/app/api/v1/job.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 81cf726555..964c2168f8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,8 @@ * API: * Make it possible to delete a `Dataset`, `Workflow` or `Project`, even when it is in relationship to an `ApplyWorkflow` (\#927). * Include `workflow_list` and `job_list` in `ProjectRead` response (\#927). + * New enpoints: `GET jobs` by `User` and by `Workflow` (\#969). + * Database: * Make foreign-keys of `ApplyWorkflow` (`project_id`, `workflow_id`, `input_dataset_id`, `output_dataset_id`) optional (\#927). * Add columns `input_dataset_dump`, `output_dataset_dump` and `user_email` to `ApplyWorkflow` (\#927). diff --git a/fractal_server/app/api/v1/job.py b/fractal_server/app/api/v1/job.py index 7cef68c8dc..34ed753d51 100644 --- a/fractal_server/app/api/v1/job.py +++ b/fractal_server/app/api/v1/job.py @@ -52,7 +52,7 @@ async def get_workflow_jobs( workflow_id: int, user: User = Depends(current_active_user), db: AsyncSession = Depends(get_db), -) -> list[ApplyWorkflowRead]: +) -> Optional[list[ApplyWorkflowRead]]: """ Returns all the jobs related to a specific workflow """ From a6eb348d28f6250a7ffa9e360a5255b81245020b Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Thu, 16 Nov 2023 13:25:40 +0100 Subject: [PATCH 9/9] Update CHANGELOG [skip ci] --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 35881238de..48ac2d5994 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,10 @@ * API: * Make it possible to delete a `Dataset`, `Workflow` or `Project`, even when it is in relationship to an `ApplyWorkflow` (\#927). - * Include `workflow_list` and `job_list` in `ProjectRead` response (\#927). + * New `GET` endpoints `api/v1/project/job/` and `api/v1/project/{project_id}/workflow/{workflow_id}/job/` (\#969). * Change response of `/api/v1/project/{project_id}/job/{job_id}/stop/` endpoint to 204 no-content (\#967). + * Include `workflow_list` and `job_list` in `ProjectRead` response (\#927). * Fix construction of `ApplyWorkflow.workflow_dump`, within apply endpoint (\#968). - * New enpoints: `GET jobs` by `User` and by `Workflow` (\#969). * Database: * Make foreign-keys of `ApplyWorkflow` (`project_id`, `workflow_id`, `input_dataset_id`, `output_dataset_id`) optional (\#927). * Add columns `input_dataset_dump`, `output_dataset_dump` and `user_email` to `ApplyWorkflow` (\#927).