Skip to content

Commit

Permalink
Merge pull request #408 from fractal-analytics-platform/407-make-vali…
Browse files Browse the repository at this point in the history
…date_arguments-decorator-required

407 make validate arguments decorator required
  • Loading branch information
tcompa authored Jun 8, 2023
2 parents 9cc9078 + ed7aad5 commit b359457
Show file tree
Hide file tree
Showing 16 changed files with 101 additions and 44 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@
* JSON Schemas for task arguments:
* Add JSON schemas for task arguments in the package manifest (\#369, \#384).
* Remove `TaskArguments` models and switch to Pydantic V1 `validate_arguments` (\#369).
* Make coercing&validating task arguments required, rather than optional (\#408).
* Remove `default_args` from manifest (\#379, \#393).
* Other:
* Make pydantic dependency required for running tasks, and pin it to V1 (\#408).
* Remove legacy executor definitions from manifest (\#361).
* Add GitHub action for testing `pip install` with/without `fractal-tasks` extra (\#390).
* Remove `sqlmodel` from dev dependencies (\#374).
Expand Down
23 changes: 5 additions & 18 deletions fractal_tasks_core/tasks/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,16 +40,12 @@ def default(self, value):
def run_fractal_task(
*,
task_function: Callable,
coerce_and_validate: bool = False,
logger_name: str = None,
):
"""
Implement standard task interface and call task_function. If
`coerce_and_validate`, coerce and validate arguments via
`pydantic.decorator.coerce_and_validate_arguments`.
Implement standard task interface and call task_function
:param task_function: the callable function that runs the task
:param coerce_and_validate: TBD
:logger_name: TBD
"""

Expand Down Expand Up @@ -79,19 +75,10 @@ def run_fractal_task(
with open(args.json, "r") as f:
pars = json.load(f)

if not coerce_and_validate:
# Run task without validating arguments' types
logger.info(f"START {task_function.__name__} task")
metadata_update = task_function(**pars)
logger.info(f"END {task_function.__name__} task")
else:
from pydantic.decorator import validate_arguments

# Validating arguments' types and run task
logger.info(f"START {task_function.__name__} task")
vf = validate_arguments(task_function)
metadata_update = vf(**pars)
logger.info(f"END {task_function.__name__} task")
# Run task
logger.info(f"START {task_function.__name__} task")
metadata_update = task_function(**pars)
logger.info(f"END {task_function.__name__} task")

# Write output metadata to file, with custom JSON encoder
with open(args.metadata_out, "w") as fout:
Expand Down
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/cellpose_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import zarr
from anndata.experimental import write_elem
from cellpose import models
from pydantic.decorator import validate_arguments

import fractal_tasks_core
from fractal_tasks_core.lib_channels import ChannelNotFoundError
Expand Down Expand Up @@ -140,6 +141,7 @@ def segment_ROI(
return mask.astype(label_dtype)


@validate_arguments
def cellpose_segmentation(
*,
# Fractal arguments
Expand Down Expand Up @@ -667,6 +669,5 @@ def cellpose_segmentation(

run_fractal_task(
task_function=cellpose_segmentation,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/copy_ome_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import anndata as ad
import zarr
from anndata.experimental import write_elem
from pydantic.decorator import validate_arguments

import fractal_tasks_core
from fractal_tasks_core.lib_regions_of_interest import (
Expand All @@ -37,6 +38,7 @@
__OME_NGFF_VERSION__ = fractal_tasks_core.__OME_NGFF_VERSION__


@validate_arguments
def copy_ome_zarr(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -201,6 +203,5 @@ def copy_ome_zarr(

run_fractal_task(
task_function=copy_ome_zarr,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/create_ome_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import pandas as pd
import zarr
from anndata.experimental import write_elem
from pydantic.decorator import validate_arguments

import fractal_tasks_core
from fractal_tasks_core.lib_channels import check_well_channel_labels
Expand All @@ -44,6 +45,7 @@
logger = logging.getLogger(__name__)


@validate_arguments
def create_ome_zarr(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -434,6 +436,5 @@ def create_ome_zarr(

run_fractal_task(
task_function=create_ome_zarr,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/create_ome_zarr_multiplex.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import pandas as pd
import zarr
from anndata.experimental import write_elem
from pydantic.decorator import validate_arguments

import fractal_tasks_core
from fractal_tasks_core.lib_channels import check_well_channel_labels
Expand All @@ -46,6 +47,7 @@
logger = logging.getLogger(__name__)


@validate_arguments
def create_ome_zarr_multiplex(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -485,6 +487,5 @@ def create_ome_zarr_multiplex(

run_fractal_task(
task_function=create_ome_zarr_multiplex,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/illumination_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import dask.array as da
import numpy as np
import zarr
from pydantic.decorator import validate_arguments
from skimage.io import imread

from fractal_tasks_core.lib_channels import get_omero_channel_list
Expand Down Expand Up @@ -91,6 +92,7 @@ def correct(
return new_img_stack.astype(dtype)


@validate_arguments
def illumination_correction(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -275,6 +277,5 @@ def illumination_correction(

run_fractal_task(
task_function=illumination_correction,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/maximum_intensity_projection.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import anndata as ad
import dask.array as da
from pydantic.decorator import validate_arguments

from fractal_tasks_core.lib_pyramid_creation import build_pyramid
from fractal_tasks_core.lib_regions_of_interest import (
Expand All @@ -32,6 +33,7 @@
logger = logging.getLogger(__name__)


@validate_arguments
def maximum_intensity_projection(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -136,6 +138,5 @@ def maximum_intensity_projection(

run_fractal_task(
task_function=maximum_intensity_projection,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/napari_workflows_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import zarr
from anndata.experimental import write_elem
from napari_workflows._io_yaml_v1 import load_workflow
from pydantic.decorator import validate_arguments

import fractal_tasks_core
from fractal_tasks_core.lib_channels import get_channel_from_image_zarr
Expand All @@ -56,6 +57,7 @@ class OutOfTaskScopeError(NotImplementedError):
pass


@validate_arguments
def napari_workflows_wrapper(
*,
# Default arguments for fractal tasks:
Expand Down Expand Up @@ -607,6 +609,5 @@ def napari_workflows_wrapper(

run_fractal_task(
task_function=napari_workflows_wrapper,
coerce_and_validate=True,
logger_name=logger.name,
)
3 changes: 2 additions & 1 deletion fractal_tasks_core/tasks/yokogawa_to_ome_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import zarr
from anndata import read_zarr
from dask.array.image import imread
from pydantic.decorator import validate_arguments

from fractal_tasks_core.lib_channels import get_omero_channel_list
from fractal_tasks_core.lib_glob import glob_with_multiple_patterns
Expand Down Expand Up @@ -56,6 +57,7 @@ def sort_fun(filename: str):
return [site, z_index]


@validate_arguments
def yokogawa_to_ome_zarr(
*,
input_paths: Sequence[str],
Expand Down Expand Up @@ -220,6 +222,5 @@ def yokogawa_to_ome_zarr(

run_fractal_task(
task_function=yokogawa_to_ome_zarr,
coerce_and_validate=True,
logger_name=logger.name,
)
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ numpy = "~1.23.5"
pandas = "^1.2.0"
defusedxml = "^0.7.1"
lxml = "^4.9.1"
pydantic = "~1.10.2"
pydantic = "<2"
docstring-parser = "^0.15"
anndata = "^0.8.0"

Expand Down
22 changes: 12 additions & 10 deletions tests/test_unit_napari_workflows_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging

import pytest
from devtools import debug
from pydantic.error_wrappers import ValidationError

from fractal_tasks_core.tasks.napari_workflows_wrapper import (
napari_workflows_wrapper,
Expand All @@ -21,10 +23,10 @@ def test_input_specs(tmp_path, testdata_path):
output_specs = {
"output_label": {"type": "label", "label_name": "label_DAPI"}
}
with pytest.raises(ValueError):
with pytest.raises(ValidationError):
napari_workflows_wrapper(
input_paths=[tmp_path],
output_path=tmp_path,
input_paths=[str(tmp_path)],
output_path=str(tmp_path),
metadata={},
component="component",
input_specs=input_specs,
Expand All @@ -48,23 +50,23 @@ def test_output_specs(tmp_path, testdata_path, caplog):
input_specs = {
"input_image": {"type": "image", "wavelength_id": "A01_C01"}
}
output_specs = {"asd": "asd"}
output_specs = {"asd": {"asd": "asd"}}

try:
napari_workflows_wrapper(
input_paths=[tmp_path],
output_path=tmp_path,
input_paths=[str(tmp_path)],
output_path=str(tmp_path),
metadata={},
component="component",
input_specs=input_specs,
output_specs=output_specs,
workflow_file=workflow_file,
input_ROI_table="FOV_ROI_table",
)
except Exception:
except Exception as e:
# The task will now fail for some other reason (its arguments are not
# valid), but we only care about the warning
pass
debug(e)

assert "WARNING" in caplog.text
assert "Some item of wf.leafs" in caplog.text
Expand Down Expand Up @@ -94,8 +96,8 @@ def test_level_setting_in_non_labeling_worfklow(tmp_path, testdata_path):

with pytest.raises(NotImplementedError):
napari_workflows_wrapper(
input_paths=[tmp_path],
output_path=tmp_path,
input_paths=[str(tmp_path)],
output_path=str(tmp_path),
metadata={},
component="component",
input_specs=input_specs,
Expand Down
58 changes: 58 additions & 0 deletions tests/test_unit_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,61 @@ def test_create_ome_zarr(tmp_path, testdata_path):
data = json.load(f)
debug(data)
assert len(data["plate"]["wells"]) == 1


def test_run_fractal_tasks(tmp_path, testdata_path, monkeypatch):
"""
Run a task funtion through run_fractal_task, after mocking the argparse
interface
"""

import fractal_tasks_core.tasks._utils

# Write arguments to a file
args = {}
args["input_paths"] = [str(testdata_path / "png/")]
args["output_path"] = str(tmp_path)
args["allowed_channels"] = [{"wavelength_id": "A01_C01"}]
args["image_extension"] = "png"
args["metadata"] = {}
debug(args)
args_path = tmp_path / "args.json"
with args_path.open("w") as f:
json.dump(args, f, indent=2)

# Mock argparse.ArgumentParser
class MockArgumentParser:
def add_argument(self, *args, **kwargs):
pass

def parse_args(self, *args, **kwargs):
class Args(object):
def __init__(self):
debug("INIT")
self.metadata_out = str(tmp_path / "metadiff.json")
self.json = str(args_path)

return Args()

monkeypatch.setattr(
"fractal_tasks_core.tasks._utils.ArgumentParser",
MockArgumentParser,
)

# Run the task
out = fractal_tasks_core.tasks._utils.run_fractal_task(
task_function=create_ome_zarr
)

# Check that the task wrote some output to args.metadata_out
with (tmp_path / "metadiff.json").open("r") as f:
out = json.load(f)
debug(out)
assert out

# Check that the output zarr exists and includes a well
zattrs = Path(args["output_path"]) / "myplate.zarr/.zattrs"
with open(zattrs) as f:
data = json.load(f)
debug(data)
assert len(data["plate"]["wells"]) == 1
8 changes: 4 additions & 4 deletions tests/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,8 @@ def test_MIP(
# MIP
for component in metadata["image"]:
maximum_intensity_projection(
input_paths=[zarr_path_mip],
output_path=zarr_path_mip,
input_paths=[str(zarr_path_mip)],
output_path=str(zarr_path_mip),
metadata=metadata,
component=component,
)
Expand Down Expand Up @@ -283,8 +283,8 @@ def test_MIP_subset_of_images(
# MIP
for component in metadata["image"]:
maximum_intensity_projection(
input_paths=[zarr_path_mip],
output_path=zarr_path_mip,
input_paths=[str(zarr_path_mip)],
output_path=str(zarr_path_mip),
metadata=metadata,
component=component,
)
Expand Down
Loading

0 comments on commit b359457

Please sign in to comment.