Skip to content

Commit

Permalink
Merge pull request #30 from etirelli/crio
Browse files Browse the repository at this point in the history
Removing test that checks for CRI-O enable/disable, as it no longer applies to kfp v2
  • Loading branch information
etirelli authored Aug 29, 2024
2 parents 8807cfe + e369a3a commit 822f643
Showing 1 changed file with 0 additions and 159 deletions.
159 changes: 0 additions & 159 deletions elyra/tests/pipeline/kfp/test_processor_kfp.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,6 @@
from elyra.pipeline.catalog_connector import FilesystemComponentCatalogConnector
from elyra.pipeline.component import Component
from elyra.pipeline.kfp.kfp_properties import KfpPipelineParameter
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_DEF_MEDIUM
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_DEF_NAME
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_DEF_SIZE
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_MOUNT_PATH
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_PYTHON_PATH
from elyra.pipeline.kfp.processor_kfp import CRIO_VOL_WORKDIR_PATH
from elyra.pipeline.kfp.processor_kfp import KfpPipelineProcessor
from elyra.pipeline.kfp.processor_kfp import RUN_ID_PLACEHOLDER
from elyra.pipeline.kfp.processor_kfp import WorkflowEngineType
Expand Down Expand Up @@ -889,159 +883,6 @@ def enable_and_disable_crio(request):
del os.environ["CRIO_RUNTIME"]


@pytest.mark.parametrize("enable_and_disable_crio", [False, True], indirect=True)
@pytest.mark.parametrize(
"metadata_dependencies",
[
{
"pipeline_file": Path(__file__).parent
/ ".."
/ "resources"
/ "test_pipelines"
/ "kfp"
/ "kfp-one-node-generic.pipeline",
"workflow_engine": WorkflowEngineType.ARGO,
},
],
indirect=True,
)
@pytest.mark.skip(
reason="This test is not compatible with KFP v2: The expected assertions cannot be verified in the generated YAML."
)
def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_component_crio(
monkeypatch, processor: KfpPipelineProcessor, metadata_dependencies: Dict[str, Any], tmpdir, enable_and_disable_crio
):
"""
This test validates that the output of _generate_pipeline_dsl and _compile_pipeline_dsl
yields the expected results for a generic node when the CRIO_RUNTIME environment variable
is set to a valid string representation of the boolean value True (/true/i).
Test assumptions:
- Enabling CRIO_RUNTIME has the same effect for all supported workflow engines
- The test pipeline contains at least one generic node
With CRIO_RUNTIME enabled, the compiled output must include the following properties:
- in spec.templates[].volumes:
- emptyDir: {medium: '', sizeLimit: 20Gi}
name: workspace
"""
crio_runtime_enabled = os.environ.get("CRIO_RUNTIME", "").lower() == "true"

# Obtain artifacts from metadata_dependencies fixture
test_pipeline_file = metadata_dependencies["pipeline_file"]
pipeline = metadata_dependencies["pipeline_object"]
assert pipeline is not None
runtime_config = metadata_dependencies["runtime_config"]
assert runtime_config is not None

workflow_engine = WorkflowEngineType.get_instance_by_value(runtime_config.metadata["engine"])

# Mock calls that require access to object storage, because their side effects
# have no bearing on the outcome of this test.
monkeypatch.setattr(processor, "_upload_dependencies_to_object_store", lambda w, x, y, prefix: True)
monkeypatch.setattr(processor, "_verify_cos_connectivity", lambda x: True)

# Mock pipeline to not include any parameters
monkeypatch.setattr(pipeline, "_pipeline_parameters", ElyraPropertyList([]))

# Test begins here

compiled_output_file = Path(tmpdir) / test_pipeline_file.with_suffix(".yaml").name
compiled_output_file_name = str(compiled_output_file.absolute())

# generate Python DSL for the specified workflow engine
pipeline_version = f"{pipeline.name}-test-0"
pipeline_instance_id = f"{pipeline.name}-{datetime.now().strftime('%m%d%H%M%S')}"
experiment_name = f"{pipeline.name}-test-0"

generated_dsl = processor._generate_pipeline_dsl(
pipeline=pipeline,
pipeline_name=pipeline.name,
workflow_engine=workflow_engine,
pipeline_version=pipeline_version,
pipeline_instance_id=pipeline_instance_id,
experiment_name=experiment_name,
)

# Compile the DSL
processor._compile_pipeline_dsl(
dsl=generated_dsl,
workflow_engine=workflow_engine,
output_file=compiled_output_file_name,
pipeline_conf=None,
)

# Load compiled workflow
with open(compiled_output_file_name) as f:
compiled_spec = yaml.safe_load(f.read())

# There should be multiple templates, one for the DAG and one for every generic node.
assert len(compiled_spec["spec"]["templates"]) >= 2
if crio_runtime_enabled:
for template in compiled_spec["spec"]["templates"]:
if template["name"] == compiled_spec["spec"]["entrypoint"]:
continue
# Check volume definition
assert template.get("volumes") is not None, template
entry_found = False
for volume_entry in template["volumes"]:
if volume_entry["name"] != CRIO_VOL_DEF_NAME:
continue
assert (
volume_entry.get("emptyDir") is not None
), f"Unexpected volume entry '{CRIO_VOL_DEF_NAME}': {volume_entry} "
assert volume_entry["emptyDir"]["sizeLimit"] == CRIO_VOL_DEF_SIZE
assert volume_entry["emptyDir"]["medium"] == CRIO_VOL_DEF_MEDIUM
entry_found = True
assert entry_found, f"Missing volume entry '{CRIO_VOL_DEF_NAME}' for CRI-O in {template['volumes']}"
# Check volume mount definition
assert template["container"].get("volumeMounts") is not None, template["container"]
for volumemount_entry in template["container"]["volumeMounts"]:
entry_found = False
if volumemount_entry["name"] != CRIO_VOL_DEF_NAME:
continue
assert volumemount_entry["mountPath"] == CRIO_VOL_MOUNT_PATH
entry_found = True
break
assert (
entry_found
), f"Missing volume mount entry '{CRIO_VOL_DEF_NAME}' for CRI-O in {template['container']['volumeMounts']}"
# Check PYTHONPATH environment variable (python_user_lib_path)
assert template["container"].get("env") is not None, template["container"]
for env_entry in template["container"]["env"]:
entry_found = False
if env_entry["name"] != "PYTHONPATH":
continue
assert env_entry["value"] == CRIO_VOL_PYTHON_PATH
entry_found = True
break
assert entry_found, f"Missing env variable entry 'PYTHONPATH' for CRI-O in {template['container']['env']}"
# Check the container command argument list
assert len(template["container"]["args"]) == 1
assert f"mkdir -p {CRIO_VOL_WORKDIR_PATH}" in template["container"]["args"][0]
assert f"--target={CRIO_VOL_PYTHON_PATH}" in template["container"]["args"][0]
assert f"--user-volume-path '{CRIO_VOL_PYTHON_PATH}' " in template["container"]["args"][0]
else:
for template in compiled_spec["spec"]["templates"]:
if template["name"] == compiled_spec["spec"]["entrypoint"]:
continue
# Check if a volume was defined
for volume_entry in template.get("volumes", []):
if volume_entry["name"] == CRIO_VOL_DEF_NAME:
# if a volume with the 'reserved' name exist there could be a problem
assert volume_entry.get("emptyDir") is None
# Check volume mount definition
for volumemount_entry in template["container"].get("volumeMounts", []):
if volumemount_entry["name"] == CRIO_VOL_DEF_NAME:
assert volumemount_entry["mountPath"] != CRIO_VOL_MOUNT_PATH
# Check PYTHONPATH environment variable
for env_entry in template["container"].get("env", []):
assert env_entry["name"] != "PYTHONPATH"
# Check the container command argument list
assert "mkdir -p ./jupyter-work-dir" in template["container"]["args"][0]
assert f"--target={CRIO_VOL_PYTHON_PATH}" not in template["container"]["args"][0]
assert "--user-volume-path" not in template["container"]["args"][0]


@pytest.mark.parametrize(
"metadata_dependencies",
[
Expand Down

0 comments on commit 822f643

Please sign in to comment.