Skip to content

Execution

zenml.execution

Step and pipeline execution.

Modules

pipeline

Pipeline execution.

Modules
dynamic

Dynamic pipeline execution.

Modules
outputs

Dynamic pipeline execution outputs.

Classes
ArtifactFuture(wrapped: Future[StepRunOutputs], invocation_id: str, index: int)

Bases: _BaseStepRunFuture

Future for a step run output artifact.

Initialize the future.

Parameters:

Name Type Description Default
wrapped Future[StepRunOutputs]

The wrapped future object.

required
invocation_id str

The invocation ID of the step run.

required
index int

The index of the output artifact.

required
Source code in src/zenml/execution/pipeline/dynamic/outputs.py
73
74
75
76
77
78
79
80
81
82
83
84
def __init__(
    self, wrapped: Future[StepRunOutputs], invocation_id: str, index: int
) -> None:
    """Initialize the future.

    Args:
        wrapped: The wrapped future object.
        invocation_id: The invocation ID of the step run.
        index: The index of the output artifact.
    """
    super().__init__(wrapped=wrapped, invocation_id=invocation_id)
    self._index = index
Functions
load(disable_cache: bool = False) -> Any

Load the step run output artifact data.

Parameters:

Name Type Description Default
disable_cache bool

Whether to disable the artifact cache.

False

Returns:

Type Description
Any

The step run output artifact data.

Source code in src/zenml/execution/pipeline/dynamic/outputs.py
106
107
108
109
110
111
112
113
114
115
def load(self, disable_cache: bool = False) -> Any:
    """Load the step run output artifact data.

    Args:
        disable_cache: Whether to disable the artifact cache.

    Returns:
        The step run output artifact data.
    """
    return self.result().load(disable_cache=disable_cache)
result() -> OutputArtifact

Get the step run output artifact.

Raises:

Type Description
RuntimeError

If the future returned an invalid output.

Returns:

Type Description
OutputArtifact

The step run output artifact.

Source code in src/zenml/execution/pipeline/dynamic/outputs.py
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def result(self) -> OutputArtifact:
    """Get the step run output artifact.

    Raises:
        RuntimeError: If the future returned an invalid output.

    Returns:
        The step run output artifact.
    """
    result = self._wrapped.result()
    if isinstance(result, OutputArtifact):
        return result
    elif isinstance(result, tuple):
        return result[self._index]
    else:
        raise RuntimeError(
            f"Step {self._invocation_id} returned an invalid output: "
            f"{result}."
        )
OutputArtifact

Bases: ArtifactVersionResponse

Dynamic step run output artifact.

StepRunOutputsFuture(wrapped: Future[StepRunOutputs], invocation_id: str, output_keys: List[str])

Bases: _BaseStepRunFuture

Future for a step run output.

Initialize the future.

Parameters:

Name Type Description Default
wrapped Future[StepRunOutputs]

The wrapped future object.

required
invocation_id str

The invocation ID of the step run.

required
output_keys List[str]

The output keys of the step run.

required
Source code in src/zenml/execution/pipeline/dynamic/outputs.py
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def __init__(
    self,
    wrapped: Future[StepRunOutputs],
    invocation_id: str,
    output_keys: List[str],
) -> None:
    """Initialize the future.

    Args:
        wrapped: The wrapped future object.
        invocation_id: The invocation ID of the step run.
        output_keys: The output keys of the step run.
    """
    super().__init__(wrapped=wrapped, invocation_id=invocation_id)
    self._output_keys = output_keys
Functions
artifacts() -> StepRunOutputs

Get the step run output artifacts.

Returns:

Type Description
StepRunOutputs

The step run output artifacts.

Source code in src/zenml/execution/pipeline/dynamic/outputs.py
161
162
163
164
165
166
167
def artifacts(self) -> StepRunOutputs:
    """Get the step run output artifacts.

    Returns:
        The step run output artifacts.
    """
    return self._wrapped.result()
get_artifact(key: str) -> ArtifactFuture

Get an artifact future by key.

Parameters:

Name Type Description Default
key str

The key of the artifact future.

required

Raises:

Type Description
KeyError

If no artifact for the given name exists.

Returns:

Type Description
ArtifactFuture

The artifact future.

Source code in src/zenml/execution/pipeline/dynamic/outputs.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
def get_artifact(self, key: str) -> ArtifactFuture:
    """Get an artifact future by key.

    Args:
        key: The key of the artifact future.

    Raises:
        KeyError: If no artifact for the given name exists.

    Returns:
        The artifact future.
    """
    if key not in self._output_keys:
        raise KeyError(
            f"Step run {self._invocation_id} does not have an output with "
            f"the name: {key}."
        )

    return ArtifactFuture(
        wrapped=self._wrapped,
        invocation_id=self._invocation_id,
        index=self._output_keys.index(key),
    )
load(disable_cache: bool = False) -> Any

Get the step run output artifact data.

Parameters:

Name Type Description Default
disable_cache bool

Whether to disable the artifact cache.

False

Raises:

Type Description
ValueError

If the step run output is invalid.

Returns:

Type Description
Any

The step run output artifact data.

Source code in src/zenml/execution/pipeline/dynamic/outputs.py
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
def load(self, disable_cache: bool = False) -> Any:
    """Get the step run output artifact data.

    Args:
        disable_cache: Whether to disable the artifact cache.

    Raises:
        ValueError: If the step run output is invalid.

    Returns:
        The step run output artifact data.
    """
    result = self.artifacts()

    if result is None:
        return None
    elif isinstance(result, ArtifactVersionResponse):
        return result.load(disable_cache=disable_cache)
    elif isinstance(result, tuple):
        return tuple(
            item.load(disable_cache=disable_cache) for item in result
        )
    else:
        raise ValueError(f"Invalid step run output: {result}")
Functions
run_context

Dynamic pipeline run context.

Classes
DynamicPipelineRunContext(pipeline: DynamicPipeline, snapshot: PipelineSnapshotResponse, run: PipelineRunResponse, runner: DynamicPipelineRunner)

Bases: BaseContext

Dynamic pipeline run context.

Initialize the dynamic pipeline run context.

Parameters:

Name Type Description Default
pipeline DynamicPipeline

The dynamic pipeline that is being executed.

required
snapshot PipelineSnapshotResponse

The snapshot of the pipeline.

required
run PipelineRunResponse

The pipeline run.

required
runner DynamicPipelineRunner

The dynamic pipeline runner.

required
Source code in src/zenml/execution/pipeline/dynamic/run_context.py
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
def __init__(
    self,
    pipeline: "DynamicPipeline",
    snapshot: "PipelineSnapshotResponse",
    run: "PipelineRunResponse",
    runner: "DynamicPipelineRunner",
) -> None:
    """Initialize the dynamic pipeline run context.

    Args:
        pipeline: The dynamic pipeline that is being executed.
        snapshot: The snapshot of the pipeline.
        run: The pipeline run.
        runner: The dynamic pipeline runner.
    """
    super().__init__()
    self._pipeline = pipeline
    self._snapshot = snapshot
    self._run = run
    self._runner = runner
Attributes
pipeline: DynamicPipeline property

The pipeline that is being executed.

Returns:

Type Description
DynamicPipeline

The pipeline that is being executed.

run: PipelineRunResponse property

The pipeline run.

Returns:

Type Description
PipelineRunResponse

The pipeline run.

runner: DynamicPipelineRunner property

The runner executing the pipeline.

Returns:

Type Description
DynamicPipelineRunner

The runner executing the pipeline.

snapshot: PipelineSnapshotResponse property

The snapshot of the pipeline.

Returns:

Type Description
PipelineSnapshotResponse

The snapshot of the pipeline.

Functions
Modules
runner

Dynamic pipeline runner.

Classes
DynamicPipelineRunner(snapshot: PipelineSnapshotResponse, run: Optional[PipelineRunResponse])

Dynamic pipeline runner.

Initialize the dynamic pipeline runner.

Parameters:

Name Type Description Default
snapshot PipelineSnapshotResponse

The snapshot of the pipeline.

required
run Optional[PipelineRunResponse]

The pipeline run.

required

Raises:

Type Description
RuntimeError

If the snapshot has no associated stack.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
def __init__(
    self,
    snapshot: "PipelineSnapshotResponse",
    run: Optional["PipelineRunResponse"],
) -> None:
    """Initialize the dynamic pipeline runner.

    Args:
        snapshot: The snapshot of the pipeline.
        run: The pipeline run.

    Raises:
        RuntimeError: If the snapshot has no associated stack.
    """
    if not snapshot.stack:
        raise RuntimeError("Missing stack for snapshot.")

    if (
        snapshot.pipeline_configuration.execution_mode
        != ExecutionMode.STOP_ON_FAILURE
    ):
        logger.warning(
            "Only the `%s` execution mode is supported for "
            "dynamic pipelines right now. "
            "The execution mode `%s` will be ignored.",
            ExecutionMode.STOP_ON_FAILURE,
            snapshot.pipeline_configuration.execution_mode,
        )

    self._snapshot = snapshot
    self._run = run
    # TODO: make this configurable
    self._executor = ThreadPoolExecutor(max_workers=10)
    self._pipeline: Optional["DynamicPipeline"] = None
    self._orchestrator = Stack.from_model(snapshot.stack).orchestrator
    self._orchestrator_run_id = (
        self._orchestrator.get_orchestrator_run_id()
    )
    self._futures: List[StepRunOutputsFuture] = []
Attributes
pipeline: DynamicPipeline property

The pipeline that the runner is executing.

Raises:

Type Description
RuntimeError

If the pipeline can't be loaded.

Returns:

Type Description
DynamicPipeline

The pipeline that the runner is executing.

Functions
await_all_step_run_futures() -> None

Await all step run output futures.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
280
281
282
283
284
def await_all_step_run_futures(self) -> None:
    """Await all step run output futures."""
    for future in self._futures:
        future._wait()
    self._futures = []
launch_step(step: BaseStep, id: Optional[str], args: Tuple[Any], kwargs: Dict[str, Any], after: Union[StepRunFuture, Sequence[StepRunFuture], None] = None, concurrent: bool = False) -> Union[StepRunOutputs, StepRunOutputsFuture]
launch_step(
    step: BaseStep,
    id: Optional[str],
    args: Tuple[Any],
    kwargs: Dict[str, Any],
    after: Union[
        StepRunFuture, Sequence[StepRunFuture], None
    ] = None,
    concurrent: Literal[False] = False,
) -> StepRunOutputs
launch_step(
    step: BaseStep,
    id: Optional[str],
    args: Tuple[Any],
    kwargs: Dict[str, Any],
    after: Union[
        StepRunFuture, Sequence[StepRunFuture], None
    ] = None,
    concurrent: Literal[True] = True,
) -> StepRunOutputsFuture

Launch a step.

Parameters:

Name Type Description Default
step BaseStep

The step to launch.

required
id Optional[str]

The invocation ID of the step.

required
args Tuple[Any]

The arguments for the step function.

required
kwargs Dict[str, Any]

The keyword arguments for the step function.

required
after Union[StepRunFuture, Sequence[StepRunFuture], None]

The step run output futures to wait for.

None
concurrent bool

Whether to launch the step concurrently.

False

Returns:

Type Description
Union[StepRunOutputs, StepRunOutputsFuture]

The step run outputs or a future for the step run outputs.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
def launch_step(
    self,
    step: "BaseStep",
    id: Optional[str],
    args: Tuple[Any],
    kwargs: Dict[str, Any],
    after: Union["StepRunFuture", Sequence["StepRunFuture"], None] = None,
    concurrent: bool = False,
) -> Union[StepRunOutputs, "StepRunOutputsFuture"]:
    """Launch a step.

    Args:
        step: The step to launch.
        id: The invocation ID of the step.
        args: The arguments for the step function.
        kwargs: The keyword arguments for the step function.
        after: The step run output futures to wait for.
        concurrent: Whether to launch the step concurrently.

    Returns:
        The step run outputs or a future for the step run outputs.
    """
    step = step.copy()
    compiled_step = compile_dynamic_step_invocation(
        snapshot=self._snapshot,
        pipeline=self.pipeline,
        step=step,
        id=id,
        args=args,
        kwargs=kwargs,
        after=after,
    )

    def _launch() -> StepRunOutputs:
        step_run = launch_step(
            snapshot=self._snapshot,
            step=compiled_step,
            orchestrator_run_id=self._orchestrator_run_id,
            retry=_should_retry_locally(
                compiled_step,
                self._snapshot.pipeline_configuration.docker_settings,
            ),
        )
        return _load_step_run_outputs(step_run.id)

    if concurrent:
        ctx = contextvars.copy_context()
        future = self._executor.submit(ctx.run, _launch)
        compiled_step.config.outputs
        step_run_future = StepRunOutputsFuture(
            wrapped=future,
            invocation_id=compiled_step.spec.invocation_id,
            output_keys=list(compiled_step.config.outputs),
        )
        self._futures.append(step_run_future)
        return step_run_future
    else:
        return _launch()
run_pipeline() -> None

Run the pipeline.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
def run_pipeline(self) -> None:
    """Run the pipeline."""
    with setup_pipeline_logging(
        source="orchestrator",
        snapshot=self._snapshot,
        run_id=self._run.id if self._run else None,
    ) as logs_request:
        with InMemoryArtifactCache():
            run = self._run or create_placeholder_run(
                snapshot=self._snapshot,
                orchestrator_run_id=self._orchestrator_run_id,
                logs=logs_request,
            )

            assert (
                self._snapshot.pipeline_spec
            )  # Always exists for new snapshots
            pipeline_parameters = self._snapshot.pipeline_spec.parameters

            with DynamicPipelineRunContext(
                pipeline=self.pipeline,
                run=run,
                snapshot=self._snapshot,
                runner=self,
            ):
                self._orchestrator.run_init_hook(snapshot=self._snapshot)
                try:
                    # TODO: step logging isn't threadsafe
                    # TODO: what should be allowed as pipeline returns?
                    #  (artifacts, json serializable, anything?)
                    #  how do we show it in the UI?
                    self.pipeline._call_entrypoint(**pipeline_parameters)
                    # The pipeline function finished successfully, but some
                    # steps might still be running. We now wait for all of
                    # them and raise any exceptions that occurred.
                    self.await_all_step_run_futures()
                except:
                    publish_failed_pipeline_run(run.id)
                    logger.error(
                        "Pipeline run failed. All in-progress step runs "
                        "will still finish executing."
                    )
                    raise
                finally:
                    self._orchestrator.run_cleanup_hook(
                        snapshot=self._snapshot
                    )
                    self._executor.shutdown(wait=True, cancel_futures=True)

                publish_successful_pipeline_run(run.id)
Functions
compile_dynamic_step_invocation(snapshot: PipelineSnapshotResponse, pipeline: DynamicPipeline, step: BaseStep, id: Optional[str], args: Tuple[Any], kwargs: Dict[str, Any], after: Union[StepRunFuture, Sequence[StepRunFuture], None] = None) -> Step

Compile a dynamic step invocation.

Parameters:

Name Type Description Default
snapshot PipelineSnapshotResponse

The snapshot.

required
pipeline DynamicPipeline

The dynamic pipeline.

required
step BaseStep

The step to compile.

required
id Optional[str]

Custom invocation ID.

required
args Tuple[Any]

The arguments for the step function.

required
kwargs Dict[str, Any]

The keyword arguments for the step function.

required
after Union[StepRunFuture, Sequence[StepRunFuture], None]

The step run output futures to wait for.

None

Returns:

Type Description
Step

The compiled step.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
def compile_dynamic_step_invocation(
    snapshot: "PipelineSnapshotResponse",
    pipeline: "DynamicPipeline",
    step: "BaseStep",
    id: Optional[str],
    args: Tuple[Any],
    kwargs: Dict[str, Any],
    after: Union["StepRunFuture", Sequence["StepRunFuture"], None] = None,
) -> "Step":
    """Compile a dynamic step invocation.

    Args:
        snapshot: The snapshot.
        pipeline: The dynamic pipeline.
        step: The step to compile.
        id: Custom invocation ID.
        args: The arguments for the step function.
        kwargs: The keyword arguments for the step function.
        after: The step run output futures to wait for.

    Returns:
        The compiled step.
    """
    upstream_steps = set()

    if isinstance(after, _BaseStepRunFuture):
        after._wait()
        upstream_steps.add(after.invocation_id)
    elif isinstance(after, Sequence):
        for item in after:
            item._wait()
            upstream_steps.add(item.invocation_id)

    def _await_and_validate_input(input: Any) -> Any:
        if isinstance(input, StepRunOutputsFuture):
            if len(input._output_keys) != 1:
                raise ValueError(
                    "Passing multiple step run outputs to another step is not "
                    "allowed."
                )
            input = input.artifacts()

        if isinstance(input, ArtifactFuture):
            input = input.result()

        if isinstance(input, OutputArtifact):
            upstream_steps.add(input.step_name)

        return input

    args = tuple(_await_and_validate_input(arg) for arg in args)
    kwargs = {
        key: _await_and_validate_input(value) for key, value in kwargs.items()
    }

    # TODO: we can validate the type of the inputs that are passed as raw data
    signature = inspect.signature(step.entrypoint, follow_wrapped=True)
    bound_args = signature.bind_partial(*args, **kwargs)
    validated_args = bound_args.arguments
    bound_args.apply_defaults()
    default_parameters = {
        key: value
        for key, value in bound_args.arguments.items()
        if key not in validated_args
    }

    input_artifacts = {}
    external_artifacts = {}
    for name, value in validated_args.items():
        if isinstance(value, OutputArtifact):
            input_artifacts[name] = StepArtifact(
                invocation_id=value.step_name,
                output_name=value.output_name,
                annotation=OutputSignature(resolved_annotation=Any),
                pipeline=pipeline,
            )
        elif isinstance(value, (ArtifactVersionResponse, ExternalArtifact)):
            external_artifacts[name] = value
        else:
            # TODO: should some of these be parameters?
            external_artifacts[name] = ExternalArtifact(value=value)

    if template := get_config_template(snapshot, step, pipeline):
        step._configuration = template.config.model_copy(
            update={"template": template.spec.invocation_id}
        )

    invocation_id = pipeline.add_step_invocation(
        step=step,
        custom_id=id,
        allow_id_suffix=not id,
        input_artifacts=input_artifacts,
        external_artifacts=external_artifacts,
        upstream_steps=upstream_steps,
        default_parameters=default_parameters,
        parameters={},
        model_artifacts_or_metadata={},
        client_lazy_loaders={},
    )
    return Compiler()._compile_step_invocation(
        invocation=pipeline.invocations[invocation_id],
        stack=Client().active_stack,
        step_config=None,
        pipeline_configuration=pipeline.configuration,
    )
get_config_template(snapshot: PipelineSnapshotResponse, step: BaseStep, pipeline: DynamicPipeline) -> Optional[Step]

Get the config template for a step executed in a dynamic pipeline.

Parameters:

Name Type Description Default
snapshot PipelineSnapshotResponse

The snapshot of the pipeline.

required
step BaseStep

The step to get the config template for.

required
pipeline DynamicPipeline

The dynamic pipeline that the step is being executed in.

required

Returns:

Type Description
Optional[Step]

The config template for the step.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
def get_config_template(
    snapshot: "PipelineSnapshotResponse",
    step: "BaseStep",
    pipeline: "DynamicPipeline",
) -> Optional["Step"]:
    """Get the config template for a step executed in a dynamic pipeline.

    Args:
        snapshot: The snapshot of the pipeline.
        step: The step to get the config template for.
        pipeline: The dynamic pipeline that the step is being executed in.

    Returns:
        The config template for the step.
    """
    for index, step_ in enumerate(pipeline.depends_on):
        if step_._static_id == step._static_id:
            break
    else:
        return None

    return list(snapshot.step_configurations.values())[index]
get_step_runtime(step: Step, pipeline_docker_settings: DockerSettings) -> StepRuntime

Determine if a step should be run in process.

Parameters:

Name Type Description Default
step Step

The step.

required
pipeline_docker_settings DockerSettings

The Docker settings of the parent pipeline.

required

Returns:

Type Description
StepRuntime

The runtime for the step.

Source code in src/zenml/execution/pipeline/dynamic/runner.py
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
def get_step_runtime(
    step: "Step", pipeline_docker_settings: "DockerSettings"
) -> StepRuntime:
    """Determine if a step should be run in process.

    Args:
        step: The step.
        pipeline_docker_settings: The Docker settings of the parent pipeline.

    Returns:
        The runtime for the step.
    """
    if step.config.step_operator:
        return StepRuntime.ISOLATED

    if not Client().active_stack.orchestrator.can_launch_dynamic_steps:
        return StepRuntime.INLINE

    runtime = step.config.runtime

    if runtime is None:
        if not step.config.resource_settings.empty:
            runtime = StepRuntime.ISOLATED
        elif step.config.docker_settings != pipeline_docker_settings:
            runtime = StepRuntime.ISOLATED
        else:
            runtime = StepRuntime.INLINE

    return runtime
Modules
utils

Pipeline execution utilities.

Classes Functions
prevent_pipeline_execution() -> Generator[None, None, None]

Context manager to prevent pipeline execution.

Yields:

Type Description
None

None.

Source code in src/zenml/execution/pipeline/utils.py
60
61
62
63
64
65
66
67
68
69
70
71
@contextmanager
def prevent_pipeline_execution() -> Generator[None, None, None]:
    """Context manager to prevent pipeline execution.

    Yields:
        None.
    """
    token = _prevent_pipeline_execution.set(True)
    try:
        yield
    finally:
        _prevent_pipeline_execution.reset(token)
should_prevent_pipeline_execution() -> bool

Whether to prevent pipeline execution.

Returns:

Type Description
bool

Whether to prevent pipeline execution.

Source code in src/zenml/execution/pipeline/utils.py
51
52
53
54
55
56
57
def should_prevent_pipeline_execution() -> bool:
    """Whether to prevent pipeline execution.

    Returns:
        Whether to prevent pipeline execution.
    """
    return _prevent_pipeline_execution.get()
submit_pipeline(snapshot: PipelineSnapshotResponse, stack: Stack, placeholder_run: Optional[PipelineRunResponse] = None) -> None

Submit a snapshot for execution.

Parameters:

Name Type Description Default
snapshot PipelineSnapshotResponse

The snapshot to submit.

required
stack Stack

The stack on which to submit the snapshot.

required
placeholder_run Optional[PipelineRunResponse]

An optional placeholder run for the snapshot.

None
noqa: DAR401

Raises: BaseException: Any exception that happened while submitting or running (in case it happens synchronously) the pipeline.

Source code in src/zenml/execution/pipeline/utils.py
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
def submit_pipeline(
    snapshot: "PipelineSnapshotResponse",
    stack: "Stack",
    placeholder_run: Optional["PipelineRunResponse"] = None,
) -> None:
    """Submit a snapshot for execution.

    Args:
        snapshot: The snapshot to submit.
        stack: The stack on which to submit the snapshot.
        placeholder_run: An optional placeholder run for the snapshot.

    # noqa: DAR401
    Raises:
        BaseException: Any exception that happened while submitting or running
            (in case it happens synchronously) the pipeline.
    """
    # Prevent execution of nested pipelines which might lead to
    # unexpected behavior
    with prevent_pipeline_execution():
        try:
            stack.prepare_pipeline_submission(snapshot=snapshot)
            stack.submit_pipeline(
                snapshot=snapshot,
                placeholder_run=placeholder_run,
            )
        except RunMonitoringError as e:
            # Don't mark the run as failed if the error happened during
            # monitoring of the run.
            raise e.original_exception from None
        except BaseException as e:
            if (
                placeholder_run
                and not Client()
                .get_pipeline_run(placeholder_run.id, hydrate=False)
                .status.is_finished
            ):
                # We failed during/before the submission of the run, so we mark
                # the run as failed if it's still in an unfinished state.
                publish_failed_pipeline_run(placeholder_run.id)

            raise e

step

Step execution.

Modules
utils

Step execution utilities.

Classes Functions
launch_step(snapshot: PipelineSnapshotResponse, step: Step, orchestrator_run_id: str, retry: bool = False) -> StepRunResponse

Launch a step.

Parameters:

Name Type Description Default
snapshot PipelineSnapshotResponse

The snapshot.

required
step Step

The step to run.

required
orchestrator_run_id str

The orchestrator run ID.

required
retry bool

Whether to retry the step if it fails.

False

Raises:

Type Description
RunStoppedException

If the run was stopped.

BaseException

If the step failed all retries.

Returns:

Type Description
StepRunResponse

The step run response.

Source code in src/zenml/execution/step/utils.py
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
def launch_step(
    snapshot: "PipelineSnapshotResponse",
    step: "Step",
    orchestrator_run_id: str,
    retry: bool = False,
) -> StepRunResponse:
    """Launch a step.

    Args:
        snapshot: The snapshot.
        step: The step to run.
        orchestrator_run_id: The orchestrator run ID.
        retry: Whether to retry the step if it fails.

    Raises:
        RunStoppedException: If the run was stopped.
        BaseException: If the step failed all retries.

    Returns:
        The step run response.
    """

    def _launch_without_retry() -> StepRunResponse:
        launcher = StepLauncher(
            snapshot=snapshot,
            step=step,
            orchestrator_run_id=orchestrator_run_id,
        )
        return launcher.launch()

    if not retry:
        step_run = _launch_without_retry()
    else:
        retries = 0
        retry_config = step.config.retry
        max_retries = retry_config.max_retries if retry_config else 0
        delay = retry_config.delay if retry_config else 0
        backoff = retry_config.backoff if retry_config else 1

        while retries <= max_retries:
            try:
                step_run = _launch_without_retry()
            except RunStoppedException:
                # Don't retry if the run was stopped
                raise
            except BaseException:
                retries += 1
                if retries <= max_retries:
                    logger.info(
                        "Sleeping for %d seconds before retrying step `%s`.",
                        delay,
                        step.config.name,
                    )
                    time.sleep(delay)
                    delay *= backoff
                else:
                    if max_retries > 0:
                        logger.error(
                            "Failed to run step `%s` after %d retries.",
                            step.config.name,
                            max_retries,
                        )
                    raise
            else:
                break

    return step_run