From e98a3cb10293de55deb3c63efcd08c27c095f763 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Mon, 27 Oct 2025 12:17:53 -0700 Subject: [PATCH 01/15] Initial pass --- Guide/src/dev_guide/dev_tools/xflowey.md | 876 ++++++++++++++++++++++- 1 file changed, 874 insertions(+), 2 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 8fc74b016c..c117b9c517 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -10,13 +10,885 @@ for writing maintainable, cross-platform automation. Some particularly notable pipelines: - `cargo xflowey build-igvm` - primarily dev-tool used to build OpenHCL IGVM files locally -- `cargo xflowey ci checkin-gates` - runs the entire PR checkin suite locally - `cargo xflowey restore-packages` - restores external packages needed to compile and run OpenVMM / OpenHCL -### `xflowey` vs `xtask` +> **Note**: While `cargo xflowey` technically has the ability to run CI pipelines +> locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is +> currently broken and should not be relied upon. Use CI pipelines in their +> intended environments (Azure DevOps or GitHub Actions). + +## `xflowey` vs `xtask` In a nutshell: - `cargo xtask`: implements novel, standalone tools/utilities - `cargo xflowey`: orchestrates invoking a sequence of tools/utilities, without doing any non-trivial data processing itself + +--- + +# Flowey Developer Guide + +This guide explains the core concepts and architecture of flowey for developers +working on OpenVMM automation. + +## Table of Contents + +1. [Core Concepts](#core-concepts) +2. [Pipelines](#pipelines) +3. [Artifacts](#artifacts) +4. [Flowey Nodes](#flowey-nodes) +5. [Variables: ReadVar and WriteVar](#variables-readvar-and-writevar) +6. [Emitting Steps](#emitting-steps) +7. [Runtime Services](#runtime-services) +8. [Node Design Philosophy](#node-design-philosophy) +9. [Common Patterns](#common-patterns) + +--- + +## Core Concepts + +### Two-Phase Execution Model + +Flowey operates in two distinct phases: + +1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey`, flowey + constructs a directed acyclic graph (DAG) of steps by: + - Instantiating all nodes + - Processing their requests + - Resolving dependencies between nodes via variables + - Determining the execution order + +2. **Runtime (Execution Phase)**: The generated flow is executed, and steps run + in the computed order. During runtime: + - Variables are read and written with actual values + - Commands are executed + - Artifacts are published/consumed + - Side effects occur + +This separation allows flowey to: +- Validate the entire workflow before execution +- Generate YAML for CI systems (ADO, GitHub Actions) +- Optimize step ordering and parallelization +- Catch dependency errors at build-time + +### Backend Abstraction + +Flowey supports multiple execution backends: + +- **Local**: Runs directly on your development machine via bash or direct + execution +- **ADO (Azure DevOps)**: Generates ADO Pipeline YAML +- **GitHub Actions**: Generates GitHub Actions workflow YAML + +**Important**: Nodes should be written to work across ALL backends whenever +possible. Relying on `ctx.backend()` to query the backend or manually emitting +backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be +avoided unless absolutely necessary. Most automation logic should be +backend-agnostic, using `emit_rust_step` for cross-platform Rust code that +works everywhere. + +--- + +## Pipelines + +A **Pipeline** is the top-level construct that defines a complete automation +workflow. Pipelines consist of one or more **Jobs**, each of which runs a set +of **Nodes** to accomplish specific tasks. + +### Defining a Pipeline + +```rust +fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result { + let mut pipeline = Pipeline::new(); + + // Define a job that runs on Linux x86_64 + let job = pipeline + .new_job(FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), FlowArch::X86_64, "build") + .finish(); + + Ok(pipeline) +} +``` + +### Pipeline Jobs + +Each `PipelineJob` represents a unit of work that: +- Runs on a specific platform and architecture +- Can depend on artifacts from other jobs +- Can be conditionally executed based on parameters +- Emits a sequence of steps that accomplish the job's goals + +Jobs are configured using a builder pattern: + +```rust +let job = pipeline + .new_job(platform, arch, "my-job") + .with_timeout_in_minutes(60) + .with_condition(some_param) + .ado_set_pool("my-pool") + .gh_set_pool(GhRunner::UbuntuLatest) + .dep_on(|ctx| { + // Define what nodes this job depends on + some_node::Request { /* ... */ } + }) + .finish(); +``` + +### Backend-Specific Configuration + +**ADO-specific:** +- `ado_set_pool()`: Specify agent pool +- `ado_set_pr_triggers()`: Configure PR triggers +- `ado_set_ci_triggers()`: Configure CI triggers +- `ado_add_resources_repository()`: Add repository resources + +**GitHub Actions-specific:** +- `gh_set_pool()`: Specify runner (GitHub-hosted or self-hosted) +- `gh_set_pr_triggers()`: Configure PR triggers +- `gh_set_ci_triggers()`: Configure CI triggers +- `gh_grant_permissions()`: Grant GITHUB_TOKEN permissions + +### Pipeline Parameters + +Parameters allow runtime configuration of pipelines: + +```rust +// Define a boolean parameter +let use_cache = pipeline.new_parameter_bool( + "use_cache", + "Whether to use caching", + ParameterKind::Stable, + Some(true) // default value +); + +// Use the parameter in a job +let job = pipeline.new_job(...) + .dep_on(|ctx| { + let use_cache = ctx.use_parameter(use_cache); + // use_cache is now a ReadVar + }) + .finish(); +``` + +Parameter types: +- `new_parameter_bool()`: Boolean parameters +- `new_parameter_string()`: String parameters with optional validation +- `new_parameter_num()`: Numeric (i64) parameters with optional validation + +--- + +## Artifacts + +**Artifacts** are the mechanism for passing data between jobs in a pipeline. +When one job produces output that another job needs, that output is packaged as +an artifact. + +### Typed vs Untyped Artifacts + +**Typed artifacts (preferred)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait. This type describes the +structure of files that will be published and consumed. + +#### The Artifact Trait + +The `Artifact` trait is the foundation of typed artifacts: + +The trait works by serializing your type to JSON in a format that reflects a +directory structure: +- Each JSON key is a file name (use `#[serde(rename = "file.exe")]`) +- Each value is either a string containing the path to the file, or another + JSON object representing a subdirectory +- Optional fields allow for conditional file inclusion + +#### Example: Defining a Typed Artifact + +Here's a real-world example from the codebase: + +```rust +use flowey::node::prelude::*; + +#[derive(Serialize, Deserialize)] +#[serde(untagged)] +pub enum PipetteOutput { + LinuxBin { + #[serde(rename = "pipette")] + bin: PathBuf, + #[serde(rename = "pipette.dbg")] + dbg: PathBuf, + }, + WindowsBin { + #[serde(rename = "pipette.exe")] + exe: PathBuf, + #[serde(rename = "pipette.pdb")] + pdb: PathBuf, + }, +} + +impl Artifact for PipetteOutput {} +``` + +This enum represents either a Linux build (with `pipette` binary and `pipette.dbg` +debug symbols) or a Windows build (with `pipette.exe` and `pipette.pdb`). The +`#[serde(rename = "...")]` attributes specify the exact file names that will +appear in the published artifact. + +#### Using Typed Artifacts in Pipelines + +```rust +// In pipeline definition - create the artifact handles +let (publish_pipette, use_pipette) = pipeline.new_typed_artifact::("pipette"); + +// In producer job - write the artifact +let job1 = pipeline.new_job(...) + .dep_on(|ctx| { + let pipette = ctx.publish_typed_artifact(publish_pipette); + // pipette is a WriteVar + + // In a node, write the appropriate variant: + ctx.emit_rust_step("build pipette", |ctx| { + let pipette = pipette.claim(ctx); + move |rt| { + let output = PipetteOutput::WindowsBin { + exe: PathBuf::from("path/to/pipette.exe"), + pdb: PathBuf::from("path/to/pipette.pdb"), + }; + rt.write(pipette, &output); + Ok(()) + } + }); + }) + .finish(); + +// In consumer job - read the artifact +let job2 = pipeline.new_job(...) + .dep_on(|ctx| { + let pipette = ctx.use_typed_artifact(&use_pipette); + // pipette is a ReadVar + + ctx.emit_rust_step("use pipette", |ctx| { + let pipette = pipette.claim(ctx); + move |rt| { + let output = rt.read(pipette); + match output { + PipetteOutput::WindowsBin { exe, pdb } => { + // Use the Windows binaries + } + PipetteOutput::LinuxBin { bin, dbg } => { + // Use the Linux binaries + } + } + Ok(()) + } + }); + }) + .finish(); +``` + +#### Untyped Artifacts + +**Untyped artifacts** provide simple directory-based artifacts for cases where +you don't need type safety: + +```rust +let (publish, use_artifact) = pipeline.new_artifact("my-artifact"); + +// Producer gets a path to an empty directory to populate +let artifact_dir = ctx.publish_artifact(publish); // ReadVar + +// Consumer gets a path to the populated directory +let artifact_dir = ctx.use_artifact(&use_artifact); // ReadVar +``` + +Use untyped artifacts when: +- The artifact structure is simple or ad-hoc +- You don't need compile-time guarantees about file names/structure +- The artifact is primarily used by a single node + +### How Artifacts Create Dependencies + +When you use an artifact in a job, flowey automatically: +1. Creates a dependency from the consuming job to the producing job +2. Ensures the producing job runs first +3. Handles artifact upload/download between jobs (on CI backends) + +--- + +## Flowey Nodes + +A **FlowNode** is a reusable unit of automation logic. Nodes process requests, +emit steps, and can depend on other nodes. + +### The Node/Request Pattern + +Every node has an associated **Request** type that defines what the node can do: + +```rust +// Define the node +new_flow_node!(struct Node); + +// Define requests using the flowey_request! macro +flowey_request! { + pub enum Request { + InstallRust(String), // Install specific version + EnsureInstalled(WriteVar), // Ensure it's installed + GetCargoHome(WriteVar), // Get CARGO_HOME path + } +} +``` + +### FlowNode vs SimpleFlowNode + +**Use `FlowNode`** when you need to: +- Aggregate multiple requests and process them together +- Resolve conflicts between requests +- Perform complex request validation + +**Use `SimpleFlowNode`** when: +- Each request can be processed independently +- No aggregation logic is needed +- Simpler, less boilerplate + +```rust +// FlowNode - processes all requests together +impl FlowNode for Node { + type Request = Request; + + fn imports(ctx: &mut ImportCtx<'_>) { + // Declare node dependencies + ctx.import::(); + } + + fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { + // Process all requests, aggregate common requirements + // Emit steps to accomplish the work + Ok(()) + } +} + +// SimpleFlowNode - processes one request at a time +impl SimpleFlowNode for Node { + type Request = Request; + + fn imports(ctx: &mut ImportCtx<'_>) { + ctx.import::(); + } + + fn process_request(request: Self::Request, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { + // Process single request + Ok(()) + } +} +``` + +### Node Registration + +Nodes are automatically registered using macros: +- `new_flow_node!(struct Node)` - registers a FlowNode +- `new_simple_flow_node!(struct Node)` - registers a SimpleFlowNode +- `flowey_request!` - defines the Request type and implements `IntoRequest` + +### The imports() Method + +The `imports()` method declares which other nodes this node might depend on: + +```rust +fn imports(ctx: &mut ImportCtx<'_>) { + ctx.import::(); + ctx.import::(); +} +``` + +This allows flowey to: +- Validate that all dependencies are available +- Build the complete dependency graph +- Catch missing dependencies at build-time + +### The emit() Method + +The `emit()` method is where the node's actual logic lives: + +```rust +fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { + // 1. Aggregate and validate requests + let mut version = None; + let mut ensure_installed = Vec::new(); + + for req in requests { + match req { + Request::Version(v) => same_across_all_reqs("Version", &mut version, v)?, + Request::EnsureInstalled(var) => ensure_installed.push(var), + } + } + + // 2. Emit steps to do the work + ctx.emit_rust_step("install rust", |ctx| { + let ensure_installed = ensure_installed.claim(ctx); + move |rt| { + // Runtime logic here + Ok(()) + } + }); + + Ok(()) +} +``` + +--- + +## Variables: ReadVar and WriteVar + +**ReadVar** and **WriteVar** are flowey's solution to the problem of declaring +variables at build-time that will hold values produced during pipeline runtime. + +### The Problem They Solve + +When constructing the pipeline graph, we don't yet know the values that will be +produced during execution (e.g., paths to built binaries, git commit hashes, +etc.). We need a way to: +1. Declare "this step will produce a value" +2. Declare "this step will consume that value" +3. Let flowey infer the execution order from these dependencies + +### Write-Once Semantics + +`WriteVar` can only be written to **once**. This is fundamental to flowey's +execution model: + +- Writing to a `WriteVar` consumes it (the type is not `Clone`) +- This ensures there's exactly one producer for each variable +- Flowey can use this to build a valid DAG (no cycles, no conflicts) + +```rust +let (read, write) = ctx.new_var::(); + +// Later, in a step: +rt.write(write, &"hello".to_string()); // write is consumed here +``` + +### Claiming Variables + +Before a step can use a `ReadVar` or `WriteVar`, it must **claim** it: + +```rust +ctx.emit_rust_step("my step", |ctx| { + // Claim variables for this step + let read_var = some_read_var.claim(ctx); + let write_var = some_write_var.claim(ctx); + + // Return the runtime closure + move |rt| { + let value = rt.read(read_var); + rt.write(write_var, &modified_value); + Ok(()) + } +}); +``` + +Claiming serves several purposes: +1. Registers that this step depends on (or produces) this variable +2. Converts `ReadVar` to `ReadVar` +3. Allows flowey to track variable usage for graph construction + +### ClaimedReadVar and ClaimedWriteVar + +These are type aliases for claimed variables: +- `ClaimedReadVar = ReadVar` +- `ClaimedWriteVar = WriteVar` + +Only claimed variables can be read/written at runtime. + +### Static Values vs Runtime Values + +Sometimes you know a value at build-time: + +```rust +// Create a ReadVar with a static value +let version = ReadVar::from_static("1.2.3".to_string()); + +// This is encoded directly in the pipeline, not computed at runtime +// WARNING: Never use this for secrets! +``` + +### Variable Operations + +ReadVar provides several useful operations: + +```rust +// Transform a value +let uppercase = lowercase.map(ctx, |s| s.to_uppercase()); + +// Combine two variables +let combined = var1.zip(ctx, var2); // ReadVar<(T, U)> + +// Discard the value, keep only the dependency +let side_effect = var.into_side_effect(); // ReadVar + +// Create a dependency without using the value +let dependent = var.depending_on(ctx, &other_var); +``` + +### The SideEffect Type + +`SideEffect` is an alias for `()` that represents a dependency without data: + +```rust +// This step produces a side effect (e.g., installs a tool) +let installed = ctx.emit_rust_step("install tool", |ctx| { + let done = done.claim(ctx); + move |rt| { + // install the tool + rt.write(done, &()); // SideEffect is () + Ok(()) + } +}); + +// Other steps can depend on this happening +ctx.emit_rust_step("use tool", |ctx| { + installed.claim(ctx); // Ensures install happens first + move |rt| { + // use the tool + Ok(()) + } +}); +``` + +--- + +## Emitting Steps + +Nodes emit **steps** - units of work that will be executed at runtime. Different +step types exist for different purposes. + +### Rust Steps + +**`emit_rust_step`**: Emits a step that runs Rust code at runtime. This is the +most common step type. + +```rust +ctx.emit_rust_step("build the project", |ctx| { + let source_dir = source_dir.claim(ctx); + let output = output.claim(ctx); + + move |rt| { + let source = rt.read(source_dir); + let result = build_project(&source)?; + rt.write(output, &result); + Ok(()) + } +}); +``` + +**`emit_minor_rust_step`**: Like `emit_rust_step`, but for steps that: +- Cannot fail (closure returns `T` not `anyhow::Result`) +- Don't need to be visible in CI logs as separate steps + +This reduces log clutter for trivial operations like variable transformations. + +**`emit_rust_stepv`**: A convenience method that creates a new variable and +returns it: + +```rust +// Instead of: +let (read, write) = ctx.new_var(); +ctx.emit_rust_step("compute value", |ctx| { + let write = write.claim(ctx); + move |rt| { + rt.write(write, &compute()); + Ok(()) + } +}); + +// You can write: +let read = ctx.emit_rust_stepv("compute value", |ctx| { + move |rt| Ok(compute()) +}); +``` + +### ADO Steps + +**`emit_ado_step`**: Emits an Azure DevOps YAML step. + +```rust +ctx.emit_ado_step("checkout code", |ctx| { + move |rt| { + r#" + - checkout: self + clean: true + "#.to_string() + } +}); +``` + +### GitHub Steps + +**`emit_gh_step`**: Builds a GitHub Actions step using `GhStepBuilder`. + +```rust +ctx.emit_gh_step("Checkout code", "actions/checkout@v4") + .with("fetch-depth", "0") + .finish(ctx); +``` + +### Side Effect Steps + +**`emit_side_effect_step`**: Creates a dependency relationship without executing +any code. Useful for resolving multiple side effects into one. + +```rust +ctx.emit_side_effect_step( + vec![dependency1, dependency2], // use these + vec![output_side_effect], // resolve this +); +``` + +### StepCtx vs NodeCtx + +- **`NodeCtx`**: Used when emitting steps. Provides `emit_*` methods, `new_var()`, + `req()`, etc. + +- **`StepCtx`**: Used inside step closures. Provides access to `claim()` for + variables, and basic environment info (`backend()`, `platform()`). + +--- + +## Runtime Services + +Runtime services provide the API available during step execution (inside the +closures passed to `emit_rust_step`, etc.). + +### RustRuntimeServices + +Available in Rust steps via the `rt` parameter: + +```rust +move |rt: &mut RustRuntimeServices<'_>| { + // Read variables + let value = rt.read(some_var); + + // Write variables + rt.write(output_var, &result); + rt.write_secret(secret_var, &secret); // Mark as secret + + // Query environment + let backend = rt.backend(); // Local, ADO, or Github + let platform = rt.platform(); // Windows, Linux, MacOs + let arch = rt.arch(); // X86_64, Aarch64 + + Ok(()) +} +``` + +**Important**: If a step reads a secret value, all subsequent writes from that +step are marked as secret by default (to prevent accidental leaks). Use +`write_not_secret()` if you need to override this. + +### AdoStepServices + +Available in ADO steps for interacting with ADO-specific features: + +```rust +move |rt: &mut AdoStepServices<'_>| { + // Get ADO variable as flowey var + rt.set_var(flowey_var, AdoRuntimeVar::BUILD__SOURCE_BRANCH); + + // Set ADO variable from flowey var + let ado_var = rt.get_var(flowey_var); + + // Resolve repository ID + let repo = rt.resolve_repository_id(repo_id); + + "- task: SomeTask@1".to_string() +} +``` + +### GhStepBuilder + +Builder for GitHub Actions steps: + +```rust +ctx.emit_gh_step("Azure Login", "Azure/login@v2") + .with("client-id", client_id) // Add parameter + .with("tenant-id", tenant_id) + .output("token", token_var) // Capture output + .run_after(some_side_effect) // Add dependency + .requires_permission( // Declare permission needed + GhPermission::IdToken, + GhPermissionValue::Write + ) + .finish(ctx); +``` + +--- + +## Node Design Philosophy + +Flowey nodes are designed around several key principles: + +### 1. Composability + +Nodes should be reusable building blocks that can be combined to build complex +workflows. Each node should have a single, well-defined responsibility. + +❌ **Bad**: A node that "builds and tests the project" +✅ **Good**: Separate nodes for "build project" and "run tests" + +### 2. Explicit Dependencies + +Dependencies between steps should be explicit through variables, not implicit +through side effects. + +❌ **Bad**: Assuming a tool is already installed +✅ **Good**: Taking a `ReadVar` that proves installation happened + +### 3. Backend Abstraction + +Nodes should work across all backends when possible. Backend-specific behavior +should be isolated and documented. + +```rust +match ctx.backend() { + FlowBackend::Local => { + // Local-specific logic + } + FlowBackend::Ado => { + // ADO-specific logic + } + FlowBackend::Github => { + // GitHub-specific logic + } +} +``` + +### 4. Separation of Concerns + +Keep node definition (request types, dependencies) separate from step +implementation (runtime logic): + +- **Node definition**: What the node does, what it depends on +- **Step implementation**: How it does it + +### 5. Type Safety + +Use Rust's type system to prevent errors at build-time: + +- Typed artifacts ensure type-safe data passing +- `WriteVar` can only be written once (enforced by the type system) +- `ClaimVar` ensures variables are claimed before use +- Request validation happens during `emit()`, not at runtime + +--- + +## Common Patterns + +### Request Aggregation and Validation + +When processing multiple requests, use helper functions to ensure consistency: + +```rust +fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { + let mut version = None; + let mut ensure_installed = Vec::new(); + + for req in requests { + match req { + Request::Version(v) => { + // Ensure all requests agree on the version + same_across_all_reqs("Version", &mut version, v)?; + } + Request::EnsureInstalled(v) => { + ensure_installed.push(v); + } + } + } + + let version = version.ok_or(anyhow::anyhow!("Missing required request: Version"))?; + + // ... emit steps using aggregated requests +} +``` + +### Conditional Execution Based on Backend/Platform + +```rust +// Only emit this step on Windows +if ctx.platform() == FlowPlatform::Windows { + ctx.emit_rust_step("windows-specific step", |ctx| { + move |rt| { + // Windows-specific logic + Ok(()) + } + }); +} + +// Different behavior per backend +match ctx.backend() { + FlowBackend::Local => { + // Check if tool is already installed + } + FlowBackend::Ado | FlowBackend::Github => { + // Always install the tool + } +} +``` + +### Working with Persistent Directories + +Some nodes need to persist data between runs (e.g., caches). Use +`ctx.persistent_dir()`: + +```rust +if let Some(cache_dir) = ctx.persistent_dir() { + // Have a persistent directory, can cache things + ctx.emit_rust_step("restore from cache", |ctx| { + let cache_dir = cache_dir.claim(ctx); + move |rt| { + let dir = rt.read(cache_dir); + // Restore from cache + Ok(()) + } + }); +} else { + // No persistent storage available, skip caching +} +``` + +### Using the flowey_request! Macro + +The `flowey_request!` macro supports several formats: + +```rust +// Enum with separate struct per variant (recommended for complex requests) +flowey_request! { + pub enum_struct Request { + Install { version: String, components: Vec }, + Check(pub WriteVar), + GetPath(pub WriteVar), + } +} +// This generates Request::Install(req::Install), Request::Check(req::Check), etc. + +// Simple enum (for simple requests) +flowey_request! { + pub enum Request { + Install { version: String }, + Check(WriteVar), + } +} + +// Struct (for nodes with a single request type) +flowey_request! { + pub struct Request { + pub input: ReadVar, + pub output: WriteVar, + } +} +``` + +--- + +## Additional Resources + +- **Example nodes**: See `flowey/flowey_lib_common/src/` for many real-world examples +- **Pipeline examples**: See `flowey/flowey_hvlite/src/pipelines/` for complete pipelines +- **Core types**: Defined in `flowey/flowey_core/src/` From 83688cac34b3f87153474afbabb34f07c3220bae Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Tue, 28 Oct 2025 10:36:45 -0700 Subject: [PATCH 02/15] Address some feedback --- Guide/src/dev_guide/dev_tools/xflowey.md | 768 ++++++++--------------- flowey/flowey_core/src/node.rs | 121 +++- flowey/flowey_core/src/pipeline.rs | 86 +++ 3 files changed, 454 insertions(+), 521 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index c117b9c517..0073e5d30d 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -12,10 +12,9 @@ Some particularly notable pipelines: - `cargo xflowey build-igvm` - primarily dev-tool used to build OpenHCL IGVM files locally - `cargo xflowey restore-packages` - restores external packages needed to compile and run OpenVMM / OpenHCL -> **Note**: While `cargo xflowey` technically has the ability to run CI pipelines -> locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is -> currently broken and should not be relied upon. Use CI pipelines in their -> intended environments (Azure DevOps or GitHub Actions). +```admonish warning +While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). +``` ## `xflowey` vs `xtask` @@ -52,25 +51,52 @@ working on OpenVMM automation. Flowey operates in two distinct phases: -1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey`, flowey - constructs a directed acyclic graph (DAG) of steps by: - - Instantiating all nodes - - Processing their requests - - Resolving dependencies between nodes via variables - - Determining the execution order - -2. **Runtime (Execution Phase)**: The generated flow is executed, and steps run - in the computed order. During runtime: +1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey regen`, flowey: + - Reads `.flowey.toml` to determine which pipelines to regenerate + - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` + - Runs the flowey binary with `pipeline --out ` for each pipeline definition + - During this invocation, flowey constructs a directed acyclic graph (DAG) by: + - Instantiating all nodes defined in the pipeline + - Processing their requests + - Resolving dependencies between nodes via variables and artifacts + - Determining the execution order + - Performing flowey-specific validations (dependency resolution, type checking, etc.) + - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` + +2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps run in the order determined at build-time: - Variables are read and written with actual values - Commands are executed - Artifacts are published/consumed - Side effects occur +```admonish note +**Understanding the Workflow:** + +The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: +```toml +[[pipeline.flowey_hvlite.github]] +file = ".github/workflows/openvmm-pr.yaml" +cmd = ["ci", "checkin-gates", "--config=pr"] +``` + +When you run `cargo xflowey regen`: +1. It reads `.flowey.toml` +2. Builds the `flowey-hvlite` binary +3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` +4. This generates/updates the YAML file with the resolved pipeline + +**Key Distinction:** +- `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. +- `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. + +Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. +``` + This separation allows flowey to: - Validate the entire workflow before execution -- Generate YAML for CI systems (ADO, GitHub Actions) +- Generate static YAML for CI systems (ADO, GitHub Actions) - Optimize step ordering and parallelization -- Catch dependency errors at build-time +- Catch dependency errors at build-time rather than runtime ### Backend Abstraction @@ -81,13 +107,13 @@ Flowey supports multiple execution backends: - **ADO (Azure DevOps)**: Generates ADO Pipeline YAML - **GitHub Actions**: Generates GitHub Actions workflow YAML -**Important**: Nodes should be written to work across ALL backends whenever -possible. Relying on `ctx.backend()` to query the backend or manually emitting +```admonish warning: +Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. - +``` --- ## Pipelines @@ -96,20 +122,7 @@ A **Pipeline** is the top-level construct that defines a complete automation workflow. Pipelines consist of one or more **Jobs**, each of which runs a set of **Nodes** to accomplish specific tasks. -### Defining a Pipeline - -```rust -fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result { - let mut pipeline = Pipeline::new(); - - // Define a job that runs on Linux x86_64 - let job = pipeline - .new_job(FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), FlowArch::X86_64, "build") - .finish(); - - Ok(pipeline) -} -``` +For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.IntoPipeline.html). ### Pipeline Jobs @@ -135,20 +148,6 @@ let job = pipeline .finish(); ``` -### Backend-Specific Configuration - -**ADO-specific:** -- `ado_set_pool()`: Specify agent pool -- `ado_set_pr_triggers()`: Configure PR triggers -- `ado_set_ci_triggers()`: Configure CI triggers -- `ado_add_resources_repository()`: Add repository resources - -**GitHub Actions-specific:** -- `gh_set_pool()`: Specify runner (GitHub-hosted or self-hosted) -- `gh_set_pr_triggers()`: Configure PR triggers -- `gh_set_ci_triggers()`: Configure CI triggers -- `gh_grant_permissions()`: Grant GITHUB_TOKEN permissions - ### Pipeline Parameters Parameters allow runtime configuration of pipelines: @@ -172,138 +171,56 @@ let job = pipeline.new_job(...) ``` Parameter types: -- `new_parameter_bool()`: Boolean parameters -- `new_parameter_string()`: String parameters with optional validation -- `new_parameter_num()`: Numeric (i64) parameters with optional validation +- Boolean parameters +- String parameters with optional validation +- Numeric (i64) parameters with optional validation ---- +#### Stable vs Unstable Parameters -## Artifacts +Every parameter in flowey must be declared as either **Stable** or **Unstable** using `ParameterKind`. This classification determines the parameter's visibility and API stability: -**Artifacts** are the mechanism for passing data between jobs in a pipeline. -When one job produces output that another job needs, that output is packaged as -an artifact. +**Stable Parameters (`ParameterKind::Stable`)** -### Typed vs Untyped Artifacts - -**Typed artifacts (preferred)** provide type-safe artifact handling by defining -a custom type that implements the `Artifact` trait. This type describes the -structure of files that will be published and consumed. +Stable parameters represent a **public, stable API** for the pipeline: -#### The Artifact Trait +- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. +- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. +- **Use Cases**: + - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) + - Parameters intended for use by other teams or external automation + - Parameters documented as part of the pipeline's public interface -The `Artifact` trait is the foundation of typed artifacts: +**Unstable Parameters (`ParameterKind::Unstable`)** -The trait works by serializing your type to JSON in a format that reflects a -directory structure: -- Each JSON key is a file name (use `#[serde(rename = "file.exe")]`) -- Each value is either a string containing the path to the file, or another - JSON object representing a subdirectory -- Optional fields allow for conditional file inclusion +Unstable parameters are for **internal use** and experimentation: -#### Example: Defining a Typed Artifact - -Here's a real-world example from the codebase: - -```rust -use flowey::node::prelude::*; - -#[derive(Serialize, Deserialize)] -#[serde(untagged)] -pub enum PipetteOutput { - LinuxBin { - #[serde(rename = "pipette")] - bin: PathBuf, - #[serde(rename = "pipette.dbg")] - dbg: PathBuf, - }, - WindowsBin { - #[serde(rename = "pipette.exe")] - exe: PathBuf, - #[serde(rename = "pipette.pdb")] - pdb: PathBuf, - }, -} - -impl Artifact for PipetteOutput {} -``` - -This enum represents either a Linux build (with `pipette` binary and `pipette.dbg` -debug symbols) or a Windows build (with `pipette.exe` and `pipette.pdb`). The -`#[serde(rename = "...")]` attributes specify the exact file names that will -appear in the published artifact. - -#### Using Typed Artifacts in Pipelines - -```rust -// In pipeline definition - create the artifact handles -let (publish_pipette, use_pipette) = pipeline.new_typed_artifact::("pipette"); +- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. +- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. +- **Use Cases**: + - Experimental features or debugging flags + - Internal pipeline configuration that may change frequently + - Parameters for development/testing that shouldn't be used in production -// In producer job - write the artifact -let job1 = pipeline.new_job(...) - .dep_on(|ctx| { - let pipette = ctx.publish_typed_artifact(publish_pipette); - // pipette is a WriteVar - - // In a node, write the appropriate variant: - ctx.emit_rust_step("build pipette", |ctx| { - let pipette = pipette.claim(ctx); - move |rt| { - let output = PipetteOutput::WindowsBin { - exe: PathBuf::from("path/to/pipette.exe"), - pdb: PathBuf::from("path/to/pipette.pdb"), - }; - rt.write(pipette, &output); - Ok(()) - } - }); - }) - .finish(); - -// In consumer job - read the artifact -let job2 = pipeline.new_job(...) - .dep_on(|ctx| { - let pipette = ctx.use_typed_artifact(&use_pipette); - // pipette is a ReadVar - - ctx.emit_rust_step("use pipette", |ctx| { - let pipette = pipette.claim(ctx); - move |rt| { - let output = rt.read(pipette); - match output { - PipetteOutput::WindowsBin { exe, pdb } => { - // Use the Windows binaries - } - PipetteOutput::LinuxBin { bin, dbg } => { - // Use the Linux binaries - } - } - Ok(()) - } - }); - }) - .finish(); -``` -#### Untyped Artifacts +## Artifacts -**Untyped artifacts** provide simple directory-based artifacts for cases where -you don't need type safety: +**Artifacts** are the mechanism for passing data between jobs in a pipeline. +When one job produces output that another job needs, that output is packaged as +an artifact. -```rust -let (publish, use_artifact) = pipeline.new_artifact("my-artifact"); +### Typed vs Untyped Artifacts -// Producer gets a path to an empty directory to populate -let artifact_dir = ctx.publish_artifact(publish); // ReadVar +**Typed artifacts (preferred)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait. **Untyped artifacts** provide +simple directory-based artifacts for simpler cases. -// Consumer gets a path to the populated directory -let artifact_dir = ctx.use_artifact(&use_artifact); // ReadVar -``` +For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.Artifact.html). -Use untyped artifacts when: -- The artifact structure is simple or ad-hoc -- You don't need compile-time guarantees about file names/structure -- The artifact is primarily used by a single node +Key concepts: +- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure +- Use `#[serde(rename = "file.exe")]` to specify exact file names +- Typed artifacts ensure compile-time type safety when passing data between jobs +- Untyped artifacts are simpler but don't provide type guarantees ### How Artifacts Create Dependencies @@ -321,118 +238,63 @@ emit steps, and can depend on other nodes. ### The Node/Request Pattern -Every node has an associated **Request** type that defines what the node can do: +Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the `flowey_request!` macro and registered with `new_flow_node!` or `new_simple_flow_node!` macros. -```rust -// Define the node -new_flow_node!(struct Node); - -// Define requests using the flowey_request! macro -flowey_request! { - pub enum Request { - InstallRust(String), // Install specific version - EnsureInstalled(WriteVar), // Ensure it's installed - GetCargoHome(WriteVar), // Get CARGO_HOME path - } -} -``` +**Key concepts:** +- Each node is a struct registered with `new_flow_node!` or `new_simple_flow_node!` +- Request types define the node's API using `flowey_request!` macro +- Requests often include `WriteVar` parameters for outputs + +For complete examples, see the [`FlowNode` trait documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html). ### FlowNode vs SimpleFlowNode -**Use `FlowNode`** when you need to: -- Aggregate multiple requests and process them together -- Resolve conflicts between requests -- Perform complex request validation +Flowey provides two node implementation patterns: -**Use `SimpleFlowNode`** when: -- Each request can be processed independently -- No aggregation logic is needed -- Simpler, less boilerplate +**FlowNode** - for nodes that need to process multiple requests together: +- Receives all requests as a `Vec` +- Can aggregate common requirements across requests and consolidate them into a single step to reduce repeated work +- Can resolve conflicts between requests -```rust -// FlowNode - processes all requests together -impl FlowNode for Node { - type Request = Request; - - fn imports(ctx: &mut ImportCtx<'_>) { - // Declare node dependencies - ctx.import::(); - } - - fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { - // Process all requests, aggregate common requirements - // Emit steps to accomplish the work - Ok(()) - } -} - -// SimpleFlowNode - processes one request at a time -impl SimpleFlowNode for Node { - type Request = Request; - - fn imports(ctx: &mut ImportCtx<'_>) { - ctx.import::(); - } - - fn process_request(request: Self::Request, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { - // Process single request - Ok(()) - } -} -``` +**SimpleFlowNode** - for nodes where each request is independent: +- Processes one request at a time +- Simpler implementation, less boilerplate +- Ideal for straightforward operations + +For detailed comparisons and examples, see the [`FlowNode`](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://docs.rs/flowey_core/latest/flowey_core/node/trait.SimpleFlowNode.html) documentation. ### Node Registration -Nodes are automatically registered using macros: +Nodes are automatically registered using macros that handle most of the boilerplate: - `new_flow_node!(struct Node)` - registers a FlowNode - `new_simple_flow_node!(struct Node)` - registers a SimpleFlowNode - `flowey_request!` - defines the Request type and implements `IntoRequest` ### The imports() Method -The `imports()` method declares which other nodes this node might depend on: - -```rust -fn imports(ctx: &mut ImportCtx<'_>) { - ctx.import::(); - ctx.import::(); -} -``` - -This allows flowey to: +The `imports()` method declares which other nodes this node might depend on. This enables flowey to: - Validate that all dependencies are available - Build the complete dependency graph - Catch missing dependencies at build-time +```admonish warning +Flowey does not catch unused imports today as part of its build-time validation step. +``` + +**Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. + +For more on node imports, see the [`FlowNode::imports` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html#tymethod.imports). + ### The emit() Method -The `emit()` method is where the node's actual logic lives: +The `emit()` method is where a node's actual logic lives. For `FlowNode`, it receives all requests together and must: +1. Aggregate and validate requests (ensuring consistency where needed) +2. Emit steps to perform the work +3. Wire up dependencies between steps via variables -```rust -fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { - // 1. Aggregate and validate requests - let mut version = None; - let mut ensure_installed = Vec::new(); - - for req in requests { - match req { - Request::Version(v) => same_across_all_reqs("Version", &mut version, v)?, - Request::EnsureInstalled(var) => ensure_installed.push(var), - } - } - - // 2. Emit steps to do the work - ctx.emit_rust_step("install rust", |ctx| { - let ensure_installed = ensure_installed.claim(ctx); - move |rt| { - // Runtime logic here - Ok(()) - } - }); - - Ok(()) -} -``` +For `SimpleFlowNode`, the equivalent `process_request()` method processes one request at a time. + +For complete implementation examples, see the [`FlowNode::emit` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html#tymethod.emit). --- @@ -459,32 +321,10 @@ execution model: - This ensures there's exactly one producer for each variable - Flowey can use this to build a valid DAG (no cycles, no conflicts) -```rust -let (read, write) = ctx.new_var::(); - -// Later, in a step: -rt.write(write, &"hello".to_string()); // write is consumed here -``` - ### Claiming Variables Before a step can use a `ReadVar` or `WriteVar`, it must **claim** it: -```rust -ctx.emit_rust_step("my step", |ctx| { - // Claim variables for this step - let read_var = some_read_var.claim(ctx); - let write_var = some_write_var.claim(ctx); - - // Return the runtime closure - move |rt| { - let value = rt.read(read_var); - rt.write(write_var, &modified_value); - Ok(()) - } -}); -``` - Claiming serves several purposes: 1. Registers that this step depends on (or produces) this variable 2. Converts `ReadVar` to `ReadVar` @@ -510,48 +350,34 @@ let version = ReadVar::from_static("1.2.3".to_string()); // WARNING: Never use this for secrets! ``` +This can be used as an escape hatch when you have Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. + ### Variable Operations -ReadVar provides several useful operations: +`ReadVar` provides several useful operations for transforming and combining variables: -```rust -// Transform a value -let uppercase = lowercase.map(ctx, |s| s.to_uppercase()); +**Transform operations:** +- **`map()`**: Apply a function to transform a `ReadVar` into a `ReadVar`. Useful for deriving new values from existing variables (e.g., extracting a filename from a path, converting to uppercase). -// Combine two variables -let combined = var1.zip(ctx, var2); // ReadVar<(T, U)> +**Combining operations:** +- **`zip()`**: Combine two ReadVars into a single `ReadVar<(T, U)>`. Useful when a step needs access to multiple values simultaneously. -// Discard the value, keep only the dependency -let side_effect = var.into_side_effect(); // ReadVar +**Dependency operations:** +- **`into_side_effect()`**: Discard the value but keep the dependency. Converts `ReadVar` to `ReadVar`, useful when you only care that a step ran, not what it produced. +- **`depending_on()`**: Create a new ReadVar that has an explicit dependency on another variable. Ensures ordering without actually using the dependent value. -// Create a dependency without using the value -let dependent = var.depending_on(ctx, &other_var); -``` +For detailed examples of each operation, see the [`ReadVar` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.ReadVar.html). ### The SideEffect Type -`SideEffect` is an alias for `()` that represents a dependency without data: +`SideEffect` is an alias for `()` that represents a dependency without data. It's used when you need to express that one step must run before another, but the first step doesn't produce any value that the second step needs to consume. -```rust -// This step produces a side effect (e.g., installs a tool) -let installed = ctx.emit_rust_step("install tool", |ctx| { - let done = done.claim(ctx); - move |rt| { - // install the tool - rt.write(done, &()); // SideEffect is () - Ok(()) - } -}); +**Key concepts:** +- Represents "something happened" without carrying data +- Enables explicit dependency ordering between steps +- Commonly used for installation, initialization, or cleanup steps -// Other steps can depend on this happening -ctx.emit_rust_step("use tool", |ctx| { - installed.claim(ctx); // Ensures install happens first - move |rt| { - // use the tool - Ok(()) - } -}); -``` +For examples of using SideEffect, see the [`SideEffect` type documentation](https://docs.rs/flowey_core/latest/flowey_core/node/type.SideEffect.html). --- @@ -562,85 +388,33 @@ step types exist for different purposes. ### Rust Steps -**`emit_rust_step`**: Emits a step that runs Rust code at runtime. This is the -most common step type. - -```rust -ctx.emit_rust_step("build the project", |ctx| { - let source_dir = source_dir.claim(ctx); - let output = output.claim(ctx); - - move |rt| { - let source = rt.read(source_dir); - let result = build_project(&source)?; - rt.write(output, &result); - Ok(()) - } -}); -``` - -**`emit_minor_rust_step`**: Like `emit_rust_step`, but for steps that: -- Cannot fail (closure returns `T` not `anyhow::Result`) -- Don't need to be visible in CI logs as separate steps +Rust steps execute Rust code at runtime and are the most common step type in flowey. -This reduces log clutter for trivial operations like variable transformations. +**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. -**`emit_rust_stepv`**: A convenience method that creates a new variable and -returns it: +**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. -```rust -// Instead of: -let (read, write) = ctx.new_var(); -ctx.emit_rust_step("compute value", |ctx| { - let write = write.claim(ctx); - move |rt| { - rt.write(write, &compute()); - Ok(()) - } -}); +**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. -// You can write: -let read = ctx.emit_rust_stepv("compute value", |ctx| { - move |rt| Ok(compute()) -}); -``` +For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). ### ADO Steps -**`emit_ado_step`**: Emits an Azure DevOps YAML step. +**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. -```rust -ctx.emit_ado_step("checkout code", |ctx| { - move |rt| { - r#" - - checkout: self - clean: true - "#.to_string() - } -}); -``` +For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). ### GitHub Steps -**`emit_gh_step`**: Builds a GitHub Actions step using `GhStepBuilder`. +**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. -```rust -ctx.emit_gh_step("Checkout code", "actions/checkout@v4") - .with("fetch-depth", "0") - .finish(ctx); -``` +For GitHub step examples, see the [`GhStepBuilder` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/steps/github/struct.GhStepBuilder.html). ### Side Effect Steps -**`emit_side_effect_step`**: Creates a dependency relationship without executing -any code. Useful for resolving multiple side effects into one. +**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. -```rust -ctx.emit_side_effect_step( - vec![dependency1, dependency2], // use these - vec![output_side_effect], // resolve this -); -``` +For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). ### StepCtx vs NodeCtx @@ -659,65 +433,97 @@ closures passed to `emit_rust_step`, etc.). ### RustRuntimeServices -Available in Rust steps via the `rt` parameter: +`RustRuntimeServices` is the primary runtime service available in Rust steps. It provides: -```rust -move |rt: &mut RustRuntimeServices<'_>| { - // Read variables - let value = rt.read(some_var); - - // Write variables - rt.write(output_var, &result); - rt.write_secret(secret_var, &secret); // Mark as secret - - // Query environment - let backend = rt.backend(); // Local, ADO, or Github - let platform = rt.platform(); // Windows, Linux, MacOs - let arch = rt.arch(); // X86_64, Aarch64 - - Ok(()) -} +**Variable Operations:** +- Reading and writing flowey variables +- Secret handling (automatic secret propagation for safety) +- Support for reading values of any type that implements `ReadVarValue` + +**Environment Queries:** +- Backend identification (Local, ADO, or GitHub) +- Platform detection (Windows, Linux, macOS) +- Architecture information (x86_64, Aarch64) + +#### Secret Variables and CI Backend Integration + +Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. + +**How Secret Handling Works** + +When a variable is marked as secret, flowey ensures: +- The value is not logged or printed in step output +- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs +- Secret status is automatically propagated to prevent leaks + +**Automatic Secret Propagation** + +To prevent accidental leaks, flowey uses conservative automatic secret propagation: + +```admonish warning +If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. ``` -**Important**: If a step reads a secret value, all subsequent writes from that -step are marked as secret by default (to prevent accidental leaks). Use -`write_not_secret()` if you need to override this. +For example: -### AdoStepServices +```rust +ctx.emit_rust_step("process token", |ctx| { + let secret_token = secret_token.claim(ctx); + let output_var = output_var.claim(ctx); + |rt| { + let token = rt.read(secret_token); // Reading a secret + + // This write is AUTOMATICALLY marked as secret + // (even though we're just writing "done") + rt.write(output_var, &"done".to_string()); + + Ok(()) + } +}); +``` -Available in ADO steps for interacting with ADO-specific features: +If you need to write non-secret data after reading a secret, use `write_not_secret()`: ```rust -move |rt: &mut AdoStepServices<'_>| { - // Get ADO variable as flowey var - rt.set_var(flowey_var, AdoRuntimeVar::BUILD__SOURCE_BRANCH); - - // Set ADO variable from flowey var - let ado_var = rt.get_var(flowey_var); - - // Resolve repository ID - let repo = rt.resolve_repository_id(repo_id); - - "- task: SomeTask@1".to_string() -} +rt.write_not_secret(output_var, &"done".to_string()); ``` +**Best Practices for Secrets** + +1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML +2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys +5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary + +### AdoStepServices + +`AdoStepServices` provides integration with Azure DevOps-specific features when emitting ADO YAML steps: + +**ADO Variable Bridge:** +- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars +- Convert flowey vars back into ADO variables for use in YAML +- Handle secret variables appropriately + +**Repository Resources:** +- Resolve repository IDs declared as pipeline resources +- Access repository information in ADO-specific steps + ### GhStepBuilder -Builder for GitHub Actions steps: +`GhStepBuilder` is a fluent builder for constructing GitHub Actions steps with: -```rust -ctx.emit_gh_step("Azure Login", "Azure/login@v2") - .with("client-id", client_id) // Add parameter - .with("tenant-id", tenant_id) - .output("token", token_var) // Capture output - .run_after(some_side_effect) // Add dependency - .requires_permission( // Declare permission needed - GhPermission::IdToken, - GhPermissionValue::Write - ) - .finish(ctx); -``` +**Step Configuration:** +- Specifying the action to use (e.g., `actions/checkout@v4`) +- Adding input parameters via `.with()` +- Capturing step outputs into flowey variables +- Setting conditional execution based on variables + +**Dependency Management:** +- Declaring side-effect dependencies via `.run_after()` +- Ensuring steps run in the correct order + +**Permissions:** +- Declaring required GITHUB_TOKEN permissions +- Automatic permission aggregation at the job level --- @@ -746,20 +552,6 @@ through side effects. Nodes should work across all backends when possible. Backend-specific behavior should be isolated and documented. -```rust -match ctx.backend() { - FlowBackend::Local => { - // Local-specific logic - } - FlowBackend::Ado => { - // ADO-specific logic - } - FlowBackend::Github => { - // GitHub-specific logic - } -} -``` - ### 4. Separation of Concerns Keep node definition (request types, dependencies) separate from step @@ -783,107 +575,45 @@ Use Rust's type system to prevent errors at build-time: ### Request Aggregation and Validation -When processing multiple requests, use helper functions to ensure consistency: +When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. -```rust -fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { - let mut version = None; - let mut ensure_installed = Vec::new(); - - for req in requests { - match req { - Request::Version(v) => { - // Ensure all requests agree on the version - same_across_all_reqs("Version", &mut version, v)?; - } - Request::EnsureInstalled(v) => { - ensure_installed.push(v); - } - } - } - - let version = version.ok_or(anyhow::anyhow!("Missing required request: Version"))?; - - // ... emit steps using aggregated requests -} -``` +**Key concepts:** +- Iterate through all requests and separate them by type +- Use `same_across_all_reqs` to validate values that must be consistent +- Collect values that can have multiple instances (like output variables) +- Validate that required values were provided + +For a complete example, see the [`same_across_all_reqs` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/user_facing/fn.same_across_all_reqs.html). ### Conditional Execution Based on Backend/Platform -```rust -// Only emit this step on Windows -if ctx.platform() == FlowPlatform::Windows { - ctx.emit_rust_step("windows-specific step", |ctx| { - move |rt| { - // Windows-specific logic - Ok(()) - } - }); -} - -// Different behavior per backend -match ctx.backend() { - FlowBackend::Local => { - // Check if tool is already installed - } - FlowBackend::Ado | FlowBackend::Github => { - // Always install the tool - } -} -``` +Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. -### Working with Persistent Directories +**Key concepts:** +- Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions +- Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) +- Use `ctx.arch()` to check the architecture (x86_64, Aarch64) +- Emit different steps or use different tool configurations based on these values -Some nodes need to persist data between runs (e.g., caches). Use -`ctx.persistent_dir()`: +**When to use:** +- Installing platform-specific tools or dependencies +- Using different commands on Windows vs Unix systems +- Optimizing for local development vs CI environments -```rust -if let Some(cache_dir) = ctx.persistent_dir() { - // Have a persistent directory, can cache things - ctx.emit_rust_step("restore from cache", |ctx| { - let cache_dir = cache_dir.claim(ctx); - move |rt| { - let dir = rt.read(cache_dir); - // Restore from cache - Ok(()) - } - }); -} else { - // No persistent storage available, skip caching -} -``` +For more on backend and platform APIs, see the [`NodeCtx` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). ### Using the flowey_request! Macro -The `flowey_request!` macro supports several formats: +The `flowey_request!` macro generates the Request type and associated boilerplate for a node. It supports three main formats to accommodate different node complexity levels. -```rust -// Enum with separate struct per variant (recommended for complex requests) -flowey_request! { - pub enum_struct Request { - Install { version: String, components: Vec }, - Check(pub WriteVar), - GetPath(pub WriteVar), - } -} -// This generates Request::Install(req::Install), Request::Check(req::Check), etc. - -// Simple enum (for simple requests) -flowey_request! { - pub enum Request { - Install { version: String }, - Check(WriteVar), - } -} +**Format options:** +- **`enum_struct`**: Recommended for complex requests. Creates an enum where each variant is a separate struct in a `req` module, providing better organization +- **`enum`**: Simple enum for straightforward request types +- **`struct`**: Single request type for nodes that only do one thing -// Struct (for nodes with a single request type) -flowey_request! { - pub struct Request { - pub input: ReadVar, - pub output: WriteVar, - } -} -``` +The macro automatically derives `Serialize`, `Deserialize`, and implements the `IntoRequest` trait. + +For complete syntax and examples, see the [`flowey_request!` macro documentation](https://docs.rs/flowey_core/latest/flowey_core/macro.flowey_request.html). --- diff --git a/flowey/flowey_core/src/node.rs b/flowey/flowey_core/src/node.rs index cc7c64b27b..78ff8b5bd2 100644 --- a/flowey/flowey_core/src/node.rs +++ b/flowey/flowey_core/src/node.rs @@ -67,6 +67,35 @@ pub mod user_facing { /// Helper method to streamline request validation in cases where a value is /// expected to be identical across all incoming requests. + /// + /// # Example: Request Aggregation Pattern + /// + /// When a node receives multiple requests, it often needs to ensure certain + /// values are consistent across all requests. This helper simplifies that pattern: + /// + /// ```rust,ignore + /// fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { + /// let mut version = None; + /// let mut ensure_installed = Vec::new(); + /// + /// for req in requests { + /// match req { + /// Request::Version(v) => { + /// // Ensure all requests agree on the version + /// same_across_all_reqs("Version", &mut version, v)?; + /// } + /// Request::EnsureInstalled(v) => { + /// ensure_installed.push(v); + /// } + /// } + /// } + /// + /// let version = version.ok_or(anyhow::anyhow!("Missing required request: Version"))?; + /// + /// // ... emit steps using aggregated requests + /// Ok(()) + /// } + /// ``` pub fn same_across_all_reqs( req_name: &str, var: &mut Option, @@ -2539,9 +2568,97 @@ macro_rules! new_flow_node_base { }; } -/// TODO: clearly verbalize what a `FlowNode` encompasses +/// A reusable unit of automation logic in flowey. +/// +/// FlowNodes process requests, emit steps, and can depend on other nodes. They are +/// the building blocks for creating complex automation workflows. +/// +/// # The Node/Request Pattern +/// +/// Every node has an associated **Request** type that defines what the node can do. +/// Nodes receive a vector of requests and process them together, allowing for +/// aggregation and conflict resolution. +/// +/// # Example: Basic FlowNode Implementation +/// +/// ```rust,ignore +/// use flowey_core::node::*; +/// +/// // Define the node +/// new_flow_node!(struct Node); +/// +/// // Define requests using the flowey_request! macro +/// flowey_request! { +/// pub enum Request { +/// InstallRust(String), // Install specific version +/// EnsureInstalled(WriteVar), // Ensure it's installed +/// GetCargoHome(WriteVar), // Get CARGO_HOME path +/// } +/// } +/// +/// impl FlowNode for Node { +/// type Request = Request; +/// +/// fn imports(ctx: &mut ImportCtx<'_>) { +/// // Declare node dependencies +/// ctx.import::(); +/// } +/// +/// fn emit(requests: Vec, ctx: &mut NodeCtx<'_>) -> anyhow::Result<()> { +/// // 1. Aggregate and validate requests +/// let mut version = None; +/// let mut ensure_installed = Vec::new(); +/// let mut get_cargo_home = Vec::new(); +/// +/// for req in requests { +/// match req { +/// Request::InstallRust(v) => { +/// same_across_all_reqs("version", &mut version, v)?; +/// } +/// Request::EnsureInstalled(var) => ensure_installed.push(var), +/// Request::GetCargoHome(var) => get_cargo_home.push(var), +/// } +/// } +/// +/// let version = version.ok_or(anyhow::anyhow!("Version not specified"))?; +/// +/// // 2. Emit steps to do the work +/// ctx.emit_rust_step("install rust", |ctx| { +/// let ensure_installed = ensure_installed.claim(ctx); +/// let get_cargo_home = get_cargo_home.claim(ctx); +/// move |rt| { +/// // Install rust with the specified version +/// // Write to all the output variables +/// for var in ensure_installed { +/// rt.write(var, &()); +/// } +/// for var in get_cargo_home { +/// rt.write(var, &PathBuf::from("/path/to/cargo")); +/// } +/// Ok(()) +/// } +/// }); +/// +/// Ok(()) +/// } +/// } +/// ``` +/// +/// # When to Use FlowNode vs SimpleFlowNode +/// +/// **Use `FlowNode`** when you need to: +/// - Aggregate multiple requests and process them together +/// - Resolve conflicts between requests +/// - Perform complex request validation +/// +/// **Use [`SimpleFlowNode`]** when: +/// - Each request can be processed independently +/// - No aggregation logic is needed +/// - Simpler, less boilerplate pub trait FlowNode { - /// TODO: clearly verbalize what a Request encompasses + /// The request type that defines what operations this node can perform. + /// + /// Use the [`flowey_request!`] macro to define this type. type Request: Serialize + DeserializeOwned; /// A list of nodes that this node is capable of taking a dependency on. diff --git a/flowey/flowey_core/src/pipeline.rs b/flowey/flowey_core/src/pipeline.rs index 83dbd62765..265a692f2f 100644 --- a/flowey/flowey_core/src/pipeline.rs +++ b/flowey/flowey_core/src/pipeline.rs @@ -1269,6 +1269,92 @@ pub enum PipelineBackendHint { Github, } +/// Trait for types that can be converted into a [`Pipeline`]. +/// +/// This is the primary entry point for defining flowey pipelines. Implement this trait +/// to create a pipeline definition that can be executed locally or converted to CI YAML. +/// +/// # Example +/// +/// ```rust,no_run +/// use flowey_core::pipeline::{IntoPipeline, Pipeline, PipelineBackendHint}; +/// use flowey_core::node::{FlowPlatform, FlowPlatformLinuxDistro, FlowArch}; +/// +/// struct MyPipeline; +/// +/// impl IntoPipeline for MyPipeline { +/// fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result { +/// let mut pipeline = Pipeline::new(); +/// +/// // Define a job that runs on Linux x86_64 +/// let _job = pipeline +/// .new_job( +/// FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), +/// FlowArch::X86_64, +/// "build" +/// ) +/// .finish(); +/// +/// Ok(pipeline) +/// } +/// } +/// ``` +/// +/// # Complex Example with Parameters and Artifacts +/// +/// ```rust,no_run +/// use flowey_core::pipeline::{IntoPipeline, Pipeline, PipelineBackendHint, ParameterKind}; +/// use flowey_core::node::{FlowPlatform, FlowPlatformLinuxDistro, FlowArch}; +/// +/// struct BuildPipeline; +/// +/// impl IntoPipeline for BuildPipeline { +/// fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result { +/// let mut pipeline = Pipeline::new(); +/// +/// // Define a runtime parameter +/// let enable_tests = pipeline.new_parameter_bool( +/// "enable_tests", +/// "Whether to run tests", +/// ParameterKind::Stable, +/// Some(true) // default value +/// ); +/// +/// // Create an artifact for passing data between jobs +/// let (publish_build, use_build) = pipeline.new_artifact("build-output"); +/// +/// // Job 1: Build +/// let build_job = pipeline +/// .new_job( +/// FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), +/// FlowArch::X86_64, +/// "build" +/// ) +/// .with_timeout_in_minutes(30) +/// .dep_on(|ctx| { +/// let _build_dir = ctx.publish_artifact(publish_build); +/// // Add node dependencies here +/// }) +/// .finish(); +/// +/// // Job 2: Test (conditionally run based on parameter) +/// let _test_job = pipeline +/// .new_job( +/// FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), +/// FlowArch::X86_64, +/// "test" +/// ) +/// .with_condition(enable_tests) +/// .dep_on(|ctx| { +/// let _build_dir = ctx.use_artifact(&use_build); +/// // Add node dependencies here +/// }) +/// .finish(); +/// +/// Ok(pipeline) +/// } +/// } +/// ``` pub trait IntoPipeline { fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result; } From e5f629bfbf1e2f776ca752591c62a54256db3cac Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Tue, 28 Oct 2025 14:17:53 -0700 Subject: [PATCH 03/15] re-order --- Guide/src/dev_guide/dev_tools/xflowey.md | 478 +++++++++++------------ 1 file changed, 238 insertions(+), 240 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 0073e5d30d..588caf6653 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -34,14 +34,15 @@ working on OpenVMM automation. ## Table of Contents 1. [Core Concepts](#core-concepts) -2. [Pipelines](#pipelines) -3. [Artifacts](#artifacts) +2. [Emitting Steps](#emitting-steps) +3. [Runtime Services](#runtime-services) 4. [Flowey Nodes](#flowey-nodes) 5. [Variables: ReadVar and WriteVar](#variables-readvar-and-writevar) -6. [Emitting Steps](#emitting-steps) -7. [Runtime Services](#runtime-services) -8. [Node Design Philosophy](#node-design-philosophy) -9. [Common Patterns](#common-patterns) +6. [Node Design Philosophy](#node-design-philosophy) +7. [Common Patterns](#common-patterns) +8. [Artifacts](#artifacts) +9. [Pipelines](#pipelines) +10. [Additional Resources](#additional-resources) --- @@ -95,15 +96,13 @@ Always run `cargo xflowey regen` after modifying pipeline definitions to ensure This separation allows flowey to: - Validate the entire workflow before execution - Generate static YAML for CI systems (ADO, GitHub Actions) -- Optimize step ordering and parallelization - Catch dependency errors at build-time rather than runtime ### Backend Abstraction Flowey supports multiple execution backends: -- **Local**: Runs directly on your development machine via bash or direct - execution +- **Local**: Runs directly on your development machine - **ADO (Azure DevOps)**: Generates ADO Pipeline YAML - **GitHub Actions**: Generates GitHub Actions workflow YAML @@ -112,122 +111,153 @@ Nodes should be written to work across ALL backends whenever possible. Relying o backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that -works everywhere. +works everywhere. ``` --- -## Pipelines +## Emitting Steps -A **Pipeline** is the top-level construct that defines a complete automation -workflow. Pipelines consist of one or more **Jobs**, each of which runs a set -of **Nodes** to accomplish specific tasks. +Nodes emit **steps** - units of work that will be executed at runtime. Different +step types exist for different purposes. -For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.IntoPipeline.html). +### Rust Steps -### Pipeline Jobs +Rust steps execute Rust code at runtime and are the most common step type in flowey. -Each `PipelineJob` represents a unit of work that: -- Runs on a specific platform and architecture -- Can depend on artifacts from other jobs -- Can be conditionally executed based on parameters -- Emits a sequence of steps that accomplish the job's goals +**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. -Jobs are configured using a builder pattern: +**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. -```rust -let job = pipeline - .new_job(platform, arch, "my-job") - .with_timeout_in_minutes(60) - .with_condition(some_param) - .ado_set_pool("my-pool") - .gh_set_pool(GhRunner::UbuntuLatest) - .dep_on(|ctx| { - // Define what nodes this job depends on - some_node::Request { /* ... */ } - }) - .finish(); -``` +**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. -### Pipeline Parameters +For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). -Parameters allow runtime configuration of pipelines: +### ADO Steps -```rust -// Define a boolean parameter -let use_cache = pipeline.new_parameter_bool( - "use_cache", - "Whether to use caching", - ParameterKind::Stable, - Some(true) // default value -); +**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. -// Use the parameter in a job -let job = pipeline.new_job(...) - .dep_on(|ctx| { - let use_cache = ctx.use_parameter(use_cache); - // use_cache is now a ReadVar - }) - .finish(); -``` +For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). -Parameter types: -- Boolean parameters -- String parameters with optional validation -- Numeric (i64) parameters with optional validation +### GitHub Steps -#### Stable vs Unstable Parameters +**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. -Every parameter in flowey must be declared as either **Stable** or **Unstable** using `ParameterKind`. This classification determines the parameter's visibility and API stability: +For GitHub step examples, see the [`GhStepBuilder` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/steps/github/struct.GhStepBuilder.html). -**Stable Parameters (`ParameterKind::Stable`)** +### Side Effect Steps -Stable parameters represent a **public, stable API** for the pipeline: +**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. -- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. -- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. -- **Use Cases**: - - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) - - Parameters intended for use by other teams or external automation - - Parameters documented as part of the pipeline's public interface +For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). -**Unstable Parameters (`ParameterKind::Unstable`)** +### StepCtx vs NodeCtx -Unstable parameters are for **internal use** and experimentation: +- **`NodeCtx`**: Used when emitting steps. Provides `emit_*` methods, `new_var()`, + `req()`, etc. + +- **`StepCtx`**: Used inside step closures. Provides access to `claim()` for + variables, and basic environment info (`backend()`, `platform()`). -- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. -- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. -- **Use Cases**: - - Experimental features or debugging flags - - Internal pipeline configuration that may change frequently - - Parameters for development/testing that shouldn't be used in production +--- +## Runtime Services -## Artifacts +Runtime services provide the API available during step execution (inside the +closures passed to `emit_rust_step`, etc.). -**Artifacts** are the mechanism for passing data between jobs in a pipeline. -When one job produces output that another job needs, that output is packaged as -an artifact. +### RustRuntimeServices -### Typed vs Untyped Artifacts +`RustRuntimeServices` is the primary runtime service available in Rust steps. It provides: -**Typed artifacts (preferred)** provide type-safe artifact handling by defining -a custom type that implements the `Artifact` trait. **Untyped artifacts** provide -simple directory-based artifacts for simpler cases. +**Variable Operations:** +- Reading and writing flowey variables +- Secret handling (automatic secret propagation for safety) +- Support for reading values of any type that implements `ReadVarValue` -For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.Artifact.html). +**Environment Queries:** +- Backend identification (Local, ADO, or GitHub) +- Platform detection (Windows, Linux, macOS) +- Architecture information (x86_64, Aarch64) -Key concepts: -- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure -- Use `#[serde(rename = "file.exe")]` to specify exact file names -- Typed artifacts ensure compile-time type safety when passing data between jobs -- Untyped artifacts are simpler but don't provide type guarantees +#### Secret Variables and CI Backend Integration -### How Artifacts Create Dependencies +Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. -When you use an artifact in a job, flowey automatically: -1. Creates a dependency from the consuming job to the producing job -2. Ensures the producing job runs first -3. Handles artifact upload/download between jobs (on CI backends) +**How Secret Handling Works** + +When a variable is marked as secret, flowey ensures: +- The value is not logged or printed in step output +- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs +- Secret status is automatically propagated to prevent leaks + +**Automatic Secret Propagation** + +To prevent accidental leaks, flowey uses conservative automatic secret propagation: + +```admonish warning +If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. +``` + +For example: + +```rust +ctx.emit_rust_step("process token", |ctx| { + let secret_token = secret_token.claim(ctx); + let output_var = output_var.claim(ctx); + |rt| { + let token = rt.read(secret_token); // Reading a secret + + // This write is AUTOMATICALLY marked as secret + // (even though we're just writing "done") + rt.write(output_var, &"done".to_string()); + + Ok(()) + } +}); +``` + +If you need to write non-secret data after reading a secret, use `write_not_secret()`: + +```rust +rt.write_not_secret(output_var, &"done".to_string()); +``` + +**Best Practices for Secrets** + +1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML +2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys +5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary + +### AdoStepServices + +`AdoStepServices` provides integration with Azure DevOps-specific features when emitting ADO YAML steps: + +**ADO Variable Bridge:** +- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars +- Convert flowey vars back into ADO variables for use in YAML +- Handle secret variables appropriately + +**Repository Resources:** +- Resolve repository IDs declared as pipeline resources +- Access repository information in ADO-specific steps + +### GhStepBuilder + +`GhStepBuilder` is a fluent builder for constructing GitHub Actions steps with: + +**Step Configuration:** +- Specifying the action to use (e.g., `actions/checkout@v4`) +- Adding input parameters via `.with()` +- Capturing step outputs into flowey variables +- Setting conditional execution based on variables + +**Dependency Management:** +- Declaring side-effect dependencies via `.run_after()` +- Ensuring steps run in the correct order + +**Permissions:** +- Declaring required GITHUB_TOKEN permissions +- Automatic permission aggregation at the job level --- @@ -381,152 +411,6 @@ For examples of using SideEffect, see the [`SideEffect` type documentation](http --- -## Emitting Steps - -Nodes emit **steps** - units of work that will be executed at runtime. Different -step types exist for different purposes. - -### Rust Steps - -Rust steps execute Rust code at runtime and are the most common step type in flowey. - -**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. - -**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. - -**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. - -For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). - -### ADO Steps - -**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. - -For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). - -### GitHub Steps - -**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. - -For GitHub step examples, see the [`GhStepBuilder` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/steps/github/struct.GhStepBuilder.html). - -### Side Effect Steps - -**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. - -For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). - -### StepCtx vs NodeCtx - -- **`NodeCtx`**: Used when emitting steps. Provides `emit_*` methods, `new_var()`, - `req()`, etc. - -- **`StepCtx`**: Used inside step closures. Provides access to `claim()` for - variables, and basic environment info (`backend()`, `platform()`). - ---- - -## Runtime Services - -Runtime services provide the API available during step execution (inside the -closures passed to `emit_rust_step`, etc.). - -### RustRuntimeServices - -`RustRuntimeServices` is the primary runtime service available in Rust steps. It provides: - -**Variable Operations:** -- Reading and writing flowey variables -- Secret handling (automatic secret propagation for safety) -- Support for reading values of any type that implements `ReadVarValue` - -**Environment Queries:** -- Backend identification (Local, ADO, or GitHub) -- Platform detection (Windows, Linux, macOS) -- Architecture information (x86_64, Aarch64) - -#### Secret Variables and CI Backend Integration - -Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. - -**How Secret Handling Works** - -When a variable is marked as secret, flowey ensures: -- The value is not logged or printed in step output -- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs -- Secret status is automatically propagated to prevent leaks - -**Automatic Secret Propagation** - -To prevent accidental leaks, flowey uses conservative automatic secret propagation: - -```admonish warning -If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. -``` - -For example: - -```rust -ctx.emit_rust_step("process token", |ctx| { - let secret_token = secret_token.claim(ctx); - let output_var = output_var.claim(ctx); - |rt| { - let token = rt.read(secret_token); // Reading a secret - - // This write is AUTOMATICALLY marked as secret - // (even though we're just writing "done") - rt.write(output_var, &"done".to_string()); - - Ok(()) - } -}); -``` - -If you need to write non-secret data after reading a secret, use `write_not_secret()`: - -```rust -rt.write_not_secret(output_var, &"done".to_string()); -``` - -**Best Practices for Secrets** - -1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML -2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys -5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary - -### AdoStepServices - -`AdoStepServices` provides integration with Azure DevOps-specific features when emitting ADO YAML steps: - -**ADO Variable Bridge:** -- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars -- Convert flowey vars back into ADO variables for use in YAML -- Handle secret variables appropriately - -**Repository Resources:** -- Resolve repository IDs declared as pipeline resources -- Access repository information in ADO-specific steps - -### GhStepBuilder - -`GhStepBuilder` is a fluent builder for constructing GitHub Actions steps with: - -**Step Configuration:** -- Specifying the action to use (e.g., `actions/checkout@v4`) -- Adding input parameters via `.with()` -- Capturing step outputs into flowey variables -- Setting conditional execution based on variables - -**Dependency Management:** -- Declaring side-effect dependencies via `.run_after()` -- Ensuring steps run in the correct order - -**Permissions:** -- Declaring required GITHUB_TOKEN permissions -- Automatic permission aggregation at the job level - ---- - ## Node Design Philosophy Flowey nodes are designed around several key principles: @@ -617,6 +501,120 @@ For complete syntax and examples, see the [`flowey_request!` macro documentation --- +## Artifacts + +**Artifacts** are the mechanism for passing data between jobs in a pipeline. +When one job produces output that another job needs, that output is packaged as +an artifact. + +### Typed vs Untyped Artifacts + +**Typed artifacts (preferred)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait. **Untyped artifacts** provide +simple directory-based artifacts for simpler cases. + +For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.Artifact.html). + +Key concepts: +- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure +- Use `#[serde(rename = "file.exe")]` to specify exact file names +- Typed artifacts ensure compile-time type safety when passing data between jobs +- Untyped artifacts are simpler but don't provide type guarantees + +### How Artifacts Create Dependencies + +When you use an artifact in a job, flowey automatically: +1. Creates a dependency from the consuming job to the producing job +2. Ensures the producing job runs first +3. Handles artifact upload/download between jobs (on CI backends) + +--- + +## Pipelines + +A **Pipeline** is the top-level construct that defines a complete automation +workflow. Pipelines consist of one or more **Jobs**, each of which runs a set +of **Nodes** to accomplish specific tasks. + +For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.IntoPipeline.html). + +### Pipeline Jobs + +Each `PipelineJob` represents a unit of work that: +- Runs on a specific platform and architecture +- Can depend on artifacts from other jobs +- Can be conditionally executed based on parameters +- Emits a sequence of steps that accomplish the job's goals + +Jobs are configured using a builder pattern: + +```rust +let job = pipeline + .new_job(platform, arch, "my-job") + .with_timeout_in_minutes(60) + .with_condition(some_param) + .ado_set_pool("my-pool") + .gh_set_pool(GhRunner::UbuntuLatest) + .dep_on(|ctx| { + // Define what nodes this job depends on + some_node::Request { /* ... */ } + }) + .finish(); +``` + +### Pipeline Parameters + +Parameters allow runtime configuration of pipelines: + +```rust +// Define a boolean parameter +let use_cache = pipeline.new_parameter_bool( + "use_cache", + "Whether to use caching", + ParameterKind::Stable, + Some(true) // default value +); + +// Use the parameter in a job +let job = pipeline.new_job(...) + .dep_on(|ctx| { + let use_cache = ctx.use_parameter(use_cache); + // use_cache is now a ReadVar + }) + .finish(); +``` + +Parameter types: +- Boolean parameters +- String parameters with optional validation +- Numeric (i64) parameters with optional validation + +#### Stable vs Unstable Parameters + +Every parameter in flowey must be declared as either **Stable** or **Unstable** using `ParameterKind`. This classification determines the parameter's visibility and API stability: + +**Stable Parameters (`ParameterKind::Stable`)** + +Stable parameters represent a **public, stable API** for the pipeline: + +- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. +- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. +- **Use Cases**: + - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) + - Parameters intended for use by other teams or external automation + - Parameters documented as part of the pipeline's public interface + +**Unstable Parameters (`ParameterKind::Unstable`)** + +Unstable parameters are for **internal use** and experimentation: + +- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. +- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. +- **Use Cases**: + - Experimental features or debugging flags + - Internal pipeline configuration that may change frequently + - Parameters for development/testing that shouldn't be used in production + ## Additional Resources - **Example nodes**: See `flowey/flowey_lib_common/src/` for many real-world examples From ae149ce64faea7f2acca39d1f5974266aafc4819 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 09:20:05 -0700 Subject: [PATCH 04/15] re-structure/address comments/fixup links --- Guide/src/dev_guide/dev_tools/xflowey.md | 466 ++++++++++++++++------- 1 file changed, 334 insertions(+), 132 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 588caf6653..63decf901e 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -16,6 +16,46 @@ Some particularly notable pipelines: While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). ``` +## Why Flowey? + +Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: + +### The Problems with Traditional YAML Pipelines + +**Non-Local Reasoning and Global State** +- YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) +- Understanding what a step does often requires mentally tracking state mutations across the entire pipeline +- Debugging requires reasoning about the entire pipeline context rather than isolated units of work +- Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts + +**Maintainability Challenges** +- YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) +- No compile-time validation means errors only surface at runtime, often deep into a pipeline execution +- Refactoring is risky and error-prone without automated tools to catch breaking changes +- Code duplication is common because YAML lacks good abstraction mechanisms +- Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive + +**Platform Lock-In** +- Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) +- Moving between platforms requires complete rewrites of pipeline configuration +- Backend-specific features and syntax create vendor lock-in +- Multi-platform support means maintaining multiple, divergent YAML files + +**Local Development Gaps** +- Developers can't easily test pipeline changes before pushing to CI +- Reproducing CI failures locally is difficult or impossible +- The feedback loop is slow: push → wait for CI → debug → repeat + +### Flowey's Solution + +Flowey addresses these issues by treating automation as **first-class Rust code**: + +- **Type Safety**: Rust's type system catches errors at compile-time rather than runtime +- **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state +- **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) +- **Reusability**: Nodes are composable building blocks that can be shared across pipelines +- **Local Execution**: The same pipeline definition can run locally or in CI + ## `xflowey` vs `xtask` In a nutshell: @@ -34,10 +74,10 @@ working on OpenVMM automation. ## Table of Contents 1. [Core Concepts](#core-concepts) -2. [Emitting Steps](#emitting-steps) -3. [Runtime Services](#runtime-services) -4. [Flowey Nodes](#flowey-nodes) -5. [Variables: ReadVar and WriteVar](#variables-readvar-and-writevar) +2. [Variables: ReadVar and WriteVar](#variables-readvar-and-writevar) +3. [Emitting Steps](#emitting-steps) +4. [Runtime Services](#runtime-services) +5. [Flowey Nodes](#flowey-nodes) 6. [Node Design Philosophy](#node-design-philosophy) 7. [Common Patterns](#common-patterns) 8. [Artifacts](#artifacts) @@ -56,18 +96,18 @@ Flowey operates in two distinct phases: - Reads `.flowey.toml` to determine which pipelines to regenerate - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` - Runs the flowey binary with `pipeline --out ` for each pipeline definition - - During this invocation, flowey constructs a directed acyclic graph (DAG) by: - - Instantiating all nodes defined in the pipeline + - During this invocation, flowey constructs a **directed acyclic graph (DAG)** - a graph structure that represents the execution order of work, where each node represents a unit of work and edges represent dependencies between them. By: + - Instantiating all nodes (reusable units of automation logic) defined in the pipeline - Processing their requests - Resolving dependencies between nodes via variables and artifacts - Determining the execution order - Performing flowey-specific validations (dependency resolution, type checking, etc.) - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` -2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps run in the order determined at build-time: +2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps (units of work) run in the order determined at build-time: - Variables are read and written with actual values - Commands are executed - - Artifacts are published/consumed + - Artifacts (data packages passed between jobs) are published/consumed - Side effects occur ```admonish note @@ -113,6 +153,147 @@ avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. ``` + +--- + +## Variables: ReadVar and WriteVar + +**ReadVar** and **WriteVar** are flowey's solution to the problem of declaring +variables at build-time that will hold values produced during pipeline runtime. + +### The Problem They Solve + +When constructing the pipeline graph, we don't yet know the values that will be +produced during execution (e.g., paths to built binaries, git commit hashes, +etc.). We need a way to: +1. Declare "this step will produce a value" +2. Declare "this step will consume that value" +3. Let flowey infer the execution order from these dependencies + +### Write-Once Semantics + +`WriteVar` can only be written to **once**. This is fundamental to flowey's +execution model: + +- Writing to a `WriteVar` consumes it (the type is not `Clone`) +- This ensures there's exactly one producer for each variable +- Flowey can use this to build a valid DAG (no cycles, no conflicts) + +### Claiming Variables + +Before a step can use a `ReadVar` or `WriteVar`, it must **claim** it. Claiming serves several purposes: +1. Registers that this step depends on (or produces) this variable +2. Converts `ReadVar` to `ReadVar` +3. Allows flowey to track variable usage for graph construction + +Variables can only be claimed inside step closures using the `claim()` method. + +**Example of the nested closure pattern:** + +```rust +// During node's emit() - this runs at BUILD-TIME +let input_var: ReadVar = /* ... */; +let output_var: WriteVar = ctx.new_var(); + +ctx.emit_rust_step("process data", |ctx| { + // OUTER CLOSURE: Runs at build-time during graph construction + // This is where you claim variables to establish dependencies + let input_var = input_var.claim(ctx); + let output_var = output_var.claim(ctx); + + // Return the INNER CLOSURE which will run at runtime + |rt| { + // INNER CLOSURE: Runs at RUNTIME during pipeline execution + // This is where you actually read/write variable values + let input = rt.read(input_var); + let result = input.len() as i32; + rt.write(output_var, &result); + + Ok(()) + } +}); +``` + +**Why the nested closure dance?** + +The nested closure pattern is fundamental to flowey's two-phase execution model: + +1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: + - Claim variables, which registers dependencies in the graph + - Determine what this step depends on (reads) and produces (writes) + - Allow flowey to validate the dependency graph and determine execution order + - The outer closure returns the inner closure for later execution + +2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: + - Read actual values from claimed `ReadVar`s + - Perform the real work (computations, running commands, etc.) + - Write actual values to claimed `WriteVar`s + +This separation ensures flowey can: +- **Validate dependencies before execution**: All claims happen during graph construction, catching errors like missing dependencies or cycles at build-time +- **Determine execution order**: By analyzing claimed variables, flowey knows which steps depend on which others +- **Generate correct YAML**: The generated CI YAML reflects the dependency structure discovered during claiming +- **Catch type errors early**: The Rust type system prevents reading/writing unclaimed variables + +The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). + +### ClaimedReadVar and ClaimedWriteVar + +These are type aliases for claimed variables: +- `ClaimedReadVar = ReadVar` +- `ClaimedWriteVar = WriteVar` + +Only claimed variables can be read/written at runtime. + +**Implementation Detail: Zero-Sized Types (ZSTs)** + +The claim state markers `VarClaimed` and `VarNotClaimed` are zero-sized types (ZSTs) - they exist purely at the type level and have no runtime representation or memory footprint. This is a pure type-level transformation that happens at compile time. + +This design is crucial because without this type-level transform, Rust couldn't statically verify that all variables used in a runtime block have been claimed by that block + +The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. + +### Static Values vs Runtime Values + +Sometimes you know a value at build-time: + +```rust +// Create a ReadVar with a static value +let version = ReadVar::from_static("1.2.3".to_string()); + +// This is encoded directly in the pipeline, not computed at runtime +// WARNING: Never use this for secrets! +``` + +This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. + +### Variable Operations + +`ReadVar` provides several useful operations for transforming and combining variables: + +**Transform operations:** +- **`map()`**: Apply a function to transform a `ReadVar` into a `ReadVar`. Useful for deriving new values from existing variables (e.g., extracting a filename from a path, converting to uppercase). + +**Combining operations:** +- **`zip()`**: Combine two ReadVars into a single `ReadVar<(T, U)>`. Useful when a step needs access to multiple values simultaneously. + +**Dependency operations:** +- **`into_side_effect()`**: Discard the value but keep the dependency. Converts `ReadVar` to `ReadVar`, useful when you only care that a step ran, not what it produced. +- **`depending_on()`**: Create a new ReadVar that has an explicit dependency on another variable. Ensures ordering without actually using the dependent value. + +For detailed examples of each operation, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). + +### The SideEffect Type + +`SideEffect` is an alias for `()` that represents a dependency without data. It's used when you need to express that one step must run before another, but the first step doesn't produce any value that the second step needs to consume. + +**Key concepts:** +- Represents "something happened" without carrying data +- Enables explicit dependency ordering between steps +- Commonly used for installation, initialization, or cleanup steps + +For examples of using SideEffect, see the [`SideEffect` type documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html). + --- ## Emitting Steps @@ -120,6 +301,26 @@ works everywhere. Nodes emit **steps** - units of work that will be executed at runtime. Different step types exist for different purposes. +### NodeCtx vs StepCtx + +Before diving into step types, it's important to understand these two context types: + +- **`NodeCtx`**: Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. + +- **`StepCtx`**: Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). + +### Isolated Working Directories and Path Immutability + +```admonish warning title="Critical Constraint" +**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. + +However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: + +**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** +``` + +When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. + ### Rust Steps Rust steps execute Rust code at runtime and are the most common step type in flowey. @@ -130,33 +331,25 @@ Rust steps execute Rust code at runtime and are the most common step type in flo **`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. -For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). +For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html). ### ADO Steps **`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. -For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). +For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). ### GitHub Steps **`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. -For GitHub step examples, see the [`GhStepBuilder` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/steps/github/struct.GhStepBuilder.html). +For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html). ### Side Effect Steps **`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. -For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). - -### StepCtx vs NodeCtx - -- **`NodeCtx`**: Used when emitting steps. Provides `emit_*` methods, `new_var()`, - `req()`, etc. - -- **`StepCtx`**: Used inside step closures. Provides access to `claim()` for - variables, and basic environment info (`backend()`, `platform()`). +For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). --- @@ -275,23 +468,73 @@ Every node has an associated **Request** type that defines what operations the n - Request types define the node's API using `flowey_request!` macro - Requests often include `WriteVar` parameters for outputs -For complete examples, see the [`FlowNode` trait documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html). +For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). ### FlowNode vs SimpleFlowNode -Flowey provides two node implementation patterns: +Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: + +**SimpleFlowNode** - for straightforward, function-like operations: +- Uses a **single struct Request** type +- Processes one request at a time independently +- Behaves like a "plain old function" that resolves its single request type +- Each invocation is isolated - no shared state or coordination between requests +- Simpler implementation with less boilerplate +- Ideal for straightforward operations like running a command or transforming data + +**Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. + +**FlowNode** - for complex nodes requiring coordination and non-local configuration: +- Often uses an **enum Request** with multiple variants +- Receives all requests as a `Vec` and processes them together +- Can aggregate, optimize, and consolidate multiple requests into fewer steps +- Enables **non-local configuration** - critical for simplifying complex pipelines -**FlowNode** - for nodes that need to process multiple requests together: -- Receives all requests as a `Vec` -- Can aggregate common requirements across requests and consolidate them into a single step to reduce repeated work -- Can resolve conflicts between requests +**The Non-Local Configuration Pattern** -**SimpleFlowNode** - for nodes where each request is independent: -- Processes one request at a time -- Simpler implementation, less boilerplate -- Ideal for straightforward operations +The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: -For detailed comparisons and examples, see the [`FlowNode`](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://docs.rs/flowey_core/latest/flowey_core/node/trait.SimpleFlowNode.html) documentation. +Consider an "install Rust toolchain" node with an enum Request: + +```rust +enum Request { + SetVersion { version: String }, + GetToolchain { toolchain_path: WriteVar }, +} +``` + +**Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: + +``` +Root Node (knows version: "1.75") + → Node A (must pass through version) + → Node B (must pass through version) + → Node C (must pass through version) + → Install Rust Node (finally uses version) +``` + +**With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: + +``` +Root Node → InstallRust::SetVersion("1.75") + → Node A + → Node B + → Node C → InstallRust::GetToolchain() +``` + +The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. + +This pattern: +- **Eliminates plumbing complexity** in large pipelines +- **Allows global configuration** to be set once at the top level +- **Keeps unrelated nodes decoupled** from configuration they don't need +- **Enables validation** that required configuration was provided (exactly one `SetVersion`) + +**Additional Benefits of FlowNode:** +- Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) +- Resolve conflicts or enforce consistency across requests + +For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. ### Node Registration @@ -313,7 +556,7 @@ Flowey does not catch unused imports today as part of its build-time validation **Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. -For more on node imports, see the [`FlowNode::imports` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html#tymethod.imports). +For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). ### The emit() Method @@ -324,90 +567,7 @@ The `emit()` method is where a node's actual logic lives. For `FlowNode`, it rec For `SimpleFlowNode`, the equivalent `process_request()` method processes one request at a time. -For complete implementation examples, see the [`FlowNode::emit` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/trait.FlowNode.html#tymethod.emit). - ---- - -## Variables: ReadVar and WriteVar - -**ReadVar** and **WriteVar** are flowey's solution to the problem of declaring -variables at build-time that will hold values produced during pipeline runtime. - -### The Problem They Solve - -When constructing the pipeline graph, we don't yet know the values that will be -produced during execution (e.g., paths to built binaries, git commit hashes, -etc.). We need a way to: -1. Declare "this step will produce a value" -2. Declare "this step will consume that value" -3. Let flowey infer the execution order from these dependencies - -### Write-Once Semantics - -`WriteVar` can only be written to **once**. This is fundamental to flowey's -execution model: - -- Writing to a `WriteVar` consumes it (the type is not `Clone`) -- This ensures there's exactly one producer for each variable -- Flowey can use this to build a valid DAG (no cycles, no conflicts) - -### Claiming Variables - -Before a step can use a `ReadVar` or `WriteVar`, it must **claim** it: - -Claiming serves several purposes: -1. Registers that this step depends on (or produces) this variable -2. Converts `ReadVar` to `ReadVar` -3. Allows flowey to track variable usage for graph construction - -### ClaimedReadVar and ClaimedWriteVar - -These are type aliases for claimed variables: -- `ClaimedReadVar = ReadVar` -- `ClaimedWriteVar = WriteVar` - -Only claimed variables can be read/written at runtime. - -### Static Values vs Runtime Values - -Sometimes you know a value at build-time: - -```rust -// Create a ReadVar with a static value -let version = ReadVar::from_static("1.2.3".to_string()); - -// This is encoded directly in the pipeline, not computed at runtime -// WARNING: Never use this for secrets! -``` - -This can be used as an escape hatch when you have Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. - -### Variable Operations - -`ReadVar` provides several useful operations for transforming and combining variables: - -**Transform operations:** -- **`map()`**: Apply a function to transform a `ReadVar` into a `ReadVar`. Useful for deriving new values from existing variables (e.g., extracting a filename from a path, converting to uppercase). - -**Combining operations:** -- **`zip()`**: Combine two ReadVars into a single `ReadVar<(T, U)>`. Useful when a step needs access to multiple values simultaneously. - -**Dependency operations:** -- **`into_side_effect()`**: Discard the value but keep the dependency. Converts `ReadVar` to `ReadVar`, useful when you only care that a step ran, not what it produced. -- **`depending_on()`**: Create a new ReadVar that has an explicit dependency on another variable. Ensures ordering without actually using the dependent value. - -For detailed examples of each operation, see the [`ReadVar` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.ReadVar.html). - -### The SideEffect Type - -`SideEffect` is an alias for `()` that represents a dependency without data. It's used when you need to express that one step must run before another, but the first step doesn't produce any value that the second step needs to consume. - -**Key concepts:** -- Represents "something happened" without carrying data -- Enables explicit dependency ordering between steps -- Commonly used for installation, initialization, or cleanup steps - -For examples of using SideEffect, see the [`SideEffect` type documentation](https://docs.rs/flowey_core/latest/flowey_core/node/type.SideEffect.html). +For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). --- @@ -467,7 +627,7 @@ When a FlowNode receives multiple requests, it often needs to ensure certain val - Collect values that can have multiple instances (like output variables) - Validate that required values were provided -For a complete example, see the [`same_across_all_reqs` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/user_facing/fn.same_across_all_reqs.html). +For a complete example, see the [`same_across_all_reqs` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/user_facing/fn.same_across_all_reqs.html). ### Conditional Execution Based on Backend/Platform @@ -484,7 +644,7 @@ Nodes can query the current backend and platform to emit platform-specific or ba - Using different commands on Windows vs Unix systems - Optimizing for local development vs CI environments -For more on backend and platform APIs, see the [`NodeCtx` documentation](https://docs.rs/flowey_core/latest/flowey_core/node/struct.NodeCtx.html). +For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). ### Using the flowey_request! Macro @@ -497,23 +657,54 @@ The `flowey_request!` macro generates the Request type and associated boilerplat The macro automatically derives `Serialize`, `Deserialize`, and implements the `IntoRequest` trait. -For complete syntax and examples, see the [`flowey_request!` macro documentation](https://docs.rs/flowey_core/latest/flowey_core/macro.flowey_request.html). +For complete syntax and examples, see the [`flowey_request!` macro documentation](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html). --- ## Artifacts -**Artifacts** are the mechanism for passing data between jobs in a pipeline. -When one job produces output that another job needs, that output is packaged as -an artifact. +**Artifacts** are first-class citizens in flowey, designed to abstract away the many footguns and complexities of CI system artifact handling. Flowey treats artifacts as typed data that flows between pipeline jobs, with automatic dependency management . + +### The Problem with Raw CI Artifacts + +Traditional CI systems have numerous artifact-related footguns that can cause subtle, hard-to-debug failures: + +**Name Collision Issues** +- If you upload an artifact mid-way through a job, then a later step fails, re-running that job will fail when trying to upload the artifact again due to name collision with the previous run +- Artifact names must be globally unique within a pipeline run, requiring manual name management +- Different CI backends have different artifact naming rules and restrictions + +**Manual Dependency Management** +- Nothing prevents you from trying to download an artifact before the job that produces it has run +- Job ordering must be manually specified and kept in sync with artifact dependencies +- Mistakes only surface at runtime, often after significant CI time has been consumed + +### Flowey's Artifact Abstraction + +Flowey solves these problems by making artifacts a core part of the pipeline definition at build-time: ### Typed vs Untyped Artifacts -**Typed artifacts (preferred)** provide type-safe artifact handling by defining -a custom type that implements the `Artifact` trait. **Untyped artifacts** provide -simple directory-based artifacts for simpler cases. +**Typed artifacts (recommended)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait: + +```rust +#[derive(Serialize, Deserialize)] +struct MyArtifact { + #[serde(rename = "output.bin")] + binary: PathBuf, + #[serde(rename = "metadata.json")] + metadata: PathBuf, +} +``` + +**Untyped artifacts** provide simple directory-based artifacts for simpler cases: -For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.Artifact.html). +```rust +let artifact = pipeline.new_artifact("my-files"); +``` + +For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). Key concepts: - The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure @@ -521,12 +712,23 @@ Key concepts: - Typed artifacts ensure compile-time type safety when passing data between jobs - Untyped artifacts are simpler but don't provide type guarantees -### How Artifacts Create Dependencies +### How Flowey Manages Artifacts Under the Hood + +During the **pipeline resolution phase** (build-time), flowey: + +1. **Identifies artifact producers and consumers** by analyzing which jobs write to vs read from each artifact's `WriteVar`/`ReadVar` +2. **Constructs the job dependency graph** ensuring producers run before consumers +3. **Generates backend-specific upload/download steps** in the appropriate places: + - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks + - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` + - For local execution: Uses filesystem copying +4. **Handles artifact naming automatically** to avoid collisions while keeping names human-readable +5. **Validates the artifact flow** to ensure all dependencies can be satisfied -When you use an artifact in a job, flowey automatically: -1. Creates a dependency from the consuming job to the producing job -2. Ensures the producing job runs first -3. Handles artifact upload/download between jobs (on CI backends) +At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: +- Producing jobs write artifact files to the path from `WriteVar` +- Flowey automatically uploads those files as an artifact +- Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact --- @@ -536,7 +738,7 @@ A **Pipeline** is the top-level construct that defines a complete automation workflow. Pipelines consist of one or more **Jobs**, each of which runs a set of **Nodes** to accomplish specific tasks. -For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://docs.rs/flowey_core/latest/flowey_core/pipeline/trait.IntoPipeline.html). +For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html). ### Pipeline Jobs From cda1a74b042c580658518c76a503642809021957 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 09:28:27 -0700 Subject: [PATCH 05/15] Add more links to rustdocs --- Guide/src/dev_guide/dev_tools/xflowey.md | 60 ++++++++++++------------ 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 63decf901e..f08df24502 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -158,7 +158,7 @@ works everywhere. ## Variables: ReadVar and WriteVar -**ReadVar** and **WriteVar** are flowey's solution to the problem of declaring +[**`ReadVar`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) and [**`WriteVar`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) are flowey's solution to the problem of declaring variables at build-time that will hold values produced during pipeline runtime. ### The Problem They Solve @@ -181,7 +181,7 @@ execution model: ### Claiming Variables -Before a step can use a `ReadVar` or `WriteVar`, it must **claim** it. Claiming serves several purposes: +Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: 1. Registers that this step depends on (or produces) this variable 2. Converts `ReadVar` to `ReadVar` 3. Allows flowey to track variable usage for graph construction @@ -240,14 +240,14 @@ The type system enforces this separation: `claim()` requires `StepCtx` (only ava ### ClaimedReadVar and ClaimedWriteVar These are type aliases for claimed variables: -- `ClaimedReadVar = ReadVar` -- `ClaimedWriteVar = WriteVar` +- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` +- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` Only claimed variables can be read/written at runtime. **Implementation Detail: Zero-Sized Types (ZSTs)** -The claim state markers `VarClaimed` and `VarNotClaimed` are zero-sized types (ZSTs) - they exist purely at the type level and have no runtime representation or memory footprint. This is a pure type-level transformation that happens at compile time. +The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level and have no runtime representation or memory footprint. This is a pure type-level transformation that happens at compile time. This design is crucial because without this type-level transform, Rust couldn't statically verify that all variables used in a runtime block have been claimed by that block @@ -305,9 +305,9 @@ step types exist for different purposes. Before diving into step types, it's important to understand these two context types: -- **`NodeCtx`**: Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. +- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. -- **`StepCtx`**: Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). +- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). ### Isolated Working Directories and Path Immutability @@ -360,12 +360,12 @@ closures passed to `emit_rust_step`, etc.). ### RustRuntimeServices -`RustRuntimeServices` is the primary runtime service available in Rust steps. It provides: +[`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: **Variable Operations:** - Reading and writing flowey variables - Secret handling (automatic secret propagation for safety) -- Support for reading values of any type that implements `ReadVarValue` +- Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) **Environment Queries:** - Backend identification (Local, ADO, or GitHub) @@ -423,7 +423,7 @@ rt.write_not_secret(output_var, &"done".to_string()); ### AdoStepServices -`AdoStepServices` provides integration with Azure DevOps-specific features when emitting ADO YAML steps: +[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: **ADO Variable Bridge:** - Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars @@ -436,7 +436,7 @@ rt.write_not_secret(output_var, &"done".to_string()); ### GhStepBuilder -`GhStepBuilder` is a fluent builder for constructing GitHub Actions steps with: +[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: **Step Configuration:** - Specifying the action to use (e.g., `actions/checkout@v4`) @@ -456,17 +456,17 @@ rt.write_not_secret(output_var, &"done".to_string()); ## Flowey Nodes -A **FlowNode** is a reusable unit of automation logic. Nodes process requests, +A [**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) is a reusable unit of automation logic. Nodes process requests, emit steps, and can depend on other nodes. ### The Node/Request Pattern -Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the `flowey_request!` macro and registered with `new_flow_node!` or `new_simple_flow_node!` macros. +Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. **Key concepts:** -- Each node is a struct registered with `new_flow_node!` or `new_simple_flow_node!` -- Request types define the node's API using `flowey_request!` macro -- Requests often include `WriteVar` parameters for outputs +- Each node is a struct registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) +- Request types define the node's API using [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro +- Requests often include [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) parameters for outputs For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). @@ -474,7 +474,7 @@ For complete examples, see the [`FlowNode` trait documentation](https://openvmm. Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: -**SimpleFlowNode** - for straightforward, function-like operations: +[**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: - Uses a **single struct Request** type - Processes one request at a time independently - Behaves like a "plain old function" that resolves its single request type @@ -484,7 +484,7 @@ Flowey provides two node implementation patterns with a fundamental difference i **Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. -**FlowNode** - for complex nodes requiring coordination and non-local configuration: +[**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: - Often uses an **enum Request** with multiple variants - Receives all requests as a `Vec` and processes them together - Can aggregate, optimize, and consolidate multiple requests into fewer steps @@ -539,9 +539,9 @@ For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/ ### Node Registration Nodes are automatically registered using macros that handle most of the boilerplate: -- `new_flow_node!(struct Node)` - registers a FlowNode -- `new_simple_flow_node!(struct Node)` - registers a SimpleFlowNode -- `flowey_request!` - defines the Request type and implements `IntoRequest` +- [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode +- [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode +- [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) ### The imports() Method @@ -560,12 +560,12 @@ For more on node imports, see the [`FlowNode::imports` documentation](https://op ### The emit() Method -The `emit()` method is where a node's actual logic lives. For `FlowNode`, it receives all requests together and must: +The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: 1. Aggregate and validate requests (ensuring consistency where needed) 2. Emit steps to perform the work 3. Wire up dependencies between steps via variables -For `SimpleFlowNode`, the equivalent `process_request()` method processes one request at a time. +For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html), the equivalent [`process_request()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html#tymethod.process_request) method processes one request at a time. For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). @@ -609,9 +609,9 @@ implementation (runtime logic): Use Rust's type system to prevent errors at build-time: - Typed artifacts ensure type-safe data passing -- `WriteVar` can only be written once (enforced by the type system) -- `ClaimVar` ensures variables are claimed before use -- Request validation happens during `emit()`, not at runtime +- [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) can only be written once (enforced by the type system) +- Claimed variables ensure variables are claimed before use +- Request validation happens during [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit), not at runtime --- @@ -742,7 +742,7 @@ For detailed examples of defining pipelines, see the [IntoPipeline trait documen ### Pipeline Jobs -Each `PipelineJob` represents a unit of work that: +Each [`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) represents a unit of work that: - Runs on a specific platform and architecture - Can depend on artifacts from other jobs - Can be conditionally executed based on parameters @@ -793,9 +793,9 @@ Parameter types: #### Stable vs Unstable Parameters -Every parameter in flowey must be declared as either **Stable** or **Unstable** using `ParameterKind`. This classification determines the parameter's visibility and API stability: +Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: -**Stable Parameters (`ParameterKind::Stable`)** +**Stable Parameters ([`ParameterKind::Stable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Stable))** Stable parameters represent a **public, stable API** for the pipeline: @@ -806,7 +806,7 @@ Stable parameters represent a **public, stable API** for the pipeline: - Parameters intended for use by other teams or external automation - Parameters documented as part of the pipeline's public interface -**Unstable Parameters (`ParameterKind::Unstable`)** +**Unstable Parameters ([`ParameterKind::Unstable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Unstable))** Unstable parameters are for **internal use** and experimentation: From fc3328176d9c8d1986d693641064d037e297a404 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 12:31:54 -0700 Subject: [PATCH 06/15] Content update --- .../dev_guide/dev_tools/images/Parameters.png | Bin 0 -> 65081 bytes Guide/src/dev_guide/dev_tools/xflowey.md | 486 ++++++++---------- 2 files changed, 212 insertions(+), 274 deletions(-) create mode 100644 Guide/src/dev_guide/dev_tools/images/Parameters.png diff --git a/Guide/src/dev_guide/dev_tools/images/Parameters.png b/Guide/src/dev_guide/dev_tools/images/Parameters.png new file mode 100644 index 0000000000000000000000000000000000000000..bb4eb5c4231d6cd69c684b32c700b83154f4a760 GIT binary patch literal 65081 zcmY(q1z1$w7cV@dba$6XgGhIm(%m5q(v1U1OTVOmv~+hjNC}8^cMV7n9rvLAzwh4j zJUqb6IcM*^di~Z~pH-A((NRcHKp+shyquI82n1IN0>OGBAp)QH23k=9zhGR|WF4Ys%n{1WV~CZ2&Y7WB@2ZABliPq?Q!F~_7 zuiJ32pD)!QYNWspB#Gmdc4DK?;m_Aznac|;aV)Bc+yXREaO+Xao3#_B5s{Kxh6?1Y zSXEZ{>X_O*hs&%#H@BV6$3k=f3LW7sK0RO4JZ&|q zwDoYZgB-U1Az=8Pgu91}9JE#odj2&cE39PS2i>sM!&};T??4<@6B??)-FYN$q^Gq1 zqxhCvTnf0*O?S9qTEjaEG_RQQiT%8y`D|nJ95`pTpJJqZ^RwSb^C6upOisy~XW`Uzg3dDwL zG|F!&+JuycSwt7rN(T=e`&Tp?J4C0!XOM(P`AXXogDMi-KP*R3dIB%3ZW;BBczXIz z&TCM57q#4JF5epMXTs&rjkMU|83}7^cy(*EUHcPOIj%qvuQMLP1f^#6?%*wZ$TVdVc6;u!&A1=?3@X^ z+$Cm<(888gQteev$!uZ;asI=)qN8gHp(3^$(r<&3+$BD^gYPvBdl|Fc5mW})r#E^s zJd0#VwTjU^jI8d^{W^BNb{!2#V6@KruQsolIyt6FEYhD}QxsYv7xdkK5|7qUOJd4d)toVsE+Oh{@S8*s{8GD+-UI+ zMfU7}`m>k+b~`%FeA{OA#r9{VmS}US=DBlQS*}awIFv8tL|9a;reu5i%~G?2<|*iL z*Ou`B#U!VMVwUiD#GBcUZ)}uzp}AB(f!=zyft7?iqT+$O1dy1h1PkQmX79OHM@r5& zZ=kiG!(nGm^!=C{$E`W2goN;@lTF}1BMySfwsBf64nyXyzhhYlG7XQIu(FDaJ7br< z)WX9{%5JK{;^)_`T0BhS_TBbrg4D%pX{xtY3@=W5qw%iCyngRC#8|Zk@r`+)WmzdUR?NcXkOm+08oq`o`0Lkq=41i^d108@--_g(RE(}vv4lrHo@!5O2ZRg= zJ)s%4x~ZbiX*70g64SIqK)rW6J6yp>Lz)G}s+}dBqE_AfRiCZ&t{j%*F0WSkoHyuV zF|j9_6%ZpLB2)|vqIeiTIBgB8g3)ONR^c-p$I>9dRVDB*QnBy}U}|3znVM1~MIuRS zc~Bh2!${|EGqBc+a}dQjv%HWFdCBK|$oFd!lX};U4Za(s>p{ge6}YyBl(={pU@fI1 z#YMuaBzC7nNLwKfd)R=2N3eB3sRtN%oJQDXUjKRsO;<^e-E{Y~Z?R_$o?MpY>iGj} zX(nxHs9y#95m;4uIEZKKvyj13J!*sf^3D}hT1sj+bYsKJ?e_wL@8!|-I8E+(^rNo> z+}8_l@nj3IOsSfbHAAkX1hzeAuQ^w4k7XDmACY1jJr!SH)CLZ7Oa@*^v$prBr)Z&3dXK#sN68!`qs@KL$qgozoUqhrdKeLCJ^R@xg}CfP}_3T5&x#U6B_ zfYs@9WDC2`1=c1|TsT|Ie9FC}N$GLjs6S0r+*KELv`h8i{3 zRPh{K$*3P~OXsA*t{phvPl~F5l|H$4HkAm~|h(S(O;yvX=X}?8ahb2yEt#8DiV>^qSVf)}VWa2g6787KaSu#c=ud;0U|a_X%3L)s)b) zGfpc>)~~VDZ!!4n@y9~-LE_@{7e}55K3A;^Xuo;MH}Nq@<>`d z_Rk_ZeqL=P(S>bQbG{lkmT8;$RiCEC8)b`WZjx{=z40_lI-WmvPj@pfQj+2EVZr3M z1AJ=+7%d#VR|Gs|i&~7RamTH_U%C<#*QBhSlH5>-A z6KIf`Nz&Ar_|W48RjZx>0!;22T4Nv*5gH9mj|3LBZRoOtZ1X6_kZG7T2W)(BHWeIN zU_j9x10NX_mr95Arvay4+yr;1pM-wzE8R<8LH$3)xwFQ(vvnm6*TlNofE6_y z7wQzdJHU6)ekm!p7(n{4H6tH=J@FGBmo&0q%Md}`mTzv2Ujr66Cjc!x7bH&*m*tMr z5aBXsk1xH>^un9-EHJ60P9B2q8$XZNJ2R@C1KKLE8{>!T2?o>(nPr>3Al6F=iX){h&g><&Gj* z#7iAqZb0Ov& zcjbhCek-PHLk&Wf$wLk#3T`{ZEvicHU9al3Oi?`o9mWTrxsVPIGgG~d$0>-;MamPG zGA}~zmVu*s3!G3&I+$8u3ihoWN0c~XqHcy^^hlW&5-b)zIeINWJjM`=SSS%5A`*=h z9~PYkh(jy@NjEvp_mBfV9Y5q{z6c2>I84V2JwNHfRpZE2#oDmT zgTW|n5mTvw@-~|B!HFzoZo8xLa_Ri$lzD24`lM{-dj+|5>ur=w+HK$?j+OT@zmY-)B> z87w{qu~sQMPZ%6VD;z;W_#D!k9*NQL&nU9BP1eC-Y1p=}X=#MW*4Nju-dSqM(ZB~` z=j51zq!JVLOlt+P@bH4G!kwgKqy?|u#Z$e-KnjF$(xF~(r5@sBQb09j6$&ZC)eQgz z>-|8&zz`QV{jbJ9Qye!SHd$wurE+BNZQHc!ET@~6J1L^UCuObWnV%g_iE8XQ(>Im1 z{%%PlO|Kr>hH;MZHR*Jp&Q6Rfd4w8Dk0^VB&OT~*(p2g-D#_e_a{K?K0A|8}72OHt zx@}tE8>Z=Jc}6*hS?oaTfc755AkdsWew+ngZ=SVSRd+z(B)=0M9bS?qRbw{VjLWXO zZjcleCk=eR1rJ`bMwhK?mo*l)LvSuQG+8VY!npdULHA}Yy4k|Jou>WjOf+}U22s5+;v@XXlO`IS#<6HRei&`gsW^< zf;}F^4C=s9>b;3Z(I`Zin&p_Y8a2SkwYI|s03H&?l_3K4T;-zxDLxOW!te(MW>2wr zm>}_hmvbN>w&3R@a(=Cf#GOtR;S|Hif`o*^$tO#Wh(%#5Q~SL-JRB*+8jCt9WnIb= zLqTB}1cQW%Dd{D}XO0wS0mCC7XwLqrybYeG^_)}CqZN+7@Qey+?7ygfsyAtkwjqaQ z?=%v%`TVg|$bGKPv(c*{IEFvVO#|FLNVOURB`BFP8Z*SzG)wfpgBPu=-Hj0kj$7Ir zlOaJi8EAvw^8B0`4=#0zeAGWYCb*?s6uX>xxhZuNXqD?xKdgzzx+nqGZ^O+&>E2+s zZyz@6Vl}e#3&`u^6~=`qdo0DzsRHmTc;e`)#otwn%v55b%ceoS)ZNw`Jw)bGDqehL z{Rj+$V}Z--Vm8g>`C6n@aJfiaG03t-{6qu^*tsC0C|J65^2-PmB59Z$Q-Z{#B-+kw zqn5536<{BpFWX)djxIn`!&Oj8 z2KsZs17qQ~OGMGQX=eKo(}QDtAh7@{zE@qTBJClnR;ekVQq=ZL)#*1~Bm$mC_X^qF znyHnxC|3k}sAs5g8A&1W@i=q#JXV3)qp$v?s=1-^$rf|JJrWa>)-}d#zEKJxiE~jk zy9G&YXZu@%^k5W4|XIw%;NW_`TsL#Kb z+&iD`QQy~ka{~&^1jmwX0Moxbw=dWwz<33RROOKC@iJ+*xg^5CW}#U(&E$YYx~ZxP z%fdqIx68j%t;{#Se-RiZW2KdN0i!t^tL&{2UeT$4<><~aeVoEId4mR9Hc)Ff*dK7ZOIu1XnHaJYV2w0DTQ;UG+iK(Kd0f?0qUHonMOWNT+>kr6LPrZmLF~1I>N;{(V z)tcfFOF0Eb>R-aL5p8eJ4Bzizl$59|Lt(TYD&GVtEUIg_E@`MA!Ws**+-z#^_P|rE1^^#uhxf&kGvPwNaO_m?dd$BAV)ok zm|I*kg?pJ!^Qg?gNZPaUOpSkRK=sJ0!oatj+>`h(nGK8TQdO3Jm5e4<_=wPcOF4nC zXz}%y7Fg7u#q%ABF(2SoZ*R6>cuP2Y5&na z7Dvlsw>aO)Lf%u(ebsaJRAw?9!cxrQ$xaKGizB7^R3cftWt8ngyxWQPzY_?7?Mhn- zD|d!@Ag*T9Pq$G?B^HKIL)OYM!FUH#cjnT^4tj*Yz}N(WYzAsZBlCdFuhpdD)~4!sJfr2@#MOtweNLkq6?|D@g2$w~rzP%0a$VWBwwwyGttqC-V?s&@4ah@azg&Mem+($~U z6qJtmYgl~0#=ZT=ayDo{hr*ZXY1K(f!y#T|Wc>qiU(1tMsK+7=_n0-=->j;Oj5PrN9D*Bi(GOMrE{;)!&gOs2 z?w(MXnfWsaB!xx3WQ}QP_}A?T#He^6=-lNCN2xhdx+@7WE0@jj!}?1_nieN(&Kw+KO_+L@3P?u-VdcPwWeHA@Zn8!m zdZ`!By6wJl-9Oz#)z&1QF#hfqs=j)@+!m_c6knbxOY=YLpr^?{q2Dldj#Oo&XDr-X z+R|}IMyxOsE%nb-ZAJS^nR+H`c@ikK!2jV`QsyUlk;cbA)4NPanY#rf}Pzw;!_(&3a_@vr9d z{w;|X^CJO@Clf9Q^>Kf=c<3bI7Z#*H%Vmz&+b>XDMIPb9|z0&1>5_>%ye(Ul8 zwjNJ_5}ITzht}8reKH&pEfRElV0lRI!&dMT!AV^A?-MljCAHtKSRl-UEr|ANypviO zre(%*YUqb_-l@uX;fs!x(fy)Nf!ua8X8SeUUpUrP!AY*ebJd$Cca+T4mZN$WKDVh> z7t>>{4}S5DN^YEa-TIUq5O$_>uR)2D>PJ7hTMHib#N+%{-x1m z-A?cHj`I37^wm4>U2+4Dd9jC*B8-duMZ(4yl90i0B#>D6e3V^9I{~ds%#^lpr9si1 z;xnZP5y3`N<}$h|+;HD2nM>4b{0bxt5A%j^p2H8BiHW_%LiUv}eAsU+$D|q^mZ8%{ z1i3V{rabA~7UD9rKsKDqD7gCny(|vok|*HVQd=YniGo1Y_&=S@MEQzfgX-)SG1%L# zX>Kommu4w)CmO4ON~cR0Yv+Ft3MCc)UvJp$Trih(j4m>wEjNffLAlnm>rF|?iiO+X zA9mY2F~v(s^jgliZsJ?=Tz!yx_ik!Dmt?NV6mAFnz^$~qBY~)*VGx|nz4hNFZ83zifN;sgjNe1*fpiw7`-8#$snJ(RX$ifLnrabrHV zpqD08iXhyL_3bt&rt_@V&l>fwONMc6%j97jws=e@{xu!{C5X`8jwI-{loc13X6&L> zD(G62ma;!mUN3oGpP1g?>}CNGaoXA5p0x-0U!^_kHr8^xTGQ+)JT4JP!slkKyq(B2 zALaJ0YaF)YJ z3W$O111C=_x$$^LWa=9$YY6ye+MrHBLG8%O%If__A8(cWf!4**GHISeD>=EKM{vK{ zCs1~eW$*r1vU`B4*GH|6s(N~@uK#8E?GPJy{&pV*Lp}6Z2%?%dF%0;x75e(V_u%2d z69-J&_OJJbo%UX>nhwRkOoJBREb{U`I0qih#gK5L%o@0|;-v9}09g^pgq3|@2=~>= z2)pQJx?_iGa;buv8jb@wH;SmFE62fgZ=k=$Bu5`&Cp0#82eKF z4_Er&rOk~^Q!XMZI0i<>Ze#!7-`gs8P46y047)t)06QakjT2PahBKPZ8#Zqr3hJVk z2SU2ia=Xu^R1tGik9+h&a4>un zIWLCeyN|UidY|tuen;-Vu{8PdMKCTdE*oAL=2`nOelKpOT4nVndae|we2`O6=-E{- z+z@>NW0mXGs|7NHz61qLlxQ;F?{;FEL8ff>riv3RbsZ^uu1!B)-e^2C!1`BHb&aSK z`6PWb>1K;vWv^A1a(|QW@idDs{eEoggbB!CK$wL~qnYP)ly|kvc&hhjPVY&7*?iOS z%k+WR)XCZB z5J&3*Wu$E7tPZc8`CXH0or_w#AJ$wO+uBCbg)tfTA|0UY_X9Cr8$*S0-)#3aYIRt= zPV-ZoClz^Vfz699O6piM$V9iGU&T;XW@#R?B>xPE@s;x8bD$Lde(e6)vE#<_o!5mb zm)*kDaf?dq_;b-&)$p03+bPDXuSDyv0IBYkjau`}Ogl@$1zXDOqInWP@%^BNQNpsM zmmNz8n>8bdxw5eoY3!HM{rz(-xVF(TmGE~dRcYYvQWg|xJ^a6HRv zYPnR|j;kF+LS9t`Z4GR#hn#%_{o)>mg%@i%L40wUF}SakyN;YZ_&sq!1|LuRn|?Pe za&Om9)W6lPb66$;sT2qzZsX(MLgbW`zW>gXJ&1$>#fn@KA!1UX1?k)nFt9TBX8AvU zjum|*QZH2Oza$srIzZ=1<@k6VE+AT=xj$1*x&bg(P*A^F#Mw{$)4(D(4-eo?qJAa` zpi_#FJy8f^tecuDq%N;_t$=+`x3!HomOWRyk2{}Y?gL>h0Gj(=9W@$p1}u&Znj9dcAYoj1V_2NZ1+U{Sv$rf`(LL%XJEIwbyaq?Eo(p+w@hq9^(Ii4P2^ zun$`1M+l>otTeL)XTT_6GYh9Q6qnzO+VjxP*?)h8d%y=>>G+F=TLS1=d za3YUl#NhRHgaBziKQ%Q>Je?^NGEMa6)?@Uw*VcMimEA1c=$$wo{Kfv{Y~IVW1-r`r z{1l@Sw6Cf%o|% z=Gzq30r1soY|)@xekR)U=B_U$mAOW;SW{mq157TUVr5lSe|)%FecvCkoo4B-ie-C& zcsT$#$cdg1uj6G;VZ`8tIvZS($7@>i-7V|k&CQL8Qf)^?UBw}gR=L6F-7J3$U0vNY zZyOt%FPz&SVvm-A?eER!6&4)UdNRrZd~-!f@7w6z4q-frm{H!inTwya0SL`VRj?{MOm zFGa+4h!y|260$!cj|<= z;A*qYQ1rQG*RUOTpbW)q2)1K>B}iIaEDDLlU#Xa|<06VH^SKuR-{peuE{DOO>x0V9 zDzknR#ngr5(R5QygyxfQq&mAoLs_tZ_ZeNs)1x5x^%~0W7Z``LiNw(|TsHC9j0cO2 zl{*Iehllnoh7ez=y_cO21>Vaqh@<6f5^{?rf#}e!VVbLpvuTDLOn;F?J>3Z1tts&y z$#BSZEqH_Mt8rEnb^A#OlJx>;i}AbJCS3A8=4%u7u8~FEoLAG+i|BlM)Hg2U12&+b zpm5ReE`wMFa60b4|8lbOt9@r35=#R!LD}+wJaW@h|K-VYr6iqNUL6@SFtPng?Cs(l6F`rkJIZ67+k~Q|x1# zhi@$T$jOZ)l0jwTWQ*2CQxo6lbIOn#n%x4E++FWb{3d7i_}guGaj@?0wsxtcJq7V6 zOKfawiuL#m+hmBw?X6B3yXYIRt$nnf`gssiT$# zzmDS(J^hTErz3iLhCjJoTpr%YP6{B+A~m~Dw@*7X(gK#!_;7uy_+8?D#RGFo#iWLQ zYHI2&4JKgqrR6_gzIdVP?qFAIIf_-*(xwzQ5#D;-L25Ts2IX&mTDc3?L@-gsGC{c% z-f%S>86Te|w80}F=*c1BU>>xVvNtGm>GU>p4Gz=xNI(q86!!5sEcAn{w6|IkClsGG zJ%^3lbe|A)5NBHm32nvgix5t8#>(dSSTu%W6?=N)u#Se4u}!T>uq%c#M`>x}h_FBP z4I**HMoF2O!{g-9{G?&R`3z8nn0WtdF3kiD6G|=%KW-(Rd|cAdyaw|~&3T7!M`_ZG zVT>*PcBt9N3@Z^wwkRUnUM%3!@dZ7OrlOKb$?>C~M6R(yF)1r_V@fRR^-LRWWJN_1 zI5{~@YUeNdQ6{Lgygz?Aj~2({hRFnytG$mJngJG0<*!P9?2SbzcxVh;SbcZfR)l^G z>&^M$YNfVqJkQ^}8>v-_zpcd#7|$es=BY+zCYAM5M+AF({Gt;V?|9eIGmDXvpFvOk zm7R~d&0Fuhbcl|c&e>xBvmFu@$D{t*pKV^PLp1fA%(1CJK#MJaZM-;R2HdxqQ@JcY zeucfx(T^}74jxOz-`-C9q3clJlObIWS+q#9u*kQlO4e1?(n7S;)rGPpxjHn|BJL!_ zihhqFU+50q*xYbh{LnbP-yabo0FhW!>F2WDa&DvB|IzLu;I;>PD;HkyF8*Vb17eq+ zQ4Fv@634bvpZWqy!W#-ch(VR`C3zdT1X~ULdH2s*EhZd97t$1Z4e!qQI?n+|mbM5X@W`7$3hEAc z`8HX@ZlQLU(t;d)d4I_u_S1lbWUtU=af!{tttj`wbK{?F(G(0%jy!H~$m^z=SDsrJ z*(mu&J0;f?raFd3#76C9D#;dEDL$qKH_4`esB#3D!P%m8jIRhyF@LZp(z;nw8zkM zxjZW*M>6dB2^lY)F9r8m{+A}pE$+h)QBi32^KA$i)0kY{^tcWkH^$F#9yql5`r1qA zB$MQeXpw|PGSIDT;c6QYXP9#eJ-*eS9){g)h%;pW(lnd&G9oZaEML?VrAdsWKNM@a8tyW0AZAkg9Rtd+$sDbLm=HvHP@nPcqNdY}FGR1P3 z{~c8O>+p}ML>Mt32a4B!Nee`dT#D8xKqFF76Vbs;1@u;m%gv*eoRr62%v%}(EjN5; zG;O_RgiW&3?*WUDaM$_wRNKAKkE|!fj7+Rfn@Jfwd_qYO@J$%dBK+sZbIVbapN15L zb!BLP#6CwlX)aA;z7IgoALk88EHD6J;S+y0681Hlt1>0xu_t;?ck1jXBqnn|R`@)% zUp)f{u~#+o_A;#vzd5#ssh@8OHQ;2bO;;vN z+HpFJIi(fvddUIf>h*)>v`EwK*p1!6Ou0^r+hU4O>C-r7_?IUg52u$om+Qo?w?OJQXA zqL%CI?EIwvN9q@0yJWxQ=OcQ#Zb%(2H}bBf)Jew2rKawm=6!F@1(cS(<3uQ#RX_9$ zR5`%{H#k1hTm9aB68hZ7StO!?Z?H^ca*zs%tnWOYgLp&EO`zt>&9#}Vl|}@=N?X3) zm>y{xdq*Xb5|BjvM=N7%wDmTHtsCxYH7twZDfsJTVUj~PXFyLb&3&_wT>iW|>y0ngf7}Ohn~<5xio)zsqc`cH74pb#b@2EYtNm4cV>s9wA6f zOlGR=M@dIdEmT@B>=!j%nydaI`b4@MpV{EJ8tVLhz-&0Nu^LE<@bCz1Tc!o*kS(;B zW5`8g^53Rx>=$PB_&=VDWn}dF@+2lFZ$JT@D?wT4)2jayx$gXYn)rY@2yp7n-rjR{ z-fvcXj}4=U-;lgbo*F-w2RNyr{|Ql>7jf_Gb8 zyZ8D%*KD8R{khq@OfGr#*gpY*!8Ce>?Q@QFeRm|QqoY%wlCSWZni@+QJCLf2ii9fS z4~`dsg#eRF1p@ljN@IL+4BbIZ!ys`dHd>^ygxMwcaMo2s1D>ky-P4NXCb+n` z6P2s^w@i2e!}fdg*Tqw(@o%^dYNy;UQbw?I>`bA(P zSh|=i9eFaB7i+dbTKRq=H{^C+L*0+lJc+f&Xa~TKKX^m6+h$p>Y4!rJ3Y7fMrf6vJH7%zGWFNp@$VEnW%@M(`XN_OA%91A2A#<+X&$=&C z%MVw5kEfH2&$uGTcww$9Q!!m)%>DPxh9HvEQ7m{E#2}bJDlt5u#|vPP5~6A_2DmXrgM$hgYFEVc|g-p<_{~RwzMd3RNLERy`YUQ7H|fH zIZ(rdXDI=ics!5Z6`A*e&djMUxGe^;mI=eAjp=4;Yi^%39f|&KA!9WWacr3AfcCA{ zS(11TN_QHkwy-L=P{6cK!2NH>`QE)ohW*}u!ll(^fG}WA>-;NdVym39Fpjhwbhc=a z|C^u)hkRo3^?2i7j zIM-g(cy7(yTk25ITGNP~{qO60XDj(ws*=v*Fo9SBpb9q*J)^2Qps9_AnAL9W&jT>w zQswSC@OyHcM!@lF^z6;N*N^jM0PE`qzA!!8YJ}DnQMf^-kSW3U$d=J=S3B=0f=(6f{?B0j;rfLLWYcZm3tE_mRm_GXVo z-KvH_qjNP26~;+Ec>?Y2WHUOVg{CdQdjQaa`3nyiu_Nt{d&f^0)3q}*jNNkd`wL3H z@Pmq0rOrPS8^kF~OvE{+rbwu$vgtqwSxXw_Y9aV}56wxomxRZqCFNI`zL#zxeVdP# zXd{rk30<7@HaXsdttSguSqML71ZVNPh#83=3hO zi>|lqolyW(C0?kMaRKzJ4~8*U5Jdr4V92bIzoh6vo9oF>@kBhryrw3kyQ|-m;|?5S zNx$iGLkEm^rouFNJ*%MB-kMn~rTHH|Ksy#fw`cTabF?d0u%xiVFv)wKL>}OQuDY1l zd5!~3)lc6Kb&XAc`I#!sxr@lmS|E1__%f$M6fN_>A~2#?ZyU0;HG_nVJTVTaifFw4 zt2~R5WF(%{i`p^4&c`!U067;r>O2-H)2mkyZSCgMZb%4+7<;+s*G1Ly9Wzi*`08C` zaM!oh_rL}e9Y>NCL1siJ)3|H`*$i8N#^Z04d;z>GUgZ9dH-((5w)JK=XWL#6_1dqk z#_UIH{_2KCy-|i>w&J*hbxziLSSK2!{KqK2NdhyBuZCNthJH!hR#*J_JQgl)5Y;Su z-9QXw(6sHeHuYb7vk+&`Ftik#*RnAzqw^t6BhW6Oyvm`5MY8Glf}Od1eBysO*(B5o z@pN)WcNL2@fQrB5qeG6T%lSFO(Yn+CbYDL`90yrX6=7o>uExqItdw{Jv;*RTM(Tl& zgNus|R0VXCRdrMXelIq@?+&DlOHVH?emblj&zY1ar2bvmc`~E#XuFexc;I^QD<8F@ z{ZgP#K}kkV4hg8zj9N-6De)w;=|=pjQCo5y7v@NB_mma(J0!~VyJZAdhdE!ZM2ZKo z4V=CoI5`_(U*Ac83h79>02D(gd-48{knj8kBi7W$uNwjvtjDJpE+A;Jf@u%fCk6M z2jX9Lnw~0)!hjrFA3QEP>qKd2XkoNR?#IXlIUHAJDHabsM!9Fr*1A5)$;ky+bBM*$ zeHFz)QEQ%@nE9Mv-~rwzwaJh7ZbF7{B4R z)M@l6we@L1_fkQ+Sv(_PUaj7{4EQcwO>d4941d)cVhBtgz=)oZ=DdS~kMg?L% zpR!p(s4I^9?6w zu&lL_@7?8bZ)~LBl>JB~dq=PbP|-Nak@Q**+2uZf4S1F5g+u6h5UQS<`V1nPTUs)m zKMVD9z2r2S^gR@!37~mtKuv>$+cr3-H#7%;Vt`qJkRfPqTIW^b+goa& z-5#W(rxy4uKbMEPBBG0k=p@0W+J+(Yia0+mlrs=VQdlRoCy=&k&AI|$G&|VnmHkLZ(|E7ucph$p zqGSwwm))(&R*7=ELFuERv|*S)<gZG%N5GJ5B z^6q?YWyQ{6`oZ8#_UW;FY*qcCqsX|`{q!as*owBXD>oa&?Z30eE?ZVfpeGcl_?gB2 zK84;K*a<=_p^!HA<&rzkxbU%xQXlStP{VnSa2hG`U@8`sny{9VmgYb7AO-rx0CAuZ6~zQR zhQvS&IZPKc5L;DM6(+qL7D%9OZV;|FV^_Hkpas6M6mkl-App)}Fttx}(R!gq6fIu0 z_#uuAClK&UeSK>ik?cTinh}Fs;G6E_(5oy70Oc@6!SE1nK(Aq;rK5xY{Q0x>eD2Ez z`+2n1vCBG|JaF(Inh~ERyq;+1XUUW^(`t znSpiMK^aJuewOR=f&yOl1&*+HlF0;Nd$ghhF%y8yw0itD%gM%v$|nkq59S z7e|w%0O^6L4V;qUk#eALT~t)mwbFi9<#|le1qtC_OCs~Tr zfEWOBvd1X?C_;z46Ng0yj0{y8wR2U@?~JDBxgX4M5TQLI+De&1#bxaL{KVQA#DPD5 zst}ELOa0hpVkD%fKpD?d{iF+Yuwkbho>Y60nVF8{ph?H zvH+R`584JA#OaXN*RcV=LTfHBk@tlN51+v7#^otzfK^0<;<=vf{I9s#S{CQPP7ft|~{iO1ifeDawI^u#glfHq0JmBL;rMHsK&eb(X0QhEl zIOqKAl_rP)lLrF>kX8;TCjw;>03V8+{XH}?(i_dY5~ex=dQF6eP6_I+ZwA!@yFW35oV2cRjksYh$uKHG zP8ZgQ3-yk%MMVr;A7;=}jC>dYL^UomvjSPgr#BS67oB(I8F^JC<$bQHnLPKFoDNV@ zzr_K9I2i*2GI)@n=aEYCITXft5*iJ-;!s0({J_9Kr;9z+cj>qM&-K8mDGeazLjm-Q zfsOGyyXaGtvEL0sadGi;RqNS1jiya|ZEvUimzGAN;uHdjEg!-Qo@>%6Gw!6IYw0GUsQw$_P*!gP3=UkCRrwa63ECFU@dU|Pp(!9q0%&JEv zEKL4%ccK0xQyA4wt5PO1nf1ArJ2)tpn4Gk})*Uq5;@IwvhloLf1h}eafWc8YGYVMN z1lY9+a(%Wm>TV5w1IpoIE8)RxrL3(j6F|eC1&aE3czA2iNd6DvH&tGzW-pu4MlF|~ zy5OvrTexd#Yfnd?Mtfy}yaS-|;mQLc&{873`8^s~Jn{l=Ux7Km*BC)u?A<#w7Z(=} z-%FN{XQu<#&{^ZSjErdCM`40!QeGLE@2Ve99$$PB_9akLSFg62qzA^1w#;{_gxeul zh|{hFI5>eo;g03zH*ee3k>K!arzah33W`rfgGB<~?AJby`QPfgn|ueTdDt+drKN#T z38tX!fd_ck>1M1b4LdKPJkUtb$%(_NU#HQ*q0d6_EUwf9sKEZY#B;62jrJyLUr4D2 z176C=2;ep%23auRRH&}p*a+ARR4hzD0b?1hARs7sbfgfu*?JGKn+7^SsS3nGz>Fyt zz%O8y5s(%m-R}>6bvi8J5fOdA^532u3946_bi+OqMz63=#@rmQM?&(ybYBu;V&dVN zXMi^pgB&q$BD~%Aa!q{}@bLqb|M{(f)YKNTE!1#8*XWEsN9_63)YRdTk-ef%_d!Dm zD?Rr&=l@-5ljzacO98z(SgU*#9~SDwmBws7KHgY;5dpVAYO{f{-5%4%`IA{=f18=* z0!2~)iYA^cRP3E~-KF8?*7O$rJ}^MbLytH=ugm9m-!yQ^^~VDlnEZ)2nkdI4nols2BR!AJka9HEmo3j4tlQ;k zYgOun01Wx^jy@~pWkgKOcgS?*83Aj?R2c+`c@ z4Y4Yo^VT5tN9cCSaE*SWLqBEb6HNLj;#+G56jam`|MMM2!!j3F*J|5sN(NR&XFqJ) zg?GBf3JRnbkY!ZR$-uGy+6cdEB2=$j>qjOx>Wj%qATs*d?(1pMECxjOXytk@^Qy8z z=R4r?eH8Fom*2O?CM4;QkZ>EkAOOd=OGXG-GkF6X;qhv}aIVI?R&AwN{Z)Ren~zUR z0rULn<^ab*BzqMA!sfH607H(+?E0ZM=baN|LHw?k?*S+o@cZ)e@_Unom@1K1fueA1 zqyb;P=p-g5uH7AXXtsQ6EjMV!0hW)B-!PNnwG9Sx)fXW9r6z2^19Ol>TTj&FTc&#a zUZ7?e{8krCSML%k)CeHZ5?n-1_kGTao@V|#KT@MpjQ!-#U!sGOS#)!Sls2!wr+GW% zGMJ4*xSxT-#LbVu$?g-=B7jA7BqS#~Bwyrr0Eqmm2-uX9|7tzMv%os@9EjuA51Ngs z+1ZsDOW6R*-@0M!fDPx%AG}* z)(d54U<44*6awJTgd6~O$wZIP=$zUqXUh#yOQ#|BAC%rrvAmeQ+}hn;2bv7I%U`eF zuRc+a;@|4k*?$J|!_wrE<^_AZ?3oJPAobKjpyD}E5WUOI#op1xoSU1gYM>GPLBA1V zb+>&`n_Cf{8xGX6+MZTghfk~Z%Ab?)K|Y`V!`^rJWBK-fUt}e-lsziSmR0shl1j*y zEoAS#w+NMtB#Mmek-b7j$VJ9wk7VzV&GR|y`@Mh9^Lp;*KX|`xS}H_TcCYygGAw@8<*vYUXdyH@)bp0b@@{=imbrO< zY3?EC5!p1b--F1BCmxg+g0-BMUpb=lqVXPY4y zdmu;Tefnxdft~^eQ(~qF#a*KXSJPU3LARH5wr#VlEJ$Pn>!?tLP=NoOnP~+5gcd&p zku0RAr@znrL;>UsZDtBFij|f1OBB7@31LBe{F^hOO;`~uILbXe`$JHp?}BjSmE@{T zYm^D=h?BtyRS|uuE{$m{guWTs>}_w}nndoKXHHpb&siJwCEZ5piDpaHu)8 za%Kkj(id;uoB@V51_#ZfRLp4%_!dJ!?ADf(i9S4sk*{7|Puvr97P*R_qv2UE))+-U zqO7hR4&uMlp@QB#yGGaF^wjBk!rMW^V%QvhexDuXN4$hVuxkc=?Pxl2Y^}JSTyy?! z(YLu`v{T`eRlBU-a(5RtGBS!~y!|pJh8%VfXuaf|b;m5&MO%k{P#P7v9bsiuWD!)m zdM2mM`9u;1ygL-aLXB0s%ISECSzV_svl?IC7!F-3bz0Umxo54gJW)O|T{=5{1tnzv zD@aaL($1j%Y+|57L!$dJf}lVz2{>KBnj`9t7Ix?=d8{Z9Be)$!%0COHEcOioIekbcTk4H zn?*AW*h=C{k3{d>A`UF?%t9yZ6D{hSa20?ictBl?(5jSp2#_iS%3`~y9>)yYAis_2 zwI-;gVm&rC>l520GnqukVAA@*18R9<8lKRUMFr@Gq?F?2Qem zYR7@tA1}|Q%t}OS`~?a( z`fJUg;4}X0*RlTj)JX%a?foyFYrcNe{ANEwXhRQbGxEl4njmXotC^LDCvvLo74EFQs$FUo`hj6y}!VximHPcJ2+>-3^kWK0E! ze7|U(C4^;!^a8nnEK*0siGykcm@<*})HGAy&o$|Uk}r#uwYG!)88MO@a# zBLf0%1_g(TRwq|Gt&JlSvv9~NvG*GGpOsN{klpTQ)()dA;r`yZg(q==nlNQGS#_n%a`{A;oVhvM&89}XndcW7bp4s-1Nybqn;!=^O&uC* z#wO+$q}JT*CEmIJoaO5@rDN%LJ~NKw#38E|<7mmKcs(E78{_U!MrIyEa^oiL2z_GfHUXRwUq4aw3nEJe4RH(L(UUF+RIW)eqhSL5^ zha4J03g(fMlLmBmB>kY#oB~J>o}xQ<9l8`PVsT>Fs4^Ajc8`i=ZGX1C0l+#25pKM9 zU6Xh+RLlY}N6^q$9J^8w# zNW&>DEuF>66Pkw=Y2}+yCI|$#Tr{0OYdDY~#jUsIx^&4dFhnIKSy>H#QZBjAbm*dJbnCKI1x@=|T|ngrE)VKkjw`dx3o+MZHNap6#l<8by; z6PM%EtD%Cr^EB)O(3!ccBysWB1P9~pP9Bk7vsu*Ke$OuC{pZM9MuVGviQTf?Xx(jm-8twZY2G^2;E%knH0#Y*4jc$ zD>jgxf)waSMrzuX4lK!ThxT5(Q}}ldmUsZ760jVnOKTnV&nc+9tzWCIU2a2MGPF!r zsgF?g@P@BhY%ANoUmXc?yD(-w#Nx^FCt7zFVF;~zu^|_$_XtBt4-yGD$ zi8EcF_V;0b*lwdI7j*ybX|VNHxK;(DQ8%RO$}$(2+IaI0gw`|&AByPA>50&}XmY;4 zb2odul54!r$vd4T9ah3yw{N*6EvZg;z4gS`a4lV5x4T0U`km_wV*vIyQo>MD#+$OTxKI_?@6BkuQ%<`Lstb)SUIj%- zKfquSNc6-H3t7muw5Xw-${JYJFJB!qI8EIAbh>q8-?HFGxxR0IlC3)C;&+Quz>0%{ zf|wdwQRXO`zC9yA{-nWFvg{%n$Gu0alIfE#oT)>0E6ix-Ns#kb%-%1Q0ilatw8qMP z+ofSaOFNM{ft#EcK3Pua2}6WK7J;ZM&h=l`3cLHF-5=!7)n{5HS;BKDkH**sMb~Hg zXaCvgzdsQeIG9Kn7E%T>M#z)BKk3FToT_&ZK9jn|Z7pF)6(u*HO*EL(+_1q8 zT>3GSsZ+0)naL<+X!zdo(;$Pm*Ref;ssbiHZkF!)#911Im&L6~dUWenP#vqmix=b- zyGC>CkZ-sDTUOIe=x)c|BHQ{+og8TYsfcca=liGULilyNNvSViRM)&1%9&Ki09WX@ z*sgdL0TUi9MqTriAGfwnv2t((vItPrdTK=d(4+@_7?5CF&B*h?EPwN6FAfpy_4|_5 zUC^efQE%{OCC=GnNm=P|0y2@s69TV<)~h9>v%2l$<9~L`Vqb5ag`fW%6iPvy9Zx`s zK06A==y;ug4ZpbiW$ zxJF}8K2oDsoMDN{Cm zuSBKT>6NjiJKxP~70co=+h1cM?|(}J#z^;kwXLjNy%IIz@&zk0ojC&W&1HVKpD#|& z_vb+LAJki7Tis3bLphW<(w~crsoUfIfL=0c{Y?+;BO9Cb`n++pC(b0`{MZFGzu0d- zqx;CVlaaABQt5crWyL`cF9Tp{h7NEm(vga9U27S&4}FN&Ul0d2wY-gZ6=^cw`ClCy z1&?f$+dED9SL@~&z{-Umz{IptAYj5QwarrnB{arr?wxd?-PraGCf2qR+{R%3y}K*t*F4za=d#aVroD+QFWrgRwL@ zQsi<8iZlf}CONH_%JLo|t?l5T<|k_miA!~@w&Xk2R%y)qPk{M#?3)e12qUc27ptN7 z6n3J!3jw}YDo-vzS^A{wBrk7&SLb2Q>O`-U+Ri2=>iBR|5rhqw0skqVkQg?u2?WH@ zpfN~p1E!F9JNm<)t;0-H+((d7Q2#K88oTT9ldSxgKh8p%131}yfAtN9*Vm-^Q}t5n zyn#Bh|Mm0qVa1advmNt?dlO7tD?9`Neo(Z6ZAQQ4*=v4YrIhjL z#S*K(o>$ggy5p- zRas?a@&|9T_2OB}VZL7!6{Ushsp9wW(t0yx$u+nAfgDP^J(fXt@><`^Uq0q4;u32-ySNuToPtEiL6>;4w-(sGk6bDc2&aLf zm9w$oj^lomt)F`rpjI;CORH4}4Bs|?9LxIdl1 zw~W4DPCMAf0IiRJ8i%&W=%ty9FtP-pO;JGbgWJFW&0y^&?leyZN_+D2=Oe?z9cH`+ zrL2dlaD#(`3xh(8pvy3trPLRSKI!1OT;%QuP3@!lJ9%6PaIcH`v<$1yxc8d(Ou{n; zaXgpMQzPXV?;KqPXpI!i&#(1;RGInRl^BUsatP`X5P*GTRQeJI22=>4Aj1Hnt0k&h z!`+7A^*Za#n>TCpNEWQDmJlT2#S4FU8U-x+vRY0F;u_6$y&bGG7KI%Jx}=RSF^X&{ z(yn9Pq_Azhh*2rwC9-H5qh%8seAbiJYS03h@5G5j6CzH^QGcZC%l%PpXU+WGXu-Ed zuVWj{dWMcT9vP?*eoRmEx{TJg7wdcRo*Ij06go3^ay_5{@;V-x>*7w&w-%%#0#!)U z%L$*RN45{P2(TPyJ8v&4e(bUoBE*Kby;B^d3&a>;XcvVKU9r|@0G~_XH5NJEo3n)) z$^f|LsNad6G?Itw;v|d`AM`(r(@Xjg71aM`k30NL`)iY92Xj=jJW)>yFZ{cr#RT>) zxTp_0BA=~D{Fw+?OXAp^)pIrd*;FzN=A!7KDv(i9B11kfb)b@}W*7iD#V`+?5BydJP0dFA0Fw?nGwF52(2F{hO2GliRww8lih^ z6gIpr>Vqw}lxmQ5C|2q;f9zCY zz%V;S-zAcAl(2etJy$smTz0X9Q_qK>q@0NM*I#|1e zBqwxI)Zm{nU2k^}0Yb@H_pMCZFLa!HaQJEWDu8;@WU_N+A!-fHm{HY!TB?Mwo z2G~wL53P^dJ+59`Z#Y<+`h0xUKA{SbYXIwytp&Q=!)a0=4;1PLsnr;vl*~HrXTG|4 zqF1%%{!BZ!ieuDcTdkP6PO@sRpc_av2IOI(d4zI(yeco@!1Qn zA^8Q)?~qE2kgyS0V=A4eVOfURInU--`IdW}HM2!(_N&PdA7p<4Qs#g}0uT*}>UxL- zc+2HoCs@hWe{sRo2Z?n5B6CNLnrDC03i-z+X*n>X~3Vsk^FZQh_fZqUS0o{wY?_ zuVTRQH%8=A_D}J97R6?M7cZc`NS!OVA-K@-PejOE=&rJ-kQ52{!hM%toG#}U%Odam=zz(2``tfR zeB5J(B+!_BOI#-?Sns*DltKFG@3(Fp7pK-gqb87#s}0o<_x8B$cl7FC?e8oXtw+c6 zI-KE4v`TKWf{aOjFNgcS^Go{L+_d)f#Y#lp3STdLo!_$L5EYE~$-hMG9Uv*+Y<%4o zCP-ND&)b+iX}sQ*vCRF{1KL}y$i#KuvE=nOKQ^0kWT2cWY7cpd%>OUwu78w8JUxStKKH+1sOqx)Pl=_3ew`i`MKbF{%?q4}|c20(2x%yOT z2I)T^e6EiF^=ZrV|9-yovSz@m_MhSQ&#}>rW&S$nee&YL0pceSC zyD`1bPWYc+N&|&E0Hjg$qIkf~pzMU`+S=Nr%*{Vy3OoKYdOAQNp4AxxaS?2pnQV^# znf3hau#T~?uW}h?K{K!Nd+`lDfehU_j`E?4&Bu|6bL0epVI% zFz)+C^@wcBi7{W@%|y=V@9q87tZ&-c(a}qZ%M5jPmd!ucbSe8O#F>N{ZC}b_4F^Q) z6pG;goH&YEv5!wLI%vIwriItI2MBa(!ZD6gDZ{4L~w~Hl|Y&T{vQB4CPFIy zS(5+E9{7(}@$pTjCDIhK={gj$1W5KU64Vl&%BRJ7At*Fdv}2p}pQ)qD2r6;VjQ(^D zfBIDUNE@9{Bfg)CRCJZEkz#*waX!TSK}8n@#; zN`PrIQ_>4X>5#Az7}uho<@!aBzn=9zkNOPcn*G}D0U0?%6tFsA>moKqC#Dw+7(AmF zB}r?2CjGpJ00mj;XBggIlUA9loJ(naeo+Scpyw~_DHy%Cz@8DmQV!fa0n8?`y<>CK+$G<^zMUferAqiNSJ?=-!%Kk|ZVFP&x zcN7&dJU>Kh)UWcUaDXvvy#>rTpZ$&zv-FkwZaAB>-uYB-lp|MPeG(cOJnfdZwX!2-BDzF=&8Kr%a#NW z=VcC#dny^wWXK~ApvvrKl1Avyb)l2~i}I+Sj=z&I-;4PWVG+YzDTrdG`!Ra0vTsG$ zOs?|tZ+ws7F+wN%3OJ+B`zQS|&_NwU3t>}O(v>R`b7~Z_IJmg*v`|n|UIqo@S2}atdoK){YdOD&|a~91E%KFCALh^;p&EOcFkYTHoSFPeMSA$|BFPJMR_6 z#1Er8{R-u-YvUDQ8|e{Yk*$=lg2nVZDc$EJ;SxlNWkNrWB=|tK5D}da8Bq9!BqVq! z&5n*2vCf3|;yIgLC%hxRe}DMRp{`7BrK4arHP+IA-L|y_BN6>?%Fv@vemw?$;O~o% z51r7+{oMJQU#v_vFXLbueY7L#2j%Yh+n72AQ6D-*@jV>y!EdQS42$Fj4un$O4hz`q zKbYI!cH94W6=-mT;yBsn0WR(o4DmIv4Pd=`^=i_P4PF~{+N=jH>NCh@E?#PS z(zL&wAhNy5jy6Wj;Hi!&B_oBXz$fd&)`Q+3Xa#Ed0TI{i+)aH9KhlNk9HY(=Ahhq!PIfPCu)dq&_d%*7!pT8YviLaMPAN?ty~V z>ORr2a|f5(QRLIRcQ82xM_MJOf({Q-)K^pfRsK3YP%(C3K zdRm_k)>NJ~1+fjx!RK${@vED#DzIe^Na)wsFQ3CEVG!&u(-j4|3N`GY@I6cSN>d2% zfi{Rr_}3K$;YD}f@87@MtxrnNAetW-I|1j>eO(W>vuD|>15+RuR*gf0eP@kck^%6n z5b#QW-oE$HP?W_9u%O>EbSDZ1H*n!PP z4Hfn~m&W^pD{E4*&rouG0VTBGh;2OxdF{lLeHP{Eggq#fLR;R5$r$Tezzl%GsPoY1 zN`CFE1yQBLwJVCrLCZ;W=~0kOcEnw)S~B{2FeMUDoJAVDd#G-a%2Ev3004|ZAtA8Q zYqfxa|J7g##+{!4X!N-jdz11x* zc(+uq;{!9!n{RgxM2W$%A!bu$&WzaDfdVn^o{3L`2%)=U&%nym7o#|tm{ZW0S3Vk$ zQ$Q2)$aaqs9O+_@>9)slhvOlt3F4kqS0jkvuz^}A8|Liy-uY8`HP5&K%2?z)qK;WW z#kL*|D5p7D_FB9c#0D||gpTEhOQy)D1&)<>W-l;Dt=u(50?F><{L`AKp4XrIqSrlD z>U}`s2B_-#zz6yEy+FHCf_#7`6vh%Yi;Qrui2Wh}pD|uVm-Ab3%L{c0!YZe=QQe=+ zRRAl}sFZ}+-3`fQpU)RUtWNOGxVMHQ!#rqT)|*HtI4(2ezqt>R@KC;b30TfIwnr-w zTXr#51SL-kKr-)YQg435AkuE&8!LUU{5IR1RcLF^7i;CKvo|Jk+KzPXb*1KJV`~B6 zF?LTTR5zGFW62XSg&#VQT;S2m?Zo#(qIHhiup@fyFI+_sSwu&k5D@UH3IyHjLii$Jib^?J^G&$1W~tz=hB$=Mo7M0NO*$@! z9d^qkZfci<$NDrVM9y=lOF4K;ftF}~qP`w8xiWjc$92Wf|^*TA) zNi@mffXg}KvtAbkbq2$M8s#YQ>C@6nuLjzW7{onTxlD0!_jbLT9ZZskC4ElfKD>% z(QxbT8aO^k9lP8kQNml~Y@&ejA`LNKP{_UwZw}YC zd%DG=>$U?y8fiTQF<*yL-LCAI0b#}JQhInAHT=ms&0Jd5mAY(bWw6e=ny0U1Vk z;?MR`>f8{jQQhP}S|i4(UqlxzE`r7AyY-^(ZseWfO|izs@aSkvlIO^CA&YBn-LFSe z;I1`&jky|o0K%a5z4CqTrcA|@mJs5l*eY-vT&v*5Rfz|cxmTH+4G3;Q&U-dEdy)~( z?@_o*2sEG1Aw+5Dd2O(KY1Js$ept`5$aukvji+F_MNe%$cfs;c+n#kD83b+h5X5|X z3U5mzqXVB9R#hEH6>@L3y|*xA1W5(J&)R`tzI|c+#bs8u4Te`V;rpX0P?=ecR4Guu zzabz%^phoKM&O+&Ipvej|~=A@gPwUXf_ zfApSshe&vPyH5YvBC5!Al%`5AJ`E3lXfr}jLP2@{6beGD1moq{phuEp^bPV{t;=a& zD3ZJCw$kN({q4fh5HvxcAjZNN$iL-wh{DNT<{1f)#OV%EJA~><)$~B=|jh& zYHHdYHU1z+G}s{+kF&k3EoOtTjNW?q(s4@{1!o%Eb;ZDFOB)*c^YH+Yw1S=@e}vvT zwi+$O!Y9q(*g1DL(P(|LA(6WYQ0!ivk@F~M#-9@QhkaFeBe>~#I%)qoJR?Zu=sM1e z2XS@e;o)y3pP;0mOA+Um{@DSKHY6Vn8I9#?@z-;egDF}nrX@1=0GtS=RV}=KmKtvZ zx4f(HdPus$Rx`>E7`Xh?XS9a2px`KGS%-+h9-XSk^UvSkd8peyr{kEhV+Q+fd%7od z*ho4o5sgXtNRfN5Y4oPj4VzKF&d&BtlWE{#V2KxWUL1yY6sa2^%#7iM$ArX89c%ct zF#lRzOx~l=&Q6u~BLEFxU1Lp=%+PUMaSnSLskWLT0-bbe;=TA!AQy|7@S%nH zT-EkbxY7t9r4a^itAyqmf2|}u?#j0}1ZjxCkCwxSQU-U$Y$$d)GAqYFQM+K$EpJJG z21VsYLnveFqw>|nDyTXkBJy)5Pu*t_u;stjpkEm-Z=D92l39v(m$6dD@hQA_;G1jr z29;kp;hT&o$$RmK8A&o6sQICis}lRW#z@X@`U8mNMPBtsI7hycc6vm3#QG9XJ*)Gd z-id|~=E_4Q%`lLe0YR*=YrcWjEOw%v^*x}ssoA*w%|snh@xn^~pkx_zQDA&gyt{M} z1xsmoD}y&tM_j?@m+2jaj~DoIcaKPG_-BxCVH1Llp6mg#B$)G%5cMJbC|zf>RI4|G zz~M-<@Z>XAN?y3Wk1+m->#|ZDf7m4tr!{|4$$f&3IIdRY#hiv5AhgET?Xak|r%d4y z4z4iIX;l@-#?f*7CYW%t;)-15?(dPcLaQN%n!VXaVa~b|@-DjfcA_PHFg&ZhI}^Uq zu{rAOb6tceIHxKU04t;BHD80FU*|y(72TcYNlHbGheGmcz||54lTNGrI)Xl=mPAjL zCesNOGQxZaCW4G(9EjvPdv=nL`2adWV0L;wRaGgZTqmCJ+Vq7?%-&Z}JR6=q?a#uD zb4AV0p`7Zvs+or`Oo|y^58K!M*_C2ZdeBE+0bLRjM@W55-cGjz*=xy4v=-_(FbytM z)WmqVWN%-B5J}S#>Mj?hIb$%!or1;oz0K2X1+QshPVEY^fnN>^m29kcHct_vAe3nG zjqNdMZ)Sf@^WTM0g12VioL1b%k2C1$YTJ6*u7%#aRM~dKT>`MzA{w_1&&oI;4yIu} zGS~~BY5UgE`)t_KRR{@0235rm$f#)r*JzcOFX!BrzC9g0!svs4Oj+A#s;W=XMvvjT)uEG;MW)qIL? zzvJ8wWquFQK-DMvXH+t3B9W-eP*RK)DxFlqjS<>hW}lNI*Jxus!;*`eeQe0DF;tvL zbS6mXmi);(SC+X%MD38yiJ((p((qd*(t`tuv>be_;~$?cX0FCg0LaRZv*YfWo?shr zBT|W*KKm~243TqgPK`L;aHs<^u$v)7KpausI)$T*f}j=zUV?UUiq5UiKni%qTgqRM z>X}cO;vxa|EoD_3&uYmh-AsO5|3Gw3-2Vl?{U5;AV*S71ba>J6?f>K%&`hN}{CVEO z!c_n3PyXK+@c-i_|NjsE8P7}q&)c)818ep1!34hjQeN7hY+u?W!=?~J(4+L^J#szb z%r>4IwhTl@gN7soBO>6kEOdC>dN)_AN3DVm07Gvh<%>FaiDope7vkE7hCvZA*>j71+z6#z*;q=!2Q1^t;A;5r_QR>+_9H zpp4#~!RRAt@m@O+d<+rIb%e0-zGaznb}QB7Oc2|%uGt^9oFB`fyma}J0U3wu5S*yV z&UY3Ln3VPMsxcjM)PhLa_1$xdlS}T9QI$KiKeTNDd+=b5$r%VqYR^^A)hoTLpDwKh z!dR#fPRqyH&v#2h9GQ-O@M2r#@8Nf6dLiI8ys*%&4Joj002JU?a#)G*C=cK=ytj&4 zyEAR||6PIsVt{&$9^z?V#J(jCWnQ2Zof{K6yiHVZFQm;}2Don@I^Wf{R+wBvM*_s~ zI!~$jGnUM5up;FwSNY+YQ#`bOMJLBMKzEQ^aMO8S$HTfiI8!C_LB)J(7a0lwNkb%t zuBPPZ&o#{1yRGz=^LXs6LfxCeC>AEi;6qRjd-rPP0r#JnUGtv=V%tB@4VL>dF1u9w zoCnSki~8Cm-?<4)+EN2a z#0FbdFo+x6i3gRe?}1B)?7{`SKvqc1fA}x}47d1e^*h`Qo`?2Z%RMUfS(ku{Id`?r zKM(Onnb({&;J15RIH{qE@ZCR+=xVqjCNZd@Oc(@B9C19YfO9HAsq4B2Y8JN5xMhx^cTS@Xk(Ptt!7nTyb0N^)xlfp$c~;|V;?hmAFRtvQAENSwWDN$;=0?G`_icX* z-M=u}Td~pp zx}tL8vV8td7z(ntnIt6{LH7O>LWSL}a?0n5pv-cUhR4SPt0xqK^_i(I7K5g$Oxldc`kCd-QJ+Km7sbR7yu_bR$A<&4lNSX9D6mnX%;2{pg{;12kcfY5UD7+L z5_Vf>Y8mCl`}#Ex((9EBOAx0tgrQ`&mQ~{Za*u!UVycK|b!XlWEB@klL7{}|gSCUT z+PR`U3n+NMoFYBMzi7%IWlGnK%zCiMecN1%Jg&_9${V;;@?(5sx+hbS5p>fsC?(TU zBXBtEx^EcWPq>Co`>~U1d_5kaSV!4QeC zpa{+o%RcOg+rB>es`+fVhMqTZJXHH+{nuLsEP{N;9M0(Z8XJ9XRpo(#j;HIsk*fmGs<0Hg0JZe(oq${6lDgz@`%wnn%9EYIw7d*elp zg9U;8=P{03w%^x9|=pgw-NvS#saMxA5 zG1WzNuYJGw;`wtF3baW1%B0Vi^6ME8S?A4|EA5t?gO6`Q9{>6;jsm0I^{IVX0b~Kv zarnwYz(;ct)xMVmZeI!ppU2p6kAtph5Ckq&Mn^tv0DLv<#+AB-9nCAu%#&X;lW&e( zw~j5szb`HQWFns)8SVnKv4Z4U-&zl>-81f_q-RvK#Ulwptc&FiNSH^FY8&%^2{{Tl=uGcmk_M#wNfK?J3!3Jq{#~2vBmwC4qro~RG zD?Z@VAOGeUcbj(ZOp#=C-*ZR#eVP3R@>$kz`+ZegAnxF7RaG^qvZ42a9Ll<;USe<-oib}%h1KRjJ4NW*is<9f% zD+makh&@gg32v6;X@OfgH4RA<{##CSJh>=@?Rs%z?1Mf93jE1-d*?Mi8(cGJY9ZDv zeth~nSRG0vNf9SijuTqOSz>K#bgc)x1a38QxG0c+H-Me!y*`nh_bi~q4BbcTffhdb z16v8=TkDta$jkdr?r#qt4eJAn{=mfKy}QvRno6GpD6*@bQ^RqbKDc~L9QtfYqq<>J z%dELhS6$Atme+bOn6tVp4`PLm`TwRLzC1{gd+0C$Brjrao+2kl%)nro6zC8?T)&Jw za*t6E8`5wtS~3KPGy`<9IAZPYE;7Cf$v~F|9|tgnw^Melj#Z}tg9JHFh$qr+3j1?E zuFZ0VCpm<@!cK=s3joKaJ=j@;uq4rS^n`C31pv5p{$O89J#vNFVi!U3G7oe5E{6R!i1YZ&^TK3MN7S>?PIr4)e+ZkNYx znnk1Tdx=0*zPFvY2cZZMWWCY2i8h7^H{?fsPP}n{{Fv?r!VM~B&6`}Bbyn2iJAU}^ zp`Coa58}Rs@(pAkc{tL(n|WC)e=m(#F&2$tQH9Nd|l=mk;4HB7%2pd0k-nW{l z%Tpg&i~#-g($9gecdP^ie6I^VcWi$C{E5oP41yk${KAD0mN@5M+%Y-gNI!5@5T-o`p%t1{1@)!g4~Vv4#t|&66eju4=)F9RwU2`Cm4s zuXfkUxdtW3kUbXo;k0W0ZRtCs?^-A%ml}Z$M{j}@gtQ9h5>t&^qhOe%ube|-jaVQ3fN)60SG-eq;r zU$55lGT$X^(68X`;c8Yoq!)XyVj@8p4F|9Ep(UGI8NGv$2?S{Y$ao?Yky%F;D|<>z z|4~ivT7)!3k$0;L!Uwf`s*>SAt_v=_R%!#{95o!dKIf@w-It|2KfG-OW{MSf3L(9> zy=KuXf8b|AB+OQLQ0QU*iN+ka$8r<_%=cvC{i($3)`VDo%<_Y^mhr!|Ab_@y41`e1 zb}pN^IAQs#bwGKmRbhda6kZdS9Cnj{96dD^C?M=4%4@2=z1a~=_WE%eNgRT+W$wB= zsS30_t2_nK)^!foCM~qgV_srcbj9w^#HWaNDGr0C+bX~3DOjy$dvNa;z|%lBP1EqJ z?M2hBNB(cIR-jtwL}!;%0h)ae7`>n6&kjh?L;1m^^g38M3$!W$m4*rgHx~I zUGECxL5v=FoZ>ScFd+@jTHTSlN*SCGVl`Z$yvN4ETZ9WE2LFR2)d`i!l=PJS&vV3i z>I}>lj8(^~zrGE;bI-k{#|kr$9}&7%-&=D5u^~ZhGDIb6TP2lvT6tS9r7WG?^6`yYc<&AU33cD1oFi7NwHYanpoZBh`|MDW(|~RdzoM$F z%pUi&IQRon9;yhodO>udkjrv~L%lo{PAiL9s|SXVnWQo{$rp{EjZMsk2*md_JQ(I}TZ-lYeaZl#CA7$!&pf9s}41~`ja zKB~NKKWby0t4tq9?%if0@DsAy+KJ!!=pb1dh(DC`y1T|h?QkSiMb8%CZfP){OnO9D zNrv`AH@@&&8$8Vd2uw#tM?nsRM^yehoTknV7RD(uZVr z&6_uqI{FRX9I|n8Ho~X~nov{6wo2vIl{rHQ%iSBQj&_CL32`(!a!S%^;0Y>ssMDLi z(sh9wHShYBg<%8%X+AmB{`U5+3|$JvJ2H^5vsQVLV`&E@-ng6brMjcl1NQ3KrH|#C zu)<29Z-_a3;Co&*an+-lg+*vl3l6)d6rh>3srME@G94X( z-I3%vrUocpj#Tm#RprpK3!SLXeSM;we(j+Mx%9FWSFqylcJx;*mzkj@C90{biRlCg zB56f^JKNfX!jUs#3LbLxi<|n_{N8kMH00W>XN1;{GT_Xmnksq;FDf`Ahn>OG3EO3P z^y8=1Qs7PCkSH9KfNgRZ6XqSib9zRe*s3}~#qD5mTDtrTFZU-IzKM`6GM?|klYdzg za=~`d@zD*d!FO9sPcU`j$keLy#{XoaJM_q)L)}b6yFwewvSZbbRabW#alM%LW?T}z zieGH;Id8Qptw*w0Dfz?Lp97v^@Rd{pCC&g1G#`prg?p{-DJw^hy zN?n7gemIbT7_!KMrd!{R5fG%bep`<`kDsGsVub@iKvsIwRW07|=$)i{K6eai zeaW`oL#)i_m&*c;R@@QZ2iY@}MnA)*kRVlviH|;;-*i*PemW8ngKq50ie;7ae_6JA z@%#2z#QTeIddpq`4pU<7LN`}(VkgV|8E34Ah0BkIloYgXosebZgqJgK14NqtXE@*pts!~M`zBL*ut=t!8prUeX zUMeYQ1&`6jXbF0N-GGPPl?1#y3hSwtMyA{~AC{x8Q;C;8f;M8TY+ z;htHnZ-yGJ2hs%?LC+LO#_7q9W{<)L#A9&};)ovZJ9?+i6Z#NYJP zUF-4tEM;P24gk2P){3vf8a8A_LOr7e67CXi?9Td^8jML-E>DXYBrBF!OrAxqe1+Dt z->`%mgdnPQNgf^^MuY3*r9RR{WgPDDeBtSD-iX2(gzvTS`-;?cRCB#fJK5P)Qu3b6fE6nhGDOL2q%2d^b7ZaReJ)@qmz+eNxvfi^Xu*KI>{o zNN9*svN)gXxgCCU({J`dhfUDF&v?HEv_Zq4J}y_iO-YHa&$r84ij~kW%I;JrCJ`F|2v3V@q-1$#>%eeY11jR<-B(|Y5a6)8AX69AxPfz^u zLx>u{8MwaQCTBOhwV<78oh706Kzo=CVyGI>1#GvZrKAj)F2VYuWI#4LHr5Tp1K>6t zT4IL|!lR?$<71<`-ik+j{wyUxO5n#zu)gtI8m{RRw66$D0g6|u%|T;O8j?enF&p~w zCk2y^sJrO_fe%9c{G>qT3PIOX(88+^Ia5)BdoK+he zKX;1p=7Oe;>;0wr*g;l;${xHijIG`215b zCQd|aIG&!KLLwp%xRv|_lTTk$TRSy37YgRxL*dU`b)f6%vOsOA~lEPSKT}|PWqc<{>G3)009pCNu&>R_=lDoCO zloTq2h(0_ap|0It%xV7l_t?sOM}R+2A(+K6GmF z2X^0vk6)!)kxFLQ`sOBobn*%Tz_hj}K9KU#W;Dj}@mQNh3z-|hw*(I4JxbG1rC{*= zA@4M=B((`xbLFw3??r0X4s%h+zG_14WSZZQtSNmHF*3T-JrHuP& z=Y}(g!JL1M-N3{GJ7kWTAig18jW`pd<4y*0Eo!$-C8w6&%NOKRVD zeF?-SWZ$r-$u7~+b>~^$I9#gqokA|J5VW90#&Q)E6|Aj0E|ss*f*|3CA_u4@*kYda zSx^y$XwMP!FKs8>u;u=aR(GLQ%TXn;p?-!hCl&@$mu6#Q1{ugfo}6CME%0x@rR*17 zeglt$Dh#6P5T+>NA}Nvo8_OA0rUf%JoEM;<2l0FzaEJkbK^N9u4AvDPsWf&S?ZWms z0ZX|&rUTVEE8BoiA-P?lzgLh9SGZAZ7mx0)g?{*8%_>O6M2OwT-tVl~f$eB{0MFFa z%uH}X0@at_k-@$trey5C#$TpnLf0WaU0b6%6*=##O-wLiV4MMV9I6{OV*4#yvQW#u zy&SAxu&k1qpDEB_ksh(jc<;jBMFkc7zhqFd~t?E_(}Q zOI9|Sm%X?9IQ4#izTf-b@BRDT_aFDK$LI0*c!#U&I?vbndL75}c#g2a$jFr~EgOx4 z#79%$9$8;b!KOcxt%I;X!b2^INg%kP>F4G9xvuL^GTs-Dw0UGhimSAo+|)V2lDczD zgKr}FwtCcZIlL&Jt*>#75CrB67(X$A=2S3P58a$J(jyH10Rjd2<{vxWX=tcZR-ZM} z^E?f|#x?)zGF<(fiUaba?a>_D`CnI3%CEL|tb;26^6u+6d8?I;MuxNyzZX4Ekqq7C zpm}Ve@v}_}=|2R(i#zn&H=L9~v|J?c*Mm&%#~8u|#h`N#bnjh4N8t|P6_~`1Uy@zv z`a^;dq+97p>zL8;{I)smYgZCR)N86j0Cg#zIRBCI`rZ=OTTfk_~;m&5r zot*FZ9(+$D9apW!=chvqAQyP%^8J-AStf{Cb6y?UU!E*q_O3au+z|K}_Ys8>DJ@kK zN72DvKO7AFnOW9H89n&K#4^CduBfcIfI<`m5d>cP8Js;AP`1wYvapXubw&G~!v3WC z6}67Pus+i^RSM|W_O+8wTVr;u&Y01$D6@k8E8b)Nxx;N_P=HaHW85BG8_hMEkw2$h zp>AY+(3;8hEhoGQR;o4ze*rGnHhK8ii$QAh^O*A=*G-S8-trfQ0`+B=D@TZ<@Sg zsb`Tx{awtozME85ccCKS2a6}DC#253>puB7v+Wn<$Pr>Ra8wqC%bUvjS-+zoHRYbN z6zOTyDP=@)$>+8~6P$z}eeZOb47TFA*O8G)Tog!x7MK}NjiQGm!ozWwrdNJv>$q2J z?O~ujcz5@~-CUl(a^w7`BO@b7`N>GTiw^~}@f)ly)xf^;B9Uj1Rua3?K1i4i*BYKl zS(NnaQJ?YM z0*nF>b_NgdT^l^?g@XPO`j)x#{UgWye|Uxe`z;%}ASx^St#Ru%?ChDL^U5lz7Jb&e z(kCC_jxtd_#__C0AF=&Rdp%h2uI_?0iR%xgg95AbBOdGA7!}Ir&!6u+S<+5vKd`B8 zkQ^)mV_nvpGqYxuqJKY~c*OXoD9nfrAs%pcBpL4-9T6ZwFi;VUmLtN(YcX7W|&2E8VUK&&*uk|Y4epCoZN z0$>nx7s3IsQAzHylSf$CT=Os8HWUlZx=w+w@s9EDYYuZmXW-{fxQVW=w4zRgO=+?+ z8h;tL%w>H*UwjM~pfkzjKGZRpnx$u3ybmc+iM&^8As7xToKfVIsVTL?(yxIeU9b+r zLU|wT;=#R7 ziU43sN=llz_GJ%`&Cj0&%kcw(T`ETu_zYb`)qr(>8FKUC_QyA_XeylE-Q}Ype3YTG zvbtKtBUA9D4TS=>a&!J6-d6JG&p762x+TV?p5E*~1GyJbVA|PPHk(Zjb6)bk<1n2c zS0H0dg4S(l(|9k;#7#4iusy>ebQ^wl7!7hDn!5(DQu_5TYvIPo6FkQ)I?Q_SGm>VI!GiWB*5Vkk~%3yJe`?0E?@5LS>kwc=1lNh_xZTG8xxqEV^f`z z4b-7qTD$vOEz^A++#Xx zuwZB-0tT^7UmvIPnT??;Hm9pGgr9CWFVxhNi2MIw^1`pwjrh1=c`zEvJJvw0ck{`U zFPrm!HxobZ-Ca{8a=@(;ahd<>?2~mi8@_n1n*+uz1i61^60+2i&-;vT;=aC2i@Nac zff~cjFjgqiBTep3@BMud3y;Ii_NSNXOU_-bKBoDCT*D2HQnn;LR6L;S> zP&qIsJTEciR^a2hF=jK}4&JrL0HYlWzj7mWhm3|%Z=0IxQ4b2(azZy@^A7yB>3zv$ zo3Uc!XA6}H)4|Mb`n%%6quzRF;*u|oxSF22ZZi;ah^Lfye3Rp7pooacTP&sV`pIgZ zPp*Qz?9E;ymr9~j*x0MJc*jzfmU`f~ZaKIWvB@D62JQG#Kno1 zZO=a69!>BE5xG%w)K?hZ`my{#zIQL>7RU(ry$)PphTLlC4%S=s;Ma2#m}syKy#!?( zh+Aunuhn(85JX6b(owKIC`_{#1n)ZPtJO7^l4XFXfCYDk_01UoCupv@@I7h&_*5qV zd6Ls~vjrige1wrOM@U`&5h?L!#)^f-?dS|7$}_Ep&T@Z->0rD17(LSZu$zMg2e6== z>g!a1HOMF{^Jc2Ap6B4;ASEUBd9hrDCo3!KvDp(dJ9LE}HZ-I8s4MIoT>hbG8^(4e7Kp2_Ovg`n3}kh{Jy zPx%7*vAm|nG1z{=<^~p3?IAXA|<6~4TH!61`~M1 z$?lg(Zg-o0Bn!e4hrkNFR{IY5cLJxIA{hK+lAni$5&*|UaJn@f8rVX!i=ba6LanSm z^}lZQo84rvm~H%15aYKd(5t}1$lh}G>eW2+o_ol(hUtV~#rLtB&HRY2giRqr%~D!E@Y58UdR*p=@&{aGLxFK3|2qVn-X{(kX0X7ksn(Q1|osmP+Rn7 z=tlbt=;p!9`P8zBqmdW@egw4w-UkE>h#-QV}$!~{3 zueT5{EiJ#D(F056;?=7yWi+WBA8}DG?mad&NjIbv6)zIvV$;*p^V!zi41Jz{w0W=& zvA@E~9MRH$14EPcknjsiFA>j{d>}bnc3jC+?u!EntV=8)ruN^c-^w!N8Fub z#(VL1*7>8INd?Q%Jc{zj$16YY^G*V(;CowJUN#ck1?6QMjV|U2_}4-{qpxOGV`8lz zpXH8i4ZZd#L|_yW63f7Qa#~6kS!^BtvsgUZxTfbV4RhjaPEMV34sZe*_M-QI;eI_r@(*&_BAve~*HNFY>0tUgaW}k^l zeUWgvP8B=_RWS$_<=$Uz8Dwq;VglpRGon-0(^r9%b@!~R)C;JDF>s1*ir1} z#GpJlpg=u-)|VYcnn*4UMjK#$2NiglG6y)K!FClDbW{^TW-B=I27f+;B}ZEbCR0IP-_flIeAxHkG^JD9~aaH*8v-&$$_KQH)n@h(;i z>SBk#9?y1F-T>vTdF4MwN<9>?uO&g92cGG9p6-#3CvOxcVr>5!+x%~6cATwTz@NwM z??2VW@M3qGhd75VoJsbJeQIpZ79EI@z|5b`u$7|8*4$vbx$?)g@ z!@lyrx}L94FtAGD8h@ucozInQ{#v{A)>MA;>E`MHbH}C2kRj%>UAfH<0bhS~N_;l1 znr5oyxjKVr0SK-m@A$}o2J4eXZnx8U5u;-L{%N;FEr_2@#w%e}0C7)eyLSW}Ueg z0T;P6bgn*~Oz+jQ1YYaIHbA?ZiX6$fP&`B;=@HSyc;2tCF+R6%){1JD6ivG#o0S9$Xw;~wM zqwyHMx3&6wsOj2pI6nwF=jajm*Z}jEn`)3&*o+iG@DrtO19ky7OO}|a)~5O}5SiX) zLQn+Eim;)>zCq`5%_mI>^c=JI@8wz#Ya&#vs)z5@e_^5gJ{9&h;rhgrk-4n>VEfb~ zs!lyVG*IBO-sY`U5qMgX%g5W^8x$5ufey)=6&2c~7s|GF z3!f&`3^w7dW~e6J$&m%WWUj4!0LWLn6(q-pN(*}8rBX7)<$IVWA=w8jQ@0)@hULAE zL?+-Dk0B8f5<=aSlEUiE+$DflxPnnT15XX;qM3zR`VA$4ciCI!<*3q;Qbe`&3mcEm zq`9g25XOu?;56Xk>V7-9i+$!dLi98K(IMzH`}N^w-w{Hm?2?sDepf9C^v%`LW3Pbk zFbtL5&R*?TF&|9m5ZljGGToeaf$8VP9U%)`sO)MVg@~7L11V2hZSx>|>LWh93&?gj z!??t_14fvjPi4lm8T?9$gIGP|h)y+ghloT{jJ)mO@Dl)_?;Z#{ zAJKz_EB8o&+V2>+n=&22_}>~fWhwOZ`UeI%n=H4NhIgQPH$T))Ulfl6xMK&1h#)32 z?!wQYckaRM>7=ijH_;xI-6J z-(6pP!+DgIzPZ!8I+CU|II{su#SXr|H|p#i)}aslV6^eN&qEGHlO=AJo0kzmC3plW z;#z?7JRD9iJ8I5Pm-=u=v5=Zz=z+pZEXx&n(p-d)l4$UU2cNN z{J8EuZ?$jdbt~|6eGQlTr;_;4&=A?Lq|_w#2(R_jt&y6=g$FK=7YO}g{rE0$hU>=L$XVSg=dj*Kt+WZ zsKRQcFh1}V;`3%EP}|;43_CkSV3h#%c#Zdu(Q(z`_tw@z7Y0;kO)?hP)_ihEF$u$qJqAws}dBeV6331nI81!%~criLbgf2VYME{ zF5T@V+!FTWOx4OjNa12($$YG<)l)`ygUD^`rH`(jZ$$;cqp!-a9Hk%$ltrTs47Q>~ z#V;|hv|jBiqnCIGe&jvBu2uIl93s?IC0ujxUWvEAnjCK@>`9B&%(rA@r_- zq#syVbLN!lY14mj1t&{sKqpM0b|TQV;j* z=&0@Wn>ZazS~+^LBYb>(W!okk#POgRs{PS*Mq^+Ai3unuAb7vCTieux2f>jABiclL z&*ULeDG=$G7BB?(HI16S!?3OUXM8yCv^ezQ&!6TFpWn)rfpwwOU7Q?e;ney5qg@6n zRkMW!WXSGcWK9H!4+y%j)be;MD=HA(V-f^DH1l&_;Ry`2nBrew-KvG8DXfg`ySEh- zz+xK<{7p(+@=$bP3`4`>q!0`^RH6M*+~A^d3el5#84cH0k=2l&s4xlcbS`t>H1=&1<(Yb zg_E!d!2A0)jD&^;>|tE?sIq-3-N*=>#8d85qkPwfbhNceX-M%+sZ9x8_3<40Pp`K%53ENM<6u4Eox&O(;_I{eOIaEPFdeHvfa$DY-2hLKvKP%%INBl+2n+rLi z)C8TSk4;_N+-{NLfD?v>$B0TF)cCM}xH)W?-Pv9uQY3v1-Vv~$-9(;ZvLOK)xTV70 zM-};Pkf`iB)ORd_6h81wiHL~sy<@osoCioD4s2}CZZrk|+F`dkZaS+SC`Z61vQtEq znVlWK?fq^tNa$X%%;kG;BtR?^@ERsydKtUBJF{c8`wTfcrKJP#&k#!iS`Cp6zJ-O4 z9dW;2yd8LfLzE6I%GGnn@LCguNI=j5lV;>0fDFP@cv-5urw)L!gD`O*^Q;Cbs9f!< zr3?*Efgp;MsrqcWbLxdFSHQmaOzx^xEs-IWZ^7M!f=$awuDF>ZQxV zIdSja#*WjG*ug=!0&Ewj(mk>4pFInN`MuY^l?ZqQL8>RYEf{}8N=QxHFoW*}_UA~9 zPYGz+V3zV0g!3mqL_zT?_LWa!%>^W%5;hn#?%VnFs@xklZh+^e$5qqoh-*`*mFxZv zWGdha+a2KWXqP;ik%$Nj3&_aegi%*6FvK6k37od*JxHN12MXDWStsh04~Rrio5J$F zdOR7`xENk}m5!!?0py1Qz%k$wKR=ec)3TK9U3@VyF?}1GRDTxOy~MWqjaD``ZmnhO zfTh9@^@3^MOpSHwUvz@N0XoB!AOj1?A+CEB5n6cX=#fyHniiT3ke7;xxa?;My^f9b zfz1MY+UF-{x_~X$@g}v$YQqDlBJ{yd1KCQ(#z4m4?bL{TwTq`ebT14vb7Zm44IGgaflEW1YDShnkBvV zbzMKsT)cSEwo~+1zgJ(j4)k!(1^low)SC3Rg5IU~jZH6si;8yZo*~s+9vwqFYr%~T z|BQ_DMOOQGGRdnU`#+PVu&=FdHl#;aPa;V4k%u~-Tx4gy$&`EA{7~9ad{J>heLjSp zZXO>&GAGVQ5qmm>7zG!f<&8fmUr^Im(qvxJU}g1ULGs+UKP1O~)kk{-u#B2r)y0iV zffF=5jE%^ee_dfhE<| zoi-5hO!VQx!osO&%`j;K?{5{Z>$y&gnq~{v0N(dajbR%S7em~p9=u!QN7gFj=D_K7 z7HYyM+UBT^OPK9?5bq#26{1sI-gS&CfM^x&y7xe}fn#a>VTxh~jIrNx?b3Ej9&Y4c zTs1#Z1>!HAYiVh-NId)+e6VDQA9sPJTT$^~_w@zf!yHabpzwFVmttj(ErER|dDN!8<6CH zZx(WvV0A{%ThoF6;e!X?khw=<_r)~nSJIX9FgX%+UOFX}Mo(Mpww(&dIx}=t0A%Cf z;M~mUL1GmVq6=Ve-*PlHES$e;!DI%>y_JyA)p+~_+Cgf@C$5S3umY|u*s30e~!Jgo08$`1)%(LeBS`Uv^}Ow3RPfh*s;mvA>o0d(p3r> zqtht%OQ-OFACRRw+23U(nB4+N6QElT)Xb`y_tgCGr0soe!@y}IgtR<0AKmmDYEav} z6=r$|Q5@|lOx4iVJjQUmfo{1LVzr~@H%a}QE!eXg(H|*b#2L+9FL}mG@V%ks;78cfu)kG*1x}RCt`pR-Lf8@WsI%jI0sdYG% z9g#>S)oFH3*lWTiY*KG{Grk38q-m>FlCZFvIh$chqn&J0P*Or;GBrgijpf@@ss^^d z-KpJNZz1}EdHEJ2YOM9$`~9!Ex%pbwJ^UpD+3wy`Q&)z6+MLrHWkm$Nh%X3&+O!^T zY2A@lIZD%ZByt3HXGKLNQj*sPSsY)SgvMR1f*#6uVOJW zGQtzGvbL6=t(&1#S=COKkbz5BP6so&0BT-MUR)o#?u$CGMj~3onJn7n+fhE>bt}!q zVqh%7iCuintg|M4x@R3u-NtrelPQzvzSy3LKR@2E^Kw>y@ln^*G`I8o9rnm&b**nu zH>-sK8)JV8Mf@H+zc@w+R8#n|-mlDo`77xCJO=}-4EKAaaV1SjYn5eBI^~y#`#6Z! zAGEY*p?vus>EGH)2+c`v26o?Cypz3OWt{j+nMoBQ{{YT~J`Btp!E_!YCYhnVT&k_4 zWN63K(+-FI#TGPE!tcA0XLiwILFpYlZ0LZKi0OsOPEBg@IoOjJ3#WtWq2l4H6uVX z`8k&yRV9}-l*$AeZw&-=k6ec`?RTvmEB7-hGKbt0D>uW!q=80tL5>yCIU=0C4vF<^ zjS=UAZU=GXc;>)4Q`Tel;yJ2I*Vv%cOX8O3G$Qy!9_uH-fXS=?j5bm z%wq9;Mcc>S(w<(PAMNddKa9Z&ye26i)cjXg*~9l660qZBiG3-sH);8hRSp;sbgr;g zVe;u1)~Dj=?mTfPbdHNhHcPdyBs?=DG=qTU&6z#&N=HSaqzg6e?IUjd`i!ynQk0Tm z)CZi(h5MnG#1k3TMm$?xQRP0%jczeUv}DZ>J3HN_qoF<%KC0yHGPZF!cVR8|@Ig%CaA)C*$v*r5n$kInMIx`p3n2 zhGCNCTYB0kt$r7wb%8(VR#QKW4av}V?`M2e`t$5gbb34HF*f!oz|@d(yS25|LPB0> z5axE!ynujIa~Ufs`wu+ENYgCe-flK{y|s*nPvP;z^ip75U8t}}$+YA5J|xR5lBIUI zKDp82EIz)^n>TM7@;{7gw!h#gce;EDhQ%YgBeMfnPc1Di!4OXRUNbsC&4zcG4to@J zGuPE&3_M#*(m%F*HTUG;7R#s>Xxc1E1*}wh%XIn zj#Q3dFTlwbUfs(6o=qVhVJCl5`r0XUoeGQssHPC0-TJ^qY^8>@M$C5}_#Wd>?f}H9 z@sbtbBoZYeTc>ZMwxl)s?={FP{E|%Vv&36>60g{ z#0!!r#COm%%!rziG5lzI7!P1D7}rxJ|LJXl$u2VCP*6}Hq!UxuSi%hAVB@JU4r+~? zcf8<0rV#@h&Dr%}a0sK|S|EpHmkKryG@RZhp)p$(A%_>N%F6?&O4wngQ)O`y%Csqy zDFnANGn+zp@af^5AK+BlC0la){P}ZYQqn2#?HGq1AeWthgrw<%O{L>NM`a6mb=Fnv zuJao;nmTUfrU9j%_>a!_+x3C*5Q`|wh5KtAh5{Km;SJa%gr!q6Ym3U8lj$EU+Nipn zSs&DZ5wIb^%2}aYL*a)ieWU3~NzJPanmFiO;i^lr5~oI4W0vYYJY9N%x1oj2Sgm04 za$$KdRDCEZK~C0{m2#F4KUP^V!lK6`N5J^IQcr8nw21CSPW$R+6>O#NK3QA+Pbm;H zi1JTHF2YOY$6kcsY~#n|f&#&c@5+jNZb8Pu?Zf)*QK0^+7MXt+&@8e%Gs^m~vs}Rj z)J@MN`1xJ*zmsmv4p5kf;SLtt$m&{K$JvKJfocQhUBIlG{f*{ic9*^g*vIy;HxmH4 zObF%+lsM2Ud>12?j?ec3x(y8ApU;kXB8KsBfaO1PqqNUZxND1laBwg-({~OiJU2m% z1C<6a9+qdb6rsRs}*B62*a8A*hv9}?Yz!K zSh>F;Wt^is+n2)x<|ZLu+TLFjKYS@rY^IZ5q@(UaCLb;z93Bqlwc_B56behBzCIZX zH1)Zbb4n(+oDQ)f~1~^^XCW>%_6fG@nq4G|NX*=! zl$B*GB+mL+Q24oo#`|!y&t%3aD|=qh7&65{%01i7hAV!_E$P@ZdSBf%{+Vf4k~BL< zx}|5?P!-5+Z+snK{{W?7QW!{?4jdj$HS~< z*xq5miP2rV38zt4fhi6njQ|cHDYu;DolJ84&Fzbxn3yp0@sa8R5%@e+AXJg0lnh~@ zJd?txX4#5z6G7XHu5tzvXD_*M2WC_5;X{?O(z>C#)E?2Bq<0`yVysW^?Kq^_ace4r zg!p9kG}#m`b?(m2^{1zxSy+!*NCtQ?NL02l>)FzfMr-WRm7{?&N;voc_PPU3+FdXR zMBv$N%IfhB$)iWTqvg)LRlr@h9F6w}Txn!P0>NK_7*bW~o;SXH-~L9ndfq8K;)S3K z_$yXFal%CznsfO2AH(j61m8hDI!n(mjv4&sq0|!r!Q^Phr?@zfa%rFT-%*-Lq z5I;aVK)u*Ht6}Ji8uS>jEAiZPlD|$TXiW&4!G#~>TT26e;L)4j7)2FR!2R?*@}_|- z1jZ8qpr}F~B!>YdVDt_e4qJ%BInCSh)}WSTbaG+90ft`5aukv~ySq~7a2AFO@Cdwi zYRm3rA$dbUXM)0;8PJN(G&>`c3yglXK2IN~PHc~=UEt-Vw$9E*Oj0VuDqkgdV`Zpj zXRgMl@xsrXCZN(^ju&UoVGoK>xOC0>7mcfHDYWb}=SA^br!4RP4zwCPdM-a>+hCv6 z3{Sf)^e;d?fjvXFTSWb`5E~v?!bm~RvWMq~n!!Ol6I^WM6J7n{t!Z@cbIZH^Hle$6 z@_}9ISb#YqWmJ3oji3Z@p);8hdLju0^?AwTgR97dS~jkIq5>$BZGsOel;iXsOUAOC z0g_xXPr6EvsV%hQKzN5SZ4Ib4jpgcqu$QS<%~;X^=RhH_`$1Y&3$*{HR5Fk(B6eh? zq$pDuR8LRq_UyW5Y^&*bXCT%PAg@wA(w~{dhc(ncIB@YIKNjFe=n+DJt5^9gda0um zy4}RZ#Kl1{h!@R1(NTT$4L)@ym;Ew&IF;yI9>0X_J}TYX@t^T6RDT(&WEpC0Hs zu@RINNZMLCx74Iyawf?nAE=tq@Nqz|?*{7eLlTJ%(xqx|)X}af=d{;)nB%HA0;Amo zMvSoCgs?>HwI5}%!NJyK14C=+s3=c$E|6YwJa;E z){f`Zhh&QN$Qm2tz~_D)6B7y}4h`yV6>}JQeWYiAu}|BWo1d$@YrV06Zcn0~GbfZ< zFsZmj>aRH?%L+}?8+FIZjMA{4iQj$medU_GLZg?N%Pb0k7ccH6gFk? zGrAk@H$Dp6)s+QjzX5I=50y58Qe}lMaUj*&{u|bdT&>TdjDa~Vc6>mvV)6SNFx1uP zjh;gX&3J1xHn!Qi{#nn3hcRVDoWsGFOY<{wjnfHpfySdeU=?ehspy$I$Ttv4BC$gP z5Cu%;uJo07@BJt|??iNgOs71(ngaHD7-ImB{Lu?$x@(pw0DEz7UpZ1g#Cywo+*1a$ zr{P=0g@d+Xswx?ZK^askhxKIvZ-Zy$Q+v%i!PNzXpUAT}+MduccF*BM7OX;W)5r$F zp4fL~BR##lkuQ+}JFq{IK*J^ex*9BO;~!!|pIOw1zO!f$_Or+d7|Z#}MWP0V&RPga zMViFEjA2>?hce{Pl6-ODRL@@RUq*VcCDFfeRR1rDOa6-l^uI{0@_$|s<{1AA^(f>g z)VBYhzaU$Bk9nG)d0G#8Sou)~$8nCpsM=SRM;Sba34b4>*(Ver{(DI&LlcG`e!%yh zL8lCo&9~SoWIo8JH=h%Ou88qD6qw3Tvpig!P>e$4}{ne&uww8 z{VBBv3HKqp1e%(?3TWZ;0^n|YayW_QTw3H zk&p=E>ix%rdvG%TJ%=nPraPWI$Z-^<$2fAnltBp1z`}UkOAC^Ly{1js?k8{q*FeX{rn_#2a?hgjbrqhEj3lRr+!9Gp=G<^vNZT@t9`V3%6VmfNDrKq`Wz=hb zAJApgm}v7r&&ADEDog1TNDGm!7+^on!LB(6m%TaDPBvCtz&_wq0VyytWYK%e4UWwo zXxa2DVC#B`(SwxXMqqtI!)f#Ad#>$l3Lk4CL74lrTgJo_b7(KJnlZ_5HLo0M0@Q0H zmrq&w9JGI*(8YKtn&xIi%!Rh-H5|xK5n3pLhKcMpRMQan6xD~f?h3>6mvCy2g8~@v z$TFpqs-AhPhIE=iM;;Sp;7(jkMaVW8sy>YCJ!51AM} zW-nf|>_l;_OXPK%0YSniFQ+oxG{L>y91|n9zEBV=F?n?(Y>t4$*?Gz?J>DKB^{k#N zSVxD`A9__VmmvgwecBt$8k!Qn>OH<(a>@s!;|~wtPk_HesX^0Jn9xy}LaKsnf`liR zE>r9N_xfQ51^_T(Vq)_DCbecW)n{njy^9ZQnCShK6gQx%;GzJv2rMrzCueNo*8?k#@a5xI`*hc!1hSMn->B>4wUY{dP$8LBQ*J$aovPZIRzO2qbu z?1M3*dLle&=EH42Y6A`q=sMy~o>6Q1Ks9@zK4?O@Zv6nfJJ%7TI#tEhBAj5y2zYd3 z;&W`$K3*!jL&MdYF{%DsJ3sQZ>?fZMVb~0l-Aimj)bLZFNRWili0R!MDztAiqM~Dr zpdoYFj8-E(KS&L~+QnPMD{(Y?>Cc~O4ve5#KWU|#w8k%VFSsw9CEzvejVb6XcfKY= z$P@^Dk%oWwbQ^YOwx>IgatwCYrf6)BJxh+Oj&`gXY}h%g&(RHwivfAec}fEH?b|n4 zW1pHOT7R@PuyubitU25^*ho&c)jJkOm?YpA;Uj{xjiCd z1Oo^wUtAh2RvYL;J{|lG)^v6q!o00$)$wX+Zdn}+8C|Kt_5)MXOD_5=7b3g) zX=~{9>+9+eZGp*PZ?=Oru(uxR>IHOxQ0th1gpkpHygqQ^d95Gv;XxO5^XC>zRFt}t z;)yUL31hDi)c5L@DuM;_o6IX;%jr$|h4_b#dUI|Q5QMI@@V2b{p-|B8N(!Lrlb_#o z11#L(gHBrlf-3QV5TpJ} zuiS}+t+XO(=Y~K8-nxY*By6b{Ib6|aG;rNo$a($N=?Wjg^s9IvvOv}tA#fSt ze+g}|&!Lkx5l7z(VD)+!wU6rL&6d<|uWq(so%bP@AsEN`Rit#i8A)4+ zSNFjLz^VI77@|t0m+<&*C~{Kp=p?vvRLp~V#BQRzzrvN^y3L3$Ag~kTK}><=AIkH? zyNv5b&mT2HUzNl)ShUF0`r$^g%%iW7un{k0a!plTzz0=16-Jp2Y=fxVX+LSVs%+){1NxfGItmCUj=WySSx#1;-cS~klpJ@xDf(=9v zna;#cx4XlRNmt|xppa&3MXm@p%Ex! z!dNMI{>)s0V;oWS!C7lkc@_XfV6qc|ikMAe_YZ}b@PriN?2FZg4Ui$nz^=ZgWR(2eo@s--oa_A!AQ#J0()e#qO2h|PCDPasD z$Oha-Y&p1zfYAOJ7q^UTVq${NV4@s|E1CYk(#e7f?4Q{#^z$zap<4_|FKb2WJ@T1k zlh^K9JSbi7?a6ufDU%S`4C#4`zpbC9D)hPYT?SsHT~2i%WWI5Jyx~1!XlxM8&Yb9q zY7FznUVae!D9iuve(nzsPH$7E0b(FiLeAV_`mxD0!8Vt?H9xI5y5cEDg^TxI2%V1Vv;jf9(kD{*R z3KFY3D#H?o8x)~dI$@FIl>+LNNT!1k?BeE@q7L)u2xJ{(b$9J5L~;4xCt2#}I!aA|kZ{Qwrjn=Kzg~Ht9=7SPLdGw;r{O>A$4gOnWv84?2^V zFzw=?^P{)J*05uPV%on;j4qEIAI3_x5K)!Je>O)xwH_$W{Ktbft9vCde?q-VNU-5g z&jYHj%*!-d|6dL#rq1)%G}`=I_k8k1|Sb4u51mvYu2 z{QLKkUu!NfcO41K%)H;%Au1w@xX{;>Ii683aAK(1Kf-gE`*|AJrq?N7QE}~Hm2WR7 zmbItfAsc6?Uu^(yTzkE89xT`(s2eLKh`F^2{biE#exJ47bD)FKY1DqZyu8b1Po8?r zt6&gOPqr10oNCo^nw(VAAk^$BQ8cPJ`i()pDQMAW(DvK$uJ*Ce7jIF9O1Dkl+uT`S z`Z(D6NGVvWLLD*Wxej2-5~3Fd7;Bf%7v4Lc^3mwDlG%Lwjgw})3k@1;->DXF>3jD= z1ghCk{ZFrTiUp8dSmbqgkvPQ5LZQ@(nqc$^!f&&#iNMLWz zX!s>q`~j5eKrA%c-(m^%R5>hFOPG+wje7fgKJv|**B>J-^N3<|E};v~mfKhF;sR_+ zv61q&8})P%Br>)>?X|T@@>UUQOe#5Ffgo*b?}&B*J7{LE1G|ghsbi)1-`x5WPXc3s$ zS~s5D&;$iV?f14HXmw|hHn|+4^X}e#P!n+A5vmvrM**@={+T6fjTb-9dpH_O<(MN4 zCZDh#ZY)UJqNE!dsK$RkQVsmax+Rpi!UyP38oy3L|B^6ZBf@v{EeklK+>X4A0J6Tu zk*(bB0xWWYTyoKR{YakrZGm@?&i$bAEXL|Dn9?PF^M59_{MUDc;(vqh$p684nEyxa zIRAw=p25Z=e05n4GRa_a@Cr-r?S*$NfZyBM+}e%Rfg{F)jsqiApu8Ae=^B~AuM5;wF3lmos%sYc`! zFjgP+y1|s%{WJ5g!2okv#&GkBo0{`^>*?hPBnSy%AaBei7|yga^f72-fvX8AB+;Jy z6*wEa>@OB~wT_Ou{f$-wyN{*$mAzk&6~Oo`1;%+@WwU`AFx5jc(cw(D(m5%`?%Waj z4g$aAq@+j6>MoD{ETT6t6pG5~!EjMPASiw7_U+aA3?uNN)CY+o@~4bo2tAI5^P$** zF9k8=hcA0W=cu6IZNfX=B?a5yni9Lo)!!C5S~<_8LAprTdt8YH62e+vCTdOUQ5E>q zxla~FEk`O&x5OL=uB_N%R8EQ(%XWAx*r!fGE`^_9*(`1Vho5k+x8#FJ{{!&X8p^Fr8iHwQvW$4h zfc>=ot_>&TIG8HRJ8(L}I$5sTXMB6O#`&cq)CHVM7Qd)f5yFHAtvrXYVIl2ZuuTOJzMW;q))O4)dp4UvKVCykFT&ush_WQcSmErIgUPgXV}P$o+T`Y(w+GXXdWvDVWH zb0W@@#QFIt^?tf7I|S+Sbk{BMW6Kw;L7i81GBCK&_Z|T-@REWNPo;c=8}2J1T-+gnWBc5`_#j<&&QiCZ?s~%Yj%Bh|_{w zrJG*g%U=aGF}l-(NaYU1Ua)%aFo7W8mvHvQi&-l*0HUp}`QAGREMUYsX&lI0dC*vg zHlktK#?UN;J3)Wsc9T ztMNz42+1S$Fd*~}B(-l=q^4IZSlC0S6A?yB^y?Y@_VQ>DWI53RdrBYNF5j?RglMwr z9QEhYp}*+*4?O-~fic3UiT2#vZCz@CkhE^fW)=J-O7Ap$1Yn!Es$B)P{T$x~@} zfKiFM@Ed zRor-@^GR!kIP{L9-@a`$XxUU_$H|-Oqy$1gf+z=3Am?~wZMQ-`0y|m3Z$j|`6dMV#H!tpkU8 zLJ@Aa_KaSGi%s*M0MRAVtz6b5|3U)Go1kp1>cGxWSh;?2UJt)YPE>bVB3%@ z%{5z61l~sP{(gw%7!)SjbVFL^CS%rp&j|u2%5-!sNrmOA#XL?U|^`(2Tz3|1!2N~QBx-y;$EB=Oa z@5nu;O1L0hSD80ADrQdCib?AvsG^cK@iBZcqlMB#X;levy+&U z8Fwkok5;&bJwx9+4IW{af+EzTG;V;cXK$8H=tD3Q1&AK?2p$ml+6dWpi=DW`>9b0S z^z`%)z7Z|$OdgK`0~jdzPDGN|9B7t1%!L5^TwN_=JRwU9e0{N$18O@TVN@V|CYJmz z-0+tG8limpbiEk>YY%|OPg}`X@_3H|*;*l1fqj2CM~>npy~s~P_a3yxC?n7G7617O za7PLbz6*{$`hbBVoBtM*FbV->{nlQ6svnzizVgTk4ig|h6Vkc3pilrK-~Ao`nUF=* zc9mIQq6&N)HD0RT0CW*SkuQh94!PO^Z;9VX^ox9)FWRDyS+aT!t@724%UQ_zJK$52 z;edjXgUe)7e5Jd=wRee%8Du~u2VP-OnijRUw>L6cH=FM}=ezr{ztWThG8+6q&thz1 zdiy%wH^@hth*r}@kKC!W98hu{iIcFG(NOgyE#lU?u9Zo$PI3hlsGu{;5o`LqDAyVD z_7n=X893*1_RxCmz%YCb7G}LUdJ)X)5j@aXlt(&W>99jY@IA=cZcQ85cOy78l^TJi z5aU_IA0nwi@{@Z6V>9&%C8yVIL&GFT^m(uwx&#KJlTDRKNL15jxn;0Y?T)@M1tyxn z;^^%$cRc8~yG*}(eM?2N2w}f1kJ1bHc-UhygMx0jZ7qC_+TXc$3I*1!2FFJ&-mgdU zU9Wz5uuJpdy0y=b0~cVu!&fzbelQs}M@8hB*tgZdh01eYW<+9icRKKx#^Q=c>pykZdvUYak@uHV$Js;UZpxsdXU7LkODiDRJ#K(e=7>5Q~u z%{L7pbHf`%4>-9$(p5Mu#NHC> zgWAqz?pXKR&y@R!RVIx1j<4vvkBj>VWyIHCSlsBen{TNDZ5$6*#_0{4?|E zV{qp}teBDH@_Jk%6bkVwo(AN8NoZ7Et<(U9FsfPFFCd4;WZ=ug^PqDPnW_S>skP-6 z2vAIX)@J4xNYsV4FyTY&DPh}De8M+2R;vtM0mAd+3z`^vMG8d`77BXXJW!XKYgO7o z`wQ8#jT2?PIyaoh>jDUTM24>OE$?&gnhr|d6ME|G?Ch}6uXrV6#6*~r^YOccBB*jw zFdIM{vUqU#1|EiI_UkB=W)}8=MQOMa>Q&5#&<@ED&tbBmNzDml#yqD*I*_Yra_Q|% zMhJh<9k`W^E)McuZ*GE0S~fJXx%U}tZ~zr?WHqP&D>xUVV>E=(Sl@wo>3jPY3I$x- zd$HRjM=U0PTYWF7{^&7#yDB2n@H5c5< z`M-L*@^GlvJw8elQclT|NNKU;suY(~C;Mr!#mrbjl8V7(U+Q?!LW)jBC0Al18B2sP zrYuQ@ENLtuREAJOriHklU#IRlf1E$>z5X$tXJ&rS^N!y;-}kcua>#w4W)S{4QU*4$ z0(bv6CXx1@xLd&r+^){Yj@O>|8*3B!aiq!X)6iW}HM}ART1n(gaB1b&Xv99a z`OTG5U5c_QQ*3?POQ86xiHZPjbO%y{w6(QYKDa>wCS~a6cDvd?w_?MI6t25sqCK5{ zd3yRdQU-bsbo5n4#%Tm>z)?%FyQ~Mp$+GO0l$7{34GxJv+Iim3*X9F%jFK}vf_x(4 z0Pu>k<$Ys+#0Qu&+zSfqT84KY9eTZUxEdjik69KnzYCPNbUl5JTS{B!pX3}XyXe&> z?g1>JFj#!x!RpZQpwrJEGEEfVX(ZccSHF*^)}Vr1b&O|E=g#lrD%5q<%Ais;H0JC3 zhfB=Ych~!ii&MoCAaabj6rAYl>Iz@x)k&4G&msL05uA687!k_Y^v3lsB#l>tA#i6) zYb=^W*<#wtE&FrP4pPU}4ULUqaB|cG2?T)~=7pf4H@67wxNUBzLG48Z5@$)Zb&k2z zC?-}>_wRQN^2BhD(oA>?G2;dFf2qkF#?gp~zP-^lOjHD(ecS!3B+t@=9(VJmQd*ri z{D#;K7o%PwJp78CWZ;_7y{+`78$L<{!}v1z{n8~DcCnDlsj+MN!I##CHeD6gQrRqR zVPhlnK5Naob*gO(sP_PpVxE&}xS4!gEFkGKqervYVG9m~D+a^TY z`(?Gdeu7m2RTgHpzw~r%lOfGa51V66_wgy6&(`FtIa#gGj$yMw&2Wn_*~YbQFEn15 z&3~%HJrx;QRomPQC&~R~pPMZi8s3$HfZIjWc%GdQ0-{qTmp!Aif6}HTI5_C-WSe^# zf9T%xoHvD+1N$PLPTDLL)YhR0bv*BiQjqXU=cEj(%9yT=i`y6ch2Zg0b!Bi@jG)S} z8ef3|zEbs2yA}?HxPerpy%f~T*m4BAP3}Jz4LvFF9T0!bBM0pE4-7Q9qV51W!@1N{ zePm`VCmkJsxIo%;E#b9ymqh()`TcmCy^T4csO1Xf=?XkvmOOIw+hw_R$ELd4fFp7*aLi39h& z_#p>f52@&NIWmoK^;RZ{Vr3x%X+miF_;D6@RJqVDTTYlpgc8cH)6$|QY~|p%SkG1( z5*aL9;fqCDWnc3?L>SkrQeaZ6EGR-GA_P@_#Wp)-h;ws&x>M}Z^r2}`i?C?(>R#NM zljJi?*=25S0%vXc@opM|LsKl78-xyI%C^^|m5?Yr9+@GQuEI+}f1EML>kU_3b;`+;odPI_8jwAdWrB(#0&6!Ei4Lw#C;21F!D& zTy6`JhbZ&}D{;Bf&dQnJrtTaIjNZ#bP7g{kfQ6j2ON+>*FF^~32=TOASETrFyU$|t zU8SZbh7k;DNChhC>LAmqpV5C+r#NTD54TJ)A05eyDSO53bs zFCQ*IeaT!LyYcS&%+_p3rWVZdp1^u}8V(NW?H<~WL1RX^m7P0_cK3bsnM&C;`AVgB z6Qr}vs7x*J>Xv1vrhbblF^G)iM3oVz`o2nq_%HAmJ52HK+`AjeU?4bjb#sn`uHW0I zlHXe|QoI?N$}!Pn6Xs|0db*xW%{qoG0L&4I6X;Q2Un2~UO}S2TLSk|mBb|VboGT*c zWn^T8A=vbJfv0Gp-9pG@3=R4EOiFg&n>T0RDeetdG|)IyFa7h+;}VEIDt#1enfNkw zmEVX>M|KC!XnjdKDOLjw#E_A^eIt=ra08}j&Qfbt=#!ne4a391<^nqg8XTIC5jlq> zMWte?W#KO=?-f1`B#TpXTPHUEb4}9qZL6D9Zc0{GKJTlDlz)U~Y7i165@-*3UtT@M zxUc~+-^gkrIuYlSWVQH42M;RY+0JMwA#^e*fU)Me1!XmUtgO_Btwvh@-8;m&ZI6*v zEOylpS{O<~EJXYL-&pzT1W8!mv&cyA~!tBdaPD*=MXbN_Zn=3_%|CE`waA)bTQQ?y(&VBppF& zuoP4@I6FO$9vdQ2t73XJx9@vN#xq-X$d+X@#TE%P%0O?J*B`Yc1Ywm}!t%8AmRL6P ziBWB>30rFSlNAJU4fK_`kn|S!rLqaPY&j?z0rW?<6PHq-mLQfQy~^9y-)MC3NZIsw z9nrZ#G=dOFW7(eJv_jC}gw!Pe$y~dR&W;p=DL&Bw@;#ORDoeB`*N*HWk~#dFdD=}S z)d4nOI&_*ypamPQ}?^iVIr0bMO zV#q|I+U|*mJgKFa|4%>`GOAD0Oo*eIJTUL6VV?(JmS{`wl*K7^+l1 zwzM=p>)6Z2ctkj6nJ=k|TK`z|$ldwzAB+^v2|^f(ZJ)6Y<)4GLWYxxvdL3G8cj|jN z%3t2#5S{1Zlr3?NM5Lnyxl9LT%}&u_f47c^-Ffk7-krt6vop)-=M>I=BeZk_K1gH! zvW!)(ubBj+_R~*Gh@uao!g-nh_JpkqThh%cQmP^?-8I(#qK{D$)Yz{G6~T)gTF%R2 zJt{;SXPJ*?;-9qWzr~!hg**Qie%v zFN^-b$jr;jl`b{0?;+Yn+`FALX3I4uCWvBdB}f}d6QSGj=uqfP$tDL&2VQ+b3{Fa^ z8O4uZZe%*0Hw9ko$NXFFCI0KFZ+hCy>C`pgWMnSZ0zh`&LqIfeie}RX;Ve6|BS0)~7yKNXByj4&@#q#7z z5p3?~p1RsFnvs+Wx8eG_%u?~QsYjY)+P{aK-7E7l-=%MDP2kdNRa8VeFR2KpeV3pv z9|lqnYuw4ZXGOn!IFqMd+J|4^_zQH+k^c}S(XmunE4$y)d=tQ3VB;Xh(V|L-*we-d&3btl0YD2Lr+WZnHewIrWqa%P$J zAIa``w0m|PF=gWarNfZXJm2M%gt3dK){=ASoY}e0&GBeE4-yiXBc|Iyg literal 0 HcmV?d00001 diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index f08df24502..5cc40a541e 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -12,6 +12,15 @@ Some particularly notable pipelines: - `cargo xflowey build-igvm` - primarily dev-tool used to build OpenHCL IGVM files locally - `cargo xflowey restore-packages` - restores external packages needed to compile and run OpenVMM / OpenHCL +## `xflowey` vs `xtask` + +In a nutshell: + +- `cargo xtask`: implements novel, standalone tools/utilities +- `cargo xflowey`: orchestrates invoking a sequence of tools/utilities, without + doing any non-trivial data processing itself + + ```admonish warning While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). ``` @@ -30,15 +39,13 @@ Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps P **Maintainability Challenges** - YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) -- No compile-time validation means errors only surface at runtime, often deep into a pipeline execution +- No compile-time validation means errors only surface at runtime - Refactoring is risky and error-prone without automated tools to catch breaking changes - Code duplication is common because YAML lacks good abstraction mechanisms - Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive **Platform Lock-In** - Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) -- Moving between platforms requires complete rewrites of pipeline configuration -- Backend-specific features and syntax create vendor lock-in - Multi-platform support means maintaining multiple, divergent YAML files **Local Development Gaps** @@ -55,15 +62,6 @@ Flowey addresses these issues by treating automation as **first-class Rust code* - **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) - **Reusability**: Nodes are composable building blocks that can be shared across pipelines - **Local Execution**: The same pipeline definition can run locally or in CI - -## `xflowey` vs `xtask` - -In a nutshell: - -- `cargo xtask`: implements novel, standalone tools/utilities -- `cargo xflowey`: orchestrates invoking a sequence of tools/utilities, without - doing any non-trivial data processing itself - --- # Flowey Developer Guide @@ -73,22 +71,82 @@ working on OpenVMM automation. ## Table of Contents -1. [Core Concepts](#core-concepts) -2. [Variables: ReadVar and WriteVar](#variables-readvar-and-writevar) -3. [Emitting Steps](#emitting-steps) +1. [Flowey Fundamentals](#flowey-fundamentals) +2. [Two-Phase Execution Model](#two-phase-execution-model) +3. [Steps](#steps) 4. [Runtime Services](#runtime-services) -5. [Flowey Nodes](#flowey-nodes) -6. [Node Design Philosophy](#node-design-philosophy) -7. [Common Patterns](#common-patterns) -8. [Artifacts](#artifacts) -9. [Pipelines](#pipelines) -10. [Additional Resources](#additional-resources) +5. [Variables](#variables) +6. [Flowey Nodes](#flowey-nodes) +7. [Node Design Philosophy](#node-design-philosophy) +8. [Common Patterns](#common-patterns) +9. [Artifacts](#artifacts) +10. [Pipelines](#pipelines) --- -## Core Concepts +## Flowey Fundamentals + +Before diving into how flowey works, let's establish the key building blocks that form the foundation of flowey's automation model. These concepts are flowey's Rust-based abstractions for common CI/CD workflow primitives. + +### The Automation Workflow Model + +In traditional CI/CD systems, workflows are defined using YAML with implicit dependencies and global state. Flowey takes a fundamentally different approach: **automation workflows are modeled as a directed acyclic graph (DAG) of typed, composable Rust components**. Each component has explicit inputs and outputs, and dependencies are tracked through the type system. + +### Core Building Blocks + +Flowey's model consists of a hierarchy of components: + +**[Pipelines](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html)** are the top-level construct that defines a complete automation workflow. A pipeline specifies what work needs to be done and how it should be organized. Pipelines can target different execution backends (local machine, Azure DevOps, GitHub Actions) and generate appropriate configuration for each. + +**[Jobs](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html)** represent units of work that run on a specific platform (Windows, Linux, macOS) and architecture (x86_64, Aarch64). Jobs can run in parallel when they don't depend on each other, or sequentially when one job's output is needed by another. Each job is isolated and runs in its own environment. + +**[Nodes](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html)** are reusable units of automation logic that perform specific tasks (e.g., "install Rust toolchain", "run cargo build", "publish test results"). Nodes are invoked by jobs and emit one or more steps to accomplish their purpose. Nodes can depend on other nodes, forming a composable ecosystem of automation building blocks. + +**Steps** are the individual units of work that execute at runtime. A step might run a shell command, execute Rust code, or interact with the CI backend. Steps are emitted by nodes during the build-time phase and executed in dependency order during runtime. + +### Connecting the Pieces + +These building blocks are connected through three key mechanisms: + +**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. + +**[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. + +**[Side Effects](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html)** represent dependencies without data. Sometimes step B needs to run after step A, but A doesn't produce any data that B consumes (e.g., "install dependencies" must happen before "run tests", even though the test step doesn't directly use the installation output). Side effects are represented as `ReadVar` and establish ordering constraints in the DAG without transferring actual values. -### Two-Phase Execution Model +### Putting It Together + +Here's how these pieces relate: + +``` +Pipeline + ├─ Job 1 (Linux x86_64) + │ ├─ Node A (install Rust) + │ │ └─ Step: Run rustup install + │ │ └─ Produces: WriteVar (installation complete) + │ └─ Node B (build project) + │ └─ Step: Run cargo build + │ └─ Consumes: ReadVar (installation complete) + │ └─ Produces: WriteVar (binary path) → Artifact + │ + └─ Job 2 (Windows x86_64) + └─ Node C (run tests) + └─ Step: Run binary with test inputs + └─ Consumes: ReadVar (binary path) ← Artifact + └─ Produces: WriteVar (test results) +``` + +In this example: +- The **Pipeline** defines two jobs that run on different platforms +- **Job 1** installs Rust and builds the project, with step dependencies expressed through variables +- **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact +- **Variables** create dependencies within a job (build depends on install) +- **Artifacts** create dependencies between jobs (Job 2 depends on Job 1's output) +- **Side Effects** represent the "Rust is installed" state without carrying data + +--- + +## Two-Phase Execution Model Flowey operates in two distinct phases: @@ -96,7 +154,7 @@ Flowey operates in two distinct phases: - Reads `.flowey.toml` to determine which pipelines to regenerate - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` - Runs the flowey binary with `pipeline --out ` for each pipeline definition - - During this invocation, flowey constructs a **directed acyclic graph (DAG)** - a graph structure that represents the execution order of work, where each node represents a unit of work and edges represent dependencies between them. By: + - During this invocation, flowey constructs a **directed acyclic graph (DAG)** by: - Instantiating all nodes (reusable units of automation logic) defined in the pipeline - Processing their requests - Resolving dependencies between nodes via variables and artifacts @@ -108,11 +166,9 @@ Flowey operates in two distinct phases: - Variables are read and written with actual values - Commands are executed - Artifacts (data packages passed between jobs) are published/consumed - - Side effects occur + - Side effects (dependencies) are resolved ```admonish note -**Understanding the Workflow:** - The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: ```toml [[pipeline.flowey_hvlite.github]] @@ -131,12 +187,7 @@ When you run `cargo xflowey regen`: - `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. -``` -This separation allows flowey to: -- Validate the entire workflow before execution -- Generate static YAML for CI systems (ADO, GitHub Actions) -- Catch dependency errors at build-time rather than runtime ### Backend Abstraction @@ -147,180 +198,16 @@ Flowey supports multiple execution backends: - **GitHub Actions**: Generates GitHub Actions workflow YAML ```admonish warning: -Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting -backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be -avoided unless absolutely necessary. Most automation logic should be -backend-agnostic, using `emit_rust_step` for cross-platform Rust code that -works everywhere. +Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. ``` --- -## Variables: ReadVar and WriteVar - -[**`ReadVar`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) and [**`WriteVar`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) are flowey's solution to the problem of declaring -variables at build-time that will hold values produced during pipeline runtime. - -### The Problem They Solve - -When constructing the pipeline graph, we don't yet know the values that will be -produced during execution (e.g., paths to built binaries, git commit hashes, -etc.). We need a way to: -1. Declare "this step will produce a value" -2. Declare "this step will consume that value" -3. Let flowey infer the execution order from these dependencies - -### Write-Once Semantics - -`WriteVar` can only be written to **once**. This is fundamental to flowey's -execution model: - -- Writing to a `WriteVar` consumes it (the type is not `Clone`) -- This ensures there's exactly one producer for each variable -- Flowey can use this to build a valid DAG (no cycles, no conflicts) - -### Claiming Variables - -Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: -1. Registers that this step depends on (or produces) this variable -2. Converts `ReadVar` to `ReadVar` -3. Allows flowey to track variable usage for graph construction - -Variables can only be claimed inside step closures using the `claim()` method. - -**Example of the nested closure pattern:** - -```rust -// During node's emit() - this runs at BUILD-TIME -let input_var: ReadVar = /* ... */; -let output_var: WriteVar = ctx.new_var(); - -ctx.emit_rust_step("process data", |ctx| { - // OUTER CLOSURE: Runs at build-time during graph construction - // This is where you claim variables to establish dependencies - let input_var = input_var.claim(ctx); - let output_var = output_var.claim(ctx); - - // Return the INNER CLOSURE which will run at runtime - |rt| { - // INNER CLOSURE: Runs at RUNTIME during pipeline execution - // This is where you actually read/write variable values - let input = rt.read(input_var); - let result = input.len() as i32; - rt.write(output_var, &result); - - Ok(()) - } -}); -``` - -**Why the nested closure dance?** - -The nested closure pattern is fundamental to flowey's two-phase execution model: - -1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: - - Claim variables, which registers dependencies in the graph - - Determine what this step depends on (reads) and produces (writes) - - Allow flowey to validate the dependency graph and determine execution order - - The outer closure returns the inner closure for later execution - -2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: - - Read actual values from claimed `ReadVar`s - - Perform the real work (computations, running commands, etc.) - - Write actual values to claimed `WriteVar`s - -This separation ensures flowey can: -- **Validate dependencies before execution**: All claims happen during graph construction, catching errors like missing dependencies or cycles at build-time -- **Determine execution order**: By analyzing claimed variables, flowey knows which steps depend on which others -- **Generate correct YAML**: The generated CI YAML reflects the dependency structure discovered during claiming -- **Catch type errors early**: The Rust type system prevents reading/writing unclaimed variables - -The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). - -### ClaimedReadVar and ClaimedWriteVar - -These are type aliases for claimed variables: -- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` -- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` - -Only claimed variables can be read/written at runtime. - -**Implementation Detail: Zero-Sized Types (ZSTs)** +## Steps -The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level and have no runtime representation or memory footprint. This is a pure type-level transformation that happens at compile time. - -This design is crucial because without this type-level transform, Rust couldn't statically verify that all variables used in a runtime block have been claimed by that block - -The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. - -### Static Values vs Runtime Values - -Sometimes you know a value at build-time: - -```rust -// Create a ReadVar with a static value -let version = ReadVar::from_static("1.2.3".to_string()); - -// This is encoded directly in the pipeline, not computed at runtime -// WARNING: Never use this for secrets! -``` - -This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. - -### Variable Operations - -`ReadVar` provides several useful operations for transforming and combining variables: - -**Transform operations:** -- **`map()`**: Apply a function to transform a `ReadVar` into a `ReadVar`. Useful for deriving new values from existing variables (e.g., extracting a filename from a path, converting to uppercase). - -**Combining operations:** -- **`zip()`**: Combine two ReadVars into a single `ReadVar<(T, U)>`. Useful when a step needs access to multiple values simultaneously. - -**Dependency operations:** -- **`into_side_effect()`**: Discard the value but keep the dependency. Converts `ReadVar` to `ReadVar`, useful when you only care that a step ran, not what it produced. -- **`depending_on()`**: Create a new ReadVar that has an explicit dependency on another variable. Ensures ordering without actually using the dependent value. - -For detailed examples of each operation, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). - -### The SideEffect Type - -`SideEffect` is an alias for `()` that represents a dependency without data. It's used when you need to express that one step must run before another, but the first step doesn't produce any value that the second step needs to consume. - -**Key concepts:** -- Represents "something happened" without carrying data -- Enables explicit dependency ordering between steps -- Commonly used for installation, initialization, or cleanup steps - -For examples of using SideEffect, see the [`SideEffect` type documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html). - ---- - -## Emitting Steps - -Nodes emit **steps** - units of work that will be executed at runtime. Different +**Steps** are units of work that will be executed at runtime. Different step types exist for different purposes. -### NodeCtx vs StepCtx - -Before diving into step types, it's important to understand these two context types: - -- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. - -- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). - -### Isolated Working Directories and Path Immutability - -```admonish warning title="Critical Constraint" -**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. - -However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: - -**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** -``` - -When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. - ### Rust Steps Rust steps execute Rust code at runtime and are the most common step type in flowey. @@ -351,6 +238,18 @@ For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvm For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). +### Isolated Working Directories and Path Immutability + +```admonish warning title="Critical Constraint" +**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. + +However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: + +**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** +``` + +When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. + --- ## Runtime Services @@ -454,20 +353,115 @@ rt.write_not_secret(output_var, &"done".to_string()); --- +## Variables + +### Claiming Variables + +Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: +1. Registers that this step depends on (or produces) this variable +2. Converts `ReadVar` to `ReadVar` +3. Allows flowey to track variable usage for graph construction + +Variables can only be claimed inside step closures using the `claim()` method. + +**Nested closure pattern and related contexts:** + +```rust +// Inside a SimpleFlowNode's process_request() method +fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { + // Assume a single Request provided an input ReadVar and output WriteVar + let input_var: ReadVar = /* from one of the requests */; + let output_var: WriteVar = /* from one of the request */ + + // Declare a step (still build-time). This adds a node to the DAG. + ctx.emit_rust_step("compute length", |step| { + // step : StepCtx (outer closure, build-time) + // Claim dependencies so the graph knows: this step READS input_var, WRITES output_var. + let input_var = input_var.claim(step); + let output_var = output_var.claim(step); + + // Return the runtime closure. + move |rt| { + // rt : RustRuntimeServices (runtime phase) + let input = rt.read(input_var); // consume value + let len = input.len() as i32; + rt.write(output_var, &len); // fulfill promise + Ok(()) + } + }); +} +``` + +**Why the nested closure dance?** + +The nested closure pattern is fundamental to flowey's two-phase execution model: + +1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: + - Claim variables, which registers dependencies in the graph + - Determine what this step depends on (reads) and produces (writes) + - Allow flowey to validate the dependency graph and determine execution order + - The outer closure returns the inner closure for later execution + +2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: + - Read actual values from claimed `ReadVar`s + - Perform the real work (computations, running commands, etc.) + - Write actual values to claimed `WriteVar`s + +- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. + +- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). + +The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). + +### ClaimedReadVar and ClaimedWriteVar + +These are type aliases for claimed variables: +- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` +- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` + +Only claimed variables can be read/written at runtime. + +**Implementation Detail: Zero-Sized Types (ZSTs)** + +The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. + +The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. + +### Static Values vs Runtime Values + +Sometimes you know a value at build-time: + +```rust +// Create a ReadVar with a static value +let version = ReadVar::from_static("1.2.3".to_string()); + +// This is encoded directly in the pipeline, not computed at runtime +// WARNING: Never use this for secrets! +``` + +This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. + +### Variable Operations + +`ReadVar` provides operations for transforming and combining variables: + +- **`map()`**: Transform a `ReadVar` into a `ReadVar` +- **`zip()`**: Combine two ReadVars into `ReadVar<(T, U)>` +- **`into_side_effect()`**: Convert `ReadVar` to `ReadVar` when you only care about ordering, not the value +- **`depending_on()`**: Create a new ReadVar with an explicit dependency + +For detailed examples, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). + +--- + ## Flowey Nodes -A [**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) is a reusable unit of automation logic. Nodes process requests, -emit steps, and can depend on other nodes. +At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). ### The Node/Request Pattern Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. -**Key concepts:** -- Each node is a struct registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) -- Request types define the node's API using [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro -- Requests often include [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) parameters for outputs - For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). ### FlowNode vs SimpleFlowNode @@ -604,15 +598,6 @@ implementation (runtime logic): - **Node definition**: What the node does, what it depends on - **Step implementation**: How it does it -### 5. Type Safety - -Use Rust's type system to prevent errors at build-time: - -- Typed artifacts ensure type-safe data passing -- [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html) can only be written once (enforced by the type system) -- Claimed variables ensure variables are claimed before use -- Request validation happens during [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit), not at runtime - --- ## Common Patterns @@ -646,42 +631,9 @@ Nodes can query the current backend and platform to emit platform-specific or ba For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). -### Using the flowey_request! Macro - -The `flowey_request!` macro generates the Request type and associated boilerplate for a node. It supports three main formats to accommodate different node complexity levels. - -**Format options:** -- **`enum_struct`**: Recommended for complex requests. Creates an enum where each variant is a separate struct in a `req` module, providing better organization -- **`enum`**: Simple enum for straightforward request types -- **`struct`**: Single request type for nodes that only do one thing - -The macro automatically derives `Serialize`, `Deserialize`, and implements the `IntoRequest` trait. - -For complete syntax and examples, see the [`flowey_request!` macro documentation](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html). - ---- - ## Artifacts -**Artifacts** are first-class citizens in flowey, designed to abstract away the many footguns and complexities of CI system artifact handling. Flowey treats artifacts as typed data that flows between pipeline jobs, with automatic dependency management . - -### The Problem with Raw CI Artifacts - -Traditional CI systems have numerous artifact-related footguns that can cause subtle, hard-to-debug failures: - -**Name Collision Issues** -- If you upload an artifact mid-way through a job, then a later step fails, re-running that job will fail when trying to upload the artifact again due to name collision with the previous run -- Artifact names must be globally unique within a pipeline run, requiring manual name management -- Different CI backends have different artifact naming rules and restrictions - -**Manual Dependency Management** -- Nothing prevents you from trying to download an artifact before the job that produces it has run -- Job ordering must be manually specified and kept in sync with artifact dependencies -- Mistakes only surface at runtime, often after significant CI time has been consumed - -### Flowey's Artifact Abstraction - -Flowey solves these problems by making artifacts a core part of the pipeline definition at build-time: +Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. ### Typed vs Untyped Artifacts @@ -734,21 +686,11 @@ At **runtime**, the artifact `ReadVar` and `WriteVar` work jus ## Pipelines -A **Pipeline** is the top-level construct that defines a complete automation -workflow. Pipelines consist of one or more **Jobs**, each of which runs a set -of **Nodes** to accomplish specific tasks. - -For detailed examples of defining pipelines, see the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html). +Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. ### Pipeline Jobs -Each [`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) represents a unit of work that: -- Runs on a specific platform and architecture -- Can depend on artifacts from other jobs -- Can be conditionally executed based on parameters -- Emits a sequence of steps that accomplish the job's goals - -Jobs are configured using a builder pattern: +[`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: ```rust let job = pipeline @@ -766,22 +708,24 @@ let job = pipeline ### Pipeline Parameters -Parameters allow runtime configuration of pipelines: +Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). +Azure DevOps parameter UI + ```rust // Define a boolean parameter -let use_cache = pipeline.new_parameter_bool( - "use_cache", - "Whether to use caching", +let verbose = pipeline.new_parameter_bool( + "verbose", + "Run with verbose output", ParameterKind::Stable, - Some(true) // default value + Some(false) // default value ); // Use the parameter in a job let job = pipeline.new_job(...) .dep_on(|ctx| { - let use_cache = ctx.use_parameter(use_cache); - // use_cache is now a ReadVar + let verbose = ctx.use_parameter(verbose); + // verbose is now a ReadVar }) .finish(); ``` @@ -816,9 +760,3 @@ Unstable parameters are for **internal use** and experimentation: - Experimental features or debugging flags - Internal pipeline configuration that may change frequently - Parameters for development/testing that shouldn't be used in production - -## Additional Resources - -- **Example nodes**: See `flowey/flowey_lib_common/src/` for many real-world examples -- **Pipeline examples**: See `flowey/flowey_hvlite/src/pipelines/` for complete pipelines -- **Core types**: Defined in `flowey/flowey_core/src/` From cd6c9d9962da61b0ce10dfc64ebb7d05a0d55d2a Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 13:31:07 -0700 Subject: [PATCH 07/15] Feedback + doc build fixes --- Guide/src/dev_guide/dev_tools/xflowey.md | 31 ++++++++++++++++-------- flowey/flowey_core/src/node.rs | 2 +- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 5cc40a541e..7973359cc9 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -108,7 +108,7 @@ Flowey's model consists of a hierarchy of components: These building blocks are connected through three key mechanisms: -**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. +**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. **[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. @@ -198,9 +198,11 @@ Flowey supports multiple execution backends: - **GitHub Actions**: Generates GitHub Actions workflow YAML ```admonish warning: -Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. +Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. ``` +If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. + --- ## Steps @@ -648,21 +650,37 @@ struct MyArtifact { #[serde(rename = "metadata.json")] metadata: PathBuf, } + +let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); ``` **Untyped artifacts** provide simple directory-based artifacts for simpler cases: ```rust -let artifact = pipeline.new_artifact("my-files"); +let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); ``` For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). +Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: +```rust +// In a producing job: +let artifact_out = ctx.publish_artifact(pub_artifact); +// artifact_out : WriteVar (typed) +// or WriteVar for untyped + +// In a consuming job: +let artifact_in = ctx.use_artifact(use_artifact); +// artifact_in : ReadVar (typed) +// or ReadVar for untyped +``` +After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). Key concepts: - The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure - Use `#[serde(rename = "file.exe")]` to specify exact file names - Typed artifacts ensure compile-time type safety when passing data between jobs - Untyped artifacts are simpler but don't provide type guarantees +- Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables ### How Flowey Manages Artifacts Under the Hood @@ -674,8 +692,6 @@ During the **pipeline resolution phase** (build-time), flowey: - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` - For local execution: Uses filesystem copying -4. **Handles artifact naming automatically** to avoid collisions while keeping names human-readable -5. **Validates the artifact flow** to ensure all dependencies can be satisfied At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: - Producing jobs write artifact files to the path from `WriteVar` @@ -730,11 +746,6 @@ let job = pipeline.new_job(...) .finish(); ``` -Parameter types: -- Boolean parameters -- String parameters with optional validation -- Numeric (i64) parameters with optional validation - #### Stable vs Unstable Parameters Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: diff --git a/flowey/flowey_core/src/node.rs b/flowey/flowey_core/src/node.rs index 78ff8b5bd2..87e91bb0c3 100644 --- a/flowey/flowey_core/src/node.rs +++ b/flowey/flowey_core/src/node.rs @@ -2658,7 +2658,7 @@ macro_rules! new_flow_node_base { pub trait FlowNode { /// The request type that defines what operations this node can perform. /// - /// Use the [`flowey_request!`] macro to define this type. + /// Use the [`crate::flowey_request!`] macro to define this type. type Request: Serialize + DeserializeOwned; /// A list of nodes that this node is capable of taking a dependency on. From 2fdbb96e038dacd86c5b1425a4caaf26aa93d3be Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 14:13:45 -0700 Subject: [PATCH 08/15] Test the content out with the guidebook --- Guide/src/SUMMARY.md | 1 + .../src/dev_guide/dev_tools/flowey/flowey.md | 739 +++++++++++++++++ .../{ => flowey}/images/Parameters.png | Bin Guide/src/dev_guide/dev_tools/xflowey.md | 751 +----------------- 4 files changed, 742 insertions(+), 749 deletions(-) create mode 100644 Guide/src/dev_guide/dev_tools/flowey/flowey.md rename Guide/src/dev_guide/dev_tools/{ => flowey}/images/Parameters.png (100%) diff --git a/Guide/src/SUMMARY.md b/Guide/src/SUMMARY.md index 71f85b232e..109daf4a1a 100644 --- a/Guide/src/SUMMARY.md +++ b/Guide/src/SUMMARY.md @@ -37,6 +37,7 @@ - [Developer Tools / Utilities](./dev_guide/dev_tools.md) - [`cargo xtask`](./dev_guide/dev_tools/xtask.md) - [`cargo xflowey`](./dev_guide/dev_tools/xflowey.md) + - [`flowey`](./dev_guide/dev_tools/flowey/flowey.md) - [VmgsTool](./dev_guide/dev_tools/vmgstool.md) - [update-rootfs.py]() - [igvmfilegen]() diff --git a/Guide/src/dev_guide/dev_tools/flowey/flowey.md b/Guide/src/dev_guide/dev_tools/flowey/flowey.md new file mode 100644 index 0000000000..0634ddca49 --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/flowey.md @@ -0,0 +1,739 @@ +## Why Flowey? + +Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: + +### The Problems with Traditional YAML Pipelines + +**Non-Local Reasoning and Global State** +- YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) +- Understanding what a step does often requires mentally tracking state mutations across the entire pipeline +- Debugging requires reasoning about the entire pipeline context rather than isolated units of work +- Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts + +**Maintainability Challenges** +- YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) +- No compile-time validation means errors only surface at runtime +- Refactoring is risky and error-prone without automated tools to catch breaking changes +- Code duplication is common because YAML lacks good abstraction mechanisms +- Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive + +**Platform Lock-In** +- Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) +- Multi-platform support means maintaining multiple, divergent YAML files + +**Local Development Gaps** +- Developers can't easily test pipeline changes before pushing to CI +- Reproducing CI failures locally is difficult or impossible +- The feedback loop is slow: push → wait for CI → debug → repeat + +### Flowey's Solution + +Flowey addresses these issues by treating automation as **first-class Rust code**: + +- **Type Safety**: Rust's type system catches errors at compile-time rather than runtime +- **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state +- **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) +- **Reusability**: Nodes are composable building blocks that can be shared across pipelines +- **Local Execution**: The same pipeline definition can run locally or in CI +--- + +## Table of Contents + +1. [Flowey Fundamentals](#flowey-fundamentals) +2. [Two-Phase Execution Model](#two-phase-execution-model) +3. [Steps](#steps) +4. [Runtime Services](#runtime-services) +5. [Variables](#variables) +6. [Flowey Nodes](#flowey-nodes) +7. [Node Design Philosophy](#node-design-philosophy) +8. [Common Patterns](#common-patterns) +9. [Artifacts](#artifacts) +10. [Pipelines](#pipelines) + +--- + +## Flowey Fundamentals + +Before diving into how flowey works, let's establish the key building blocks that form the foundation of flowey's automation model. These concepts are flowey's Rust-based abstractions for common CI/CD workflow primitives. + +### The Automation Workflow Model + +In traditional CI/CD systems, workflows are defined using YAML with implicit dependencies and global state. Flowey takes a fundamentally different approach: **automation workflows are modeled as a directed acyclic graph (DAG) of typed, composable Rust components**. Each component has explicit inputs and outputs, and dependencies are tracked through the type system. + +### Core Building Blocks + +Flowey's model consists of a hierarchy of components: + +**[Pipelines](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html)** are the top-level construct that defines a complete automation workflow. A pipeline specifies what work needs to be done and how it should be organized. Pipelines can target different execution backends (local machine, Azure DevOps, GitHub Actions) and generate appropriate configuration for each. + +**[Jobs](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html)** represent units of work that run on a specific platform (Windows, Linux, macOS) and architecture (x86_64, Aarch64). Jobs can run in parallel when they don't depend on each other, or sequentially when one job's output is needed by another. Each job is isolated and runs in its own environment. + +**[Nodes](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html)** are reusable units of automation logic that perform specific tasks (e.g., "install Rust toolchain", "run cargo build", "publish test results"). Nodes are invoked by jobs and emit one or more steps to accomplish their purpose. Nodes can depend on other nodes, forming a composable ecosystem of automation building blocks. + +**Steps** are the individual units of work that execute at runtime. A step might run a shell command, execute Rust code, or interact with the CI backend. Steps are emitted by nodes during the build-time phase and executed in dependency order during runtime. + +### Connecting the Pieces + +These building blocks are connected through three key mechanisms: + +**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. + +**[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. + +**[Side Effects](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html)** represent dependencies without data. Sometimes step B needs to run after step A, but A doesn't produce any data that B consumes (e.g., "install dependencies" must happen before "run tests", even though the test step doesn't directly use the installation output). Side effects are represented as `ReadVar` and establish ordering constraints in the DAG without transferring actual values. + +### Putting It Together + +Here's how these pieces relate: + +``` +Pipeline + ├─ Job 1 (Linux x86_64) + │ ├─ Node A (install Rust) + │ │ └─ Step: Run rustup install + │ │ └─ Produces: WriteVar (installation complete) + │ └─ Node B (build project) + │ └─ Step: Run cargo build + │ └─ Consumes: ReadVar (installation complete) + │ └─ Produces: WriteVar (binary path) → Artifact + │ + └─ Job 2 (Windows x86_64) + └─ Node C (run tests) + └─ Step: Run binary with test inputs + └─ Consumes: ReadVar (binary path) ← Artifact + └─ Produces: WriteVar (test results) +``` + +In this example: +- The **Pipeline** defines two jobs that run on different platforms +- **Job 1** installs Rust and builds the project, with step dependencies expressed through variables +- **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact +- **Variables** create dependencies within a job (build depends on install) +- **Artifacts** create dependencies between jobs (Job 2 depends on Job 1's output) +- **Side Effects** represent the "Rust is installed" state without carrying data + +--- + +## Two-Phase Execution Model + +Flowey operates in two distinct phases: + +1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey regen`, flowey: + - Reads `.flowey.toml` to determine which pipelines to regenerate + - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` + - Runs the flowey binary with `pipeline --out ` for each pipeline definition + - During this invocation, flowey constructs a **directed acyclic graph (DAG)** by: + - Instantiating all nodes (reusable units of automation logic) defined in the pipeline + - Processing their requests + - Resolving dependencies between nodes via variables and artifacts + - Determining the execution order + - Performing flowey-specific validations (dependency resolution, type checking, etc.) + - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` + +2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps (units of work) run in the order determined at build-time: + - Variables are read and written with actual values + - Commands are executed + - Artifacts (data packages passed between jobs) are published/consumed + - Side effects (dependencies) are resolved + + +The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: +```toml +[[pipeline.flowey_hvlite.github]] +file = ".github/workflows/openvmm-pr.yaml" +cmd = ["ci", "checkin-gates", "--config=pr"] +``` + +When you run `cargo xflowey regen`: +1. It reads `.flowey.toml` +2. Builds the `flowey-hvlite` binary +3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` +4. This generates/updates the YAML file with the resolved pipeline + +**Key Distinction:** +- `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. +- `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. + +Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. + +### Backend Abstraction + +Flowey supports multiple execution backends: + +- **Local**: Runs directly on your development machine +- **ADO (Azure DevOps)**: Generates ADO Pipeline YAML +- **GitHub Actions**: Generates GitHub Actions workflow YAML + +```admonish warning +Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. +``` + +If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. + +--- + +## Steps + +**Steps** are units of work that will be executed at runtime. Different +step types exist for different purposes. + +### Rust Steps + +Rust steps execute Rust code at runtime and are the most common step type in flowey. + +**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. + +**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. + +**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. + +For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html). + +### ADO Steps + +**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. + +For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). + +### GitHub Steps + +**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. + +For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html). + +### Side Effect Steps + +**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. + +For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). + +### Isolated Working Directories and Path Immutability + +```admonish warning title="Critical Constraint" +**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. + +However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: + +**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** +``` + +When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. + +--- + +## Runtime Services + +Runtime services provide the API available during step execution (inside the +closures passed to `emit_rust_step`, etc.). + +### RustRuntimeServices + +[`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: + +**Variable Operations:** +- Reading and writing flowey variables +- Secret handling (automatic secret propagation for safety) +- Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) + +**Environment Queries:** +- Backend identification (Local, ADO, or GitHub) +- Platform detection (Windows, Linux, macOS) +- Architecture information (x86_64, Aarch64) + +#### Secret Variables and CI Backend Integration + +Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. + +**How Secret Handling Works** + +When a variable is marked as secret, flowey ensures: +- The value is not logged or printed in step output +- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs +- Secret status is automatically propagated to prevent leaks + +**Automatic Secret Propagation** + +To prevent accidental leaks, flowey uses conservative automatic secret propagation: + +```admonish warning +If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. +``` + +For example: + +```rust +ctx.emit_rust_step("process token", |ctx| { + let secret_token = secret_token.claim(ctx); + let output_var = output_var.claim(ctx); + |rt| { + let token = rt.read(secret_token); // Reading a secret + + // This write is AUTOMATICALLY marked as secret + // (even though we're just writing "done") + rt.write(output_var, &"done".to_string()); + + Ok(()) + } +}); +``` + +If you need to write non-secret data after reading a secret, use `write_not_secret()`: + +```rust +rt.write_not_secret(output_var, &"done".to_string()); +``` + +**Best Practices for Secrets** + +1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML +2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys +5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary + +### AdoStepServices + +[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: + +**ADO Variable Bridge:** +- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars +- Convert flowey vars back into ADO variables for use in YAML +- Handle secret variables appropriately + +**Repository Resources:** +- Resolve repository IDs declared as pipeline resources +- Access repository information in ADO-specific steps + +### GhStepBuilder + +[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: + +**Step Configuration:** +- Specifying the action to use (e.g., `actions/checkout@v4`) +- Adding input parameters via `.with()` +- Capturing step outputs into flowey variables +- Setting conditional execution based on variables + +**Dependency Management:** +- Declaring side-effect dependencies via `.run_after()` +- Ensuring steps run in the correct order + +**Permissions:** +- Declaring required GITHUB_TOKEN permissions +- Automatic permission aggregation at the job level + +--- + +## Variables +### Claiming Variables + +Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: +1. Registers that this step depends on (or produces) this variable +2. Converts `ReadVar` to `ReadVar` +3. Allows flowey to track variable usage for graph construction + +Variables can only be claimed inside step closures using the `claim()` method. + +**Nested closure pattern and related contexts:** + +```rust +// Inside a SimpleFlowNode's process_request() method +fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { + // Assume a single Request provided an input ReadVar and output WriteVar + let input_var: ReadVar = /* from one of the requests */; + let output_var: WriteVar = /* from one of the request */ + + // Declare a step (still build-time). This adds a node to the DAG. + ctx.emit_rust_step("compute length", |step| { + // step : StepCtx (outer closure, build-time) + // Claim dependencies so the graph knows: this step READS input_var, WRITES output_var. + let input_var = input_var.claim(step); + let output_var = output_var.claim(step); + + // Return the runtime closure. + move |rt| { + // rt : RustRuntimeServices (runtime phase) + let input = rt.read(input_var); // consume value + let len = input.len() as i32; + rt.write(output_var, &len); // fulfill promise + Ok(()) + } + }); +} +``` + +**Why the nested closure dance?** + +The nested closure pattern is fundamental to flowey's two-phase execution model: + +1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: + - Claim variables, which registers dependencies in the graph + - Determine what this step depends on (reads) and produces (writes) + - Allow flowey to validate the dependency graph and determine execution order + - The outer closure returns the inner closure for later execution + +2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: + - Read actual values from claimed `ReadVar`s + - Perform the real work (computations, running commands, etc.) + - Write actual values to claimed `WriteVar`s + +- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. + +- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). + +The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). + +### ClaimedReadVar and ClaimedWriteVar + +These are type aliases for claimed variables: +- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` +- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` + +Only claimed variables can be read/written at runtime. + +**Implementation Detail: Zero-Sized Types (ZSTs)** + +The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. + +The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. + +### Static Values vs Runtime Values + +Sometimes you know a value at build-time: + +```rust +// Create a ReadVar with a static value +let version = ReadVar::from_static("1.2.3".to_string()); + +// This is encoded directly in the pipeline, not computed at runtime +// WARNING: Never use this for secrets! +``` + +This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. + +### Variable Operations + +`ReadVar` provides operations for transforming and combining variables: + +- **`map()`**: Transform a `ReadVar` into a `ReadVar` +- **`zip()`**: Combine two ReadVars into `ReadVar<(T, U)>` +- **`into_side_effect()`**: Convert `ReadVar` to `ReadVar` when you only care about ordering, not the value +- **`depending_on()`**: Create a new ReadVar with an explicit dependency + +For detailed examples, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). + +--- + +## Flowey Nodes + +At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). + +### The Node/Request Pattern + +Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. + +For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). + +### FlowNode vs SimpleFlowNode + +Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: + +[**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: +- Uses a **single struct Request** type +- Processes one request at a time independently +- Behaves like a "plain old function" that resolves its single request type +- Each invocation is isolated - no shared state or coordination between requests +- Simpler implementation with less boilerplate +- Ideal for straightforward operations like running a command or transforming data + +**Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. + +[**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: +- Often uses an **enum Request** with multiple variants +- Receives all requests as a `Vec` and processes them together +- Can aggregate, optimize, and consolidate multiple requests into fewer steps +- Enables **non-local configuration** - critical for simplifying complex pipelines + +**The Non-Local Configuration Pattern** + +The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: + +Consider an "install Rust toolchain" node with an enum Request: + +```rust +enum Request { + SetVersion { version: String }, + GetToolchain { toolchain_path: WriteVar }, +} +``` + +**Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: + +``` +Root Node (knows version: "1.75") + → Node A (must pass through version) + → Node B (must pass through version) + → Node C (must pass through version) + → Install Rust Node (finally uses version) +``` + +**With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: + +``` +Root Node → InstallRust::SetVersion("1.75") + → Node A + → Node B + → Node C → InstallRust::GetToolchain() +``` + +The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. + +This pattern: +- **Eliminates plumbing complexity** in large pipelines +- **Allows global configuration** to be set once at the top level +- **Keeps unrelated nodes decoupled** from configuration they don't need +- **Enables validation** that required configuration was provided (exactly one `SetVersion`) + +**Additional Benefits of FlowNode:** +- Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) +- Resolve conflicts or enforce consistency across requests + +For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. + +### Node Registration + +Nodes are automatically registered using macros that handle most of the boilerplate: +- [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode +- [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode +- [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) + +### The imports() Method + +The `imports()` method declares which other nodes this node might depend on. This enables flowey to: +- Validate that all dependencies are available +- Build the complete dependency graph +- Catch missing dependencies at build-time + +```admonish warning +Flowey does not catch unused imports today as part of its build-time validation step. +``` + +**Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. + +For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). + +### The emit() Method + +The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: +1. Aggregate and validate requests (ensuring consistency where needed) +2. Emit steps to perform the work +3. Wire up dependencies between steps via variables + +For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html), the equivalent [`process_request()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html#tymethod.process_request) method processes one request at a time. + +For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). + +--- + +## Node Design Philosophy + +Flowey nodes are designed around several key principles: + +### 1. Composability + +Nodes should be reusable building blocks that can be combined to build complex +workflows. Each node should have a single, well-defined responsibility. + +❌ **Bad**: A node that "builds and tests the project" +✅ **Good**: Separate nodes for "build project" and "run tests" + +### 2. Explicit Dependencies + +Dependencies between steps should be explicit through variables, not implicit +through side effects. + +❌ **Bad**: Assuming a tool is already installed +✅ **Good**: Taking a `ReadVar` that proves installation happened + +### 3. Backend Abstraction + +Nodes should work across all backends when possible. Backend-specific behavior +should be isolated and documented. + +### 4. Separation of Concerns + +Keep node definition (request types, dependencies) separate from step +implementation (runtime logic): + +- **Node definition**: What the node does, what it depends on +- **Step implementation**: How it does it + +--- + +## Common Patterns + +### Request Aggregation and Validation + +When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. + +**Key concepts:** +- Iterate through all requests and separate them by type +- Use `same_across_all_reqs` to validate values that must be consistent +- Collect values that can have multiple instances (like output variables) +- Validate that required values were provided + +For a complete example, see the [`same_across_all_reqs` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/user_facing/fn.same_across_all_reqs.html). + +### Conditional Execution Based on Backend/Platform + +Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. + +**Key concepts:** +- Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions +- Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) +- Use `ctx.arch()` to check the architecture (x86_64, Aarch64) +- Emit different steps or use different tool configurations based on these values + +**When to use:** +- Installing platform-specific tools or dependencies +- Using different commands on Windows vs Unix systems +- Optimizing for local development vs CI environments + +For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). + +## Artifacts + +Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. + +### Typed vs Untyped Artifacts + +**Typed artifacts (recommended)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait: + +```rust +#[derive(Serialize, Deserialize)] +struct MyArtifact { + #[serde(rename = "output.bin")] + binary: PathBuf, + #[serde(rename = "metadata.json")] + metadata: PathBuf, +} + +let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); +``` + +**Untyped artifacts** provide simple directory-based artifacts for simpler cases: + +```rust +let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); +``` + +For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). + +Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: +```rust +// In a producing job: +let artifact_out = ctx.publish_artifact(pub_artifact); +// artifact_out : WriteVar (typed) +// or WriteVar for untyped + +// In a consuming job: +let artifact_in = ctx.use_artifact(use_artifact); +// artifact_in : ReadVar (typed) +// or ReadVar for untyped +``` +After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). +Key concepts: +- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure +- Use `#[serde(rename = "file.exe")]` to specify exact file names +- Typed artifacts ensure compile-time type safety when passing data between jobs +- Untyped artifacts are simpler but don't provide type guarantees +- Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables + +### How Flowey Manages Artifacts Under the Hood + +During the **pipeline resolution phase** (build-time), flowey: + +1. **Identifies artifact producers and consumers** by analyzing which jobs write to vs read from each artifact's `WriteVar`/`ReadVar` +2. **Constructs the job dependency graph** ensuring producers run before consumers +3. **Generates backend-specific upload/download steps** in the appropriate places: + - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks + - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` + - For local execution: Uses filesystem copying + +At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: +- Producing jobs write artifact files to the path from `WriteVar` +- Flowey automatically uploads those files as an artifact +- Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact + +--- + +## Pipelines + +Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. + +### Pipeline Jobs + +[`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: + +```rust +let job = pipeline + .new_job(platform, arch, "my-job") + .with_timeout_in_minutes(60) + .with_condition(some_param) + .ado_set_pool("my-pool") + .gh_set_pool(GhRunner::UbuntuLatest) + .dep_on(|ctx| { + // Define what nodes this job depends on + some_node::Request { /* ... */ } + }) + .finish(); +``` + +### Pipeline Parameters + +Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). +Azure DevOps parameter UI + + +```rust +// Define a boolean parameter +let verbose = pipeline.new_parameter_bool( + "verbose", + "Run with verbose output", + ParameterKind::Stable, + Some(false) // default value +); + +// Use the parameter in a job +let job = pipeline.new_job(...) + .dep_on(|ctx| { + let verbose = ctx.use_parameter(verbose); + // verbose is now a ReadVar + }) + .finish(); +``` + +#### Stable vs Unstable Parameters + +Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: + +**Stable Parameters ([`ParameterKind::Stable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Stable))** + +Stable parameters represent a **public, stable API** for the pipeline: + +- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. +- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. +- **Use Cases**: + - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) + - Parameters intended for use by other teams or external automation + - Parameters documented as part of the pipeline's public interface + +**Unstable Parameters ([`ParameterKind::Unstable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Unstable))** + +Unstable parameters are for **internal use** and experimentation: + +- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. +- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. +- **Use Cases**: + - Experimental features or debugging flags + - Internal pipeline configuration that may change frequently + - Parameters for development/testing that shouldn't be used in production diff --git a/Guide/src/dev_guide/dev_tools/images/Parameters.png b/Guide/src/dev_guide/dev_tools/flowey/images/Parameters.png similarity index 100% rename from Guide/src/dev_guide/dev_tools/images/Parameters.png rename to Guide/src/dev_guide/dev_tools/flowey/images/Parameters.png diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index 7973359cc9..e18775b2f0 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -1,7 +1,7 @@ # cargo xflowey To implement various developer workflows (both locally, as well as in CI), the -OpenVMM project relies on `flowey`: a custom, in-house Rust library/framework +OpenVMM project relies on [`flowey`](./flowey/flowey.md): a custom, in-house Rust library/framework for writing maintainable, cross-platform automation. `cargo xflowey` is a cargo alias that makes it easy for developers to run @@ -23,751 +23,4 @@ In a nutshell: ```admonish warning While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). -``` - -## Why Flowey? - -Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: - -### The Problems with Traditional YAML Pipelines - -**Non-Local Reasoning and Global State** -- YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) -- Understanding what a step does often requires mentally tracking state mutations across the entire pipeline -- Debugging requires reasoning about the entire pipeline context rather than isolated units of work -- Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts - -**Maintainability Challenges** -- YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) -- No compile-time validation means errors only surface at runtime -- Refactoring is risky and error-prone without automated tools to catch breaking changes -- Code duplication is common because YAML lacks good abstraction mechanisms -- Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive - -**Platform Lock-In** -- Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) -- Multi-platform support means maintaining multiple, divergent YAML files - -**Local Development Gaps** -- Developers can't easily test pipeline changes before pushing to CI -- Reproducing CI failures locally is difficult or impossible -- The feedback loop is slow: push → wait for CI → debug → repeat - -### Flowey's Solution - -Flowey addresses these issues by treating automation as **first-class Rust code**: - -- **Type Safety**: Rust's type system catches errors at compile-time rather than runtime -- **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state -- **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) -- **Reusability**: Nodes are composable building blocks that can be shared across pipelines -- **Local Execution**: The same pipeline definition can run locally or in CI ---- - -# Flowey Developer Guide - -This guide explains the core concepts and architecture of flowey for developers -working on OpenVMM automation. - -## Table of Contents - -1. [Flowey Fundamentals](#flowey-fundamentals) -2. [Two-Phase Execution Model](#two-phase-execution-model) -3. [Steps](#steps) -4. [Runtime Services](#runtime-services) -5. [Variables](#variables) -6. [Flowey Nodes](#flowey-nodes) -7. [Node Design Philosophy](#node-design-philosophy) -8. [Common Patterns](#common-patterns) -9. [Artifacts](#artifacts) -10. [Pipelines](#pipelines) - ---- - -## Flowey Fundamentals - -Before diving into how flowey works, let's establish the key building blocks that form the foundation of flowey's automation model. These concepts are flowey's Rust-based abstractions for common CI/CD workflow primitives. - -### The Automation Workflow Model - -In traditional CI/CD systems, workflows are defined using YAML with implicit dependencies and global state. Flowey takes a fundamentally different approach: **automation workflows are modeled as a directed acyclic graph (DAG) of typed, composable Rust components**. Each component has explicit inputs and outputs, and dependencies are tracked through the type system. - -### Core Building Blocks - -Flowey's model consists of a hierarchy of components: - -**[Pipelines](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html)** are the top-level construct that defines a complete automation workflow. A pipeline specifies what work needs to be done and how it should be organized. Pipelines can target different execution backends (local machine, Azure DevOps, GitHub Actions) and generate appropriate configuration for each. - -**[Jobs](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html)** represent units of work that run on a specific platform (Windows, Linux, macOS) and architecture (x86_64, Aarch64). Jobs can run in parallel when they don't depend on each other, or sequentially when one job's output is needed by another. Each job is isolated and runs in its own environment. - -**[Nodes](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html)** are reusable units of automation logic that perform specific tasks (e.g., "install Rust toolchain", "run cargo build", "publish test results"). Nodes are invoked by jobs and emit one or more steps to accomplish their purpose. Nodes can depend on other nodes, forming a composable ecosystem of automation building blocks. - -**Steps** are the individual units of work that execute at runtime. A step might run a shell command, execute Rust code, or interact with the CI backend. Steps are emitted by nodes during the build-time phase and executed in dependency order during runtime. - -### Connecting the Pieces - -These building blocks are connected through three key mechanisms: - -**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. - -**[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. - -**[Side Effects](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html)** represent dependencies without data. Sometimes step B needs to run after step A, but A doesn't produce any data that B consumes (e.g., "install dependencies" must happen before "run tests", even though the test step doesn't directly use the installation output). Side effects are represented as `ReadVar` and establish ordering constraints in the DAG without transferring actual values. - -### Putting It Together - -Here's how these pieces relate: - -``` -Pipeline - ├─ Job 1 (Linux x86_64) - │ ├─ Node A (install Rust) - │ │ └─ Step: Run rustup install - │ │ └─ Produces: WriteVar (installation complete) - │ └─ Node B (build project) - │ └─ Step: Run cargo build - │ └─ Consumes: ReadVar (installation complete) - │ └─ Produces: WriteVar (binary path) → Artifact - │ - └─ Job 2 (Windows x86_64) - └─ Node C (run tests) - └─ Step: Run binary with test inputs - └─ Consumes: ReadVar (binary path) ← Artifact - └─ Produces: WriteVar (test results) -``` - -In this example: -- The **Pipeline** defines two jobs that run on different platforms -- **Job 1** installs Rust and builds the project, with step dependencies expressed through variables -- **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact -- **Variables** create dependencies within a job (build depends on install) -- **Artifacts** create dependencies between jobs (Job 2 depends on Job 1's output) -- **Side Effects** represent the "Rust is installed" state without carrying data - ---- - -## Two-Phase Execution Model - -Flowey operates in two distinct phases: - -1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey regen`, flowey: - - Reads `.flowey.toml` to determine which pipelines to regenerate - - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` - - Runs the flowey binary with `pipeline --out ` for each pipeline definition - - During this invocation, flowey constructs a **directed acyclic graph (DAG)** by: - - Instantiating all nodes (reusable units of automation logic) defined in the pipeline - - Processing their requests - - Resolving dependencies between nodes via variables and artifacts - - Determining the execution order - - Performing flowey-specific validations (dependency resolution, type checking, etc.) - - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` - -2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps (units of work) run in the order determined at build-time: - - Variables are read and written with actual values - - Commands are executed - - Artifacts (data packages passed between jobs) are published/consumed - - Side effects (dependencies) are resolved - -```admonish note -The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: -```toml -[[pipeline.flowey_hvlite.github]] -file = ".github/workflows/openvmm-pr.yaml" -cmd = ["ci", "checkin-gates", "--config=pr"] -``` - -When you run `cargo xflowey regen`: -1. It reads `.flowey.toml` -2. Builds the `flowey-hvlite` binary -3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` -4. This generates/updates the YAML file with the resolved pipeline - -**Key Distinction:** -- `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. -- `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. - -Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. - - -### Backend Abstraction - -Flowey supports multiple execution backends: - -- **Local**: Runs directly on your development machine -- **ADO (Azure DevOps)**: Generates ADO Pipeline YAML -- **GitHub Actions**: Generates GitHub Actions workflow YAML - -```admonish warning: -Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. -``` - -If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. - ---- - -## Steps - -**Steps** are units of work that will be executed at runtime. Different -step types exist for different purposes. - -### Rust Steps - -Rust steps execute Rust code at runtime and are the most common step type in flowey. - -**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. - -**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. - -**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. - -For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html). - -### ADO Steps - -**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. - -For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). - -### GitHub Steps - -**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. - -For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html). - -### Side Effect Steps - -**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. - -For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). - -### Isolated Working Directories and Path Immutability - -```admonish warning title="Critical Constraint" -**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. - -However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: - -**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** -``` - -When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. - ---- - -## Runtime Services - -Runtime services provide the API available during step execution (inside the -closures passed to `emit_rust_step`, etc.). - -### RustRuntimeServices - -[`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: - -**Variable Operations:** -- Reading and writing flowey variables -- Secret handling (automatic secret propagation for safety) -- Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) - -**Environment Queries:** -- Backend identification (Local, ADO, or GitHub) -- Platform detection (Windows, Linux, macOS) -- Architecture information (x86_64, Aarch64) - -#### Secret Variables and CI Backend Integration - -Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. - -**How Secret Handling Works** - -When a variable is marked as secret, flowey ensures: -- The value is not logged or printed in step output -- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs -- Secret status is automatically propagated to prevent leaks - -**Automatic Secret Propagation** - -To prevent accidental leaks, flowey uses conservative automatic secret propagation: - -```admonish warning -If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. -``` - -For example: - -```rust -ctx.emit_rust_step("process token", |ctx| { - let secret_token = secret_token.claim(ctx); - let output_var = output_var.claim(ctx); - |rt| { - let token = rt.read(secret_token); // Reading a secret - - // This write is AUTOMATICALLY marked as secret - // (even though we're just writing "done") - rt.write(output_var, &"done".to_string()); - - Ok(()) - } -}); -``` - -If you need to write non-secret data after reading a secret, use `write_not_secret()`: - -```rust -rt.write_not_secret(output_var, &"done".to_string()); -``` - -**Best Practices for Secrets** - -1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML -2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys -5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary - -### AdoStepServices - -[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: - -**ADO Variable Bridge:** -- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars -- Convert flowey vars back into ADO variables for use in YAML -- Handle secret variables appropriately - -**Repository Resources:** -- Resolve repository IDs declared as pipeline resources -- Access repository information in ADO-specific steps - -### GhStepBuilder - -[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: - -**Step Configuration:** -- Specifying the action to use (e.g., `actions/checkout@v4`) -- Adding input parameters via `.with()` -- Capturing step outputs into flowey variables -- Setting conditional execution based on variables - -**Dependency Management:** -- Declaring side-effect dependencies via `.run_after()` -- Ensuring steps run in the correct order - -**Permissions:** -- Declaring required GITHUB_TOKEN permissions -- Automatic permission aggregation at the job level - ---- - -## Variables - -### Claiming Variables - -Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: -1. Registers that this step depends on (or produces) this variable -2. Converts `ReadVar` to `ReadVar` -3. Allows flowey to track variable usage for graph construction - -Variables can only be claimed inside step closures using the `claim()` method. - -**Nested closure pattern and related contexts:** - -```rust -// Inside a SimpleFlowNode's process_request() method -fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { - // Assume a single Request provided an input ReadVar and output WriteVar - let input_var: ReadVar = /* from one of the requests */; - let output_var: WriteVar = /* from one of the request */ - - // Declare a step (still build-time). This adds a node to the DAG. - ctx.emit_rust_step("compute length", |step| { - // step : StepCtx (outer closure, build-time) - // Claim dependencies so the graph knows: this step READS input_var, WRITES output_var. - let input_var = input_var.claim(step); - let output_var = output_var.claim(step); - - // Return the runtime closure. - move |rt| { - // rt : RustRuntimeServices (runtime phase) - let input = rt.read(input_var); // consume value - let len = input.len() as i32; - rt.write(output_var, &len); // fulfill promise - Ok(()) - } - }); -} -``` - -**Why the nested closure dance?** - -The nested closure pattern is fundamental to flowey's two-phase execution model: - -1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: - - Claim variables, which registers dependencies in the graph - - Determine what this step depends on (reads) and produces (writes) - - Allow flowey to validate the dependency graph and determine execution order - - The outer closure returns the inner closure for later execution - -2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: - - Read actual values from claimed `ReadVar`s - - Perform the real work (computations, running commands, etc.) - - Write actual values to claimed `WriteVar`s - -- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. - -- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). - -The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). - -### ClaimedReadVar and ClaimedWriteVar - -These are type aliases for claimed variables: -- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` -- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` - -Only claimed variables can be read/written at runtime. - -**Implementation Detail: Zero-Sized Types (ZSTs)** - -The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. - -The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. - -### Static Values vs Runtime Values - -Sometimes you know a value at build-time: - -```rust -// Create a ReadVar with a static value -let version = ReadVar::from_static("1.2.3".to_string()); - -// This is encoded directly in the pipeline, not computed at runtime -// WARNING: Never use this for secrets! -``` - -This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. - -### Variable Operations - -`ReadVar` provides operations for transforming and combining variables: - -- **`map()`**: Transform a `ReadVar` into a `ReadVar` -- **`zip()`**: Combine two ReadVars into `ReadVar<(T, U)>` -- **`into_side_effect()`**: Convert `ReadVar` to `ReadVar` when you only care about ordering, not the value -- **`depending_on()`**: Create a new ReadVar with an explicit dependency - -For detailed examples, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). - ---- - -## Flowey Nodes - -At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). - -### The Node/Request Pattern - -Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. - -For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). - -### FlowNode vs SimpleFlowNode - -Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: - -[**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: -- Uses a **single struct Request** type -- Processes one request at a time independently -- Behaves like a "plain old function" that resolves its single request type -- Each invocation is isolated - no shared state or coordination between requests -- Simpler implementation with less boilerplate -- Ideal for straightforward operations like running a command or transforming data - -**Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. - -[**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: -- Often uses an **enum Request** with multiple variants -- Receives all requests as a `Vec` and processes them together -- Can aggregate, optimize, and consolidate multiple requests into fewer steps -- Enables **non-local configuration** - critical for simplifying complex pipelines - -**The Non-Local Configuration Pattern** - -The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: - -Consider an "install Rust toolchain" node with an enum Request: - -```rust -enum Request { - SetVersion { version: String }, - GetToolchain { toolchain_path: WriteVar }, -} -``` - -**Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: - -``` -Root Node (knows version: "1.75") - → Node A (must pass through version) - → Node B (must pass through version) - → Node C (must pass through version) - → Install Rust Node (finally uses version) -``` - -**With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: - -``` -Root Node → InstallRust::SetVersion("1.75") - → Node A - → Node B - → Node C → InstallRust::GetToolchain() -``` - -The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. - -This pattern: -- **Eliminates plumbing complexity** in large pipelines -- **Allows global configuration** to be set once at the top level -- **Keeps unrelated nodes decoupled** from configuration they don't need -- **Enables validation** that required configuration was provided (exactly one `SetVersion`) - -**Additional Benefits of FlowNode:** -- Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) -- Resolve conflicts or enforce consistency across requests - -For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. - -### Node Registration - -Nodes are automatically registered using macros that handle most of the boilerplate: -- [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode -- [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode -- [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) - -### The imports() Method - -The `imports()` method declares which other nodes this node might depend on. This enables flowey to: -- Validate that all dependencies are available -- Build the complete dependency graph -- Catch missing dependencies at build-time - -```admonish warning -Flowey does not catch unused imports today as part of its build-time validation step. -``` - -**Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. - -For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). - -### The emit() Method - -The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: -1. Aggregate and validate requests (ensuring consistency where needed) -2. Emit steps to perform the work -3. Wire up dependencies between steps via variables - -For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html), the equivalent [`process_request()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html#tymethod.process_request) method processes one request at a time. - -For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). - ---- - -## Node Design Philosophy - -Flowey nodes are designed around several key principles: - -### 1. Composability - -Nodes should be reusable building blocks that can be combined to build complex -workflows. Each node should have a single, well-defined responsibility. - -❌ **Bad**: A node that "builds and tests the project" -✅ **Good**: Separate nodes for "build project" and "run tests" - -### 2. Explicit Dependencies - -Dependencies between steps should be explicit through variables, not implicit -through side effects. - -❌ **Bad**: Assuming a tool is already installed -✅ **Good**: Taking a `ReadVar` that proves installation happened - -### 3. Backend Abstraction - -Nodes should work across all backends when possible. Backend-specific behavior -should be isolated and documented. - -### 4. Separation of Concerns - -Keep node definition (request types, dependencies) separate from step -implementation (runtime logic): - -- **Node definition**: What the node does, what it depends on -- **Step implementation**: How it does it - ---- - -## Common Patterns - -### Request Aggregation and Validation - -When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. - -**Key concepts:** -- Iterate through all requests and separate them by type -- Use `same_across_all_reqs` to validate values that must be consistent -- Collect values that can have multiple instances (like output variables) -- Validate that required values were provided - -For a complete example, see the [`same_across_all_reqs` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/user_facing/fn.same_across_all_reqs.html). - -### Conditional Execution Based on Backend/Platform - -Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. - -**Key concepts:** -- Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions -- Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) -- Use `ctx.arch()` to check the architecture (x86_64, Aarch64) -- Emit different steps or use different tool configurations based on these values - -**When to use:** -- Installing platform-specific tools or dependencies -- Using different commands on Windows vs Unix systems -- Optimizing for local development vs CI environments - -For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). - -## Artifacts - -Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. - -### Typed vs Untyped Artifacts - -**Typed artifacts (recommended)** provide type-safe artifact handling by defining -a custom type that implements the `Artifact` trait: - -```rust -#[derive(Serialize, Deserialize)] -struct MyArtifact { - #[serde(rename = "output.bin")] - binary: PathBuf, - #[serde(rename = "metadata.json")] - metadata: PathBuf, -} - -let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); -``` - -**Untyped artifacts** provide simple directory-based artifacts for simpler cases: - -```rust -let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); -``` - -For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). - -Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: -```rust -// In a producing job: -let artifact_out = ctx.publish_artifact(pub_artifact); -// artifact_out : WriteVar (typed) -// or WriteVar for untyped - -// In a consuming job: -let artifact_in = ctx.use_artifact(use_artifact); -// artifact_in : ReadVar (typed) -// or ReadVar for untyped -``` -After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). -Key concepts: -- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure -- Use `#[serde(rename = "file.exe")]` to specify exact file names -- Typed artifacts ensure compile-time type safety when passing data between jobs -- Untyped artifacts are simpler but don't provide type guarantees -- Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables - -### How Flowey Manages Artifacts Under the Hood - -During the **pipeline resolution phase** (build-time), flowey: - -1. **Identifies artifact producers and consumers** by analyzing which jobs write to vs read from each artifact's `WriteVar`/`ReadVar` -2. **Constructs the job dependency graph** ensuring producers run before consumers -3. **Generates backend-specific upload/download steps** in the appropriate places: - - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks - - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` - - For local execution: Uses filesystem copying - -At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: -- Producing jobs write artifact files to the path from `WriteVar` -- Flowey automatically uploads those files as an artifact -- Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact - ---- - -## Pipelines - -Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. - -### Pipeline Jobs - -[`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: - -```rust -let job = pipeline - .new_job(platform, arch, "my-job") - .with_timeout_in_minutes(60) - .with_condition(some_param) - .ado_set_pool("my-pool") - .gh_set_pool(GhRunner::UbuntuLatest) - .dep_on(|ctx| { - // Define what nodes this job depends on - some_node::Request { /* ... */ } - }) - .finish(); -``` - -### Pipeline Parameters - -Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). -Azure DevOps parameter UI - - -```rust -// Define a boolean parameter -let verbose = pipeline.new_parameter_bool( - "verbose", - "Run with verbose output", - ParameterKind::Stable, - Some(false) // default value -); - -// Use the parameter in a job -let job = pipeline.new_job(...) - .dep_on(|ctx| { - let verbose = ctx.use_parameter(verbose); - // verbose is now a ReadVar - }) - .finish(); -``` - -#### Stable vs Unstable Parameters - -Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: - -**Stable Parameters ([`ParameterKind::Stable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Stable))** - -Stable parameters represent a **public, stable API** for the pipeline: - -- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. -- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. -- **Use Cases**: - - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) - - Parameters intended for use by other teams or external automation - - Parameters documented as part of the pipeline's public interface - -**Unstable Parameters ([`ParameterKind::Unstable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Unstable))** - -Unstable parameters are for **internal use** and experimentation: - -- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. -- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. -- **Use Cases**: - - Experimental features or debugging flags - - Internal pipeline configuration that may change frequently - - Parameters for development/testing that shouldn't be used in production +``` \ No newline at end of file From f6de9c8dcd687e020d2aa21ad3c9a6b015da1663 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 14:56:00 -0700 Subject: [PATCH 09/15] Update example to be more accurate to usage --- flowey/flowey_core/src/pipeline.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/flowey/flowey_core/src/pipeline.rs b/flowey/flowey_core/src/pipeline.rs index 265a692f2f..e71cd843cc 100644 --- a/flowey/flowey_core/src/pipeline.rs +++ b/flowey/flowey_core/src/pipeline.rs @@ -1302,7 +1302,7 @@ pub enum PipelineBackendHint { /// /// # Complex Example with Parameters and Artifacts /// -/// ```rust,no_run +/// ```rust,ignore /// use flowey_core::pipeline::{IntoPipeline, Pipeline, PipelineBackendHint, ParameterKind}; /// use flowey_core::node::{FlowPlatform, FlowPlatformLinuxDistro, FlowArch}; /// @@ -1331,9 +1331,8 @@ pub enum PipelineBackendHint { /// "build" /// ) /// .with_timeout_in_minutes(30) -/// .dep_on(|ctx| { -/// let _build_dir = ctx.publish_artifact(publish_build); -/// // Add node dependencies here +/// .dep_on(|ctx| flowey_lib_hvlite::_jobs::example_node::Request { +/// output_dir: ctx.publish_artifact(publish_build), /// }) /// .finish(); /// @@ -1345,9 +1344,8 @@ pub enum PipelineBackendHint { /// "test" /// ) /// .with_condition(enable_tests) -/// .dep_on(|ctx| { -/// let _build_dir = ctx.use_artifact(&use_build); -/// // Add node dependencies here +/// .dep_on(|ctx| flowey_lib_hvlite::_jobs::example_node2::Request { +/// input_dir: ctx.use_artifact(&use_build), /// }) /// .finish(); /// From baeeb7995f3242c6f6aaea6bbb220120146e64e5 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 29 Oct 2025 15:03:54 -0700 Subject: [PATCH 10/15] fmt fix --- Guide/src/dev_guide/dev_tools/xflowey.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index e18775b2f0..a1522cec8b 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -23,4 +23,4 @@ In a nutshell: ```admonish warning While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). -``` \ No newline at end of file +``` From ae55b698d00ec91b9e2ce06b1b0f77958780132b Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Thu, 30 Oct 2025 14:53:59 -0700 Subject: [PATCH 11/15] Re-organize guidebook --- Guide/src/SUMMARY.md | 8 +- Guide/src/dev_guide/dev_tools/flowey.md | 41 + .../dev_guide/dev_tools/flowey/artifacts.md | 64 ++ .../src/dev_guide/dev_tools/flowey/flowey.md | 739 ------------------ .../dev_tools/flowey/flowey_fundamentals.md | 115 +++ Guide/src/dev_guide/dev_tools/flowey/nodes.md | 172 ++++ .../dev_guide/dev_tools/flowey/pipelines.md | 71 ++ Guide/src/dev_guide/dev_tools/flowey/steps.md | 145 ++++ .../dev_guide/dev_tools/flowey/variables.md | 100 +++ 9 files changed, 715 insertions(+), 740 deletions(-) create mode 100644 Guide/src/dev_guide/dev_tools/flowey.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/artifacts.md delete mode 100644 Guide/src/dev_guide/dev_tools/flowey/flowey.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/nodes.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/pipelines.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/steps.md create mode 100644 Guide/src/dev_guide/dev_tools/flowey/variables.md diff --git a/Guide/src/SUMMARY.md b/Guide/src/SUMMARY.md index 109daf4a1a..a9fc85f4fe 100644 --- a/Guide/src/SUMMARY.md +++ b/Guide/src/SUMMARY.md @@ -35,9 +35,15 @@ - [Running Fuzzers](./dev_guide/tests/fuzzing/running.md) - [Writing Fuzzers](./dev_guide/tests/fuzzing/writing.md) - [Developer Tools / Utilities](./dev_guide/dev_tools.md) + - [`flowey`](./dev_guide/dev_tools/flowey.md) + - [`Flowey Fundamentals`](./dev_guide/dev_tools/flowey/flowey_fundamentals.md) + - [`Steps`](./dev_guide/dev_tools/flowey/steps.md) + - [`Variables`](./dev_guide/dev_tools/flowey/variables.md) + - [`Nodes`](./dev_guide/dev_tools/flowey/nodes.md) + - [`Artifacts`](./dev_guide/dev_tools/flowey/artifacts.md) + - [`Pipelines`](./dev_guide/dev_tools/flowey/pipelines.md) - [`cargo xtask`](./dev_guide/dev_tools/xtask.md) - [`cargo xflowey`](./dev_guide/dev_tools/xflowey.md) - - [`flowey`](./dev_guide/dev_tools/flowey/flowey.md) - [VmgsTool](./dev_guide/dev_tools/vmgstool.md) - [update-rootfs.py]() - [igvmfilegen]() diff --git a/Guide/src/dev_guide/dev_tools/flowey.md b/Guide/src/dev_guide/dev_tools/flowey.md new file mode 100644 index 0000000000..5b741ef986 --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey.md @@ -0,0 +1,41 @@ +# Flowey + +Flowey is an in-house, custom Rust library for writing maintainable, cross-platform automation. It enables developers to define CI/CD pipelines and local workflows as type-safe Rust code that can generate backend-specific YAML (Azure DevOps, GitHub Actions) or execute directly on a local machine. Rather than writing automation logic in YAML with implicit dependencies, flowey treats automation as first-class Rust code with explicit, typed dependencies tracked through a directed acyclic graph (DAG). + +## Why Flowey? + +Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: + +### The Problems with Traditional YAML Pipelines + +**Non-Local Reasoning and Global State** +- YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) +- Understanding what a step does often requires mentally tracking state mutations across the entire pipeline +- Debugging requires reasoning about the entire pipeline context rather than isolated units of work +- Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts + +**Maintainability Challenges** +- YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) +- No compile-time validation means errors only surface at runtime +- Refactoring is risky and error-prone without automated tools to catch breaking changes +- Code duplication is common because YAML lacks good abstraction mechanisms +- Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive + +**Platform Lock-In** +- Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) +- Multi-platform support means maintaining multiple, divergent YAML files + +**Local Development Gaps** +- Developers can't easily test pipeline changes before pushing to CI +- Reproducing CI failures locally is difficult or impossible +- The feedback loop is slow: push → wait for CI → debug → repeat + +### Flowey's Solution + +Flowey addresses these issues by treating automation as **first-class Rust code**: + +- **Type Safety**: Rust's type system catches errors at compile-time rather than runtime +- **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state +- **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) +- **Reusability**: Nodes are composable building blocks that can be shared across pipelines +- **Local Execution**: The same pipeline definition can run locally or in CI diff --git a/Guide/src/dev_guide/dev_tools/flowey/artifacts.md b/Guide/src/dev_guide/dev_tools/flowey/artifacts.md new file mode 100644 index 0000000000..e3df4682ba --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/artifacts.md @@ -0,0 +1,64 @@ +# Artifacts + +Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. + +### Typed vs Untyped Artifacts + +**Typed artifacts (recommended)** provide type-safe artifact handling by defining +a custom type that implements the `Artifact` trait: + +```rust +#[derive(Serialize, Deserialize)] +struct MyArtifact { + #[serde(rename = "output.bin")] + binary: PathBuf, + #[serde(rename = "metadata.json")] + metadata: PathBuf, +} + +let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); +``` + +**Untyped artifacts** provide simple directory-based artifacts for simpler cases: + +```rust +let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); +``` + +For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). + +Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: +```rust +// In a producing job: +let artifact_out = ctx.publish_artifact(pub_artifact); +// artifact_out : WriteVar (typed) +// or WriteVar for untyped + +// In a consuming job: +let artifact_in = ctx.use_artifact(use_artifact); +// artifact_in : ReadVar (typed) +// or ReadVar for untyped +``` +After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). +Key concepts: +- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure +- Use `#[serde(rename = "file.exe")]` to specify exact file names +- Typed artifacts ensure compile-time type safety when passing data between jobs +- Untyped artifacts are simpler but don't provide type guarantees +- Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables + +### How Flowey Manages Artifacts Under the Hood + +During the **pipeline resolution phase** (build-time), flowey: + +1. **Identifies artifact producers and consumers** by analyzing which jobs write to vs read from each artifact's `WriteVar`/`ReadVar` +2. **Constructs the job dependency graph** ensuring producers run before consumers +3. **Generates backend-specific upload/download steps** in the appropriate places: + - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks + - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` + - For local execution: Uses filesystem copying + +At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: +- Producing jobs write artifact files to the path from `WriteVar` +- Flowey automatically uploads those files as an artifact +- Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact diff --git a/Guide/src/dev_guide/dev_tools/flowey/flowey.md b/Guide/src/dev_guide/dev_tools/flowey/flowey.md deleted file mode 100644 index 0634ddca49..0000000000 --- a/Guide/src/dev_guide/dev_tools/flowey/flowey.md +++ /dev/null @@ -1,739 +0,0 @@ -## Why Flowey? - -Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: - -### The Problems with Traditional YAML Pipelines - -**Non-Local Reasoning and Global State** -- YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) -- Understanding what a step does often requires mentally tracking state mutations across the entire pipeline -- Debugging requires reasoning about the entire pipeline context rather than isolated units of work -- Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts - -**Maintainability Challenges** -- YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) -- No compile-time validation means errors only surface at runtime -- Refactoring is risky and error-prone without automated tools to catch breaking changes -- Code duplication is common because YAML lacks good abstraction mechanisms -- Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive - -**Platform Lock-In** -- Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) -- Multi-platform support means maintaining multiple, divergent YAML files - -**Local Development Gaps** -- Developers can't easily test pipeline changes before pushing to CI -- Reproducing CI failures locally is difficult or impossible -- The feedback loop is slow: push → wait for CI → debug → repeat - -### Flowey's Solution - -Flowey addresses these issues by treating automation as **first-class Rust code**: - -- **Type Safety**: Rust's type system catches errors at compile-time rather than runtime -- **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state -- **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) -- **Reusability**: Nodes are composable building blocks that can be shared across pipelines -- **Local Execution**: The same pipeline definition can run locally or in CI ---- - -## Table of Contents - -1. [Flowey Fundamentals](#flowey-fundamentals) -2. [Two-Phase Execution Model](#two-phase-execution-model) -3. [Steps](#steps) -4. [Runtime Services](#runtime-services) -5. [Variables](#variables) -6. [Flowey Nodes](#flowey-nodes) -7. [Node Design Philosophy](#node-design-philosophy) -8. [Common Patterns](#common-patterns) -9. [Artifacts](#artifacts) -10. [Pipelines](#pipelines) - ---- - -## Flowey Fundamentals - -Before diving into how flowey works, let's establish the key building blocks that form the foundation of flowey's automation model. These concepts are flowey's Rust-based abstractions for common CI/CD workflow primitives. - -### The Automation Workflow Model - -In traditional CI/CD systems, workflows are defined using YAML with implicit dependencies and global state. Flowey takes a fundamentally different approach: **automation workflows are modeled as a directed acyclic graph (DAG) of typed, composable Rust components**. Each component has explicit inputs and outputs, and dependencies are tracked through the type system. - -### Core Building Blocks - -Flowey's model consists of a hierarchy of components: - -**[Pipelines](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html)** are the top-level construct that defines a complete automation workflow. A pipeline specifies what work needs to be done and how it should be organized. Pipelines can target different execution backends (local machine, Azure DevOps, GitHub Actions) and generate appropriate configuration for each. - -**[Jobs](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html)** represent units of work that run on a specific platform (Windows, Linux, macOS) and architecture (x86_64, Aarch64). Jobs can run in parallel when they don't depend on each other, or sequentially when one job's output is needed by another. Each job is isolated and runs in its own environment. - -**[Nodes](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html)** are reusable units of automation logic that perform specific tasks (e.g., "install Rust toolchain", "run cargo build", "publish test results"). Nodes are invoked by jobs and emit one or more steps to accomplish their purpose. Nodes can depend on other nodes, forming a composable ecosystem of automation building blocks. - -**Steps** are the individual units of work that execute at runtime. A step might run a shell command, execute Rust code, or interact with the CI backend. Steps are emitted by nodes during the build-time phase and executed in dependency order during runtime. - -### Connecting the Pieces - -These building blocks are connected through three key mechanisms: - -**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. - -**[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. - -**[Side Effects](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html)** represent dependencies without data. Sometimes step B needs to run after step A, but A doesn't produce any data that B consumes (e.g., "install dependencies" must happen before "run tests", even though the test step doesn't directly use the installation output). Side effects are represented as `ReadVar` and establish ordering constraints in the DAG without transferring actual values. - -### Putting It Together - -Here's how these pieces relate: - -``` -Pipeline - ├─ Job 1 (Linux x86_64) - │ ├─ Node A (install Rust) - │ │ └─ Step: Run rustup install - │ │ └─ Produces: WriteVar (installation complete) - │ └─ Node B (build project) - │ └─ Step: Run cargo build - │ └─ Consumes: ReadVar (installation complete) - │ └─ Produces: WriteVar (binary path) → Artifact - │ - └─ Job 2 (Windows x86_64) - └─ Node C (run tests) - └─ Step: Run binary with test inputs - └─ Consumes: ReadVar (binary path) ← Artifact - └─ Produces: WriteVar (test results) -``` - -In this example: -- The **Pipeline** defines two jobs that run on different platforms -- **Job 1** installs Rust and builds the project, with step dependencies expressed through variables -- **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact -- **Variables** create dependencies within a job (build depends on install) -- **Artifacts** create dependencies between jobs (Job 2 depends on Job 1's output) -- **Side Effects** represent the "Rust is installed" state without carrying data - ---- - -## Two-Phase Execution Model - -Flowey operates in two distinct phases: - -1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey regen`, flowey: - - Reads `.flowey.toml` to determine which pipelines to regenerate - - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` - - Runs the flowey binary with `pipeline --out ` for each pipeline definition - - During this invocation, flowey constructs a **directed acyclic graph (DAG)** by: - - Instantiating all nodes (reusable units of automation logic) defined in the pipeline - - Processing their requests - - Resolving dependencies between nodes via variables and artifacts - - Determining the execution order - - Performing flowey-specific validations (dependency resolution, type checking, etc.) - - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` - -2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps (units of work) run in the order determined at build-time: - - Variables are read and written with actual values - - Commands are executed - - Artifacts (data packages passed between jobs) are published/consumed - - Side effects (dependencies) are resolved - - -The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: -```toml -[[pipeline.flowey_hvlite.github]] -file = ".github/workflows/openvmm-pr.yaml" -cmd = ["ci", "checkin-gates", "--config=pr"] -``` - -When you run `cargo xflowey regen`: -1. It reads `.flowey.toml` -2. Builds the `flowey-hvlite` binary -3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` -4. This generates/updates the YAML file with the resolved pipeline - -**Key Distinction:** -- `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. -- `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. - -Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. - -### Backend Abstraction - -Flowey supports multiple execution backends: - -- **Local**: Runs directly on your development machine -- **ADO (Azure DevOps)**: Generates ADO Pipeline YAML -- **GitHub Actions**: Generates GitHub Actions workflow YAML - -```admonish warning -Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. -``` - -If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. - ---- - -## Steps - -**Steps** are units of work that will be executed at runtime. Different -step types exist for different purposes. - -### Rust Steps - -Rust steps execute Rust code at runtime and are the most common step type in flowey. - -**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. - -**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. - -**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. - -For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html). - -### ADO Steps - -**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. - -For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). - -### GitHub Steps - -**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. - -For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html). - -### Side Effect Steps - -**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. - -For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). - -### Isolated Working Directories and Path Immutability - -```admonish warning title="Critical Constraint" -**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. - -However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: - -**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** -``` - -When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. - ---- - -## Runtime Services - -Runtime services provide the API available during step execution (inside the -closures passed to `emit_rust_step`, etc.). - -### RustRuntimeServices - -[`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: - -**Variable Operations:** -- Reading and writing flowey variables -- Secret handling (automatic secret propagation for safety) -- Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) - -**Environment Queries:** -- Backend identification (Local, ADO, or GitHub) -- Platform detection (Windows, Linux, macOS) -- Architecture information (x86_64, Aarch64) - -#### Secret Variables and CI Backend Integration - -Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. - -**How Secret Handling Works** - -When a variable is marked as secret, flowey ensures: -- The value is not logged or printed in step output -- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs -- Secret status is automatically propagated to prevent leaks - -**Automatic Secret Propagation** - -To prevent accidental leaks, flowey uses conservative automatic secret propagation: - -```admonish warning -If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. -``` - -For example: - -```rust -ctx.emit_rust_step("process token", |ctx| { - let secret_token = secret_token.claim(ctx); - let output_var = output_var.claim(ctx); - |rt| { - let token = rt.read(secret_token); // Reading a secret - - // This write is AUTOMATICALLY marked as secret - // (even though we're just writing "done") - rt.write(output_var, &"done".to_string()); - - Ok(()) - } -}); -``` - -If you need to write non-secret data after reading a secret, use `write_not_secret()`: - -```rust -rt.write_not_secret(output_var, &"done".to_string()); -``` - -**Best Practices for Secrets** - -1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML -2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys -5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary - -### AdoStepServices - -[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: - -**ADO Variable Bridge:** -- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars -- Convert flowey vars back into ADO variables for use in YAML -- Handle secret variables appropriately - -**Repository Resources:** -- Resolve repository IDs declared as pipeline resources -- Access repository information in ADO-specific steps - -### GhStepBuilder - -[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: - -**Step Configuration:** -- Specifying the action to use (e.g., `actions/checkout@v4`) -- Adding input parameters via `.with()` -- Capturing step outputs into flowey variables -- Setting conditional execution based on variables - -**Dependency Management:** -- Declaring side-effect dependencies via `.run_after()` -- Ensuring steps run in the correct order - -**Permissions:** -- Declaring required GITHUB_TOKEN permissions -- Automatic permission aggregation at the job level - ---- - -## Variables -### Claiming Variables - -Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: -1. Registers that this step depends on (or produces) this variable -2. Converts `ReadVar` to `ReadVar` -3. Allows flowey to track variable usage for graph construction - -Variables can only be claimed inside step closures using the `claim()` method. - -**Nested closure pattern and related contexts:** - -```rust -// Inside a SimpleFlowNode's process_request() method -fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { - // Assume a single Request provided an input ReadVar and output WriteVar - let input_var: ReadVar = /* from one of the requests */; - let output_var: WriteVar = /* from one of the request */ - - // Declare a step (still build-time). This adds a node to the DAG. - ctx.emit_rust_step("compute length", |step| { - // step : StepCtx (outer closure, build-time) - // Claim dependencies so the graph knows: this step READS input_var, WRITES output_var. - let input_var = input_var.claim(step); - let output_var = output_var.claim(step); - - // Return the runtime closure. - move |rt| { - // rt : RustRuntimeServices (runtime phase) - let input = rt.read(input_var); // consume value - let len = input.len() as i32; - rt.write(output_var, &len); // fulfill promise - Ok(()) - } - }); -} -``` - -**Why the nested closure dance?** - -The nested closure pattern is fundamental to flowey's two-phase execution model: - -1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: - - Claim variables, which registers dependencies in the graph - - Determine what this step depends on (reads) and produces (writes) - - Allow flowey to validate the dependency graph and determine execution order - - The outer closure returns the inner closure for later execution - -2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: - - Read actual values from claimed `ReadVar`s - - Perform the real work (computations, running commands, etc.) - - Write actual values to claimed `WriteVar`s - -- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. - -- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). - -The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). - -### ClaimedReadVar and ClaimedWriteVar - -These are type aliases for claimed variables: -- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` -- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` - -Only claimed variables can be read/written at runtime. - -**Implementation Detail: Zero-Sized Types (ZSTs)** - -The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. - -The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. - -### Static Values vs Runtime Values - -Sometimes you know a value at build-time: - -```rust -// Create a ReadVar with a static value -let version = ReadVar::from_static("1.2.3".to_string()); - -// This is encoded directly in the pipeline, not computed at runtime -// WARNING: Never use this for secrets! -``` - -This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. - -### Variable Operations - -`ReadVar` provides operations for transforming and combining variables: - -- **`map()`**: Transform a `ReadVar` into a `ReadVar` -- **`zip()`**: Combine two ReadVars into `ReadVar<(T, U)>` -- **`into_side_effect()`**: Convert `ReadVar` to `ReadVar` when you only care about ordering, not the value -- **`depending_on()`**: Create a new ReadVar with an explicit dependency - -For detailed examples, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). - ---- - -## Flowey Nodes - -At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). - -### The Node/Request Pattern - -Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. - -For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). - -### FlowNode vs SimpleFlowNode - -Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: - -[**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: -- Uses a **single struct Request** type -- Processes one request at a time independently -- Behaves like a "plain old function" that resolves its single request type -- Each invocation is isolated - no shared state or coordination between requests -- Simpler implementation with less boilerplate -- Ideal for straightforward operations like running a command or transforming data - -**Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. - -[**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: -- Often uses an **enum Request** with multiple variants -- Receives all requests as a `Vec` and processes them together -- Can aggregate, optimize, and consolidate multiple requests into fewer steps -- Enables **non-local configuration** - critical for simplifying complex pipelines - -**The Non-Local Configuration Pattern** - -The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: - -Consider an "install Rust toolchain" node with an enum Request: - -```rust -enum Request { - SetVersion { version: String }, - GetToolchain { toolchain_path: WriteVar }, -} -``` - -**Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: - -``` -Root Node (knows version: "1.75") - → Node A (must pass through version) - → Node B (must pass through version) - → Node C (must pass through version) - → Install Rust Node (finally uses version) -``` - -**With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: - -``` -Root Node → InstallRust::SetVersion("1.75") - → Node A - → Node B - → Node C → InstallRust::GetToolchain() -``` - -The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. - -This pattern: -- **Eliminates plumbing complexity** in large pipelines -- **Allows global configuration** to be set once at the top level -- **Keeps unrelated nodes decoupled** from configuration they don't need -- **Enables validation** that required configuration was provided (exactly one `SetVersion`) - -**Additional Benefits of FlowNode:** -- Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) -- Resolve conflicts or enforce consistency across requests - -For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. - -### Node Registration - -Nodes are automatically registered using macros that handle most of the boilerplate: -- [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode -- [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode -- [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) - -### The imports() Method - -The `imports()` method declares which other nodes this node might depend on. This enables flowey to: -- Validate that all dependencies are available -- Build the complete dependency graph -- Catch missing dependencies at build-time - -```admonish warning -Flowey does not catch unused imports today as part of its build-time validation step. -``` - -**Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. - -For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). - -### The emit() Method - -The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: -1. Aggregate and validate requests (ensuring consistency where needed) -2. Emit steps to perform the work -3. Wire up dependencies between steps via variables - -For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html), the equivalent [`process_request()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html#tymethod.process_request) method processes one request at a time. - -For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). - ---- - -## Node Design Philosophy - -Flowey nodes are designed around several key principles: - -### 1. Composability - -Nodes should be reusable building blocks that can be combined to build complex -workflows. Each node should have a single, well-defined responsibility. - -❌ **Bad**: A node that "builds and tests the project" -✅ **Good**: Separate nodes for "build project" and "run tests" - -### 2. Explicit Dependencies - -Dependencies between steps should be explicit through variables, not implicit -through side effects. - -❌ **Bad**: Assuming a tool is already installed -✅ **Good**: Taking a `ReadVar` that proves installation happened - -### 3. Backend Abstraction - -Nodes should work across all backends when possible. Backend-specific behavior -should be isolated and documented. - -### 4. Separation of Concerns - -Keep node definition (request types, dependencies) separate from step -implementation (runtime logic): - -- **Node definition**: What the node does, what it depends on -- **Step implementation**: How it does it - ---- - -## Common Patterns - -### Request Aggregation and Validation - -When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. - -**Key concepts:** -- Iterate through all requests and separate them by type -- Use `same_across_all_reqs` to validate values that must be consistent -- Collect values that can have multiple instances (like output variables) -- Validate that required values were provided - -For a complete example, see the [`same_across_all_reqs` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/user_facing/fn.same_across_all_reqs.html). - -### Conditional Execution Based on Backend/Platform - -Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. - -**Key concepts:** -- Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions -- Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) -- Use `ctx.arch()` to check the architecture (x86_64, Aarch64) -- Emit different steps or use different tool configurations based on these values - -**When to use:** -- Installing platform-specific tools or dependencies -- Using different commands on Windows vs Unix systems -- Optimizing for local development vs CI environments - -For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). - -## Artifacts - -Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. - -### Typed vs Untyped Artifacts - -**Typed artifacts (recommended)** provide type-safe artifact handling by defining -a custom type that implements the `Artifact` trait: - -```rust -#[derive(Serialize, Deserialize)] -struct MyArtifact { - #[serde(rename = "output.bin")] - binary: PathBuf, - #[serde(rename = "metadata.json")] - metadata: PathBuf, -} - -let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); -``` - -**Untyped artifacts** provide simple directory-based artifacts for simpler cases: - -```rust -let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); -``` - -For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). - -Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: -```rust -// In a producing job: -let artifact_out = ctx.publish_artifact(pub_artifact); -// artifact_out : WriteVar (typed) -// or WriteVar for untyped - -// In a consuming job: -let artifact_in = ctx.use_artifact(use_artifact); -// artifact_in : ReadVar (typed) -// or ReadVar for untyped -``` -After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). -Key concepts: -- The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure -- Use `#[serde(rename = "file.exe")]` to specify exact file names -- Typed artifacts ensure compile-time type safety when passing data between jobs -- Untyped artifacts are simpler but don't provide type guarantees -- Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables - -### How Flowey Manages Artifacts Under the Hood - -During the **pipeline resolution phase** (build-time), flowey: - -1. **Identifies artifact producers and consumers** by analyzing which jobs write to vs read from each artifact's `WriteVar`/`ReadVar` -2. **Constructs the job dependency graph** ensuring producers run before consumers -3. **Generates backend-specific upload/download steps** in the appropriate places: - - For ADO: Uses `PublishPipelineArtifact` and `DownloadPipelineArtifact` tasks - - For GitHub Actions: Uses `actions/upload-artifact` and `actions/download-artifact` - - For local execution: Uses filesystem copying - -At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: -- Producing jobs write artifact files to the path from `WriteVar` -- Flowey automatically uploads those files as an artifact -- Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact - ---- - -## Pipelines - -Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. - -### Pipeline Jobs - -[`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: - -```rust -let job = pipeline - .new_job(platform, arch, "my-job") - .with_timeout_in_minutes(60) - .with_condition(some_param) - .ado_set_pool("my-pool") - .gh_set_pool(GhRunner::UbuntuLatest) - .dep_on(|ctx| { - // Define what nodes this job depends on - some_node::Request { /* ... */ } - }) - .finish(); -``` - -### Pipeline Parameters - -Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). -Azure DevOps parameter UI - - -```rust -// Define a boolean parameter -let verbose = pipeline.new_parameter_bool( - "verbose", - "Run with verbose output", - ParameterKind::Stable, - Some(false) // default value -); - -// Use the parameter in a job -let job = pipeline.new_job(...) - .dep_on(|ctx| { - let verbose = ctx.use_parameter(verbose); - // verbose is now a ReadVar - }) - .finish(); -``` - -#### Stable vs Unstable Parameters - -Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: - -**Stable Parameters ([`ParameterKind::Stable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Stable))** - -Stable parameters represent a **public, stable API** for the pipeline: - -- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. -- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. -- **Use Cases**: - - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) - - Parameters intended for use by other teams or external automation - - Parameters documented as part of the pipeline's public interface - -**Unstable Parameters ([`ParameterKind::Unstable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Unstable))** - -Unstable parameters are for **internal use** and experimentation: - -- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. -- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. -- **Use Cases**: - - Experimental features or debugging flags - - Internal pipeline configuration that may change frequently - - Parameters for development/testing that shouldn't be used in production diff --git a/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md b/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md new file mode 100644 index 0000000000..ee4a1a5b87 --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md @@ -0,0 +1,115 @@ +# Flowey Fundamentals + +Before diving into how flowey works, let's establish the key building blocks that form the foundation of flowey's automation model. These concepts are flowey's Rust-based abstractions for common CI/CD workflow primitives. + +## The Automation Workflow Model + +In traditional CI/CD systems, workflows are defined using YAML with implicit dependencies and global state. Flowey takes a fundamentally different approach: **automation workflows are modeled as a directed acyclic graph (DAG) of typed, composable Rust components**. Each component has explicit inputs and outputs, and dependencies are tracked through the type system. + +### Core Building Blocks + +Flowey's model consists of a hierarchy of components: + +**[Pipelines](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html)** are the top-level construct that defines a complete automation workflow. A pipeline specifies what work needs to be done and how it should be organized. Pipelines can target different execution backends (local machine, Azure DevOps, GitHub Actions) and generate appropriate configuration for each. + +**[Jobs](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html)** represent units of work that run on a specific platform (Windows, Linux, macOS) and architecture (x86_64, Aarch64). Jobs can run in parallel when they don't depend on each other, or sequentially when one job's output is needed by another. Each job is isolated and runs in its own environment. + +**[Nodes](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html)** are reusable units of automation logic that perform specific tasks (e.g., "install Rust toolchain", "run cargo build", "publish test results"). Nodes are invoked by jobs and emit one or more steps to accomplish their purpose. Nodes can depend on other nodes, forming a composable ecosystem of automation building blocks. + +**Steps** are the individual units of work that execute at runtime. A step might run a shell command, execute Rust code, or interact with the CI backend. Steps are emitted by nodes during the build-time phase and executed in dependency order during runtime. + +### Connecting the Pieces + +These building blocks are connected through three key mechanisms: + +**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. + +**[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. + +**[Side Effects](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.SideEffect.html)** represent dependencies without data. Sometimes step B needs to run after step A, but A doesn't produce any data that B consumes (e.g., "install dependencies" must happen before "run tests", even though the test step doesn't directly use the installation output). Side effects are represented as `ReadVar` and establish ordering constraints in the DAG without transferring actual values. + +### Putting It Together + +Here's how these pieces relate: + +``` +Pipeline + ├─ Job 1 (Linux x86_64) + │ ├─ Node A (install Rust) + │ │ └─ Step: Run rustup install + │ │ └─ Produces: WriteVar (installation complete) + │ └─ Node B (build project) + │ └─ Step: Run cargo build + │ └─ Consumes: ReadVar (installation complete) + │ └─ Produces: WriteVar (binary path) → Artifact + │ + └─ Job 2 (Windows x86_64) + └─ Node C (run tests) + └─ Step: Run binary with test inputs + └─ Consumes: ReadVar (binary path) ← Artifact + └─ Produces: WriteVar (test results) +``` + +In this example: +- The **Pipeline** defines two jobs that run on different platforms +- **Job 1** installs Rust and builds the project, with step dependencies expressed through variables +- **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact +- **Variables** create dependencies within a job (build depends on install) +- **Artifacts** create dependencies between jobs (Job 2 depends on Job 1's output) +- **Side Effects** represent the "Rust is installed" state without carrying data + +## Two-Phase Execution Model + +Flowey operates in two distinct phases: + +1. **Build-Time (Resolution Phase)**: When you run `cargo xflowey regen`, flowey: + - Reads `.flowey.toml` to determine which pipelines to regenerate + - Builds the flowey binary (e.g., `flowey-hvlite`) via `cargo build` + - Runs the flowey binary with `pipeline --out ` for each pipeline definition + - During this invocation, flowey constructs a **directed acyclic graph (DAG)** by: + - Instantiating all nodes (reusable units of automation logic) defined in the pipeline + - Processing their requests + - Resolving dependencies between nodes via variables and artifacts + - Determining the execution order + - Performing flowey-specific validations (dependency resolution, type checking, etc.) + - Generates YAML files for CI systems (ADO, GitHub Actions) at the paths specified in `.flowey.toml` + +2. **Runtime (Execution Phase)**: The generated YAML is executed by the CI system (or locally via `cargo xflowey `). Steps (units of work) run in the order determined at build-time: + - Variables are read and written with actual values + - Commands are executed + - Artifacts (data packages passed between jobs) are published/consumed + - Side effects (dependencies) are resolved + + +The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: +```toml +[[pipeline.flowey_hvlite.github]] +file = ".github/workflows/openvmm-pr.yaml" +cmd = ["ci", "checkin-gates", "--config=pr"] +``` + +When you run `cargo xflowey regen`: +1. It reads `.flowey.toml` +2. Builds the `flowey-hvlite` binary +3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` +4. This generates/updates the YAML file with the resolved pipeline + +**Key Distinction:** +- `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. +- `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. + +Always run `cargo xflowey regen` after modifying pipeline definitions to ensure the generated YAML files reflect your changes. + +### Backend Abstraction + +Flowey supports multiple execution backends: + +- **Local**: Runs directly on your development machine +- **ADO (Azure DevOps)**: Generates ADO Pipeline YAML +- **GitHub Actions**: Generates GitHub Actions workflow YAML + +```admonish warning +Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. +``` + +If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. diff --git a/Guide/src/dev_guide/dev_tools/flowey/nodes.md b/Guide/src/dev_guide/dev_tools/flowey/nodes.md new file mode 100644 index 0000000000..be1e1ba48f --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/nodes.md @@ -0,0 +1,172 @@ +# Nodes + +At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). +### The Node/Request Pattern +Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. + +For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). + +### FlowNode vs SimpleFlowNode + +Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: + +[**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: +- Uses a **single struct Request** type +- Processes one request at a time independently +- Behaves like a "plain old function" that resolves its single request type +- Each invocation is isolated - no shared state or coordination between requests +- Simpler implementation with less boilerplate +- Ideal for straightforward operations like running a command or transforming data + +**Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. + +[**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: +- Often uses an **enum Request** with multiple variants +- Receives all requests as a `Vec` and processes them together +- Can aggregate, optimize, and consolidate multiple requests into fewer steps +- Enables **non-local configuration** - critical for simplifying complex pipelines + +**The Non-Local Configuration Pattern** + +The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: + +Consider an "install Rust toolchain" node with an enum Request: + +```rust +enum Request { + SetVersion { version: String }, + GetToolchain { toolchain_path: WriteVar }, +} +``` + +**Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: + +``` +Root Node (knows version: "1.75") + → Node A (must pass through version) + → Node B (must pass through version) + → Node C (must pass through version) + → Install Rust Node (finally uses version) +``` + +**With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: + +``` +Root Node → InstallRust::SetVersion("1.75") + → Node A + → Node B + → Node C → InstallRust::GetToolchain() +``` + +The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. + +This pattern: +- **Eliminates plumbing complexity** in large pipelines +- **Allows global configuration** to be set once at the top level +- **Keeps unrelated nodes decoupled** from configuration they don't need +- **Enables validation** that required configuration was provided (exactly one `SetVersion`) + +**Additional Benefits of FlowNode:** +- Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) +- Resolve conflicts or enforce consistency across requests + +For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. + +### Node Registration + +Nodes are automatically registered using macros that handle most of the boilerplate: +- [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode +- [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode +- [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) + +### The imports() Method + +The `imports()` method declares which other nodes this node might depend on. This enables flowey to: +- Validate that all dependencies are available +- Build the complete dependency graph +- Catch missing dependencies at build-time + +```admonish warning +Flowey does not catch unused imports today as part of its build-time validation step. +``` + +**Why declare imports?** Flowey needs to know the full set of potentially-used nodes at compilation time to properly resolve the dependency graph. + +For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). + +### The emit() Method + +The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: +1. Aggregate and validate requests (ensuring consistency where needed) +2. Emit steps to perform the work +3. Wire up dependencies between steps via variables + +For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html), the equivalent [`process_request()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html#tymethod.process_request) method processes one request at a time. + +For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). + + +## Node Design Philosophy + +Flowey nodes are designed around several key principles: + +### 1. Composability + +Nodes should be reusable building blocks that can be combined to build complex +workflows. Each node should have a single, well-defined responsibility. + +❌ **Bad**: A node that "builds and tests the project" +✅ **Good**: Separate nodes for "build project" and "run tests" + +### 2. Explicit Dependencies + +Dependencies between steps should be explicit through variables, not implicit +through side effects. + +❌ **Bad**: Assuming a tool is already installed +✅ **Good**: Taking a `ReadVar` that proves installation happened + +### 3. Backend Abstraction + +Nodes should work across all backends when possible. Backend-specific behavior +should be isolated and documented. + +### 4. Separation of Concerns + +Keep node definition (request types, dependencies) separate from step +implementation (runtime logic): + +- **Node definition**: What the node does, what it depends on +- **Step implementation**: How it does it + + +## Common Patterns + +### Request Aggregation and Validation + +When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. + +**Key concepts:** +- Iterate through all requests and separate them by type +- Use `same_across_all_reqs` to validate values that must be consistent +- Collect values that can have multiple instances (like output variables) +- Validate that required values were provided + +For a complete example, see the [`same_across_all_reqs` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/user_facing/fn.same_across_all_reqs.html). + +### Conditional Execution Based on Backend/Platform + +Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. + +**Key concepts:** +- Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions +- Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) +- Use `ctx.arch()` to check the architecture (x86_64, Aarch64) +- Emit different steps or use different tool configurations based on these values + +**When to use:** +- Installing platform-specific tools or dependencies +- Using different commands on Windows vs Unix systems +- Optimizing for local development vs CI environments + +For more on backend and platform APIs, see the [`NodeCtx` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html). diff --git a/Guide/src/dev_guide/dev_tools/flowey/pipelines.md b/Guide/src/dev_guide/dev_tools/flowey/pipelines.md new file mode 100644 index 0000000000..4b68fab10f --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/pipelines.md @@ -0,0 +1,71 @@ +# Pipelines + +Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. + +### Pipeline Jobs + +[`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: + +```rust +let job = pipeline + .new_job(platform, arch, "my-job") + .with_timeout_in_minutes(60) + .with_condition(some_param) + .ado_set_pool("my-pool") + .gh_set_pool(GhRunner::UbuntuLatest) + .dep_on(|ctx| { + // Define what nodes this job depends on + some_node::Request { /* ... */ } + }) + .finish(); +``` + +### Pipeline Parameters + +Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). +Azure DevOps parameter UI + + +```rust +// Define a boolean parameter +let verbose = pipeline.new_parameter_bool( + "verbose", + "Run with verbose output", + ParameterKind::Stable, + Some(false) // default value +); + +// Use the parameter in a job +let job = pipeline.new_job(...) + .dep_on(|ctx| { + let verbose = ctx.use_parameter(verbose); + // verbose is now a ReadVar + }) + .finish(); +``` + +#### Stable vs Unstable Parameters + +Every parameter in flowey must be declared as either **Stable** or **Unstable** using [`ParameterKind`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html). This classification determines the parameter's visibility and API stability: + +**Stable Parameters ([`ParameterKind::Stable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Stable))** + +Stable parameters represent a **public, stable API** for the pipeline: + +- **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. +- **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. +- **Use Cases**: + - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) + - Parameters intended for use by other teams or external automation + - Parameters documented as part of the pipeline's public interface + +**Unstable Parameters ([`ParameterKind::Unstable`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/enum.ParameterKind.html#variant.Unstable))** + +Unstable parameters are for **internal use** and experimentation: + +- **Internal Only**: The parameter name is prefixed with `__unstable_` in the generated YAML (e.g., `__unstable_debug_mode`), signaling that it's not part of the stable API. +- **No Stability Guarantee**: Unstable parameters can be renamed, removed, or have their behavior changed without notice. External consumers should not depend on them. +- **Use Cases**: + - Experimental features or debugging flags + - Internal pipeline configuration that may change frequently + - Parameters for development/testing that shouldn't be used in production diff --git a/Guide/src/dev_guide/dev_tools/flowey/steps.md b/Guide/src/dev_guide/dev_tools/flowey/steps.md new file mode 100644 index 0000000000..025cabe09c --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/steps.md @@ -0,0 +1,145 @@ +# Steps + +**Steps** are units of work that will be executed at runtime. Different +step types exist for different purposes. + +### Rust Steps + +Rust steps execute Rust code at runtime and are the most common step type in flowey. + +**`emit_rust_step`**: The primary method for emitting steps that run Rust code. Steps can claim variables, read inputs, perform work, and write outputs. Returns an optional `ReadVar` that other steps can use as a dependency. + +**`emit_minor_rust_step`**: Similar to `emit_rust_step` but for steps that cannot fail (no `Result` return) and don't need visibility in CI logs. Used for simple transformations and glue logic. Using minor steps also improve performance, since there is a slight cost to starting and ending a 'step' in GitHub and ADO. During the build stage, minor steps that are adjacent to each other will get merged into one giant CI step. + +**`emit_rust_stepv`**: Convenience method that combines creating a new variable and emitting a step in one call. The step's return value is automatically written to the new variable. + +For detailed examples of Rust steps, see the [`NodeCtx` emit methods documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html). + +### ADO Steps + +**`emit_ado_step`**: Emits a step that generates Azure DevOps Pipeline YAML. Takes a closure that returns a YAML string snippet which is interpolated into the generated pipeline. + +For ADO step examples, see the [`NodeCtx::emit_ado_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_ado_step). + +### GitHub Steps + +**`emit_gh_step`**: Creates a GitHub Actions step using the fluent `GhStepBuilder` API. Supports specifying the action, parameters, outputs, dependencies, and permissions. Returns a builder that must be finalized with `.finish(ctx)`. + +For GitHub step examples, see the [`GhStepBuilder` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html). + +### Side Effect Steps + +**`emit_side_effect_step`**: Creates a dependency relationship without executing code. Useful for aggregating multiple side effect dependencies into a single side effect. More efficient than emitting an empty Rust step. + +For side effect step examples, see the [`NodeCtx::emit_side_effect_step` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/struct.NodeCtx.html#method.emit_side_effect_step). + +### Isolated Working Directories and Path Immutability + +```admonish warning title="Critical Constraint" +**Each step gets its own fresh local working directory.** This avoids the "single global working directory dumping ground" common in bash + YAML systems. + +However, while flowey variables enforce sharing XOR mutability at the type-system level, **developers must manually enforce this at the filesystem level**: + +**Steps must NEVER modify the contents of paths referenced by `ReadVar`.** +``` + +When you write a path to `WriteVar`, you're creating an immutable contract. Other steps reading that path must treat it as read-only. If you need to modify files from a `ReadVar`, copy them to your step's working directory. + +## Runtime Services + +Runtime services provide the API available during step execution (inside the +closures passed to `emit_rust_step`, etc.). + +### RustRuntimeServices + +[`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: + +**Variable Operations:** +- Reading and writing flowey variables +- Secret handling (automatic secret propagation for safety) +- Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) + +**Environment Queries:** +- Backend identification (Local, ADO, or GitHub) +- Platform detection (Windows, Linux, macOS) +- Architecture information (x86_64, Aarch64) + +#### Secret Variables and CI Backend Integration + +Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. + +**How Secret Handling Works** + +When a variable is marked as secret, flowey ensures: +- The value is not logged or printed in step output +- CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs +- Secret status is automatically propagated to prevent leaks + +**Automatic Secret Propagation** + +To prevent accidental leaks, flowey uses conservative automatic secret propagation: + +```admonish warning +If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. +``` + +For example: + +```rust +ctx.emit_rust_step("process token", |ctx| { + let secret_token = secret_token.claim(ctx); + let output_var = output_var.claim(ctx); + |rt| { + let token = rt.read(secret_token); // Reading a secret + + // This write is AUTOMATICALLY marked as secret + // (even though we're just writing "done") + rt.write(output_var, &"done".to_string()); + + Ok(()) + } +}); +``` + +If you need to write non-secret data after reading a secret, use `write_not_secret()`: + +```rust +rt.write_not_secret(output_var, &"done".to_string()); +``` + +**Best Practices for Secrets** + +1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML +2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys +5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary + +### AdoStepServices + +[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: + +**ADO Variable Bridge:** +- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars +- Convert flowey vars back into ADO variables for use in YAML +- Handle secret variables appropriately + +**Repository Resources:** +- Resolve repository IDs declared as pipeline resources +- Access repository information in ADO-specific steps + +### GhStepBuilder + +[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: + +**Step Configuration:** +- Specifying the action to use (e.g., `actions/checkout@v4`) +- Adding input parameters via `.with()` +- Capturing step outputs into flowey variables +- Setting conditional execution based on variables + +**Dependency Management:** +- Declaring side-effect dependencies via `.run_after()` +- Ensuring steps run in the correct order + +**Permissions:** +- Declaring required GITHUB_TOKEN permissions +- Automatic permission aggregation at the job level diff --git a/Guide/src/dev_guide/dev_tools/flowey/variables.md b/Guide/src/dev_guide/dev_tools/flowey/variables.md new file mode 100644 index 0000000000..67a333bb69 --- /dev/null +++ b/Guide/src/dev_guide/dev_tools/flowey/variables.md @@ -0,0 +1,100 @@ +# Variables + +Variables are flowey's mechanism for creating typed data dependencies between steps. When a node emits steps, it uses ReadVar and WriteVar to declare what data each step consumes and produces. This creates explicit edges in the dependency graph: if step B reads from a variable that step A writes to, flowey ensures step A executes before step B. + +## Claiming Variables + +Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: +1. Registers that this step depends on (or produces) this variable +2. Converts `ReadVar` to `ReadVar` +3. Allows flowey to track variable usage for graph construction + +Variables can only be claimed inside step closures using the `claim()` method. + +**Nested closure pattern and related contexts:** + +```rust +// Inside a SimpleFlowNode's process_request() method +fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { + // Assume a single Request provided an input ReadVar and output WriteVar + let input_var: ReadVar = /* from one of the requests */; + let output_var: WriteVar = /* from one of the request */ + + // Declare a step (still build-time). This adds a node to the DAG. + ctx.emit_rust_step("compute length", |step| { + // step : StepCtx (outer closure, build-time) + // Claim dependencies so the graph knows: this step READS input_var, WRITES output_var. + let input_var = input_var.claim(step); + let output_var = output_var.claim(step); + + // Return the runtime closure. + move |rt| { + // rt : RustRuntimeServices (runtime phase) + let input = rt.read(input_var); // consume value + let len = input.len() as i32; + rt.write(output_var, &len); // fulfill promise + Ok(()) + } + }); +} +``` + +**Why the nested closure dance?** + +The nested closure pattern is fundamental to flowey's two-phase execution model: + +1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: + - Claim variables, which registers dependencies in the graph + - Determine what this step depends on (reads) and produces (writes) + - Allow flowey to validate the dependency graph and determine execution order + - The outer closure returns the inner closure for later execution + +2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: + - Read actual values from claimed `ReadVar`s + - Perform the real work (computations, running commands, etc.) + - Write actual values to claimed `WriteVar`s + +- [**`NodeCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.NodeCtx.html): Used when emitting steps (during the build-time phase). Provides `emit_*` methods, `new_var()`, `req()`, etc. + +- [**`StepCtx`**](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.StepCtx.html): Used inside step closures (during runtime execution). Provides access to `claim()` for variables, and basic environment info (`backend()`, `platform()`). + +The type system enforces this separation: `claim()` requires `StepCtx` (only available in the outer closure), while `read()`/`write()` require `RustRuntimeServices` (only available in the inner closure). + +## ClaimedReadVar and ClaimedWriteVar + +These are type aliases for claimed variables: +- [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` +- [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` + +Only claimed variables can be read/written at runtime. + +**Implementation Detail: Zero-Sized Types (ZSTs)** + +The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. + +The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. + +## Static Values vs Runtime Values + +Sometimes you know a value at build-time: + +```rust +// Create a ReadVar with a static value +let version = ReadVar::from_static("1.2.3".to_string()); + +// This is encoded directly in the pipeline, not computed at runtime +// WARNING: Never use this for secrets! +``` + +This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. + +## Variable Operations + +`ReadVar` provides operations for transforming and combining variables: + +- **`map()`**: Transform a `ReadVar` into a `ReadVar` +- **`zip()`**: Combine two ReadVars into `ReadVar<(T, U)>` +- **`into_side_effect()`**: Convert `ReadVar` to `ReadVar` when you only care about ordering, not the value +- **`depending_on()`**: Create a new ReadVar with an explicit dependency + +For detailed examples, see the [`ReadVar` documentation](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html). From fad335e2e884133b890580b6807932c4fac7654f Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Thu, 30 Oct 2025 15:02:59 -0700 Subject: [PATCH 12/15] Add gh issue --- Guide/src/dev_guide/dev_tools/xflowey.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/src/dev_guide/dev_tools/xflowey.md b/Guide/src/dev_guide/dev_tools/xflowey.md index a1522cec8b..f38dfcc6c9 100644 --- a/Guide/src/dev_guide/dev_tools/xflowey.md +++ b/Guide/src/dev_guide/dev_tools/xflowey.md @@ -22,5 +22,5 @@ In a nutshell: ```admonish warning -While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). +While `cargo xflowey` technically has the ability to run CI pipelines locally (e.g., `cargo xflowey ci checkin-gates`), this functionality is currently broken and should not be relied upon. Use CI pipelines in their intended environments (Azure DevOps or GitHub Actions). [`GitHub issue tracking this`](https://github.com/microsoft/openvmm/issues/2322) ``` From 7f7a6a6157c23cb6024e866eb551dd6db60a90e1 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Mon, 3 Nov 2025 10:29:30 -0800 Subject: [PATCH 13/15] Add section describing flowey's layout --- Guide/src/dev_guide/dev_tools/flowey.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/Guide/src/dev_guide/dev_tools/flowey.md b/Guide/src/dev_guide/dev_tools/flowey.md index 5b741ef986..0915ca001a 100644 --- a/Guide/src/dev_guide/dev_tools/flowey.md +++ b/Guide/src/dev_guide/dev_tools/flowey.md @@ -2,6 +2,8 @@ Flowey is an in-house, custom Rust library for writing maintainable, cross-platform automation. It enables developers to define CI/CD pipelines and local workflows as type-safe Rust code that can generate backend-specific YAML (Azure DevOps, GitHub Actions) or execute directly on a local machine. Rather than writing automation logic in YAML with implicit dependencies, flowey treats automation as first-class Rust code with explicit, typed dependencies tracked through a directed acyclic graph (DAG). + + ## Why Flowey? Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps Pipelines, GitHub Actions workflows) have several fundamental limitations that become increasingly problematic as projects grow in complexity: @@ -39,3 +41,15 @@ Flowey addresses these issues by treating automation as **first-class Rust code* - **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) - **Reusability**: Nodes are composable building blocks that can be shared across pipelines - **Local Execution**: The same pipeline definition can run locally or in CI + +## Flowey's Directory Structure + +Flowey is architected as a standalone tool with a layered crate structure that separates project-agnostic core functionality from project-specific implementations: + +- **`flowey_core`**: Provides the core types and traits shared between user-facing and internal Flowey code, such as the essential abstractions for nodes and pipelines. +- **`flowey`**: Thin wrapper around `flowey_core` that exposes the public API for defining nodes and pipelines. +- **`flowey_cli`**: Command-line interface for running flowey - handles YAML generation, local execution, and pipeline orchestration. +- **`schema_ado_yaml`**: Rust types for Azure DevOps YAML schemas used during pipeline generation. +- **`flowey_lib_common`**: Ecosystem-wide reusable nodes (installing Rust, running Cargo, downloading tools, etc.) that could be useful across projects outside of OpenVMM. +- **`flowey_lib_hvlite`**: OpenVMM-specific nodes and workflows that build on the common library primitives. +- **`flowey_hvlite`**: The OpenVMM pipeline definitions that compose nodes from the libraries above into complete CI/CD workflows. From bb866452e018949a2b347a52e4735062c7f126b5 Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Wed, 5 Nov 2025 14:26:03 -0800 Subject: [PATCH 14/15] Update Guide/src/dev_guide/dev_tools/flowey/steps.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- Guide/src/dev_guide/dev_tools/flowey/steps.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Guide/src/dev_guide/dev_tools/flowey/steps.md b/Guide/src/dev_guide/dev_tools/flowey/steps.md index 025cabe09c..66969c7950 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/steps.md +++ b/Guide/src/dev_guide/dev_tools/flowey/steps.md @@ -111,7 +111,7 @@ rt.write_not_secret(output_var, &"done".to_string()); 1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML 2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys -5. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary +3. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary ### AdoStepServices From 68439d8804db540d91a18fa886de5b49291d4a1b Mon Sep 17 00:00:00 2001 From: Daman Mulye Date: Mon, 10 Nov 2025 10:21:52 -0800 Subject: [PATCH 15/15] Fix markdown lint issues and address feedback --- Guide/src/dev_guide/dev_tools/flowey.md | 17 ++-- .../dev_guide/dev_tools/flowey/artifacts.md | 12 ++- .../dev_tools/flowey/flowey_fundamentals.md | 19 ++-- Guide/src/dev_guide/dev_tools/flowey/nodes.md | 30 ++++--- .../dev_guide/dev_tools/flowey/pipelines.md | 8 +- Guide/src/dev_guide/dev_tools/flowey/steps.md | 86 +++++++++++-------- .../dev_guide/dev_tools/flowey/variables.md | 17 ++-- flowey/flowey_core/src/node.rs | 1 - 8 files changed, 110 insertions(+), 80 deletions(-) diff --git a/Guide/src/dev_guide/dev_tools/flowey.md b/Guide/src/dev_guide/dev_tools/flowey.md index 0915ca001a..bbbf4bf487 100644 --- a/Guide/src/dev_guide/dev_tools/flowey.md +++ b/Guide/src/dev_guide/dev_tools/flowey.md @@ -1,8 +1,6 @@ # Flowey -Flowey is an in-house, custom Rust library for writing maintainable, cross-platform automation. It enables developers to define CI/CD pipelines and local workflows as type-safe Rust code that can generate backend-specific YAML (Azure DevOps, GitHub Actions) or execute directly on a local machine. Rather than writing automation logic in YAML with implicit dependencies, flowey treats automation as first-class Rust code with explicit, typed dependencies tracked through a directed acyclic graph (DAG). - - +Flowey is an in-house, Rust library for writing maintainable, cross-platform automation. It enables developers to define CI/CD pipelines and local workflows as type-safe Rust code that can generate backend-specific YAML (Azure DevOps, GitHub Actions) or execute directly on a local machine. Rather than writing automation logic in YAML with implicit dependencies, flowey treats automation as first-class Rust code with explicit, typed dependencies tracked through a directed acyclic graph (DAG). ## Why Flowey? @@ -10,24 +8,28 @@ Traditional CI/CD pipelines using YAML-based configuration (e.g., Azure DevOps P ### The Problems with Traditional YAML Pipelines -**Non-Local Reasoning and Global State** +#### Non-Local Reasoning and Global State + - YAML pipelines heavily rely on global state and implicit dependencies (environment variables, file system state, installed tools) - Understanding what a step does often requires mentally tracking state mutations across the entire pipeline - Debugging requires reasoning about the entire pipeline context rather than isolated units of work - Changes in one part of the pipeline can have unexpected effects in distant, seemingly unrelated parts -**Maintainability Challenges** +#### Maintainability Challenges + - YAML lacks type safety, making it easy to introduce subtle bugs (typos in variable names, incorrect data types, etc.) - No compile-time validation means errors only surface at runtime - Refactoring is risky and error-prone without automated tools to catch breaking changes - Code duplication is common because YAML lacks good abstraction mechanisms - Testing pipeline logic requires actually running the pipeline, making iteration slow and expensive -**Platform Lock-In** +#### Platform Lock-In + - Pipelines are tightly coupled to their specific CI backend (ADO, GitHub Actions, etc.) - Multi-platform support means maintaining multiple, divergent YAML files -**Local Development Gaps** +#### Local Development Gaps + - Developers can't easily test pipeline changes before pushing to CI - Reproducing CI failures locally is difficult or impossible - The feedback loop is slow: push → wait for CI → debug → repeat @@ -40,7 +42,6 @@ Flowey addresses these issues by treating automation as **first-class Rust code* - **Local Reasoning**: Dependencies are explicit through typed variables, not implicit through global state - **Portability**: Write once, generate YAML for any backend (ADO, GitHub Actions, or run locally) - **Reusability**: Nodes are composable building blocks that can be shared across pipelines -- **Local Execution**: The same pipeline definition can run locally or in CI ## Flowey's Directory Structure diff --git a/Guide/src/dev_guide/dev_tools/flowey/artifacts.md b/Guide/src/dev_guide/dev_tools/flowey/artifacts.md index e3df4682ba..9f0c96dfe2 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/artifacts.md +++ b/Guide/src/dev_guide/dev_tools/flowey/artifacts.md @@ -2,7 +2,7 @@ Artifacts enable typed data transfer between jobs with automatic dependency management, abstracting away CI system complexities like name collisions and manual job ordering. -### Typed vs Untyped Artifacts +## Typed vs Untyped Artifacts **Typed artifacts (recommended)** provide type-safe artifact handling by defining a custom type that implements the `Artifact` trait: @@ -16,6 +16,8 @@ struct MyArtifact { metadata: PathBuf, } +impl Artifact for MyArtifact {} + let (pub_artifact, use_artifact) = pipeline.new_typed_artifact("my-files"); ``` @@ -28,6 +30,7 @@ let (pub_artifact, use_artifact) = pipeline.new_artifact("my-files"); For detailed examples of defining and using artifacts, see the [Artifact trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html). Both `pipeline.new_typed_artifact("name")` and `pipeline.new_artifact("name")` return a tuple of handles: `(pub_artifact, use_artifact)`. When defining a job you convert them with the job context: + ```rust // In a producing job: let artifact_out = ctx.publish_artifact(pub_artifact); @@ -39,15 +42,17 @@ let artifact_in = ctx.use_artifact(use_artifact); // artifact_in : ReadVar (typed) // or ReadVar for untyped ``` -After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). + +After conversion, you treat the returned `WriteVar` / `ReadVar` like any other flowey variable (claim them in steps, write/read values). Key concepts: + - The `Artifact` trait works by serializing your type to JSON in a format that reflects a directory structure - Use `#[serde(rename = "file.exe")]` to specify exact file names - Typed artifacts ensure compile-time type safety when passing data between jobs - Untyped artifacts are simpler but don't provide type guarantees - Tuple handles must be lifted with `ctx.publish_artifact(...)` / `ctx.use_artifact(...)` to become flowey variables -### How Flowey Manages Artifacts Under the Hood +## How Flowey Manages Artifacts Under the Hood During the **pipeline resolution phase** (build-time), flowey: @@ -59,6 +64,7 @@ During the **pipeline resolution phase** (build-time), flowey: - For local execution: Uses filesystem copying At **runtime**, the artifact `ReadVar` and `WriteVar` work just like any other flowey variable: + - Producing jobs write artifact files to the path from `WriteVar` - Flowey automatically uploads those files as an artifact - Consuming jobs read the path from `ReadVar` where flowey has downloaded the artifact diff --git a/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md b/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md index ee4a1a5b87..b6db5dc84d 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md +++ b/Guide/src/dev_guide/dev_tools/flowey/flowey_fundamentals.md @@ -22,7 +22,7 @@ Flowey's model consists of a hierarchy of components: These building blocks are connected through three key mechanisms: -**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust’s “shared XOR mutable” ownership rule: a value has either one writer or multiple readers, never both concurrently. +**[Variables (`ReadVar`/`WriteVar`)](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html)** enable data flow between steps. A `WriteVar` represents a promise to produce a value of type `T` at runtime, while a `ReadVar` represents a dependency on that value. Variables enforce write-once semantics (each value has exactly one producer) and create explicit dependencies in the DAG. For example, a "build" step might write a binary path to a `WriteVar`, and a "test" step would read from the corresponding `ReadVar`. This echoes Rust's "shared XOR mutable" ownership rule: a value has either one writer or multiple readers, never both concurrently. **[Artifacts](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.Artifact.html)** enable data transfer between jobs. Since jobs may run on different machines or at different times, artifacts package up files (like compiled binaries, test results, or build outputs) for transfer. Flowey automatically handles uploading artifacts at the end of producing jobs and downloading them at the start of consuming jobs, abstracting away backend-specific artifact APIs. @@ -30,9 +30,9 @@ These building blocks are connected through three key mechanisms: ### Putting It Together -Here's how these pieces relate: +Here's an example of how these pieces relate: -``` +```txt Pipeline ├─ Job 1 (Linux x86_64) │ ├─ Node A (install Rust) @@ -51,6 +51,7 @@ Pipeline ``` In this example: + - The **Pipeline** defines two jobs that run on different platforms - **Job 1** installs Rust and builds the project, with step dependencies expressed through variables - **Job 2** runs tests using the binary from Job 1, with the binary transferred via an artifact @@ -80,8 +81,8 @@ Flowey operates in two distinct phases: - Artifacts (data packages passed between jobs) are published/consumed - Side effects (dependencies) are resolved - The `.flowey.toml` file at the repo root defines which pipelines to generate and where. For example: + ```toml [[pipeline.flowey_hvlite.github]] file = ".github/workflows/openvmm-pr.yaml" @@ -89,12 +90,14 @@ cmd = ["ci", "checkin-gates", "--config=pr"] ``` When you run `cargo xflowey regen`: -1. It reads `.flowey.toml` + +1. It reads `.flowey.toml` 2. Builds the `flowey-hvlite` binary 3. Runs `flowey-hvlite pipeline github --out .github/workflows/openvmm-pr.yaml ci checkin-gates --config=pr` 4. This generates/updates the YAML file with the resolved pipeline **Key Distinction:** + - `cargo build -p flowey-hvlite` - Only compiles the flowey code to verify it builds successfully. **Does not** construct the DAG or generate YAML files. - `cargo xflowey regen` - Compiles the code **and** runs the full build-time resolution to construct the DAG, validate the pipeline, and regenerate all YAML files defined in `.flowey.toml`. @@ -104,12 +107,12 @@ Always run `cargo xflowey regen` after modifying pipeline definitions to ensure Flowey supports multiple execution backends: -- **Local**: Runs directly on your development machine +- **Local**: Runs directly on your development machine - **ADO (Azure DevOps)**: Generates ADO Pipeline YAML - **GitHub Actions**: Generates GitHub Actions workflow YAML -```admonish warning +```admonish warning Nodes should be written to work across ALL backends whenever possible. Relying on `ctx.backend()` to query the backend or manually emitting backend-specific steps (via `emit_ado_step` or `emit_gh_step`) should be avoided unless absolutely necessary. Most automation logic should be backend-agnostic, using `emit_rust_step` for cross-platform Rust code that works everywhere. Writing cross-platform flowey code enables locally testing pipelines which can be invaluable when iterating over CI changes. ``` -If a node only supports certain backends, it should immediately fast‑fail with a clear error (“ not supported on ”) instead of silently proceeding. That failure signals it’s time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. +If a node only supports certain backends, it should immediately fast‑fail with a clear error ("`` not supported on ``") instead of silently proceeding. That failure signals it's time either to add the missing backend support or introduce a multi‑platform abstraction/meta‑node that delegates to platform‑specific nodes. diff --git a/Guide/src/dev_guide/dev_tools/flowey/nodes.md b/Guide/src/dev_guide/dev_tools/flowey/nodes.md index be1e1ba48f..0de6785b26 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/nodes.md +++ b/Guide/src/dev_guide/dev_tools/flowey/nodes.md @@ -1,16 +1,19 @@ # Nodes At a conceptual level, a Flowey node is analogous to a strongly typed function: you "invoke" it by submitting one or more Request values (its parameters), and it responds by emitting steps that perform work and produce outputs (values written to `WriteVar`s, published artifacts, or side-effect dependencies). -### The Node/Request Pattern + +## The Node/Request Pattern + Every node has an associated **Request** type that defines what operations the node can perform. Requests are defined using the [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) macro and registered with [`new_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) or [`new_simple_flow_node!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) macros. For complete examples, see the [`FlowNode` trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html). -### FlowNode vs SimpleFlowNode +## FlowNode vs SimpleFlowNode Flowey provides two node implementation patterns with a fundamental difference in their Request structure and complexity: [**`SimpleFlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) - for straightforward, function-like operations: + - Uses a **single struct Request** type - Processes one request at a time independently - Behaves like a "plain old function" that resolves its single request type @@ -21,12 +24,13 @@ Flowey provides two node implementation patterns with a fundamental difference i **Example use case**: A node that runs `cargo build` - each request is independent and just needs to know what to build. [**`FlowNode`**](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) - for complex nodes requiring coordination and non-local configuration: + - Often uses an **enum Request** with multiple variants - Receives all requests as a `Vec` and processes them together - Can aggregate, optimize, and consolidate multiple requests into fewer steps - Enables **non-local configuration** - critical for simplifying complex pipelines -**The Non-Local Configuration Pattern** +### The Non-Local Configuration Pattern The key advantage of FlowNode is its ability to accept configuration from different parts of the node graph without forcing intermediate nodes to be aware of that configuration. This is the "non-local" aspect: @@ -41,7 +45,7 @@ enum Request { **Without this pattern** (struct-only requests), you'd need to thread the Rust version through every intermediate node in the call graph: -``` +```txt Root Node (knows version: "1.75") → Node A (must pass through version) → Node B (must pass through version) @@ -51,7 +55,7 @@ Root Node (knows version: "1.75") **With FlowNode's enum Request**, the root node can send `Request::SetVersion` once, while intermediate nodes that don't care about the version can simply send `Request::GetToolchain`: -``` +```txt Root Node → InstallRust::SetVersion("1.75") → Node A → Node B @@ -61,27 +65,31 @@ Root Node → InstallRust::SetVersion("1.75") The Install Rust FlowNode receives both requests together, validates that exactly one `SetVersion` was provided, and fulfills all the `GetToolchain` requests with that configured version. The intermediate nodes (A, B, C) never needed to know about or pass through version information. This pattern: + - **Eliminates plumbing complexity** in large pipelines - **Allows global configuration** to be set once at the top level - **Keeps unrelated nodes decoupled** from configuration they don't need - **Enables validation** that required configuration was provided (exactly one `SetVersion`) **Additional Benefits of FlowNode:** + - Optimize and consolidate multiple similar requests into fewer steps (e.g., installing a tool once for many consumers) - Resolve conflicts or enforce consistency across requests For detailed comparisons and examples, see the [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html) and [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.SimpleFlowNode.html) documentation. -### Node Registration +## Node Registration Nodes are automatically registered using macros that handle most of the boilerplate: + - [`new_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_flow_node.html) - registers a FlowNode - [`new_simple_flow_node!(struct Node)`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.new_simple_flow_node.html) - registers a SimpleFlowNode - [`flowey_request!`](https://openvmm.dev/rustdoc/linux/flowey_core/macro.flowey_request.html) - defines the Request type and implements [`IntoRequest`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.IntoRequest.html) -### The imports() Method +## The imports() Method The `imports()` method declares which other nodes this node might depend on. This enables flowey to: + - Validate that all dependencies are available - Build the complete dependency graph - Catch missing dependencies at build-time @@ -94,9 +102,10 @@ Flowey does not catch unused imports today as part of its build-time validation For more on node imports, see the [`FlowNode::imports` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.imports). -### The emit() Method +## The emit() Method The [`emit()`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit) method is where a node's actual logic lives. For [`FlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html), it receives all requests together and must: + 1. Aggregate and validate requests (ensuring consistency where needed) 2. Emit steps to perform the work 3. Wire up dependencies between steps via variables @@ -105,7 +114,6 @@ For [`SimpleFlowNode`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait. For complete implementation examples, see the [`FlowNode::emit` documentation](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.FlowNode.html#tymethod.emit). - ## Node Design Philosophy Flowey nodes are designed around several key principles: @@ -139,7 +147,6 @@ implementation (runtime logic): - **Node definition**: What the node does, what it depends on - **Step implementation**: How it does it - ## Common Patterns ### Request Aggregation and Validation @@ -147,6 +154,7 @@ implementation (runtime logic): When a FlowNode receives multiple requests, it often needs to ensure certain values are consistent across all requests while collecting others. The `same_across_all_reqs` helper function simplifies this pattern by validating that a value is identical across all requests. **Key concepts:** + - Iterate through all requests and separate them by type - Use `same_across_all_reqs` to validate values that must be consistent - Collect values that can have multiple instances (like output variables) @@ -159,12 +167,14 @@ For a complete example, see the [`same_across_all_reqs` documentation](https://o Nodes can query the current backend and platform to emit platform-specific or backend-specific steps. This allows nodes to adapt their behavior based on the execution environment. **Key concepts:** + - Use `ctx.backend()` to check if running locally, on ADO, or on GitHub Actions - Use `ctx.platform()` to check the operating system (Windows, Linux, macOS) - Use `ctx.arch()` to check the architecture (x86_64, Aarch64) - Emit different steps or use different tool configurations based on these values **When to use:** + - Installing platform-specific tools or dependencies - Using different commands on Windows vs Unix systems - Optimizing for local development vs CI environments diff --git a/Guide/src/dev_guide/dev_tools/flowey/pipelines.md b/Guide/src/dev_guide/dev_tools/flowey/pipelines.md index 4b68fab10f..ea7f9400cc 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/pipelines.md +++ b/Guide/src/dev_guide/dev_tools/flowey/pipelines.md @@ -2,7 +2,7 @@ Pipelines define complete automation workflows consisting of jobs that run nodes. See the [IntoPipeline trait documentation](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/trait.IntoPipeline.html) for detailed examples. -### Pipeline Jobs +## Pipeline Jobs [`PipelineJob`](https://openvmm.dev/rustdoc/linux/flowey_core/pipeline/struct.PipelineJob.html) instances are configured using a builder pattern: @@ -22,9 +22,9 @@ let job = pipeline ### Pipeline Parameters -Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). -Azure DevOps parameter UI +Parameters allow runtime configuration of pipelines. In Azure DevOps, parameters appear as editable fields in the Run pipeline UI (name, description, default). +![Azure DevOps parameter UI](images/Parameters.png) ```rust // Define a boolean parameter @@ -54,7 +54,7 @@ Stable parameters represent a **public, stable API** for the pipeline: - **External Visibility**: The parameter name is exposed as-is in the generated CI YAML, making it callable by external pipelines and users. - **API Contract**: Once a parameter is marked stable, its name and behavior should be maintained for backward compatibility. Removing or renaming a stable parameter is a breaking change. -- **Use Cases**: +- **Use Cases**: - Parameters that control major pipeline behavior (e.g., `enable_tests`, `build_configuration`) - Parameters intended for use by other teams or external automation - Parameters documented as part of the pipeline's public interface diff --git a/Guide/src/dev_guide/dev_tools/flowey/steps.md b/Guide/src/dev_guide/dev_tools/flowey/steps.md index 66969c7950..2d2e89f313 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/steps.md +++ b/Guide/src/dev_guide/dev_tools/flowey/steps.md @@ -3,6 +3,8 @@ **Steps** are units of work that will be executed at runtime. Different step types exist for different purposes. +## Types of Steps + ### Rust Steps Rust steps execute Rust code at runtime and are the most common step type in flowey. @@ -54,32 +56,71 @@ closures passed to `emit_rust_step`, etc.). [`RustRuntimeServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/rust/struct.RustRuntimeServices.html) is the primary runtime service available in Rust steps. It provides: -**Variable Operations:** +#### Variable Operations + - Reading and writing flowey variables - Secret handling (automatic secret propagation for safety) - Support for reading values of any type that implements [`ReadVarValue`](https://openvmm.dev/rustdoc/linux/flowey_core/node/trait.ReadVarValue.html) -**Environment Queries:** +#### Environment Queries + - Backend identification (Local, ADO, or GitHub) - Platform detection (Windows, Linux, macOS) - Architecture information (x86_64, Aarch64) -#### Secret Variables and CI Backend Integration +### AdoStepServices + +[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: + +**ADO Variable Bridge:** + +- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars +- Convert flowey vars back into ADO variables for use in YAML +- Handle secret variables appropriately + +**Repository Resources:** + +- Resolve repository IDs declared as pipeline resources +- Access repository information in ADO-specific steps + +### GhStepBuilder + +[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: + +**Step Configuration:** + +- Specifying the action to use (e.g., `actions/checkout@v4`) +- Adding input parameters via `.with()` +- Capturing step outputs into flowey variables +- Setting conditional execution based on variables + +**Dependency Management:** + +- Declaring side-effect dependencies via `.run_after()` +- Ensuring steps run in the correct order + +**Permissions:** + +- Declaring required GITHUB_TOKEN permissions +- Automatic permission aggregation at the job level + +## Secret Variables and CI Backend Integration Flowey provides built-in support for handling sensitive data like API keys, tokens, and credentials through **secret variables**. Secret variables are treated specially to prevent accidental exposure in logs and CI outputs. -**How Secret Handling Works** +### How Secret Handling Works When a variable is marked as secret, flowey ensures: + - The value is not logged or printed in step output - CI backends (ADO, GitHub Actions) are instructed to mask the value in their logs - Secret status is automatically propagated to prevent leaks -**Automatic Secret Propagation** +### Automatic Secret Propagation To prevent accidental leaks, flowey uses conservative automatic secret propagation: -```admonish warning +```admonish warning If a step reads a secret value, **all subsequent writes from that step are automatically marked as secret** by default. This prevents accidentally leaking secrets through derived values. ``` @@ -107,39 +148,8 @@ If you need to write non-secret data after reading a secret, use `write_not_secr rt.write_not_secret(output_var, &"done".to_string()); ``` -**Best Practices for Secrets** +### Best Practices for Secrets 1. **Never use `ReadVar::from_static()` for secrets** - static values are encoded in plain text in the generated YAML 2. **Always use `write_secret()`** when writing sensitive data like tokens, passwords, or keys 3. **Minimize secret lifetime** - read secrets as late as possible and don't pass them through more variables than necessary - -### AdoStepServices - -[`AdoStepServices`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/ado/struct.AdoStepServices.html) provides integration with Azure DevOps-specific features when emitting ADO YAML steps: - -**ADO Variable Bridge:** -- Convert ADO runtime variables (like `BUILD.SOURCEBRANCH`) into flowey vars -- Convert flowey vars back into ADO variables for use in YAML -- Handle secret variables appropriately - -**Repository Resources:** -- Resolve repository IDs declared as pipeline resources -- Access repository information in ADO-specific steps - -### GhStepBuilder - -[`GhStepBuilder`](https://openvmm.dev/rustdoc/linux/flowey_core/node/steps/github/struct.GhStepBuilder.html) is a fluent builder for constructing GitHub Actions steps with: - -**Step Configuration:** -- Specifying the action to use (e.g., `actions/checkout@v4`) -- Adding input parameters via `.with()` -- Capturing step outputs into flowey variables -- Setting conditional execution based on variables - -**Dependency Management:** -- Declaring side-effect dependencies via `.run_after()` -- Ensuring steps run in the correct order - -**Permissions:** -- Declaring required GITHUB_TOKEN permissions -- Automatic permission aggregation at the job level diff --git a/Guide/src/dev_guide/dev_tools/flowey/variables.md b/Guide/src/dev_guide/dev_tools/flowey/variables.md index 67a333bb69..d342496d66 100644 --- a/Guide/src/dev_guide/dev_tools/flowey/variables.md +++ b/Guide/src/dev_guide/dev_tools/flowey/variables.md @@ -1,10 +1,11 @@ # Variables -Variables are flowey's mechanism for creating typed data dependencies between steps. When a node emits steps, it uses ReadVar and WriteVar to declare what data each step consumes and produces. This creates explicit edges in the dependency graph: if step B reads from a variable that step A writes to, flowey ensures step A executes before step B. +Variables are flowey's mechanism for creating typed data dependencies between steps. When a node emits steps, it uses `ReadVar` and `WriteVar` to declare what data each step consumes and produces. This creates explicit edges in the dependency graph: if step B reads from a variable that step A writes to, flowey ensures step A executes before step B. ## Claiming Variables Before a step can use a [`ReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.ReadVar.html) or [`WriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/struct.WriteVar.html), it must **claim** it. Claiming serves several purposes: + 1. Registers that this step depends on (or produces) this variable 2. Converts `ReadVar` to `ReadVar` 3. Allows flowey to track variable usage for graph construction @@ -18,7 +19,7 @@ Variables can only be claimed inside step closures using the `claim()` method. fn process_request(&self, request: Self::Request, ctx: &mut NodeCtx<'_>) { // Assume a single Request provided an input ReadVar and output WriteVar let input_var: ReadVar = /* from one of the requests */; - let output_var: WriteVar = /* from one of the request */ + let output_var: WriteVar = /* from one of the requests */; // Declare a step (still build-time). This adds a node to the DAG. ctx.emit_rust_step("compute length", |step| { @@ -46,9 +47,8 @@ The nested closure pattern is fundamental to flowey's two-phase execution model: 1. **Build-Time (Outer Closure)**: When flowey constructs the DAG, the outer closure runs to: - Claim variables, which registers dependencies in the graph - Determine what this step depends on (reads) and produces (writes) - - Allow flowey to validate the dependency graph and determine execution order - - The outer closure returns the inner closure for later execution - + - Allow flowey determine execution order + - Returns an inner closure that gets invoked during the job's runtime 2. **Runtime (Inner Closure)**: When the pipeline actually executes, the inner closure runs to: - Read actual values from claimed `ReadVar`s - Perform the real work (computations, running commands, etc.) @@ -63,14 +63,15 @@ The type system enforces this separation: `claim()` requires `StepCtx` (only ava ## ClaimedReadVar and ClaimedWriteVar These are type aliases for claimed variables: + - [`ClaimedReadVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedReadVar.html) = `ReadVar` - [`ClaimedWriteVar`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/type.ClaimedWriteVar.html) = `WriteVar` Only claimed variables can be read/written at runtime. -**Implementation Detail: Zero-Sized Types (ZSTs)** +### Implementation Detail: Zero-Sized Types (ZSTs) -The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. +The claim state markers [`VarClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarClaimed.html) and [`VarNotClaimed`](https://openvmm.dev/rustdoc/linux/flowey/node/prelude/enum.VarNotClaimed.html) are zero-sized types (ZSTs) - they exist purely at the type level. It allows Rust to statically verify that all variables used in a runtime block have been claimed by that block. The type system ensures that `claim()` is the only way to convert from `VarNotClaimed` to `VarClaimed`, and this conversion can only happen within the outer closure where `StepCtx` is available. @@ -86,7 +87,7 @@ let version = ReadVar::from_static("1.2.3".to_string()); // WARNING: Never use this for secrets! ``` -This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value is known at build-time. +This can be used as an escape hatch when you have a Request (that expects a value to be determined at runtime), but in a given instance you know the value at build-time. ## Variable Operations diff --git a/flowey/flowey_core/src/node.rs b/flowey/flowey_core/src/node.rs index 87e91bb0c3..41607c2de0 100644 --- a/flowey/flowey_core/src/node.rs +++ b/flowey/flowey_core/src/node.rs @@ -2654,7 +2654,6 @@ macro_rules! new_flow_node_base { /// **Use [`SimpleFlowNode`]** when: /// - Each request can be processed independently /// - No aggregation logic is needed -/// - Simpler, less boilerplate pub trait FlowNode { /// The request type that defines what operations this node can perform. ///