diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..9fae104 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,14 @@ +# EditorConfig is awesome: http://EditorConfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[{*.md,*.py,*.rs}] +indent_size = 4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d9f0d22..ebd1bee 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -63,12 +63,14 @@ jobs: if: ${{ !matrix.cross }} run: | RUSTFLAGS='-C target-feature=+crt-static' \ - cargo build --release -p liyi --target ${{ matrix.target }} + cargo build --release -p liyi-cli --target ${{ matrix.target }} + ls -alF target/${{ matrix.target }}/release/ - name: Build release binary (cross) if: matrix.cross run: | - cross build --release -p liyi --target ${{ matrix.target }} + cross build --release -p liyi-cli --target ${{ matrix.target }} + ls -alF target/${{ matrix.target }}/release/ - name: Strip binary run: | @@ -85,12 +87,12 @@ jobs: cp target/${{ matrix.target }}/release/liyi dist/ cp LICENSE-*.txt dist/ cp README.md dist/ - tar -C dist -czf ../liyi-${{ github.ref_name }}-${{ matrix.target }}.tar.gz . + tar -C dist -czf ./liyi-${{ github.ref_name }}-${{ matrix.target }}.tar.gz . - name: Upload artifact uses: actions/upload-artifact@v7 with: - name: liyi-${{ matrix.target }} + name: dist-artifact-${{ matrix.target }} path: liyi-${{ github.ref_name }}-${{ matrix.target }}.tar.gz release: @@ -104,10 +106,13 @@ jobs: uses: actions/download-artifact@v8 with: path: artifacts - pattern: liyi-* + pattern: dist-artifact-* merge-multiple: true + - run: ls -alF artifacts + - name: Create Release + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: softprops/action-gh-release@v2 with: files: artifacts/*.tar.gz diff --git a/.github/workflows/release.yml.liyi.jsonc b/.github/workflows/release.yml.liyi.jsonc index c6d2720..09f3ed5 100644 --- a/.github/workflows/release.yml.liyi.jsonc +++ b/.github/workflows/release.yml.liyi.jsonc @@ -6,7 +6,7 @@ { "item": "on", "reviewed": true, - "intent": "=trivial", + "intent": "Trigger this workflow only for pushed Git tags. Ordinary branch pushes and pull requests must not run the release pipeline or publish release assets.", "source_span": [ 5, 8 @@ -18,7 +18,7 @@ { "item": "permissions", "reviewed": true, - "intent": "Grant the workflow GITHUB_TOKEN write access to repository contents so the release job can create and publish a GitHub release. Must not request broader repository permissions than contents: write.", + "intent": "Grant the workflow token write access to repository contents so the release job can create or update a GitHub release and attach assets. Must not request broader repository permissions than contents: write.", "source_span": [ 10, 11 @@ -30,26 +30,144 @@ { "item": "jobs::build", "reviewed": true, - "intent": "Build one statically linked release tarball per configured musl target. Must install the appropriate Rust target and build tool, use native cargo for non-cross targets and cross for cross targets, strip the produced binary when possible, package the binary together with the license files and README, and upload a per-target artifact for the release job.", + "intent": "Build one release tarball per configured musl target and publish each tarball as an artifact for the downstream release job. The job must choose native or cross compilation per matrix entry, produce the `liyi` CLI binary from package `liyi-cli`, and package the binary together with licenses and README.", "source_span": [ 17, - 94 + 96 ], "tree_path": "key.jobs::key.build", - "source_hash": "sha256:d91d63ed004a32d04cdcd4e2aa7f081a1590bc53b732a8cbbe511d9c6b677e87", + "source_hash": "sha256:71f4a0f6d299fbf42ee8ba8eb3da5a4c46a157deb550e9c7cced6e9f55e4e537", "source_anchor": " build:" }, + { + "item": "build::strategy", + "reviewed": true, + "intent": "Enumerate the supported musl release targets and annotate each target with the architecture name and whether it requires cross compilation. The `cross` flag and `arch` value must stay aligned with the later install, build, and strip steps.", + "source_span": [ + 20, + 34 + ], + "tree_path": "key.jobs::key.build::key.strategy", + "source_hash": "sha256:e23b623cd0cbd9ee9e51e3eef73fa8b0a33f988486d8d89c1541032def51daab", + "source_anchor": " strategy:" + }, + { + "item": "build::install-native-musl-tools", + "reviewed": true, + "intent": "For native builds only, install the host musl toolchain packages before compilation so cargo can link the static musl binary without cross. This step must not run for matrix entries that use cross.", + "source_span": [ + 44, + 48 + ], + "tree_path": "key.jobs::key.build::key.steps[2]::key.run", + "source_hash": "sha256:53d3f3776031cd2a1de9c69d47a04405a586d455b4816654453563aee662c0b4", + "source_anchor": " - name: Install native musl tools" + }, + { + "item": "build::install-cross", + "reviewed": true, + "intent": "For cross-compiled targets only, install `cross` from the upstream Git repository instead of the pinned marketplace installer because the packaged action version is too old for the loongarch64 and riscv64 musl targets used in this matrix. This step must not run for native builds.", + "source_span": [ + 50, + 56 + ], + "tree_path": "key.jobs::key.build::key.steps[3]::key.run", + "source_hash": "sha256:f8258de5e2a775f28b6837a034c23142c802e840a645aae82feea016592e14c9", + "source_anchor": " - name: Install cross" + }, + { + "item": "build::build-native-binary", + "reviewed": true, + "intent": "For non-cross targets, build package `liyi-cli` in release mode for the selected target with `crt-static` enabled so the produced `liyi` binary is statically linked. The post-build directory listing acts as a sanity check that the expected release artifact exists at `target//release/`.", + "source_span": [ + 62, + 67 + ], + "tree_path": "key.jobs::key.build::key.steps[5]::key.run", + "source_hash": "sha256:ad6da2aaa86f294126892da476be8e7255abd010ab6a7f1d3e1ac696cbab923e", + "source_anchor": " - name: Build release binary (native)" + }, + { + "item": "build::build-cross-binary", + "reviewed": true, + "intent": "For cross targets, build package `liyi-cli` in release mode through `cross` for the selected target triple. The post-build directory listing must verify that the expected target release directory was populated before later packaging steps run.", + "source_span": [ + 69, + 73 + ], + "tree_path": "key.jobs::key.build::key.steps[6]::key.run", + "source_hash": "sha256:f0516755de82a5ca1b26380cc0157a046918dc7a99005c5e2a8954ecfef9084d", + "source_anchor": " - name: Build release binary (cross)" + }, + { + "item": "build::strip-binary", + "reviewed": true, + "intent": "Attempt to strip the produced `liyi` binary after compilation to reduce release artifact size. Native builds must use the host `strip`; cross builds must try the target-specific musl strip tool but tolerate its absence so packaging can still proceed.", + "source_span": [ + 75, + 82 + ], + "tree_path": "key.jobs::key.build::key.steps[7]::key.run", + "source_hash": "sha256:3e21374960929ccbaed4d37fc5b285dc0b4b77cf5c75bddfda431871b20374d1", + "source_anchor": " - name: Strip binary" + }, + { + "item": "build::package-binary", + "reviewed": true, + "intent": "Assemble the release tarball for the current target by copying the built `liyi` binary, the repository license files, and `README.md` into `dist/`, then creating `liyi--.tar.gz` in the workspace root. The tarball name must stay stable because the upload and release steps consume that exact filename pattern.", + "source_span": [ + 84, + 90 + ], + "tree_path": "key.jobs::key.build::key.steps[8]::key.run", + "source_hash": "sha256:3e07ffc6ef6e4854235d2668749ade3536be6eece60e8c0f592244f701e506d5", + "source_anchor": " - name: Package binary" + }, + { + "item": "build::upload-artifact", + "reviewed": true, + "intent": "Upload the packaged tarball as a GitHub Actions artifact named `dist-artifact-`. The artifact naming convention must remain consistent with the release job's download pattern so every target tarball is collected for publication.", + "source_span": [ + 92, + 96 + ], + "tree_path": "key.jobs::key.build::key.steps[9]::key.with", + "source_hash": "sha256:b53367295c966328cd1ed89adb45574227d5a1b28a929e9442f7fd7e86d994a5", + "source_anchor": " - name: Upload artifact" + }, { "item": "jobs::release", "reviewed": true, - "intent": "After all build-matrix artifacts are available, download every packaged tarball and publish them on the GitHub release for the pushed tag. Must attach all artifacts, generate release notes, and authenticate with the repository GITHUB_TOKEN.", + "intent": "Wait for all build-matrix artifacts, gather the packaged tarballs into one directory, and publish them on the GitHub release corresponding to the pushed tag. The job must not try to publish before the build job succeeds.", "source_span": [ - 96, - 118 + 98, + 123 ], "tree_path": "key.jobs::key.release", - "source_hash": "sha256:bfc7fd802dbded85b5ee694007432677d288376e5f46f0c74a39335ce123c1d5", + "source_hash": "sha256:1c505f0c2dda23075de3f2b6265714e95dad26cf4b7840c3ce3fe070bc540eda", "source_anchor": " release:" + }, + { + "item": "release::download-artifacts", + "reviewed": true, + "intent": "Download every target tarball artifact emitted by the build matrix into the local `artifacts/` directory. The `dist-artifact-*` pattern and `merge-multiple: true` setting must gather all per-target uploads into one flat directory so the release step can attach them with a single glob.", + "source_span": [ + 105, + 110 + ], + "source_hash": "sha256:5cb97468bcd3e1686454133b658486b7359407a75637bd46c408dc622c757ea8", + "source_anchor": " - name: Download all artifacts" + }, + { + "item": "release::create-release", + "reviewed": true, + "intent": "Create or update the GitHub release only when the event is a tag push, then attach every `artifacts/*.tar.gz` asset, keep the release non-draft and non-prerelease, generate release notes, and authenticate with `GITHUB_TOKEN`. The explicit `if` guard must prevent accidental release publication from any future non-tag trigger added to this workflow.", + "source_span": [ + 114, + 123 + ], + "source_hash": "sha256:216ea36572f4dc28e5641bc4cc64b64e769140ce0517f9cd84a7483b914891c4", + "source_anchor": " - name: Create Release" } ] } diff --git a/AGENTS.md b/AGENTS.md index 3a28360..6de129d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -39,6 +39,7 @@ Follow Conventional Commits: `(): ` - Types: `feat`, `fix`, `docs`, `refactor`, `build`, `ci`, `style`. Do **not** use `chore`. - Scopes: `linter`, `design`, `template`, `docs`, `policy`, `meta`. - One logical change per commit — do not combine unrelated changes. +- For bug-fix or debugging commits, include a short rationale and root cause analysis in the commit body so reviewers can see why the change was necessary and what failure mode it corrects. - AIGC trailers required: `Original prompt:`, blank line, Markdown block quote of user prompts verbatim, blank line, `AI-assisted-by: Your model identity (Name of client used to interact with you)`. - Do **not** add `Signed-off-by` on behalf of the user. diff --git a/crates/liyi-cli/Cargo.toml b/crates/liyi-cli/Cargo.toml index ade89c3..cf873ff 100644 --- a/crates/liyi-cli/Cargo.toml +++ b/crates/liyi-cli/Cargo.toml @@ -22,10 +22,10 @@ ratatui = "0.29" serde_json = "1.0" similar = "2.7.0" syntect = { version = "5", default-features = false, features = [ - "default-syntaxes", - "default-themes", - "parsing", - "regex-fancy", + "default-syntaxes", + "default-themes", + "parsing", + "regex-fancy", ] } liyi = { path = "../liyi" } diff --git a/crates/liyi/src/tree_path/lang_json.rs b/crates/liyi/src/tree_path/lang_json.rs index fb4d6a9..b6809cb 100644 --- a/crates/liyi/src/tree_path/lang_json.rs +++ b/crates/liyi/src/tree_path/lang_json.rs @@ -140,6 +140,25 @@ mod tests { assert_eq!(path, "key.nested::key.deep::key.value"); } + #[test] + fn compute_json_indexed_array_element() { + let sample = r#"{ + "specs": [ + { + "item": "foo", + "intent": "do foo" + }, + { + "item": "bar", + "intent": "do bar" + } + ] +}"#; + let span = resolve_tree_path(sample, "key.specs[1]::key.item", Language::Json).unwrap(); + let path = compute_tree_path(sample, span, Language::Json); + assert_eq!(path, "key.specs[1]::key.item"); + } + #[test] fn roundtrip_json_top_level() { let span = resolve_tree_path(SAMPLE_JSON, "key.name", Language::Json).unwrap(); @@ -158,6 +177,27 @@ mod tests { assert_eq!(re_resolved, span); } + #[test] + fn roundtrip_json_indexed_array_element() { + let sample = r#"{ + "specs": [ + { + "item": "foo", + "intent": "do foo" + }, + { + "item": "bar", + "intent": "do bar" + } + ] +}"#; + let span = resolve_tree_path(sample, "key.specs[1]::key.item", Language::Json).unwrap(); + let path = compute_tree_path(sample, span, Language::Json); + assert_eq!(path, "key.specs[1]::key.item"); + let re_resolved = resolve_tree_path(sample, &path, Language::Json).unwrap(); + assert_eq!(re_resolved, span); + } + #[test] fn detect_json_extension() { assert_eq!( diff --git a/crates/liyi/src/tree_path/lang_yaml.rs b/crates/liyi/src/tree_path/lang_yaml.rs index b0d819e..94f53b6 100644 --- a/crates/liyi/src/tree_path/lang_yaml.rs +++ b/crates/liyi/src/tree_path/lang_yaml.rs @@ -197,6 +197,18 @@ metadata: assert_eq!(path, "key.jobs::key.build::key.\"runs-on\""); } + #[test] + fn compute_yaml_indexed_sequence_item() { + let span = resolve_tree_path( + SAMPLE_YAML, + "key.jobs::key.build::key.steps[1]::key.name", + Language::Yaml, + ) + .unwrap(); + let path = compute_tree_path(SAMPLE_YAML, span, Language::Yaml); + assert_eq!(path, "key.jobs::key.build::key.steps[1]::key.name"); + } + #[test] fn roundtrip_yaml_top_level() { let span = resolve_tree_path(SAMPLE_YAML, "key.name", Language::Yaml).unwrap(); @@ -215,6 +227,20 @@ metadata: assert_eq!(re_resolved, span); } + #[test] + fn roundtrip_yaml_indexed_sequence_item() { + let span = resolve_tree_path( + SAMPLE_YAML, + "key.jobs::key.build::key.steps[2]::key.run", + Language::Yaml, + ) + .unwrap(); + let path = compute_tree_path(SAMPLE_YAML, span, Language::Yaml); + assert_eq!(path, "key.jobs::key.build::key.steps[2]::key.run"); + let re_resolved = resolve_tree_path(SAMPLE_YAML, &path, Language::Yaml).unwrap(); + assert_eq!(re_resolved, span); + } + #[test] fn detect_yaml_extensions() { assert_eq!( diff --git a/crates/liyi/src/tree_path/mod.rs b/crates/liyi/src/tree_path/mod.rs index 4258175..b50ddb7 100644 --- a/crates/liyi/src/tree_path/mod.rs +++ b/crates/liyi/src/tree_path/mod.rs @@ -685,7 +685,10 @@ fn resolve_indexed_child<'a>( let mut body = config.find_body(node)?; // Walk through transparent wrapper nodes to reach the actual array // container (e.g., YAML block_node → block_sequence). - while config.transparent_kinds.contains(&body.kind()) { + while config.transparent_kinds.contains(&body.kind()) + && body.kind() != "array" + && body.kind() != "block_sequence" + { let mut cursor = body.walk(); let named: Vec> = body .children(&mut cursor) @@ -1029,6 +1032,57 @@ fn build_path_to_node(config: &LanguageConfig, root: &Node, target: &Node, sourc } } +fn format_segment(short: &str, name: &str, index: Option) -> String { + let mut segment = format!("{short}.{}", parser::serialize_name(name)); + if let Some(idx) = index { + segment.push('['); + segment.push_str(&idx.to_string()); + segment.push(']'); + } + segment +} + +fn sequence_children<'a>(config: &LanguageConfig, node: &Node<'a>) -> Option>> { + let mut body = config.find_body(node)?; + while config.transparent_kinds.contains(&body.kind()) + && body.kind() != "array" + && body.kind() != "block_sequence" + { + let mut cursor = body.walk(); + let named: Vec> = body + .children(&mut cursor) + .filter(|c| c.is_named()) + .collect(); + if named.len() == 1 { + body = named[0]; + } else { + break; + } + } + + if body.kind() != "array" && body.kind() != "block_sequence" { + return None; + } + + let mut cursor = body.walk(); + Some( + body.children(&mut cursor) + .filter(|c| c.is_named()) + .collect(), + ) +} + +fn is_descendant_of(target: &Node, ancestor: &Node) -> bool { + let mut current = Some(*target); + while let Some(node) = current { + if node.id() == ancestor.id() { + return true; + } + current = node.parent(); + } + false +} + /// Recursively find `target` in the tree and collect path segments. fn collect_path( config: &LanguageConfig, @@ -1043,19 +1097,41 @@ fn collect_path( config.kind_to_shorthand(node.kind()), config.node_name(node, source), ) { - segments.push(format!("{short}.{}", parser::serialize_name(&name))); + segments.push(format_segment(short, &name, None)); return true; } return false; } + if is_item_node(config, node) + && let Some(children) = sequence_children(config, node) + { + for (idx, child) in children.into_iter().enumerate() { + if !is_descendant_of(target, &child) { + continue; + } + + let mut nested_segments = Vec::new(); + if collect_path(config, &child, target, source, &mut nested_segments) { + if let (Some(short), Some(name)) = ( + config.kind_to_shorthand(node.kind()), + config.node_name(node, source), + ) { + segments.push(format_segment(short, &name, Some(idx))); + } + segments.extend(nested_segments); + return true; + } + } + } + // Check children let mut cursor = node.walk(); for child in node.children(&mut cursor) { - let child_start = child.start_position().row; - let child_end = child.end_position().row; - let target_start = target.start_position().row; - let target_end = target.end_position().row; + let child_start = child.start_byte(); + let child_end = child.end_byte(); + let target_start = target.start_byte(); + let target_end = target.end_byte(); // Only descend into nodes that contain the target if child_start <= target_start @@ -1069,7 +1145,7 @@ fn collect_path( config.node_name(node, source), ) { - segments.insert(0, format!("{short}.{}", parser::serialize_name(&name))); + segments.insert(0, format_segment(short, &name, None)); } return true; } @@ -1801,20 +1877,7 @@ jobs: #[test] fn compute_injection_roundtrip() { - // Roundtrip test using a simpler fixture without array indexing, - // since YAML array indexes are not yet produced by build_path_to_node. - let yaml = r#"name: CI -on: push -jobs: - build: - runs-on: ubuntu-latest - run: | - setup() { - echo "hello" - } - setup -"#; - let lines: Vec<&str> = yaml.lines().collect(); + let lines: Vec<&str> = SAMPLE_GHA_FUNC.lines().collect(); let setup_line = lines .iter() .position(|l| l.contains("setup()")) @@ -1829,7 +1892,7 @@ jobs: .expect("should find closing brace"); let computed_path = compute_tree_path_injected( - yaml, + SAMPLE_GHA_FUNC, [setup_line, closing_line], Language::Yaml, Path::new(".github/workflows/ci.yml"), @@ -1839,12 +1902,16 @@ jobs: !computed_path.is_empty(), "computed path should not be empty" ); + assert!( + computed_path.contains("key.steps[0]::key.run"), + "computed path should preserve the sequence index, got: {computed_path}" + ); assert!( computed_path.contains("//bash"), "computed path should contain //bash, got: {computed_path}" ); - let resolved_span = resolve_tree_path(yaml, &computed_path, Language::Yaml); + let resolved_span = resolve_tree_path(SAMPLE_GHA_FUNC, &computed_path, Language::Yaml); assert!( resolved_span.is_some(), "resolve should succeed for computed path: {computed_path}" diff --git a/crates/liyi/src/tree_path/mod.rs.liyi.jsonc b/crates/liyi/src/tree_path/mod.rs.liyi.jsonc index 94aa2d7..b5e8807 100644 --- a/crates/liyi/src/tree_path/mod.rs.liyi.jsonc +++ b/crates/liyi/src/tree_path/mod.rs.liyi.jsonc @@ -114,8 +114,8 @@ "reviewed": true, "intent": "Find subsequent path segments inside an item's body. Delegates to config.find_body to locate the body node, then to resolve_segments for recursive matching. Accepts segments as &[PathPair] (kind, name, optional index) triples.", "source_span": [ - 714, - 722 + 717, + 725 ], "tree_path": "fn.resolve_in_body", "source_hash": "sha256:aa2789a394092d4abffef26e10ea466447c79cd3733487aac172732344b82524", @@ -126,8 +126,8 @@ "reviewed": true, "intent": "=doc", "source_span": [ - 729, - 750 + 732, + 753 ], "tree_path": "fn.compute_tree_path", "source_hash": "sha256:d26c58f5d1e8e905754b00690744e6d567e93ce7ebfc0e72ac679822f116e4f2", @@ -138,23 +138,59 @@ "reviewed": true, "intent": "Find the widest item-bearing tree-sitter node whose start and end rows both fall within [target_start, target_end]. Must handle the attribute-sibling pattern where Rust attributes (#[derive(...)]) are siblings of the item node — the sidecar span can start before the item node. Prefer the outermost (widest) item when multiple items fall within the range.", "source_span": [ - 968, - 1014 + 971, + 1017 ], "tree_path": "fn.find_item_in_range", "source_hash": "sha256:104839ea12f7f9fc30654e2c74b4e164ed7cbf4ac1adaa2ae3bf7d8b9a1c9798", "source_anchor": "fn find_item_in_range<'a>(" }, + { + "item": "format_segment", + "reviewed": true, + "intent": "Serialize a single kind.name tree_path segment and append `[N]` when an indexed data-file array element is being represented. Uses parser::serialize_name so complex names remain correctly quoted.", + "source_span": [ + 1035, + 1043 + ], + "tree_path": "fn.format_segment", + "source_hash": "sha256:ea3b655d90180b5b97265cf5f34c5c9b4a032ce6081967fe9b6d91b6a9e073dd", + "source_anchor": "fn format_segment(short: &str, name: &str, index: Option) -> String {" + }, + { + "item": "sequence_children", + "reviewed": true, + "intent": "Return the named children of a sequence-valued body for a data-file item. Descends through transparent wrappers but must stop when it reaches the actual `array` or `block_sequence` container so single-element sequences do not lose their indexed identity during tree_path reconstruction.", + "source_span": [ + 1045, + 1073 + ], + "tree_path": "fn.sequence_children", + "source_hash": "sha256:40e811a6c866a086eeb6f9354a0690a31df7ce950f147ada06c6d4ad6e8fb2b6", + "source_anchor": "fn sequence_children<'a>(config: &LanguageConfig, node: &Node<'a>) -> Option>> {" + }, + { + "item": "is_descendant_of", + "reviewed": true, + "intent": "Return true when the target node is nested anywhere under the given ancestor node by walking the parent chain upward and comparing node identity.", + "source_span": [ + 1075, + 1084 + ], + "tree_path": "fn.is_descendant_of", + "source_hash": "sha256:b45614ba69222cc723a6b7efccc5b01bb48cd35b1ba8ab011aa35fbfcc9e5e3a", + "source_anchor": "fn is_descendant_of(target: &Node, ancestor: &Node) -> bool {" + }, { "item": "collect_path", "reviewed": true, - "intent": "Recursively walk from root to target node, collecting kind.name path segments. Uses parser::serialize_name to quote names with special characters. At the target node, push its segment and return true. During descent, only enter children that spatially contain the target. When a child's subtree contains the target, prepend the current node's segment if it is an item node. Return false if the target cannot be found.", + "intent": "Recursively walk from root to target node and build the canonical tree_path. At the target node, emit its segment. For sequence-valued data-file items, detect which child element actually contains the target, recurse through that element, and emit the parent segment with `[N]` so indexed paths like `key.steps[0]` are preserved. Outside indexed sequences, descend only through children whose byte range contains the target and prepend item segments on the way back out. Return false if the target cannot be found.", "source_span": [ - 1033, - 1079 + 1087, + 1155 ], "tree_path": "fn.collect_path", - "source_hash": "sha256:d6db330999717bbea6d21f24455ff2720da3393a219f0c4119e481535e67c729", + "source_hash": "sha256:db3a5e263b5e500487e639bc671a1f341ecbe5272ce5905f24162e3bdf196fe5", "source_anchor": "fn collect_path(", "related": { "exhaustive-inclusion": "sha256:5b729afeeb3162d6f07f6bd6f6a88e348cb57a06ddb0886e69df61438206bcf6", @@ -195,8 +231,8 @@ "reviewed": true, "intent": "Repo-path-aware variant of compute_tree_path. Detects active injection profiles for the given path; when the target span falls inside an injection zone, emits the //lang marker and computes the inner-language path. Falls back to standard compute_tree_path when no injection profile matches or the span is not inside an injection zone.", "source_span": [ - 763, - 798 + 766, + 801 ], "tree_path": "fn.compute_tree_path_injected", "source_hash": "sha256:a60ecfabe2d1845f23196261a7935a0807555643c09f7f24d75d2378d762c2f4", @@ -211,8 +247,8 @@ "reviewed": true, "intent": "Find the nearest block_mapping_pair whose value contains the target span and whose key name matches an active injection rule (including ancestor-key verification). Extract content, sub-parse with the inner language, compute the inner tree_path, and return the composite host_path//lang::inner_path string.", "source_span": [ - 806, - 871 + 809, + 874 ], "tree_path": "fn.find_injection_zone", "source_hash": "sha256:c896283836e5d46d34e5007513a093a3145adc804b41e8d71a7ac9fd6edb1b57", @@ -223,8 +259,8 @@ "reviewed": true, "intent": "Walk the YAML AST to find the innermost block_mapping_pair node whose value range contains [target_start, target_end]. Prefers the smallest (innermost) match.", "source_span": [ - 875, - 921 + 878, + 924 ], "tree_path": "fn.find_injection_candidate", "source_hash": "sha256:92702300ecffefd8bda8775a7316cac494bef3aa96dfd3cb2ede7ad6608950ca", @@ -235,8 +271,8 @@ "reviewed": true, "intent": "=trivial", "source_span": [ - 924, - 933 + 927, + 936 ], "tree_path": "fn.find_ancestor_pair", "source_hash": "sha256:9e0cd419679148068d82d1ac708400f44bc2e0e5c8a1227c01340b3ec634a286", @@ -247,8 +283,8 @@ "reviewed": true, "intent": "Map a Language enum variant to its canonical injection marker string. Inverse of language_from_name (using canonical names only, no aliases).", "source_span": [ - 936, - 960 + 939, + 963 ], "tree_path": "fn.language_to_name", "source_hash": "sha256:968d9ec759ea341d48fe8d1768bd373ff064aeed9ab4c4e77b61c287279018b8", diff --git a/docs/aigc-policy.en.md b/docs/aigc-policy.en.md index ea0b79c..dc15958 100644 --- a/docs/aigc-policy.en.md +++ b/docs/aigc-policy.en.md @@ -22,11 +22,11 @@ This project enforces strict separation and labeling of human-produced and AI-pr All AI agents contributing to this project must comply with these requirements; the humans directing these agents must understand and help the AI agents comply. - Ideally, the content of each Git commit in this project should be either entirely written by a human or entirely written by an AI. - - If you intend to modify part of an AIGC output and submit it as a single commit, please note: this will cause the entire commit content to be considered your own creation. + - If you intend to modify part of an AIGC output and submit it as a single commit, please note: this will cause the entire commit content to be considered your own creation. - All commit content must be reviewed by a human submitter. - - The human must append a `Signed-off-by` tag i.e. DCO to the trailer section of the commit message. - - Under current legal understanding and practice, the human who signs off on a commit may be considered responsible for the entire content of that commit. - - As a natural person, if you cannot fully understand and accept certain content (i.e., you would not have produced it yourself), then you are advised not to submit it. + - The human must append a `Signed-off-by` tag i.e. DCO to the trailer section of the commit message. + - Under current legal understanding and practice, the human who signs off on a commit may be considered responsible for the entire content of that commit. + - As a natural person, if you cannot fully understand and accept certain content (i.e., you would not have produced it yourself), then you are advised not to submit it. - If a human deems it necessary to modify AI-generated content, given the legal implications described above, the AI should first complete its commit, and then the human can make their modifications in a separate commit. - If a human considers the vast majority of an AIGC output to need reworking, the human may either let the AI complete its commit first or discard the current round of AIGC entirely. @@ -67,7 +67,7 @@ AI agents should record the original prompt that triggered the work at the end o - If multiple rounds of user interaction are involved, output each round's prompt as a separate paragraph. - The paragraph should be written in the same natural language as the rest of the commit message. - - If the user's prompt was in a different language, do not translate it. + - If the user's prompt was in a different language, do not translate it. - If the original prompt contains the user's sensitive information, alert the user and/or redact it: replace sensitive content with `[redacted]` or `[略]`. For English commit messages, use the following format: diff --git a/docs/aigc-policy.zh.md b/docs/aigc-policy.zh.md index 38cac3c..df459a2 100644 --- a/docs/aigc-policy.zh.md +++ b/docs/aigc-policy.zh.md @@ -22,11 +22,11 @@ 所有 AI 智能体在给本项目贡献时,必须遵守这些要求;指挥这些智能体的人类必须理解并帮助 AI 智能体遵守这些要求。 - 本项目的每个 Git 提交的内容,理想情况下,要么完全由人类撰写,要么完全由 AI 撰写。 - - 如您意欲修改一部分 AIGC 并将其作为一个整体提交,请注意:这将使整个提交内容被视作您的创作。 + - 如您意欲修改一部分 AIGC 并将其作为一个整体提交,请注意:这将使整个提交内容被视作您的创作。 - 任何提交的全部内容必须由人类提交者复核。 - - 该人类须在提交说明的最后(trailer)部分附加 `Signed-off-by` 标签,即 DCO。 - - 按照现行的法律理解与实践,为某提交标注了 DCO 信息的人类,可能会被认为对该提交的全部内容负责。 - - 您作为自然人,如果对一些内容,您自身无法完全理解并接受(如果交给您自己做,您也会这么做),那么就建议您不要提交这些内容。 + - 该人类须在提交说明的最后(trailer)部分附加 `Signed-off-by` 标签,即 DCO。 + - 按照现行的法律理解与实践,为某提交标注了 DCO 信息的人类,可能会被认为对该提交的全部内容负责。 + - 您作为自然人,如果对一些内容,您自身无法完全理解并接受(如果交给您自己做,您也会这么做),那么就建议您不要提交这些内容。 - 如人类认为有必要修改 AI 输出的内容,鉴于上文所述的法律后果,请先让 AI 完成提交,再自己上场修改。 - 如人类认为 AIGC 的绝大部分都需要重做,人类可以先让 AI 完成提交,也可以直接撤回此轮的 AIGC。 @@ -67,7 +67,7 @@ AI 智能体应当在提交说明正文的末尾(trailer 之前)记录触发 - 如涉及多轮用户交互,请将每一轮的提示词都输出为各自的段落。 - 该段落应按照提交说明的其余部分使用的自然语言书写。 - - 如用户提示词使用了不同语言,不要翻译。 + - 如用户提示词使用了不同语言,不要翻译。 - 如原始提示词含有用户的敏感信息,请提醒用户 和/或 将其脱敏并注明:将敏感内容替换为 `[redacted]` 或 `[略]`。 英语提交说明的格式如下: diff --git a/docs/prompt-mode-design.md b/docs/prompt-mode-design.md index a884315..76ab2c4 100644 --- a/docs/prompt-mode-design.md +++ b/docs/prompt-mode-design.md @@ -2,8 +2,8 @@ # `--prompt` Mode Design -**Status:** Implemented -**Target:** v0.1.x +**Status:** Implemented +**Target:** v0.1.x **Design authority:** `docs/liyi-design.md` v8.10 **Scope note (v8.10):** This document covers the initial `--prompt` scope: coverage gaps (Untracked, MissingRelatedEdge, ReqNoRelated). The cognitive load inversion principle (design doc v8.10, *The cognitive load inversion: tool-guided agents*) calls for extending `--prompt` to all diagnostics — stale items, shifted spans, unreviewed specs — each with per-item resolution instructions. The generalized `--prompt` design is deferred to a future revision of this document. @@ -257,10 +257,10 @@ Other diagnostics (Stale, Shifted, etc.) are not coverage gaps and don't appear ## Testing Strategy 1. **Golden-file fixtures:** - - `prompt_output/mixed_gaps/` — fixture with all three gap types present. - - `prompt_output/clean/` — fixture with no gaps (empty `items` array). - - `prompt_output/errors_only/` — fixture with `ParseError` or `OrphanedSource` but no coverage gaps (verifies `exit_code: 2` with empty `items`). - - `prompt_output/multi_file/` — gaps spread across multiple files. + - `prompt_output/mixed_gaps/` — fixture with all three gap types present. + - `prompt_output/clean/` — fixture with no gaps (empty `items` array). + - `prompt_output/errors_only/` — fixture with `ParseError` or `OrphanedSource` but no coverage gaps (verifies `exit_code: 2` with empty `items`). + - `prompt_output/multi_file/` — gaps spread across multiple files. 2. **Unit tests:** Verify instruction generation for each of the three gap types. 3. **Integration test:** Parse `--prompt` output and validate against `schema/prompt.schema.json`. 4. **Instruction accuracy test:** For each instruction template, apply the described mutation and verify that a follow-up `liyi check` no longer reports the gap.