diff --git a/.claude/commands/release.md b/.claude/commands/release.md index a8fa051..7e3ebc7 100644 --- a/.claude/commands/release.md +++ b/.claude/commands/release.md @@ -32,19 +32,23 @@ ephemeral pre-releases and the stable version is chosen at release time. 3. Bump the version in `Cargo.toml` to the dev version (e.g., `0.3.0-dev.1`) 4. Run `cargo check` to update `Cargo.lock` 5. Run `cargo fmt --all` -6. Run `cargo test` to verify everything passes -7. Commit the version bump: +6. Run `cargo clippy -- -D warnings` to verify no lint issues +7. Run `cargo test` to verify everything passes +8. Commit the version bump: ``` git commit -m "chore: bump version to X.Y.Z-dev.N" ``` -8. Tag: `git tag vX.Y.Z-dev.N` -9. Push commit and tag: +9. Tag (annotated — required to trigger the Release workflow and show as verified): + ``` + git tag -a vX.Y.Z-dev.N -m "chore: release vX.Y.Z-dev.N" + ``` +10. Push commit and tag: ``` git push origin develop git push origin vX.Y.Z-dev.N ``` -10. Print: "Dev release vX.Y.Z-dev.N tagged and pushed. GitHub Actions will build and publish pre-release binaries." -11. Provide the releases URL +11. Print: "Dev release vX.Y.Z-dev.N tagged and pushed. GitHub Actions will build and publish pre-release binaries." +12. Provide the releases URL --- @@ -89,21 +93,41 @@ If already on `develop` or `main`, skip to Stage 2. ## Stage 3: Tag & Release 1. Checkout `main` and pull latest -2. Verify the version in `Cargo.toml` is the stable version (no pre-release suffix) +2. Verify the version in `Cargo.toml` matches the intended release (no pre-release suffix): + ``` + grep '^version' Cargo.toml + ``` 3. Run `cargo check` to update `Cargo.lock` 4. Run `cargo fmt --all` to ensure formatting is correct -5. Run `cargo test` to verify everything passes -6. If any changes from steps 3-5, commit them on a branch, PR into `main` -7. After merge confirmation, tag `vX.Y.Z` on main -8. Push the tag to trigger the release workflow -9. Print: "Release vX.Y.Z tagged and pushed. GitHub Actions will build and publish binaries." -10. Provide the releases URL -11. Clean up dev tags for this release cycle: +5. Run `cargo clippy -- -D warnings` to verify no lint issues +6. Run `cargo test` to verify everything passes +7. If any changes from steps 3-6, commit them on a branch, PR into `main` +8. After merge confirmation, create an annotated tag on main: + ``` + git tag -a vX.Y.Z -m "chore: release vX.Y.Z" + ``` +9. Verify the tag matches `Cargo.toml`: + ``` + TAG_VERSION=$(git describe --tags --exact-match | sed 's/^v//') + CARGO_VERSION=$(grep '^version' Cargo.toml | head -1 | sed 's/.*"\(.*\)".*/\1/') + ``` + If they don't match, stop and report the mismatch. +10. Push the tag to trigger the release workflow +11. Print: "Release vX.Y.Z tagged and pushed. GitHub Actions will build and publish binaries." +12. Provide the releases URL +13. Clean up dev tags for this release cycle: ``` git tag -l "vX.Y.Z-dev.*" | xargs -I {} git push origin :refs/tags/{} git tag -l "vX.Y.Z-dev.*" | xargs git tag -d ``` -12. Checkout `develop`, merge `main` back into `develop`, bump to next dev version, push +14. Merge `main` back into `develop` and bump to next dev version: + - Checkout `develop` and pull latest + - Merge `main` into `develop`: `git merge main` + - If conflicts, resolve and commit + - Bump `Cargo.toml` to next optimistic dev version (e.g., `0.4.0` → `0.5.0-dev.1`) + - Run `cargo check` to update `Cargo.lock` + - Commit: `git commit -m "chore: bump version to X.Y.Z-dev.1"` + - Push: `git push origin develop` ## Rules @@ -115,3 +139,4 @@ If already on `develop` or `main`, skip to Stage 2. - Ask the user before every destructive or visible action - If any step fails, stop and report the error — don't continue - Dev tags go on `develop`, stable tags go on `main` +- Always verify tag/version match before pushing tags diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c702c0..6668e60 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,7 +62,8 @@ jobs: - uses: taiki-e/install-action@cargo-llvm-cov - uses: Swatinem/rust-cache@v2 - run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info - - uses: codecov/codecov-action@v5 + - uses: codecov/codecov-action@v6 with: files: lcov.info + token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: false diff --git a/.gitignore b/.gitignore index 7b0cb02..e78be4b 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,5 @@ Thumbs.db # Worktrees .trees/ +.worktrees/ +.claude/worktrees/ diff --git a/CLAUDE.md b/CLAUDE.md index 3831b8e..c44acf5 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -14,39 +14,55 @@ src/ │ ├── issue/ # issue commands (split by operation theme) │ │ ├── mod.rs # dispatch + re-exports │ │ ├── format.rs # row formatting, headers, points display -│ │ ├── list.rs # list + view + comments (read operations) +│ │ ├── list.rs # list + view + comments (read operations, unified JQL composition) │ │ ├── create.rs # create + edit (field-building) │ │ ├── workflow.rs # move + transitions + assign + comment + open │ │ ├── links.rs # link + unlink + link-types -│ │ └── helpers.rs # team/points resolution, prompts +│ │ ├── helpers.rs # team/points resolution, user resolution, prompts +│ │ └── assets.rs # linked assets (issue→asset lookup) +│ ├── assets.rs # assets search/view/tickets/schemas/types/schema (search enrichment, schema discovery) │ ├── board.rs # board list/view -│ ├── sprint.rs # sprint list/current (scrum-only, errors on kanban) +│ ├── sprint.rs # sprint list/current/add/remove (scrum-only, errors on kanban) │ ├── worklog.rs # worklog add/list │ ├── team.rs # team list (with cache + lazy org discovery) │ ├── auth.rs # auth login (API token default, --oauth for OAuth 2.0), auth status │ ├── init.rs # Interactive setup (prefetches org metadata + team cache + story points field) -│ └── project.rs # project fields (issue types, priorities for a project) +│ ├── project.rs # project fields (types, priorities, statuses, CMDB fields) +│ └── queue.rs # queue list/view (JSM service desks) ├── api/ │ ├── client.rs # JiraClient — HTTP methods, auth headers, rate limit retry, 429/401 handling │ ├── auth.rs # OAuth 2.0 flow, API token storage, keychain read/write, token refresh │ ├── pagination.rs # Offset-based (most endpoints) + cursor-based (JQL search) │ ├── rate_limit.rs # Retry-After parsing +│ ├── assets/ # Assets/CMDB API call implementations +│ │ ├── workspace.rs # workspace ID discovery + cache +│ │ ├── linked.rs # CMDB field discovery, asset extraction/enrichment (per-field + JSON) +│ │ ├── objects.rs # AQL search, get object, resolve key +│ │ └── tickets.rs # connected tickets │ └── jira/ # Jira-specific API call implementations (one file per resource) │ ├── issues.rs # search, get, create, edit, list comments │ ├── boards.rs # list boards, get board config │ ├── sprints.rs # list sprints, get sprint issues -│ ├── fields.rs # list fields, story points field discovery +│ ├── fields.rs # list fields, story points + CMDB field discovery +│ ├── statuses.rs # get all statuses (global, not project-scoped) │ ├── links.rs # create/delete issue links, list link types │ ├── teams.rs # org metadata (GraphQL), list teams │ ├── worklogs.rs # add/list worklogs │ ├── projects.rs # project details -│ └── users.rs # current user, assignable users +│ └── users.rs # current user, user search, assignable users +│ ├── jsm/ # JSM-specific API call implementations +│ │ ├── servicedesks.rs # list service desks, project meta orchestration +│ │ └── queues.rs # list queues, get queue issues +├── types/assets/ # Serde structs for Assets API responses (AssetObject, ConnectedTicket, LinkedAsset, etc.) ├── types/jira/ # Serde structs for API responses (Issue, Board, Sprint, User, Team, etc.) -├── cache.rs # XDG cache (~/.cache/jr/) — team list with 7-day TTL +├── types/jsm/ # Serde structs for JSM API responses (ServiceDesk, Queue, etc.) +├── cache.rs # XDG cache (~/.cache/jr/) — team list, project meta, workspace ID (all 7-day TTL) ├── config.rs # Global (~/.config/jr/config.toml) + per-project (.jr.toml), figment layering ├── output.rs # Table (comfy-table) and JSON formatting ├── adf.rs # Atlassian Document Format: text→ADF, markdown→ADF, ADF→text ├── duration.rs # Worklog duration parser (2h, 1h30m, 1d, 1w) +├── lib.rs # Crate root (re-exports for integration tests) +├── jql.rs # JQL utilities: escaping, validation, asset clause builder ├── partial_match.rs # Case-insensitive substring matching with disambiguation └── error.rs # JrError enum with exit codes (0/1/2/64/78/130) ``` @@ -93,8 +109,7 @@ See `docs/adr/` for detailed rationale: - **v1 design spec:** `docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md` - **v1 implementation plan:** `docs/superpowers/plans/2026-03-21-jr-implementation.md` -- **Feature specs (post-v1):** `docs/specs/{feature-name}.md` -- **Team assignment spec:** `docs/specs/team-assignment.md` +- **Feature specs (post-v1):** `docs/specs/` — one spec per feature, read before implementing When adding a new feature: 1. Read this file @@ -103,6 +118,13 @@ When adding a new feature: 4. Create a feature spec in `docs/specs/` before implementing 5. Follow TDD — write tests first +## Gotchas + +- **Cache format changes:** `~/.cache/jr/cmdb_fields.json` stores `(id, name)` tuples. Old format (ID-only) causes deserialization failure, handled as cache miss. If you change cache structs, old caches auto-expire (7-day TTL) or fail gracefully. +- **`list.rs` is large (~970 lines):** Contains both `handle_list` and `handle_view` plus all JQL composition logic. If modifying, read the full function you're changing — context matters. +- **`aqlFunction()` not `assetsQuery()`:** The Jira Assets JQL function is `aqlFunction()`. It requires the human-readable field **name**, not `cf[ID]` or `customfield_NNNNN`. AQL attribute for object key is `Key` (not `objectKey` — that's the JSON field name). +- **Status category colors are fixed:** `green` = Done, `yellow` = In Progress, `blue-gray` = To Do. These mappings are hardcoded in Jira Cloud across all instances. Used by `--open` filtering. + ## AI Agent Notes - `JR_BASE_URL` env var overrides the configured Jira instance URL (used by tests to inject wiremock) diff --git a/Cargo.lock b/Cargo.lock index dc3fb2c..c75885f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -338,18 +338,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "windows-sys 0.59.0", -] - [[package]] name = "console" version = "0.16.3" @@ -435,7 +423,7 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25f104b501bf2364e78d0d3974cbc774f738f5865306ed128e1e0d7499c0ad96" dependencies = [ - "console 0.16.3", + "console", "shell-words", "tempfile", "zeroize", @@ -1011,11 +999,11 @@ checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" [[package]] name = "insta" -version = "1.46.3" +version = "1.47.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4" +checksum = "99322078b2c076829a1db959d49da554fabc4342257fc0ba5a070a1eb3a01cd8" dependencies = [ - "console 0.15.11", + "console", "once_cell", "serde", "similar", @@ -1125,7 +1113,7 @@ dependencies = [ [[package]] name = "jr" -version = "0.3.0" +version = "0.4.0" dependencies = [ "anyhow", "assert_cmd", @@ -1138,6 +1126,7 @@ dependencies = [ "dialoguer", "dirs", "figment", + "futures", "insta", "keyring", "open", @@ -1482,9 +1471,9 @@ dependencies = [ [[package]] name = "proptest" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" +checksum = "4b45fcc2344c680f5025fe57779faef368840d0bd1f42f216291f0dc4ace4744" dependencies = [ "bit-set", "bit-vec", @@ -2706,15 +2695,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.60.2" diff --git a/Cargo.toml b/Cargo.toml index a2b4baa..fb2f4d4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "jr" -version = "0.3.0" +version = "0.4.0" edition = "2024" description = "A fast CLI for Jira Cloud" repository = "https://github.com/Zious11/jira-cli" @@ -30,6 +30,7 @@ tokio = { version = "1", features = ["full"] } toml = "1" base64 = "0.22" chrono = { version = "0.4", features = ["serde"] } +futures = { version = "0.3", default-features = false, features = ["async-await"] } urlencoding = "2" [dev-dependencies] diff --git a/README.md b/README.md index c22f2f7..839d240 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,25 @@ # jr +[![CI](https://github.com/Zious11/jira-cli/actions/workflows/ci.yml/badge.svg?branch=develop)](https://github.com/Zious11/jira-cli/actions/workflows/ci.yml) +[![Release](https://img.shields.io/github/v/release/Zious11/jira-cli?label=release)](https://github.com/Zious11/jira-cli/releases/latest) +[![Pre-release](https://img.shields.io/github/v/release/Zious11/jira-cli?include_prereleases&label=dev)](https://github.com/Zious11/jira-cli/releases) +[![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE) +[![MSRV](https://img.shields.io/badge/MSRV-1.85-orange.svg)](https://blog.rust-lang.org/) +[![codecov](https://codecov.io/gh/Zious11/jira-cli/branch/develop/graph/badge.svg)](https://codecov.io/gh/Zious11/jira-cli) + A fast, agent-friendly CLI for Jira Cloud, written in Rust. Built for both humans and AI agents — commands support structured JSON output, actionable error messages with suggested next steps, and `--no-input` mode for fully non-interactive automation. +## Why jr? + +- **Fast** — native Rust binary, no JVM or Node runtime +- **Agent-friendly** — structured JSON output, non-interactive mode, idempotent operations, actionable error messages with exit codes +- **Smart defaults** — auto-discovers scrum boards, story points fields, and CMDB asset fields during `jr init` +- **Composable filters** — chain `--assignee`, `--status`, `--team`, `--asset`, `--open`, `--recent` on `issue list` +- **Assets/CMDB support** — search assets, view linked tickets, filter by asset on issues, enriched JSON output +- **Partial matching** — type `jr issue move KEY "prog"` and it matches "In Progress" +- **JSM queues** — list and view JSM service desk queues +- **Shell completions** — bash, zsh, fish + ## Install ### One-liner (macOS, Linux) @@ -48,7 +66,31 @@ jr auth login jr auth login --oauth # View your current sprint/board issues -jr issue list +jr issue list --project FOO + +# Sprint list (auto-discovers scrum board for project) +jr sprint list --project FOO + +# My assigned tickets +jr issue list --assignee me + +# Tickets I reported in the last 7 days +jr issue list --reporter me --recent 7d + +# Open issues assigned to me (excludes Done) +jr issue list --assignee me --open + +# Issues in a specific status +jr issue list --project FOO --status "In Progress" + +# Issues linked to a specific asset +jr issue list --project FOO --asset CUST-5 --open + +# Open tickets for an asset (quick lookup) +jr assets tickets CUST-5 --open + +# Discover available projects +jr project list # View a specific issue jr issue view KEY-123 @@ -74,8 +116,8 @@ jr issue comment KEY-123 "Deployed to staging" | `jr auth login` | Authenticate with API token (default) or `--oauth` for OAuth 2.0 | | `jr auth status` | Show authentication status | | `jr me` | Show current user info | -| `jr issue list` | List issues (smart defaults for scrum/kanban, `--team`, `--points`) | -| `jr issue view KEY` | View issue details (includes story points) | +| `jr issue list` | List issues (`--assignee`, `--reporter`, `--recent`, `--status`, `--open`, `--team`, `--asset KEY`, `--jql`, `--limit`/`--all`, `--points`, `--assets`) | +| `jr issue view KEY` | View issue details (per-field asset rows, enriched JSON, story points) | | `jr issue create` | Create an issue (`--team`, `--points`) | | `jr issue edit KEY` | Edit issue fields (`--team`, `--points`, `--no-points`) | | `jr issue move KEY [STATUS]` | Transition issue (partial match on status name) | @@ -87,14 +129,26 @@ jr issue comment KEY-123 "Deployed to staging" | `jr issue link KEY1 KEY2` | Link two issues (`--type blocks`, defaults to Relates) | | `jr issue unlink KEY1 KEY2` | Remove link(s) between issues (`--type` to filter) | | `jr issue link-types` | List available link types | -| `jr board list` | List boards | -| `jr board view` | Show current board issues | -| `jr sprint list` | List sprints (scrum only) | -| `jr sprint current` | Show current sprint issues (with points summary) | +| `jr issue assets KEY` | Show assets linked to an issue | +| `jr board list` | List boards (`--project`, `--type scrum\|kanban`) | +| `jr board view --board 42` | Show current board issues (`--board` or config, `--limit`/`--all`) | +| `jr sprint list --board 42` | List sprints (`--board` or config or auto-discover, scrum only) | +| `jr sprint current --board 42` | Show current sprint issues (with points summary) | +| `jr sprint add --sprint 100 KEY...` | Add issues to a sprint (`--current` for active sprint) | +| `jr sprint remove KEY...` | Move issues to backlog (removes from all sprints) | | `jr worklog add KEY 2h` | Log time (`1h30m`, `1d`, `1w`) | | `jr worklog list KEY` | List worklogs | +| `jr queue list` | List JSM queues for the project's service desk | +| `jr queue view ` | View issues in a queue (partial name match) | +| `jr assets search ` | Search assets via AQL query (`--attributes` resolves names) | +| `jr assets view ` | View asset details (key or numeric ID) | +| `jr assets tickets ` | Show Jira issues connected to an asset (`--open`, `--status`, `--limit`) | +| `jr assets schemas` | List object schemas in the workspace | +| `jr assets types [--schema]` | List object types (all or filtered by schema) | +| `jr assets schema ` | Show attributes for an object type (partial match) | | `jr team list` | List available teams (`--refresh` to force update) | -| `jr project fields FOO` | Show valid issue types and priorities | +| `jr project list` | List accessible projects (`--type`, `--limit`/`--all`) | +| `jr project fields --project FOO` | Show valid issue types, priorities, statuses, and asset custom fields | | `jr completion bash\|zsh\|fish` | Generate shell completions | ## Global Flags @@ -148,7 +202,7 @@ board_id = 42 - `--stdin` flag on comment/create reads content from pipes - `--url-only` prints URLs instead of opening a browser - State-changing commands are idempotent (exit 0 if already in target state) -- Exit codes: 0=success, 1=error, 2=auth, 64=usage, 78=config, 130=interrupted +- Structured exit codes (see [Exit Codes](#exit-codes) table) ```bash # AI agent workflow example @@ -158,6 +212,30 @@ echo "Fixed the bug" | jr issue comment KEY-123 --stdin # Add comment jr issue move KEY-123 "Done" # Complete ``` +## Shell Completions + +```bash +# Bash (add to ~/.bashrc) +eval "$(jr completion bash)" + +# Zsh (add to ~/.zshrc) +eval "$(jr completion zsh)" + +# Fish (add to ~/.config/fish/config.fish) +jr completion fish | source +``` + +## Exit Codes + +| Code | Meaning | +|------|---------| +| 0 | Success | +| 1 | General error | +| 2 | Authentication error | +| 64 | Usage error (bad arguments) | +| 78 | Configuration error | +| 130 | Interrupted (Ctrl+C) | + ## License MIT diff --git a/docs/specs/assets-schema-discovery.md b/docs/specs/assets-schema-discovery.md new file mode 100644 index 0000000..f829212 --- /dev/null +++ b/docs/specs/assets-schema-discovery.md @@ -0,0 +1,270 @@ +# Assets Schema Discovery + +**Issue:** [#87](https://github.com/Zious11/jira-cli/issues/87) + +## Problem + +There is no way to discover what asset object types exist (Customer, Location, Hardware, Service) or what attributes each type has. Users and AI agents must guess AQL queries and inspect results to learn the schema. This is the assets equivalent of `project fields` — without a discovery command, the schema is invisible. + +## Root Cause + +The Jira Assets data model is hierarchical: **schemas → object types → attributes**. The existing `jr assets` commands (`search`, `view`, `tickets`) operate on objects but provide no way to explore the schema that defines them. + +## Solution + +Add three new subcommands to `jr assets`: + +| Command | Purpose | API Endpoint | +|---------|---------|-------------| +| `jr assets schemas` | List all object schemas | `GET /objectschema/list?includeCounts=true` | +| `jr assets types [--schema ID\|NAME]` | List object types (all or filtered by schema) | `GET /objectschema/{id}/objecttypes/flat` per schema | +| `jr assets schema [--schema ID\|NAME]` | Show attributes for an object type | `GET /objecttype/{id}/attributes` (existing method) | + +## API Details + +### `GET /objectschema/list` + +Paginated with `startAt`/`maxResults`/`isLast` — same envelope as `AssetsPage`. Pass `includeCounts=true` to get `objectCount` and `objectTypeCount`. + +Response fields per schema entry: +- `id`, `name`, `objectSchemaKey` ("ITSM", "HR"), `description` (optional), `status` +- `objectCount`, `objectTypeCount` (when `includeCounts=true`) + +### `GET /objectschema/{id}/objecttypes/flat` + +Returns a **bare JSON array** (no pagination envelope). Pass `includeObjectCounts=true` to populate `objectCount`. Each entry contains: +- `id`, `name`, `description` (optional), `position`, `objectCount`, `objectSchemaId` +- `inherited`, `abstractObjectType`, `parentObjectTypeInherited` + +### `GET /objecttype/{id}/attributes` + +Already implemented as `JiraClient::get_object_type_attributes()`. Returns a bare JSON array. Each entry contains: +- `id`, `name`, `position`, `system`, `hidden`, `label`, `editable` +- `minimumCardinality`, `maximumCardinality` — `minimumCardinality >= 1` means required +- `defaultType` (optional): `{ id: 0, name: "Text" }` — present for non-reference attributes +- `referenceType` (optional): `{ id, name }` — present for reference attributes (e.g., "Depends on") +- `referenceObjectType` (optional): `{ id, name, ... }` — target object type for references (e.g., "Service") +- `options` — comma-separated options for Select type attributes + +Known `defaultType` values: `0 = Text`, `6 = DateTime`, `10 = Select`. Reference attributes have `referenceType`/`referenceObjectType` instead of `defaultType`. + +## New Types + +### `src/types/assets/schema.rs` + +```rust +/// Object schema from GET /objectschema/list. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectSchema { + pub id: String, + pub name: String, + #[serde(rename = "objectSchemaKey")] + pub object_schema_key: String, + pub description: Option, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectTypeCount", default)] + pub object_type_count: i64, +} + +/// Object type entry from GET /objectschema/{id}/objecttypes/flat. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeEntry { + pub id: String, + pub name: String, + pub description: Option, + #[serde(default)] + pub position: i32, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectSchemaId")] + pub object_schema_id: String, + #[serde(default)] + pub inherited: bool, + #[serde(rename = "abstractObjectType", default)] + pub abstract_object_type: bool, +} +``` + +### Extend `ObjectTypeAttributeDef` (in `src/types/assets/object.rs`) + +Add optional fields with `#[serde(default)]` so the existing enrichment code (issue #86) is unaffected: + +```rust +pub struct ObjectTypeAttributeDef { + // existing: id, name, system, hidden, label, position + #[serde(rename = "defaultType")] + pub default_type: Option, + #[serde(rename = "referenceType")] + pub reference_type: Option, + #[serde(rename = "referenceObjectType")] + pub reference_object_type: Option, + #[serde(rename = "minimumCardinality", default)] + pub minimum_cardinality: i32, + #[serde(rename = "maximumCardinality", default)] + pub maximum_cardinality: i32, + #[serde(default)] + pub editable: bool, + pub description: Option, + pub options: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct DefaultType { + pub id: i32, + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceType { + pub id: String, + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceObjectType { + pub id: String, + pub name: String, +} +``` + +## New API Methods + +### `src/api/assets/schemas.rs` + +```rust +impl JiraClient { + /// List all object schemas in the workspace. + pub async fn list_object_schemas( + &self, + workspace_id: &str, + ) -> Result> + + /// List all object types for a given schema (flat). + pub async fn list_object_types( + &self, + workspace_id: &str, + schema_id: &str, + ) -> Result> +} +``` + +`list_object_schemas` auto-paginates using the existing `AssetsPage` pattern with `startAt`/`maxResults`/`isLast`. Passes `includeCounts=true`. + +`list_object_types` returns the bare array directly (no pagination). + +## CLI Commands + +### `jr assets schemas` + +No arguments or flags. + +**Table output:** + +| ID | Key | Name | Description | Types | Objects | +|----|-----|------|-------------|-------|---------| +| 6 | ITSM | ITSM | — | 34 | 95 | +| 1 | HR | Human Resources | — | 14 | 1023 | + +Columns: ID, Key, Name, Description (truncated or "—"), Types (`objectTypeCount`), Objects (`objectCount`). + +**JSON output:** Pass through the API array directly. + +### `jr assets types [--schema ID|NAME]` + +Optional `--schema` flag filters to a single schema. Supports partial match on schema name or exact match on schema ID. + +Without `--schema`: iterate all schemas from `list_object_schemas`, call `list_object_types` for each, inject schema name into results. + +**Table output:** + +| ID | Name | Schema | Description | Objects | +|----|------|--------|-------------|---------| +| 19 | Employee | Human Resources | — | 0 | +| 23 | Office | ITSM | Lorem ipsum... | 0 | + +**JSON output:** Flat array of object type entries. Each entry has an injected `schemaName` field for cross-schema context. + +### `jr assets schema [--schema ID|NAME]` + +`` is resolved via partial match on object type name. Resolution searches across all schemas (or scoped if `--schema` is provided). + +**Table output:** + +``` +Object Type: Office (Schema: ITSM) + +┌─────┬──────────────────────┬───────────────────┬──────────┬──────────┐ +│ Pos ┆ Name ┆ Type ┆ Required ┆ Editable │ +╞═════╪══════════════════════╪═══════════════════╪══════════╪══════════╡ +│ 1 ┆ Name ┆ Text ┆ Yes ┆ Yes │ +│ 4 ┆ Location ┆ Text ┆ No ┆ Yes │ +│ 5 ┆ Tier ┆ Select ┆ Yes ┆ Yes │ +│ 6 ┆ Service relationships┆ Reference → Service┆ No ┆ Yes │ +└─────┴──────────────────────┴───────────────────┴──────────┴──────────┘ +``` + +Type column logic: +1. Has `defaultType` → show `defaultType.name` ("Text", "DateTime", "Select") +2. Has `referenceObjectType` → show `"Reference → {referenceObjectType.name}"` +3. Neither → show "Unknown" + +Required: `minimumCardinality >= 1` → "Yes", otherwise "No". + +System and hidden attributes are **filtered out** in table mode. All attributes are included in JSON output. + +**JSON output:** Full attribute definitions array from the API, unfiltered. + +## Type Resolution + +Type name resolution for `jr assets schema `: + +1. Fetch all schemas via `list_object_schemas` +2. If `--schema` provided, resolve to a single schema (partial match on name, exact on ID) +3. Fetch object types for target schema(s) via `list_object_types` +4. Partial match `` against all collected type names using `partial_match.rs` +5. If ambiguous across schemas, include schema name in disambiguation: "Matches: Employee (HR), Employee (ITSM)" +6. If ambiguous within same schema, standard disambiguation + +## Error Handling + +| Scenario | Message | +|----------|---------| +| No schemas found | "No asset schemas found in this workspace." | +| `--schema` no match | "No schema matching \"{input}\". Available: ITSM, HR, Services" | +| `--schema` ambiguous | "Ambiguous schema \"{input}\". Matches: {list}" | +| `` no match | "No object type matching \"{input}\". Run \"jr assets types\" to see available types." | +| `` ambiguous | "Ambiguous type \"{input}\". Matches: {list}. Use --schema to narrow results." | +| Assets unavailable | Already handled by `workspace.rs` (404/403 → user-friendly error) | + +## Files Changed + +| File | Change | +|------|--------| +| `src/types/assets/schema.rs` | **New** — `ObjectSchema`, `ObjectTypeEntry` | +| `src/types/assets/object.rs` | Extend `ObjectTypeAttributeDef` with `default_type`, `reference_type`, `reference_object_type`, `minimum_cardinality`, `maximum_cardinality`, `editable`, `description`, `options` | +| `src/types/assets/mod.rs` | Add `pub mod schema; pub use schema::*;` | +| `src/api/assets/schemas.rs` | **New** — `list_object_schemas`, `list_object_types` | +| `src/api/assets/mod.rs` | Add `pub mod schemas;` | +| `src/cli/mod.rs` | Add `Schemas`, `Types`, `Schema` variants to `AssetsCommand` | +| `src/cli/assets.rs` | Add `handle_schemas`, `handle_types`, `handle_schema` handlers | +| `CLAUDE.md` | Update `assets.rs` description | +| `README.md` | Add new commands to table | +| `tests/assets.rs` | Integration tests for new API methods and CLI commands | +| `tests/cli_smoke.rs` | Smoke tests for new subcommands | + +## Testing + +- **Unit tests:** Serde deserialization for `ObjectSchema`, `ObjectTypeEntry`, extended `ObjectTypeAttributeDef` (with and without reference fields) +- **Unit tests:** Type display logic (defaultType, referenceObjectType, neither) +- **Integration tests (wiremock):** `list_object_schemas` with pagination, `list_object_types` flat response, `get_object_type_attributes` with extended fields +- **CLI integration tests:** End-to-end `jr assets schemas`, `jr assets types`, `jr assets schema ` with both table and JSON output +- **CLI smoke tests:** Verify subcommands parse correctly (`--help` exit 0) + +## Edge Cases + +- **Single schema workspace:** All commands work. `--schema` is optional but accepted. +- **Empty schema:** `list_object_types` returns `[]`. `jr assets types` shows "No object types found." +- **Abstract object types:** Included in `types` output but not directly instantiable. No special handling needed — users can still inspect their attributes. +- **Type name collision across schemas:** Disambiguation message includes schema name parenthetically. +- **Extending `ObjectTypeAttributeDef`:** New fields use `Option` or `#[serde(default)]`. Existing callers (search enrichment from #86) only read `id`, `name`, `system`, `hidden`, `label`, `position` — all still present. The `CachedObjectTypeAttr` cache struct is unchanged since `schema` fetches attributes directly, not through the cache. diff --git a/docs/specs/assets-search-attribute-names.md b/docs/specs/assets-search-attribute-names.md new file mode 100644 index 0000000..a217541 --- /dev/null +++ b/docs/specs/assets-search-attribute-names.md @@ -0,0 +1,251 @@ +# Assets Search Attribute Names + +**Issue:** [#86](https://github.com/Zious11/jira-cli/issues/86) +**Status:** Design approved +**Date:** 2026-04-01 + +## Problem + +`jr assets search --attributes --output json` returns attribute data with only a numeric +`objectTypeAttributeId` — no human-readable name. In contrast, `jr assets view --attributes +--output json` includes the full `objectTypeAttribute` object with `name`, `position`, and +filtering metadata (`system`, `hidden`, `label`). + +An AI agent running a search gets attributes it cannot interpret without a follow-up +`assets view` call per result. + +Additionally, `assets search --attributes` in table mode only adds Created/Updated columns +but does not display actual attribute values. + +## Root Cause + +The AQL search endpoint (`POST /object/aql?includeAttributes=true`) returns inline +`AssetAttribute` structs that contain `objectTypeAttributeId` (a numeric string) but no +`objectTypeAttribute` object with the name. The per-object endpoint +(`GET /object/{id}/attributes`) returns the full `ObjectAttribute` struct with names — this is +what `assets view` uses. + +There is no search endpoint that returns attribute names. Enrichment requires additional API +calls. + +## Solution + +Resolve attribute definitions by fetching them per object type via +`GET /objecttype/{id}/attributes`, cache the results, and inject `objectTypeAttribute` into +each search result's attributes. For table mode, display attribute values as inline +`Name: Value` pairs in an "Attributes" column. + +## API Details + +**Fetch attribute definitions** — `GET /objecttype/{id}/attributes` +- Response: array of `ObjectTypeAttributeDef` with `id`, `name`, `system`, `hidden`, `label`, + `position` (plus other fields serde ignores) +- One call per unique object type in search results (typically 1-3) +- Cacheable — schema-level data that rarely changes + +**Existing search endpoint** — `POST /object/aql?includeAttributes=true` +- Response includes `attributes` array per object, each with `objectTypeAttributeId` and + `objectAttributeValues` but no `objectTypeAttribute` +- No changes to how we call this endpoint + +## Enrichment Flow + +### `enrich_search_attributes` + +New function in `src/api/assets/objects.rs`: + +1. Collect unique `objectType.id` values from search results +2. For each unique type ID: + - Check `~/.cache/jr/object_type_attrs.json` for a cached entry + - Cache miss → call `GET /objecttype/{typeId}/attributes` → write to cache +3. Build `HashMap` mapping `objectTypeAttributeId` → definition +4. For each object's attributes, inject `objectTypeAttribute` from the map (skip attributes + with no match — defensive against schema drift) + +### Cost + +- **Without enrichment:** 0 extra API calls, but opaque IDs +- **With enrichment:** K calls where K = unique object types (typically 1, rarely >3) +- **With cache (after first call):** 0 extra API calls + +## Cache + +### Structure + +`~/.cache/jr/object_type_attrs.json`: + +```rust +pub struct ObjectTypeAttrCache { + pub fetched_at: DateTime, + pub types: HashMap>, +} + +pub struct CachedObjectTypeAttr { + pub id: String, + pub name: String, + pub system: bool, + pub hidden: bool, + pub label: bool, + pub position: i32, +} +``` + +### Behavior + +- 7-day TTL (consistent with all other caches) +- Keyed by object type ID — multiple types coexist in one file +- Deserialization failure → treat as miss (consistent with `cmdb_fields.json` pattern) +- New types are merged into existing cache, not replaced +- The `CachedObjectTypeAttr` struct stores only the fields needed for enrichment and filtering, + not the full API response + +## New API Method + +### `src/api/assets/objects.rs` + +```rust +pub async fn get_object_type_attributes( + &self, + workspace_id: &str, + object_type_id: &str, +) -> Result> +``` + +Calls `GET /objecttype/{object_type_id}/attributes`. The existing `ObjectTypeAttributeDef` +struct can deserialize this response — serde ignores extra fields (`editable`, `sortable`, +`objectType`, etc.) that are present in the API response but absent from the struct. + +## Output Changes + +### JSON (`--output json`) + +When `--attributes` is passed, after enrichment each attribute gains an `objectTypeAttribute` +field. System and hidden attributes are filtered out (matching `assets view` behavior). +Attributes are sorted by position. + +**Before (current):** +```json +{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": {"id": "23", "name": "Client"}, + "attributes": [ + { + "id": "81", + "objectTypeAttributeId": "81", + "objectAttributeValues": [{"value": "0", "displayValue": "0"}] + } + ] +} +``` + +**After (enriched):** +```json +{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": {"id": "23", "name": "Client"}, + "attributes": [ + { + "id": "81", + "objectTypeAttributeId": "81", + "objectTypeAttribute": {"name": "Managed_Devices", "position": 5}, + "objectAttributeValues": [{"value": "0", "displayValue": "0"}] + } + ] +} +``` + +The enrichment is additive — `objectTypeAttribute` is injected into the existing attribute +JSON via `serde_json::Value` manipulation (insert key into the attribute object map). Only +`name` and `position` are included in the injected object — these are the fields consumers +need. The root-level object schema is preserved (same approach `assets view` uses for +injecting richer attributes). + +### Table (`--output table`) + +When `--attributes` is passed, replace the current Created/Updated columns with a single +"Attributes" column containing inline `Name: Value` pairs, pipe-delimited. Filter +system/hidden/label attributes, sort by position. + +**Before (current):** +``` +Key Type Name Created Updated +OBJ-88 Client Acme Corp 2025-12-17T14:58:00Z 2026-01-29T19:52:00Z +``` + +**After (enriched):** +``` +Key Type Name Attributes +OBJ-88 Client Acme Corp Location: New York, NY | Managed_Devices: 0 +``` + +Multi-value attributes use the first `displayValue` (or `value` as fallback). Attributes +with no values are omitted from the inline display. + +### Without `--attributes` + +No change. The default table (Key, Type, Name) and JSON output remain identical. + +## Handler Changes + +### `src/cli/assets.rs` — `handle_search` + +When `attributes` is `true`: +1. Fetch search results with `include_attributes=true` (existing) +2. Call `enrich_search_attributes(client, workspace_id, &mut objects)` (new) +3. For JSON: serialize enriched objects with `objectTypeAttribute` injected, filter + system/hidden, sort by position +4. For table: build "Attributes" column from enriched data + +The enrichment function is called once after search, before output formatting. + +## Files Changed + +| File | Change | +|------|--------| +| `src/api/assets/objects.rs` | Add `get_object_type_attributes`, `enrich_search_attributes` | +| `src/types/assets/object.rs` | No changes — reuse `ObjectTypeAttributeDef` | +| `src/cache.rs` | Add `ObjectTypeAttrCache`, `CachedObjectTypeAttr`, read/write functions | +| `src/cli/assets.rs` | Update `handle_search` for enriched JSON and table output | + +## Error Handling + +| Scenario | Behavior | +|----------|----------| +| Object type attributes fetch fails (401/404/500) | Log warning, skip enrichment for that type — attributes remain un-enriched (graceful degradation) | +| Attribute ID not found in type definitions | Skip that attribute in enriched output — defensive against schema drift | +| Cache file corrupt / old format | Treat as miss, re-fetch (consistent with `cmdb_fields.json`) | +| `--attributes` without `--output json` | Enrichment still runs for table mode | + +## Edge Cases + +| Scenario | Behavior | +|----------|----------| +| Mixed object types in results | Each type's definitions fetched/cached independently | +| No attributes on an object | "Attributes" column shows empty string | +| All attributes are system/hidden | "Attributes" column shows empty string | +| Multi-value attribute | Show first `displayValue` or `value` | +| Search with `--attributes` returns 0 results | No enrichment needed, empty table/JSON | +| `assets view` (existing) | Unchanged — still uses `GET /object/{id}/attributes` directly | + +## Testing + +- **Unit tests**: Cache read/write, enrichment logic (inject names, filter system/hidden, + handle missing IDs, sort by position), table column formatting +- **Integration tests (wiremock)**: Mock `POST /object/aql` + `GET /objecttype/{id}/attributes`, + verify enriched JSON output contains `objectTypeAttribute.name`, verify table output contains + inline attribute values +- **CLI smoke tests**: `assets search --help` still shows `--attributes` flag + +## What Doesn't Change + +- `assets view` — still uses `GET /object/{id}/attributes` directly +- `assets tickets` — no attributes involved +- `AssetObject`, `AssetAttribute`, `ObjectAttribute`, `ObjectTypeAttributeDef` types — no + changes to existing structs +- Search without `--attributes` — completely unchanged +- No new CLI flags or subcommands +- No new dependencies diff --git a/docs/specs/assets-tickets-status-filter.md b/docs/specs/assets-tickets-status-filter.md new file mode 100644 index 0000000..78cbd19 --- /dev/null +++ b/docs/specs/assets-tickets-status-filter.md @@ -0,0 +1,83 @@ +# Assets Tickets Status Filtering + +**Issue:** #89 +**Date:** 2026-04-01 +**Status:** Design + +## Problem + +`jr assets tickets OBJ-1` returns all tickets linked to an asset with no way to filter by status. For assets with many linked tickets, the only option is to fetch everything and filter externally. + +## Solution + +Add `--open` and `--status` client-side filtering flags to `assets tickets`. + +``` +jr assets tickets CUST-5 --open +jr assets tickets CUST-5 --status "In Progress" +jr assets tickets CUST-5 --open --limit 10 +``` + +## CLI Flags + +Add to `AssetsCommand::Tickets`: + +- `--open` — exclude tickets in the Done status category +- `--status ` — filter to a specific status (case-insensitive substring match) +- `--open` and `--status` conflict with each other (same pattern as `issue list`) +- Filtering applies **before** `--limit` truncation + +## Filter Logic + +The Assets connected-tickets API (`GET /objectconnectedtickets/{objectId}/tickets`) has no server-side filtering. All filtering is client-side on the fetched response. + +### `--open` + +Retain tickets where `status.colorName` is not `"green"`. The `colorName` field maps to Jira's fixed status categories: `"green"` = Done, `"yellow"` = In Progress, `"blue-gray"` = To Do. These mappings are fixed across all Jira Cloud instances. + +Tickets with `status: None` are included (unknown status is not assumed to be Done). + +### `--status` + +Case-insensitive substring match on `status.name` using the existing `partial_match` module. Statuses are extracted from the fetched tickets for disambiguation. + +If ambiguous: error listing matching statuses. If no match: error listing available statuses. + +### Ordering + +1. Fetch all tickets from API +2. Apply `--open` or `--status` filter +3. Apply `--limit` truncation +4. Display + +## Error Handling + +- `--status "xyz"` with no match: `No status matching "xyz". Available: In Progress, Done, To Do` +- `--status` ambiguous: `Ambiguous status "in". Matches: In Progress, In Review` +- Zero tickets after filtering: normal empty output, not an error +- Tickets with missing `status` field: included by `--open`, excluded by `--status` (no status name to match) + +## Changes by File + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add `--open` and `--status` to `AssetsCommand::Tickets` with `conflicts_with` | +| `src/cli/assets.rs` | Filter tickets in `handle_tickets` before display/limit | + +## Testing Strategy + +### Unit tests + +- Filter function: `--open` excludes green, keeps yellow/blue-gray/None +- Filter function: `--status` partial match +- Filter applies before `--limit` + +### CLI smoke tests + +- `--open` and `--status` conflict produces clap error + +## API Constraints (Validated) + +- Connected-tickets endpoint has no query parameters for filtering (confirmed via Context7 API docs) +- `status.colorName` maps to fixed Jira status categories: `"green"` = Done, `"yellow"` = In Progress, `"blue-gray"` = To Do (confirmed via Perplexity + Context7 status category API) +- These category-color mappings are fixed across all Jira Cloud instances (confirmed) diff --git a/docs/specs/assets-view-default-attributes.md b/docs/specs/assets-view-default-attributes.md new file mode 100644 index 0000000..d7fb7b9 --- /dev/null +++ b/docs/specs/assets-view-default-attributes.md @@ -0,0 +1,105 @@ +# assets view default attributes + +**Issue:** [#85](https://github.com/Zious11/jira-cli/issues/85) +**Status:** Design approved +**Date:** 2026-04-01 + +## Problem + +`jr assets view OBJ-1` returns `"attributes": []` in JSON and shows no attribute data in table +mode unless `--attributes` is explicitly passed. The default output is missing the most valuable +information about an asset. An AI agent or user viewing an asset for the first time would +reasonably expect to see its attributes. + +## Root cause + +The `handle_view` function only calls `get_object_attributes()` when the `--attributes` flag is +passed. Without it, the object is fetched with `includeAttributes=false`, which returns an empty +attributes array. Even `includeAttributes=true` on the object endpoint only returns numeric +`objectTypeAttributeId` values without human-readable names — the separate +`GET /object/{id}/attributes` endpoint is required to get named attributes with `system`, +`hidden`, `label`, and `position` metadata. + +## Solution + +Flip the default for `assets view`: always fetch and display attributes unless `--no-attributes` +is passed. + +### `src/cli/mod.rs` + +**`AssetsCommand::View`**: Replace `#[arg(long)] attributes: bool` with +`#[arg(long)] no_attributes: bool`. + +This matches the existing `--no-color` and `--no-input` patterns in the codebase. Clap +auto-generates `--no-attributes` from the field name. + +**`AssetsCommand::Search`**: No change. `--attributes` remains opt-in. Search returns multiple +objects, and each would require a separate `/object/{id}/attributes` call — too expensive for a +default. + +### `src/cli/assets.rs` + +**`handle_view`**: Invert the condition — `!no_attributes` replaces `attributes` as the guard +for the `get_object_attributes()` call. + +- **Table output**: No structural change. The second attributes table already renders correctly + when attributes are fetched. It now renders by default. +- **JSON output**: No structural change. The `attributes` array injection into the object JSON + already works. It now runs by default. + +### API calls + +No changes to `get_asset()` or `get_object_attributes()`. The object is still fetched with +`includeAttributes=false` (the inline attributes lack human-readable names and are not useful). +The named attributes come from the separate `/object/{id}/attributes` endpoint. + +**Call pattern per `assets view` invocation:** +- Default (3 calls): resolve key → get object → get object attributes +- With `--no-attributes` (2 calls): resolve key → get object + +## What doesn't change + +- `assets search` — `--attributes` remains opt-in +- `assets tickets` — no attributes involved +- API client methods — no changes +- Types (`AssetObject`, `ObjectAttribute`, etc.) — no changes +- Attribute filtering logic (exclude system, hidden, label for table; exclude system, hidden + for JSON) — no changes + +## JSON output impact + +**Before** (default): `{"attributes": []}` — empty, useless +**After** (default): `{"attributes": [{...}]}` — full named attributes + +This is a breaking change to the default JSON output, but the previous default was unusable. +Consumers that parse `attributes` will now get the data they expected. The `--no-attributes` +flag provides an escape hatch for the minimal view. + +## CLI breaking change + +The `--attributes` flag on `assets view` is removed and replaced with `--no-attributes`. This +is a pre-1.0 breaking change. There is no clean alias path because the semantics are inverted +(old flag meant "include", new default already includes). Scripts using `jr assets view --attributes` +will fail with an unrecognized flag error — the fix is to remove the flag (attributes are now +included by default). + +## Edge cases + +| Scenario | Behavior | +|----------|----------| +| Object with no custom attributes | Attributes table omitted (existing behavior: `if !attrs.is_empty()`) | +| Object with many attributes | All returned in single API call (endpoint is not paginated) | +| `--no-attributes` passed | Skip attributes fetch, same as old default behavior | +| `--output json --no-attributes` | Returns bare object with `"attributes": []` (same as old default) | + +## Testing + +- **Integration test**: API-layer simulation verifying JSON filter excludes system and hidden attributes +- **CLI smoke test**: `assets view --help` shows `--no-attributes`, not `--attributes` + +## Not in scope + +- Changing `assets search` default (keep `--attributes` opt-in) +- Adding new flags to `assets view` +- Caching attribute definitions +- Changing the attribute filtering logic (system/hidden/label exclusion) diff --git a/docs/specs/issue-list-asset-filter.md b/docs/specs/issue-list-asset-filter.md new file mode 100644 index 0000000..f7db7c1 --- /dev/null +++ b/docs/specs/issue-list-asset-filter.md @@ -0,0 +1,150 @@ +# Issue List `--asset` Filter + +**Issue:** #88 +**Date:** 2026-04-01 +**Status:** Design + +## Problem + +There is no way to filter `issue list` by linked asset. The only path from asset to issues is `jr assets tickets OBJ-1`, which returns a simpler response format (no labels, points, links) and cannot be composed with other `issue list` filters like `--status`, `--assignee`, `--team`, or `--open`. + +## Solution + +Add `--asset ` to `issue list` that generates an `aqlFunction()` JQL clause, composable with all existing filters. + +``` +jr issue list --project PROJ --asset CUST-5 +jr issue list --asset CUST-5 --status "In Progress" --assignee me +jr issue list --asset CUST-5 --open --points +``` + +## CLI Surface + +Add `--asset ` to `IssueCommand::List`: + +- Accepts an asset object key (e.g., `CUST-5`, `SRV-42`) — the `SCHEMA-NUMBER` format used by Jira Assets +- Composes with all existing filters (`--status`, `--assignee`, `--reporter`, `--team`, `--recent`, `--open`, `--jql`) +- Automatically enables the `--assets` display column when `--asset` is used +- Fully non-interactive: no prompts, no `--no-input` concern + +## JQL Construction + +### aqlFunction() — the JQL Function + +The Jira Assets JQL function used here is **`aqlFunction()`**. It accepts an AQL (Assets Query Language) string and returns matching objects. It must be used with the **human-readable custom field name**, not the `cf[ID]` shorthand or `customfield_NNNNN` format. + +Supported operators: `IS`, `IS NOT`, `IN`, `NOT IN`. + +### Single CMDB Field (Common Case) + +``` +"Client" IN aqlFunction("Key = \"CUST-5\"") +``` + +### Multiple CMDB Fields + +When the instance has multiple Assets custom fields (e.g., "Client" and "Server"), OR them: + +``` +("Client" IN aqlFunction("Key = \"CUST-5\"") OR "Server" IN aqlFunction("Key = \"CUST-5\"")) +``` + +### Escaping + +- **Field name:** Wrapped in double quotes. Field names are admin-defined strings; quote defensively. +- **Asset key inside AQL:** Escaped with `jql::escape_value`, then nested inside the JQL string with backslash-escaped quotes. Asset keys follow `SCHEMA-NUMBER` format (alphanumeric + hyphen), so special characters are unlikely but handled defensively. + +```rust +// Per CMDB field, build: +// "Client" IN aqlFunction("Key = \"CUST-5\"") +format!( + "\"{}\" IN aqlFunction(\"Key = \\\"{}\\\"\")", + escape_value(&field_name), + escape_value(&asset_key), +) +``` + +Multiple fields are joined with ` OR ` and wrapped in parentheses when there are 2+. + +## CMDB Field Discovery Changes + +### Current State + +`filter_cmdb_fields` returns `Vec` (IDs only). The cache stores `CmdbFieldsCache { field_ids: Vec }`. + +### Required Change + +`filter_cmdb_fields` returns `Vec<(String, String)>` — `(field_id, field_name)` pairs. The cache stores `CmdbFieldsCache { fields: Vec<(String, String)> }`. + +Stale caches auto-expire after 7 days. On deserialization failure of an old-format cache, it is treated as a cache miss (existing behavior: `serde_json::from_str` returns `Err`, propagated as `None`). + +Existing callers that only need IDs extract with `.iter().map(|(id, _)| id.clone()).collect()`. + +## Error Handling + +### No CMDB fields found + +If `get_or_fetch_cmdb_field_ids` returns empty and `--asset` was specified: + +``` +Error: --asset requires Assets custom fields on this Jira instance. +Assets requires Jira Service Management Premium or Enterprise. +``` + +### Invalid asset key format + +Validate the key matches `SCHEMA-NUMBER` pattern before building JQL: + +``` +Error: Invalid asset key "foo". Expected format: SCHEMA-NUMBER (e.g., CUST-5, SRV-42). +``` + +### aqlFunction returns no matches + +Not an error. JQL returns zero issues, same as any other filter with no matches. + +### `--asset` without project scope + +Allowed at the CLI level — the asset clause counts as a valid filter for the "no project or filters" guard. If Jira's API rejects a project-less `aqlFunction()` query, the API error propagates naturally through existing error handling. In practice, most users have a default project configured via `.jr.toml` or `--project`. + +## Automatic `--assets` Display Column + +When `--asset` is used, automatically enable the `--assets` display column without requiring the user to also pass `--assets`. Implementation: if `--asset` is set, treat `show_assets` as `true` regardless of whether the `--assets` display flag was passed. + +## Testing Strategy + +### Unit Tests + +- `build_asset_clause` with single CMDB field, multiple CMDB fields +- `build_filter_clauses` with asset clause composed alongside other filters +- Asset key validation (valid keys, malformed keys) +- `filter_cmdb_fields` returning `(id, name)` tuples + +### Integration Tests (wiremock) + +- `issue list --asset CUST-5` produces correct JQL in the search request +- `--asset` combined with `--status`, `--assignee` composes correctly +- `--asset` with no CMDB fields returns the appropriate error +- `--asset` auto-enables assets display column + +## Changes by File + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add `--asset` field to `IssueCommand::List` | +| `src/api/jira/fields.rs` | `filter_cmdb_fields` returns `Vec<(String, String)>` | +| `src/cache.rs` | `CmdbFieldsCache` stores `Vec<(String, String)>` | +| `src/api/assets/linked.rs` | `get_or_fetch_cmdb_field_ids` returns `Vec<(String, String)>`, add helper to extract IDs only | +| `src/cli/issue/list.rs` | Build asset JQL clause, auto-enable assets column, update filter guard | +| `src/cli/issue/assets.rs` | Adapt to new `(id, name)` tuple return | +| `tests/` | Integration tests for `--asset` flag | + +## API Constraints (Validated) + +- `aqlFunction()` requires the human-readable field **name**, not `cf[ID]` or `customfield_NNNNN` (Atlassian support docs, community confirmed) +- `aqlFunction()` composes with other JQL clauses via `AND` (confirmed) +- AQL attribute for object key is **`Key`** (a reserved AQL keyword) — e.g., `Key = "CUST-5"`. Note: `objectKey` is the JSON field name in REST API responses, but `Key` is the AQL query attribute (confirmed via Atlassian AQL docs) +- Asset object keys follow `SCHEMA-NUMBER` format (confirmed) +- Empty AQL results produce empty JQL results, not errors (confirmed) +- Available on all paid JSM plans: Standard, Premium, and Enterprise (confirmed via Atlassian docs) +- Deprecated functions like `attributeValue()` should be avoided; `aqlFunction()` is the current standard diff --git a/docs/specs/resolve-asset-custom-fields.md b/docs/specs/resolve-asset-custom-fields.md new file mode 100644 index 0000000..6cc5200 --- /dev/null +++ b/docs/specs/resolve-asset-custom-fields.md @@ -0,0 +1,185 @@ +# Resolve Asset-Typed Custom Fields from Jira Field Metadata + +**Issue:** #90 +**Date:** 2026-04-01 +**Status:** Design + +## Problem + +jr already discovers all CMDB-typed custom fields and displays linked assets in table output. However: + +1. **JSON output is opaque** — `issue view --output json` returns raw Jira API data where CMDB custom fields contain only `objectId` and `workspaceId`, not human-readable keys or labels. Table output enriches these, but JSON doesn't. +2. **Assets are lumped together** — `issue view` shows a single "Assets" row combining all CMDB fields. When an issue has multiple asset fields (e.g., "Customer Site" and "Affected Service"), there's no way to distinguish which field each asset belongs to. +3. **No custom field visibility** — `jr project fields` shows issue types, priorities, and statuses, but no information about CMDB custom fields. Users can't discover what asset fields exist. + +## Solution + +Three changes, all building on the existing CMDB field discovery infrastructure: + +1. Enrich CMDB custom fields in JSON output for `issue view` and `issue list` +2. Show per-field asset rows in `issue view` table output +3. Add CMDB custom fields to `project fields` output + +## 1. JSON Enrichment + +### Current behavior + +`issue view --output json` dumps the raw Jira API response. CMDB fields appear as: + +```json +"customfield_10191": [ + {"objectId": "18", "workspaceId": "e28955f0-..."} +] +``` + +### New behavior + +After fetching the issue, detect CMDB fields in `fields.extra`, resolve them via the existing `enrich_assets` pipeline, then inject `objectKey`, `label`, and `objectType` back into the JSON before printing: + +```json +"customfield_10191": [ + { + "objectId": "18", + "workspaceId": "e28955f0-...", + "objectKey": "CUST-5", + "label": "Acme Corp", + "objectType": "Client" + } +] +``` + +This is an **additive change** — existing fields (`objectId`, `workspaceId`) are preserved. New fields are injected alongside them. + +### Scope + +- **`issue view --output json`:** Always enrich CMDB fields (single issue, bounded API calls). +- **`issue list --output json`:** Enrich when `--assets` is active (same condition as table mode). Uses the same batch-deduplication logic that table mode already uses. + +### API constraint + +The Jira issue API never returns `objectKey` or `label` for CMDB fields — this is a documented limitation (JSDCLOUD-15201). Resolution requires separate Assets API calls (`GET /object/{id}`). There is no bulk endpoint; `jr` already handles this with concurrent `join_all` calls and deduplication. + +## 2. Per-Field Asset Rows in `issue view` + +### Current behavior + +`issue view` shows a single "Assets" row: + +``` +Assets CUST-5 (Acme Corp), SRV-42 (Email Server) +``` + +### New behavior + +Each CMDB field is shown as its own row using the field's configured name: + +``` +Customer Site CUST-5 (Acme Corp) +Affected Service SRV-42 (Email Server) +``` + +This requires the `(id, name)` pairs from the CMDB field cache (available from #88's `get_or_fetch_cmdb_fields`). Instead of extracting all assets at once, iterate per-field: extract assets for each field ID, enrich, then display as a separate row using the field name. + +If a CMDB field has no linked assets on the current issue, skip the row entirely. + +If only one CMDB field exists on the instance, continue showing it by its field name (not "Assets"). + +### JSON output (`issue view --output json`) + +The per-field structure is already present in JSON — each custom field is a separate key in `fields`. The enrichment from section 1 handles this. No additional JSON changes needed. + +## 3. CMDB Custom Fields in `project fields` + +### Current behavior + +`jr project fields` shows issue types, priorities, and statuses. No custom field information. + +### New behavior + +Add a "Custom Fields (Assets)" section listing discovered CMDB fields: + +``` +Custom Fields (Assets) — instance-wide: + - Client (customfield_10191) + - Affected Service (customfield_10245) +``` + +The "instance-wide" qualifier is important: `GET /rest/api/3/field` returns instance-level fields, not project-scoped ones. Project-to-field mapping requires admin permissions and is out of scope. + +### JSON output + +```json +{ + "project": "PROJ", + "issue_types": [...], + "priorities": [...], + "statuses_by_issue_type": [...], + "asset_fields": [ + {"id": "customfield_10191", "name": "Client"}, + {"id": "customfield_10245", "name": "Affected Service"} + ] +} +``` + +If no CMDB fields exist, `asset_fields` is an empty array in JSON. In table output, the section is omitted entirely. + +## Implementation Notes + +### Shared infrastructure + +All three features depend on `get_or_fetch_cmdb_fields` (returns `Vec<(String, String)>` of id/name pairs) and the existing `extract_linked_assets` + `enrich_assets` pipeline. No new API integrations are needed. + +### JSON enrichment mechanics + +For `issue view`, the flow is: +1. Fetch issue via `get_issue` +2. Discover CMDB field IDs (cached) +3. For each CMDB field, extract linked assets from `fields.extra` +4. Enrich all assets via `enrich_assets` (concurrent resolution) +5. For JSON: write enriched data back into the `serde_json::Value` at each `customfield_NNNNN` key +6. For table: display per-field rows + +For `issue list`, enrichment already happens for table mode in `handle_list`. The change is to also apply the resolved data to JSON output when `--assets` is active. + +### Performance + +- `issue view`: Single issue, typically 1-3 CMDB fields with 1-5 assets each. Negligible overhead. +- `issue list`: Already does concurrent enrichment with deduplication for table mode. JSON enrichment piggybacks on the same resolved data — no additional API calls. +- Future optimization: AQL batch resolution (`POST /object/aql` with `objectId IN (...)`) could replace per-object `GET /object/{id}` calls. This is out of scope for this feature but noted as a follow-up. + +## Error Handling + +- If enrichment fails for an asset (e.g., 404 from Assets API), leave the original raw data in place — don't error, don't remove the field. This matches the current table-mode behavior (graceful degradation). +- If no CMDB fields exist on the instance, all three features degrade gracefully: JSON has no enrichment needed, `issue view` shows no asset rows, `project fields` omits the section. + +## Testing Strategy + +### Unit tests + +- JSON enrichment: given a `serde_json::Value` with raw CMDB fields, verify enriched fields are injected +- Per-field extraction: given `fields.extra` with multiple CMDB fields, verify per-field asset extraction +- `project fields` JSON: verify `asset_fields` array is present/absent based on CMDB field discovery + +### Integration tests (wiremock) + +- `issue view --output json` with CMDB fields returns enriched JSON +- `issue view` table output shows per-field rows +- `project fields` with CMDB fields shows the custom fields section +- `project fields` without CMDB fields omits the section + +## Changes by File + +| File | Change | +|------|--------| +| `src/cli/issue/list.rs` | `handle_view`: per-field rows in table, JSON enrichment; `handle_list`: JSON enrichment when `--assets` | +| `src/api/assets/linked.rs` | Add `extract_linked_assets_per_field` (returns `Vec<(field_name, Vec)>`) and `enrich_json_assets` (injects enriched data into `serde_json::Value`) | +| `src/cli/project.rs` | `handle_fields`: add CMDB fields section | +| `tests/` | Integration tests for all three features | + +## API Constraints (Validated) + +- Jira issue API returns only `objectId` and `workspaceId` for CMDB fields — never `objectKey` or `label` (confirmed, JSDCLOUD-15201) +- Separate Assets API call required per object to resolve — no bulk endpoint exists +- `GET /rest/api/3/field` returns instance-wide fields, not project-scoped (confirmed) +- Project-to-field context mapping requires admin permissions (out of scope) +- Available on all paid JSM plans: Standard, Premium, Enterprise diff --git a/docs/specs/sprint-issue-management.md b/docs/specs/sprint-issue-management.md new file mode 100644 index 0000000..411c76f --- /dev/null +++ b/docs/specs/sprint-issue-management.md @@ -0,0 +1,216 @@ +# Sprint Issue Management + +**Issue:** [#83](https://github.com/Zious11/jira-cli/issues/83) +**Status:** Design approved +**Date:** 2026-04-01 + +## Problem + +`jr sprint` currently supports `list` and `current` — both read-only. There is no way to add +or remove issues from a sprint. Common automation use cases (velocity tracking, sprint planning +scripts, backlog grooming) require this capability and currently fall back to raw `curl` calls +against the Jira Agile REST API. + +## Solution + +Add two new subcommands: `sprint add` and `sprint remove`. + +``` +jr sprint add --sprint 100 FOO-1 FOO-2 # add issues to a specific sprint +jr sprint add --current FOO-1 FOO-2 # add issues to the active sprint +jr sprint remove FOO-1 FOO-2 # move issues to backlog +``` + +### `src/cli/mod.rs` + +**`SprintCommand::Add`**: `--sprint ` flag (required unless `--current` is present), +`--current` flag (conflicts with `--sprint`), variadic positional `issues: Vec`, +optional `--board` for active sprint resolution. + +**`SprintCommand::Remove`**: variadic positional `issues: Vec` only. + +Sprint ID is a named flag (`--sprint`) rather than positional because clap's derive API cannot +handle an optional positional (`Option`) followed by a variadic positional (`Vec`) +— the parser processes positionals left-to-right by fixed slot and cannot skip the optional. + +### `src/cli/sprint.rs` + +**`handle_add`**: If `--current`, resolve board via existing `resolve_board_id` chain +(`--board` → config → auto-discover, scrum-only), then list active sprints to get the sprint +ID. Validate issue count <= 50. Call `add_issues_to_sprint`. Output follows state-change +pattern. + +**`handle_remove`**: Validate issue count <= 50. Call `move_issues_to_backlog`. Output follows +state-change pattern. + +### `src/api/jira/sprints.rs` + +Two new methods: + +- `add_issues_to_sprint(sprint_id: u64, issues: &[String]) -> Result<()>` — calls + `POST /rest/agile/1.0/sprint/{sprintId}/issue` with `{"issues": [...]}`. Uses + `post_no_content` (204 response). + +- `move_issues_to_backlog(issues: &[String]) -> Result<()>` — calls + `POST /rest/agile/1.0/backlog/issue` with `{"issues": [...]}`. Uses `post_no_content` + (204 response). + +No new types needed. Request bodies are constructed inline with `serde_json::json!`. + +## API details + +**Add to sprint** — `POST /rest/agile/1.0/sprint/{sprintId}/issue` +- Request: `{"issues": ["FOO-1", "FOO-2"]}` +- Response: 204 No Content +- Max 50 issues per call +- Issues can only be moved to open or active sprints (400 for closed) +- Idempotent: adding an issue already in the sprint returns 204 +- Sprint ID (`int64`) is globally unique — no board ID needed + +**Move to backlog** — `POST /rest/agile/1.0/backlog/issue` +- Request: `{"issues": ["FOO-1", "FOO-2"]}` +- Response: 204 No Content +- Max 50 issues per call +- Equivalent to removing future and active sprints from a given set of issues +- Idempotent: moving an issue already in backlog returns 204 +- No sprint ID needed + +## CLI interface + +``` +jr sprint add --sprint ... +jr sprint add --current ... +jr sprint add --current --board ... +jr sprint remove ... +``` + +### Clap definitions + +```rust +/// Add issues to a sprint +Add { + /// Sprint ID (from `jr sprint list`) + #[arg(long, required_unless_present = "current")] + sprint: Option, + /// Use the active sprint instead of specifying an ID + #[arg(long, conflicts_with = "sprint")] + current: bool, + /// Issue keys to add (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, + /// Board ID (used with --current to resolve the active sprint) + #[arg(long)] + board: Option, +}, +/// Remove issues from sprint (moves to backlog) +Remove { + /// Issue keys to remove (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, +}, +``` + +## Output + +### `sprint add` + +**Table:** `Added 3 issue(s) to sprint 100` (via `output::print_success`) + +**JSON:** +```json +{ + "sprint_id": 100, + "issues": ["FOO-1", "FOO-2", "FOO-3"], + "added": true +} +``` + +### `sprint remove` + +**Table:** `Moved 2 issue(s) to backlog` (via `output::print_success`) + +**JSON:** +```json +{ + "issues": ["FOO-1", "FOO-2"], + "removed": true +} +``` + +Both commands are idempotent — the API returns 204 whether the issues were already in the +target state or not. The `added`/`removed` field is `true` on any successful API call since +the API does not distinguish "newly moved" from "already there". + +## Error handling + +| Scenario | Behavior | +|----------|----------| +| >50 issues provided | Client-side error before API call: "Too many issues (got N). Maximum is 50 per operation." | +| Closed sprint (`add`) | API returns 400 — pass through API error message | +| Sprint not found (`add`) | API returns 404 — pass through API error message | +| No active sprint (`--current`) | Existing error path: "No active sprint found for board N." | +| Invalid issue key | API returns 400 — pass through API error message | +| No project configured (`--current`) | Existing error path: "No project configured. Run \"jr init\" or pass --project." | +| Permission denied | API returns 403 — pass through API error message | +| Neither `--sprint` nor `--current` on `add` | Clap enforces `required_unless_present` — automatic error | +| Both `--sprint` and `--current` on `add` | Clap enforces `conflicts_with` — automatic error | + +## Edge cases + +| Scenario | Behavior | +|----------|----------| +| Single issue | Works: `jr sprint add --sprint 100 FOO-1` | +| 50 issues (max) | Works: all sent in one API call | +| 51 issues | Client-side error before API call | +| Issue already in sprint (`add`) | API returns 204 — reported as success | +| Issue already in backlog (`remove`) | API returns 204 — reported as success | +| `--board` without `--current` (`add`) | `--board` is silently ignored (matches existing `sprint list`/`sprint current` behavior) | +| `--board` on `remove` | Not accepted (not in the variant definition) | + +## Handler flow + +### `handle_add` + +1. If `--current`: resolve board ID via `resolve_board_id(config, client, board, project, true)`, + then `list_sprints(board_id, Some("active"))`. Error if no active sprint. Use first sprint's ID. +2. Validate `issues.len() <= 50`. +3. Call `client.add_issues_to_sprint(sprint_id, &issues)`. +4. Output success (JSON or table). + +### `handle_remove` + +1. Validate `issues.len() <= 50`. +2. Call `client.move_issues_to_backlog(&issues)`. +3. Output success (JSON or table). + +## Dispatch changes + +`src/cli/sprint.rs` match arm and `src/cli/mod.rs` `SprintCommand` enum need two new variants. +The `handle` function in `sprint.rs` needs new match arms. For `Add`, the board resolution +path is only needed when `--current` is used — when `--sprint` is provided, no board/config +is needed. + +## Testing + +- **Unit tests**: Clap validation — `--sprint` and `--current` conflict, one of them required + for `add`, variadic issues required +- **Integration tests (wiremock)**: Mock `POST /rest/agile/1.0/sprint/{sprintId}/issue` and + `POST /rest/agile/1.0/backlog/issue`, verify request body contains correct issue keys, + assert 204 handling produces correct output +- **CLI smoke tests**: `sprint add --help` shows `--sprint`, `--current`, `--board` flags; + `sprint remove --help` shows variadic `ISSUE` arg + +## What doesn't change + +- `sprint list` / `sprint current` — untouched +- Board resolution logic (`resolve_board_id`) — reused as-is +- Existing API client methods — no changes +- Types (`Sprint`, `SprintIssuesResult`) — no changes +- No new dependencies + +## Not in scope + +- Ranking options (`rankBeforeIssue`, `rankAfterIssue`) — the API supports them but they add + complexity without clear use cases for CLI automation +- Moving issues between sprints in one command — do `add` to new sprint (API handles the move) +- Batch operations >50 issues — user can loop; auto-chunking adds complexity diff --git a/docs/superpowers/plans/2026-03-21-jr-implementation.md b/docs/superpowers/plans/2026-03-21-jr-implementation.md index 6c95d30..01eaf36 100644 --- a/docs/superpowers/plans/2026-03-21-jr-implementation.md +++ b/docs/superpowers/plans/2026-03-21-jr-implementation.md @@ -3867,7 +3867,7 @@ async fn fields( ) -> Result<()> { let project_key = project .or_else(|| config.project_key(project_override)) - .ok_or_else(|| anyhow::anyhow!("No project specified. Use 'jr project fields FOO' or configure .jr.toml"))?; + .ok_or_else(|| anyhow::anyhow!("No project specified. Use 'jr project fields --project FOO' or configure .jr.toml"))?; let issue_types = client.get_project_issue_types(&project_key).await?; let priorities = client.get_priorities().await?; diff --git a/docs/superpowers/plans/2026-03-23-unbounded-jql-guard.md b/docs/superpowers/plans/2026-03-23-unbounded-jql-guard.md new file mode 100644 index 0000000..8522bea --- /dev/null +++ b/docs/superpowers/plans/2026-03-23-unbounded-jql-guard.md @@ -0,0 +1,294 @@ +# Unbounded JQL Guard Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Reject unbounded JQL queries in `issue list` before they hit the Jira API, with an actionable error message (closes GitHub issue #16). + +**Architecture:** Change `build_fallback_jql` from `-> String` to `-> Result`, returning `JrError::UserError` (exit code 64) when all filters are `None`. Add a stderr warning to `board view` kanban path when no project is configured. + +**Tech Stack:** Rust, anyhow, thiserror (JrError) + +**Spec:** `docs/superpowers/specs/2026-03-23-unbounded-jql-guard-design.md` + +--- + +## File Structure + +| File | Responsibility | Change | +|------|---------------|--------| +| `src/cli/issue/list.rs` | JQL construction, list/view handlers, unit tests | `build_fallback_jql` returns `Result`, callers add `?`, test updates | +| `src/cli/board.rs` | Board list/view handlers | Add kanban warning when no project configured | + +--- + +### Task 1: Update `build_fallback_jql` to reject unbounded queries + +**Files:** +- Modify: `src/cli/issue/list.rs:1-2` (imports) +- Modify: `src/cli/issue/list.rs:128-145` (function body) +- Modify: `src/cli/issue/list.rs:313-364` (tests) + +- [ ] **Step 1: Write the failing test** + +Replace the test `fallback_jql_no_filters_still_has_order_by` (lines 353-357) and add an error content assertion. Also update `use` import at line 315 since tests now need `crate::error::JrError`. The new test block for the error case: + +In `src/cli/issue/list.rs`, replace lines 353-357: + +```rust + #[test] + fn fallback_jql_errors_when_no_filters() { + let result = build_fallback_jql(None, None, None); + assert!(result.is_err(), "Expected error for unbounded query"); + let err_msg = result.unwrap_err().to_string(); + assert!( + err_msg.contains("--project"), + "Error should mention --project: {err_msg}" + ); + assert!( + err_msg.contains(".jr.toml"), + "Error should mention .jr.toml: {err_msg}" + ); + assert!( + err_msg.contains("jr init"), + "Error should mention jr init: {err_msg}" + ); + } +``` + +- [ ] **Step 2: Run test to verify it fails** + +```bash +cargo test --lib fallback_jql_errors_when_no_filters +``` +Expected: FAIL — `build_fallback_jql` currently returns `String`, not `Result`, so the test won't compile. + +- [ ] **Step 3: Implement the fix** + +In `src/cli/issue/list.rs`: + +**3a.** Add `use crate::error::JrError;` to the imports at the top of the file. The existing import line is: + +```rust +use anyhow::Result; +``` + +Change it to: + +```rust +use anyhow::Result; + +use crate::error::JrError; +``` + +**3b.** Change the function signature and add the guard. Replace the entire `build_fallback_jql` function (lines 128-145) with: + +```rust +fn build_fallback_jql( + project_key: Option<&str>, + status: Option<&str>, + resolved_team: Option<&(String, String)>, +) -> Result { + if project_key.is_none() && status.is_none() && resolved_team.is_none() { + return Err(JrError::UserError( + "No project or filters specified. Use --project KEY, --status STATUS, or --team NAME. \ + You can also set a default project in .jr.toml or run \"jr init\"." + .into(), + ) + .into()); + } + let mut parts: Vec = Vec::new(); + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", pk)); + } + if let Some(s) = status { + parts.push(format!("status = \"{}\"", s)); + } + if let Some((field_id, team_uuid)) = resolved_team { + parts.push(format!("{} = \"{}\"", field_id, team_uuid)); + } + let where_clause = parts.join(" AND "); + Ok(format!("{} ORDER BY updated DESC", where_clause)) +} +``` + +Note: `Err(JrError::UserError(...).into())` converts the `JrError` into `anyhow::Error` so it matches the `Result` (which is `anyhow::Result`). This ensures `main.rs` can `downcast_ref::()` to get exit code 64. + +**3c.** Update the three callers in `handle_list` to add `?`: + +Line 72-76 — change: +```rust + _ => build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + ), +``` +to: +```rust + _ => build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + )?, +``` + +Line 96-100 — change: +```rust + Err(_) => build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + ), +``` +to: +```rust + Err(_) => build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + )?, +``` + +Line 103-107 — change: +```rust + build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + ) +``` +to: +```rust + build_fallback_jql( + project_key.as_deref(), + status.as_deref(), + resolved_team.as_ref(), + )? +``` + +**3d.** Update the existing tests that call `build_fallback_jql` to unwrap the `Result`. Four tests need `.unwrap()`: + +`fallback_jql_order_by_not_joined_with_and` (line 319) — change: +```rust + let jql = build_fallback_jql(Some("PROJ"), None, None); +``` +to: +```rust + let jql = build_fallback_jql(Some("PROJ"), None, None).unwrap(); +``` + +`fallback_jql_with_team_has_valid_order_by` (line 330) — change: +```rust + let jql = build_fallback_jql(Some("PROJ"), None, Some(&team)); +``` +to: +```rust + let jql = build_fallback_jql(Some("PROJ"), None, Some(&team)).unwrap(); +``` + +`fallback_jql_with_all_filters` (line 342) — change: +```rust + let jql = build_fallback_jql(Some("PROJ"), Some("In Progress"), Some(&team)); +``` +to: +```rust + let jql = build_fallback_jql(Some("PROJ"), Some("In Progress"), Some(&team)).unwrap(); +``` + +`fallback_jql_with_status_only` (line 361) — change: +```rust + let jql = build_fallback_jql(None, Some("Done"), None); +``` +to: +```rust + let jql = build_fallback_jql(None, Some("Done"), None).unwrap(); +``` + +- [ ] **Step 4: Run all tests** + +```bash +cargo test --lib +``` +Expected: All tests pass, including the new `fallback_jql_errors_when_no_filters`. + +- [ ] **Step 5: Run clippy and fmt** + +```bash +cargo clippy --all --all-features --tests -- -D warnings +cargo fmt --all +``` +Expected: Zero warnings, formatting clean. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "fix: reject unbounded JQL in issue list with actionable error + +build_fallback_jql now returns Result and errors with exit +code 64 when no project, status, or team filter is provided. + +Closes #16" +``` + +--- + +### Task 2: Add kanban warning to `board view` + +**Files:** +- Modify: `src/cli/board.rs:54-65` (kanban path) + +- [ ] **Step 1: Add the warning** + +In `src/cli/board.rs`, after line 56 (`let project_key = config.project_key(None);`), add: + +```rust + if project_key.is_none() { + eprintln!("warning: no project configured for board. Showing issues across all projects. Set project in .jr.toml to scope results."); + } +``` + +The full kanban block (lines 54-65) should now read: + +```rust + } else { + // Kanban: search for issues not in Done status category + let project_key = config.project_key(None); + if project_key.is_none() { + eprintln!("warning: no project configured for board. Showing issues across all projects. Set project in .jr.toml to scope results."); + } + let mut jql_parts: Vec = Vec::new(); + if let Some(ref pk) = project_key { + jql_parts.push(format!("project = \"{}\"", pk)); + } + jql_parts.push("statusCategory != Done".into()); + jql_parts.push("ORDER BY rank ASC".into()); + let jql = jql_parts.join(" AND "); + client.search_issues(&jql, None, &[]).await? + }; +``` + +- [ ] **Step 2: Run all tests** + +```bash +cargo test +``` +Expected: All tests pass (no board integration tests are affected). + +- [ ] **Step 3: Run clippy and fmt** + +```bash +cargo clippy --all --all-features --tests -- -D warnings +cargo fmt --all +``` +Expected: Zero warnings, formatting clean. + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/board.rs +git commit -m "fix: warn when board view kanban has no project scope + +Emits a stderr warning when no project is configured, since the +query returns issues across all projects which may be a large set." +``` diff --git a/docs/superpowers/plans/2026-03-24-assets-cmdb.md b/docs/superpowers/plans/2026-03-24-assets-cmdb.md new file mode 100644 index 0000000..db5c628 --- /dev/null +++ b/docs/superpowers/plans/2026-03-24-assets-cmdb.md @@ -0,0 +1,1528 @@ +# Assets/CMDB Support Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `jr assets search`, `jr assets view`, and `jr assets tickets` commands for querying the Atlassian Assets/CMDB API. + +**Architecture:** New `src/api/assets/` and `src/types/assets/` peer modules alongside existing `jira/` and `jsm/` directories. Assets is workspace-scoped (not project-scoped). Workspace ID discovered via `/rest/servicedeskapi/assets/workspace` and cached site-wide. Assets API calls go through the Atlassian API gateway at `api.atlassian.com/ex/jira/{cloudId}/jsm/assets/workspace/{workspaceId}/v1/` via a new `assets_base_url` field on `JiraClient`. + +**Tech Stack:** Rust, reqwest, serde (custom deserializer for `isLast` string/bool), chrono, comfy-table, wiremock (tests), clap + +**Spec:** `docs/superpowers/specs/2026-03-24-assets-cmdb-design.md` + +**Prerequisite:** PR #40 (`feat/jsm-queues`) must be merged first — this plan depends on `ServiceDeskPage`, `ProjectMeta` cache, and `api/jsm/` module already existing. + +--- + +## File Structure + +| File | Responsibility | Change | +|------|---------------|--------| +| `src/api/client.rs` | Add `assets_base_url` field, `get_assets()`, `post_assets()` | Modify | +| `src/api/pagination.rs` | Add `AssetsPage` with `deserialize_bool_or_string` | Modify | +| `src/cache.rs` | Add `WorkspaceCache`, `read_workspace_cache()`, `write_workspace_cache()` | Modify | +| `src/types/assets/mod.rs` | Re-exports for Assets types | Create | +| `src/types/assets/object.rs` | `AssetObject`, `ObjectType`, `AssetAttribute`, `ObjectAttributeValue` | Create | +| `src/types/assets/ticket.rs` | `ConnectedTicketsResponse`, `ConnectedTicket` | Create | +| `src/types/mod.rs` | Add `pub mod assets;` | Modify | +| `src/api/assets/mod.rs` | Re-exports for Assets API | Create | +| `src/api/assets/workspace.rs` | Workspace ID discovery + `get_or_fetch_workspace_id()` | Create | +| `src/api/assets/objects.rs` | `search_assets()`, `get_asset()`, `resolve_object_key()` | Create | +| `src/api/assets/tickets.rs` | `get_connected_tickets()` | Create | +| `src/api/mod.rs` | Add `pub mod assets;` | Modify | +| `src/cli/mod.rs` | Add `Assets` command + `AssetsCommand` enum | Modify | +| `src/cli/assets.rs` | `handle()`, `handle_search()`, `handle_view()`, `handle_tickets()` | Create | +| `src/main.rs` | Add dispatch for `Command::Assets` | Modify | +| `tests/assets.rs` | Integration tests with wiremock | Create | + +--- + +### Task 1: AssetsPage pagination type + bool/string deserializer + +**Files:** +- Modify: `src/api/pagination.rs` + +- [ ] **Step 1: Write unit tests for `AssetsPage` and `deserialize_bool_or_string`** + +Add these tests at the bottom of the existing `#[cfg(test)] mod tests` block in `src/api/pagination.rs`: + +```rust + #[test] + fn test_assets_page_has_more() { + let page: AssetsPage = AssetsPage { + start_at: 0, + max_results: 25, + total: 50, + is_last: false, + values: vec!["a".into()], + }; + assert!(page.has_more()); + assert_eq!(page.next_start(), 25); + } + + #[test] + fn test_assets_page_last_page() { + let page: AssetsPage = AssetsPage { + start_at: 25, + max_results: 25, + total: 30, + is_last: true, + values: vec!["a".into()], + }; + assert!(!page.has_more()); + } + + #[test] + fn test_assets_page_deserialize_is_last_bool() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": true, + "values": ["a", "b"] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(page.is_last); + assert_eq!(page.values.len(), 2); + } + + #[test] + fn test_assets_page_deserialize_is_last_string() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": "false", + "values": ["a"] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(!page.is_last); + } + + #[test] + fn test_assets_page_deserialize_is_last_string_true() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": "true", + "values": [] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(page.is_last); + assert!(page.values.is_empty()); + } +``` + +- [ ] **Step 2: Run tests — verify they fail** + +```bash +cargo test --lib pagination -- --nocapture +``` + +Expected: compilation errors — `AssetsPage` and `deserialize_bool_or_string` don't exist. + +- [ ] **Step 3: Implement `deserialize_bool_or_string` and `AssetsPage`** + +Add at the top of `src/api/pagination.rs`, after the existing `use serde::Deserialize;`: + +```rust +use serde::de::{self, Deserializer}; +``` + +Add the deserializer function before the `#[cfg(test)]` block: + +```rust +/// Deserialize a value that may be a boolean or a string representation of a boolean. +/// The Assets API returns `isLast` as `"true"`/`"false"` (string) in some contexts +/// and `true`/`false` (boolean) in others. +fn deserialize_bool_or_string<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value: serde_json::Value = Deserialize::deserialize(deserializer)?; + match value { + serde_json::Value::Bool(b) => Ok(b), + serde_json::Value::String(s) => s.parse::().map_err(de::Error::custom), + _ => Err(de::Error::custom("expected boolean or string")), + } +} +``` + +Add the `AssetsPage` struct and impl after the `ServiceDeskPage` impl (before `#[cfg(test)]`): + +```rust +/// Pagination used by the Assets/CMDB API (`POST /object/aql`). +/// +/// Similar to `OffsetPage` (`startAt`/`maxResults`/`total`) but uses an `isLast` +/// boolean (which may be returned as a string) instead of computing from offsets. +/// `total` is capped at 1000 by the API. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AssetsPage { + /// Zero-based starting index. + #[serde(default)] + pub start_at: u32, + /// Maximum items per page. + #[serde(default)] + pub max_results: u32, + /// Total matching items (capped at 1000). + #[serde(default)] + pub total: u32, + /// Whether this is the last page. May be a bool or string in API responses. + #[serde(deserialize_with = "deserialize_bool_or_string")] + pub is_last: bool, + /// The items in this page. + #[serde(default)] + pub values: Vec, +} + +impl AssetsPage { + /// Returns true if there are more pages after this one. + pub fn has_more(&self) -> bool { + !self.is_last + } + + /// Returns the `startAt` value for the next page. + pub fn next_start(&self) -> u32 { + self.start_at + self.max_results + } +} +``` + +Note: using `#[serde(rename_all = "camelCase")]` at struct level (matching `OffsetPage` pattern) so `start_at` → `startAt`, `max_results` → `maxResults` automatically. The `is_last` field gets its rename from `rename_all` (→ `isLast`) plus its custom deserializer. + +- [ ] **Step 4: Run tests — verify they pass** + +```bash +cargo test --lib pagination -- --nocapture +``` + +Expected: all pagination tests pass. + +- [ ] **Step 5: Commit** + +```bash +git add src/api/pagination.rs +git commit -m "feat: add AssetsPage pagination type with bool/string isLast support + +AssetsPage handles pagination from the Assets/CMDB API which returns +isLast as either a boolean or string. Custom deserialize_bool_or_string +handles both formats. Uses rename_all camelCase matching OffsetPage." +``` + +--- + +### Task 2: WorkspaceCache + +**Files:** +- Modify: `src/cache.rs` + +- [ ] **Step 1: Write unit tests for workspace cache** + +Add these tests inside the existing `#[cfg(test)] mod tests` block in `src/cache.rs`: + +```rust + #[test] + fn read_missing_workspace_cache_returns_none() { + with_temp_cache(|| { + let result = read_workspace_cache().unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_workspace_cache() { + with_temp_cache(|| { + write_workspace_cache("abc-123-def").unwrap(); + + let cache = read_workspace_cache().unwrap().expect("should exist"); + assert_eq!(cache.workspace_id, "abc-123-def"); + }); + } + + #[test] + fn expired_workspace_cache_returns_none() { + with_temp_cache(|| { + let expired = WorkspaceCache { + workspace_id: "old-id".into(), + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("workspace.json"), content).unwrap(); + + let result = read_workspace_cache().unwrap(); + assert!(result.is_none(), "expired workspace cache should return None"); + }); + } +``` + +- [ ] **Step 2: Run tests — verify they fail** + +```bash +cargo test --lib cache -- --nocapture +``` + +Expected: compilation errors. + +- [ ] **Step 3: Implement WorkspaceCache** + +Add after the existing `write_project_meta` function (before `#[cfg(test)]`): + +```rust +#[derive(Debug, Serialize, Deserialize)] +pub struct WorkspaceCache { + pub workspace_id: String, + pub fetched_at: DateTime, +} + +pub fn read_workspace_cache() -> Result> { + let path = cache_dir().join("workspace.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let cache: WorkspaceCache = serde_json::from_str(&content)?; + + let age = Utc::now() - cache.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + + Ok(Some(cache)) +} + +pub fn write_workspace_cache(workspace_id: &str) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let cache = WorkspaceCache { + workspace_id: workspace_id.to_string(), + fetched_at: Utc::now(), + }; + + let content = serde_json::to_string_pretty(&cache)?; + std::fs::write(dir.join("workspace.json"), content)?; + Ok(()) +} +``` + +- [ ] **Step 4: Run tests — verify they pass** + +```bash +cargo test --lib cache -- --nocapture +``` + +- [ ] **Step 5: Commit** + +```bash +git add src/cache.rs +git commit -m "feat: add WorkspaceCache for Assets workspace ID + +Site-wide cache in ~/.cache/jr/workspace.json with 7-day TTL. +write_workspace_cache() sets fetched_at internally matching +write_team_cache() pattern." +``` + +--- + +### Task 3: JiraClient — assets_base_url + get_assets/post_assets + +**Files:** +- Modify: `src/api/client.rs` + +- [ ] **Step 1: Add `assets_base_url` field and update constructors** + +In `src/api/client.rs`, add `assets_base_url: Option` to the `JiraClient` struct: + +```rust +pub struct JiraClient { + client: Client, + base_url: String, + instance_url: String, + auth_header: String, + verbose: bool, + assets_base_url: Option, +} +``` + +In `from_config()`, construct `assets_base_url` from `cloud_id` after the existing `Ok(Self { ... })` block. Replace the `Ok(Self { ... })` with: + +```rust + let assets_base_url = config + .global + .instance + .cloud_id + .as_ref() + .map(|cloud_id| { + format!( + "https://api.atlassian.com/ex/jira/{}/jsm/assets", + cloud_id + ) + }); + + Ok(Self { + client, + base_url, + instance_url, + auth_header, + verbose, + assets_base_url, + }) +``` + +In `new_for_test()`, set `assets_base_url` to route to wiremock: + +```rust + pub fn new_for_test(base_url: String, auth_header: String) -> Self { + let assets_base_url = Some(format!("{}/jsm/assets", &base_url)); + Self { + client: Client::new(), + instance_url: base_url.clone(), + base_url, + auth_header, + verbose: false, + assets_base_url, + } + } +``` + +- [ ] **Step 2: Add `get_assets()` and `post_assets()` methods** + +Add after `post_to_instance()`: + +```rust + /// Perform a GET request against the Assets/CMDB API gateway. + /// + /// Constructs URL: `{assets_base_url}/workspace/{workspace_id}/v1/{path}`. + /// Requires `cloud_id` in config (set during `jr init`). + pub async fn get_assets( + &self, + workspace_id: &str, + path: &str, + ) -> anyhow::Result { + let base = self.assets_base_url.as_ref().ok_or_else(|| { + JrError::ConfigError( + "Cloud ID not configured. Run \"jr init\" to set up your instance.".into(), + ) + })?; + let url = format!( + "{}/workspace/{}/v1/{}", + base, + urlencoding::encode(workspace_id), + path + ); + let request = self.client.get(&url); + let response = self.send(request).await?; + Ok(response.json::().await?) + } + + /// Perform a POST request against the Assets/CMDB API gateway. + pub async fn post_assets( + &self, + workspace_id: &str, + path: &str, + body: &B, + ) -> anyhow::Result { + let base = self.assets_base_url.as_ref().ok_or_else(|| { + JrError::ConfigError( + "Cloud ID not configured. Run \"jr init\" to set up your instance.".into(), + ) + })?; + let url = format!( + "{}/workspace/{}/v1/{}", + base, + urlencoding::encode(workspace_id), + path + ); + let request = self.client.post(&url).json(body); + let response = self.send(request).await?; + Ok(response.json::().await?) + } +``` + +- [ ] **Step 3: Verify it compiles** + +```bash +cargo build +``` + +- [ ] **Step 4: Run all existing tests to verify nothing broke** + +```bash +cargo test +``` + +Expected: all existing tests pass. The `new_for_test()` change adds a field but doesn't break existing test behavior. + +- [ ] **Step 5: Run clippy and fmt** + +```bash +cargo clippy -- -D warnings +cargo fmt --all +``` + +- [ ] **Step 6: Commit** + +```bash +git add src/api/client.rs +git commit -m "feat: add assets_base_url, get_assets, post_assets to JiraClient + +Third base URL pattern for Assets/CMDB API at api.atlassian.com gateway. +Constructed from cloud_id in config. new_for_test routes to wiremock. +URL-encodes workspace_id in path segments." +``` + +--- + +### Task 4: Assets types (AssetObject, ConnectedTicket) + +**Files:** +- Create: `src/types/assets/mod.rs` +- Create: `src/types/assets/object.rs` +- Create: `src/types/assets/ticket.rs` +- Modify: `src/types/mod.rs` + +- [ ] **Step 1: Create `src/types/assets/object.rs`** + +```rust +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize, Serialize)] +pub struct AssetObject { + pub id: String, + pub label: String, + #[serde(rename = "objectKey")] + pub object_key: String, + #[serde(rename = "objectType")] + pub object_type: ObjectType, + pub created: Option, + pub updated: Option, + #[serde(default)] + pub attributes: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectType { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct AssetAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttributeValue { + pub value: Option, + #[serde(rename = "displayValue")] + pub display_value: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_asset_object_minimal() { + let json = r#"{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "23", "name": "Client" } + }"#; + let obj: AssetObject = serde_json::from_str(json).unwrap(); + assert_eq!(obj.id, "88"); + assert_eq!(obj.label, "Acme Corp"); + assert_eq!(obj.object_key, "OBJ-88"); + assert_eq!(obj.object_type.name, "Client"); + assert!(obj.attributes.is_empty()); + assert!(obj.created.is_none()); + } + + #[test] + fn deserialize_asset_object_with_attributes() { + let json = r#"{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "23", "name": "Client" }, + "created": "2025-12-17T14:58:00.000Z", + "updated": "2026-01-29T19:52:00.000Z", + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "contact@acme.com", "displayValue": "contact@acme.com" } + ] + } + ] + }"#; + let obj: AssetObject = serde_json::from_str(json).unwrap(); + assert_eq!(obj.attributes.len(), 1); + assert_eq!( + obj.attributes[0].values[0].display_value.as_deref(), + Some("contact@acme.com") + ); + } +} +``` + +- [ ] **Step 2: Create `src/types/assets/ticket.rs`** + +```rust +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicketsResponse { + #[serde(default)] + pub tickets: Vec, + #[serde(rename = "allTicketsQuery")] + pub all_tickets_query: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicket { + pub key: String, + pub id: String, + pub title: String, + pub reporter: Option, + pub created: Option, + pub updated: Option, + pub status: Option, + #[serde(rename = "type")] + pub issue_type: Option, + pub priority: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketStatus { + pub name: String, + #[serde(rename = "colorName")] + pub color_name: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketType { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketPriority { + pub name: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_connected_tickets_response() { + let json = r#"{ + "tickets": [ + { + "key": "PROJ-42", + "id": "10968", + "title": "VPN access not working", + "reporter": "abc123", + "created": "2026-02-17T18:31:56.953Z", + "updated": "2026-03-22T18:59:23.333Z", + "status": { "name": "In Progress", "colorName": "yellow" }, + "type": { "name": "Service Request" }, + "priority": { "name": "High" } + } + ], + "allTicketsQuery": "issueFunction in assetsObject(\"objectId = 88\")" + }"#; + let resp: ConnectedTicketsResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.tickets.len(), 1); + assert_eq!(resp.tickets[0].key, "PROJ-42"); + assert_eq!(resp.tickets[0].title, "VPN access not working"); + assert_eq!(resp.tickets[0].status.as_ref().unwrap().name, "In Progress"); + assert!(resp.all_tickets_query.is_some()); + } + + #[test] + fn deserialize_empty_tickets() { + let json = r#"{ "tickets": [] }"#; + let resp: ConnectedTicketsResponse = serde_json::from_str(json).unwrap(); + assert!(resp.tickets.is_empty()); + assert!(resp.all_tickets_query.is_none()); + } +} +``` + +- [ ] **Step 3: Create `src/types/assets/mod.rs`** + +```rust +pub mod object; +pub mod ticket; + +pub use object::*; +pub use ticket::*; +``` + +- [ ] **Step 4: Add `pub mod assets;` to `src/types/mod.rs`** + +```rust +pub mod assets; +pub mod jira; +pub mod jsm; +``` + +- [ ] **Step 5: Verify and run tests** + +```bash +cargo build +cargo test --lib types::assets -- --nocapture +``` + +Expected: all 4 type tests pass. + +- [ ] **Step 6: Commit** + +```bash +git add src/types/assets/ src/types/mod.rs +git commit -m "feat: add Assets types for AssetObject and ConnectedTicket + +AssetObject supports optional attributes with displayValue. +ConnectedTicket has different field names than platform Issue +(title not summary, type not issuetype). Includes unit tests." +``` + +--- + +### Task 5: Assets API — workspace discovery + object methods + +**Files:** +- Create: `src/api/assets/mod.rs` +- Create: `src/api/assets/workspace.rs` +- Create: `src/api/assets/objects.rs` +- Create: `src/api/assets/tickets.rs` +- Modify: `src/api/mod.rs` + +- [ ] **Step 1: Create `src/api/assets/mod.rs`** + +```rust +pub mod objects; +pub mod tickets; +pub mod workspace; +``` + +- [ ] **Step 2: Add `pub mod assets;` to `src/api/mod.rs`** + +```rust +pub mod assets; +pub mod auth; +pub mod client; +pub mod jira; +pub mod jsm; +pub mod pagination; +pub mod rate_limit; +``` + +- [ ] **Step 3: Create `src/api/assets/workspace.rs`** + +```rust +use anyhow::Result; +use serde::Deserialize; + +use crate::api::client::JiraClient; +use crate::cache; +use crate::error::JrError; + +#[derive(Deserialize)] +struct WorkspaceResponse { + #[serde(rename = "workspaceId")] + workspace_id: String, +} + +/// Get the Assets workspace ID, using cache when available. +/// +/// 1. Check cache — return if fresh. +/// 2. GET /rest/servicedeskapi/assets/workspace on instance URL. +/// 3. Cache and return. +pub async fn get_or_fetch_workspace_id(client: &JiraClient) -> Result { + if let Some(cached) = cache::read_workspace_cache()? { + return Ok(cached.workspace_id); + } + + let resp: WorkspaceResponse = client + .get_from_instance("/rest/servicedeskapi/assets/workspace") + .await + .map_err(|e| { + // If the endpoint doesn't exist or is forbidden, Assets isn't available + if e.to_string().contains("404") || e.to_string().contains("403") { + JrError::UserError( + "Assets is not available on this Jira site. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + .into() + } else { + e + } + })?; + + let _ = cache::write_workspace_cache(&resp.workspace_id); + + Ok(resp.workspace_id) +} +``` + +- [ ] **Step 4: Create `src/api/assets/objects.rs`** + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::AssetsPage; +use crate::error::JrError; +use crate::types::assets::AssetObject; + +impl JiraClient { + /// Search assets via AQL with auto-pagination. + pub async fn search_assets( + &self, + workspace_id: &str, + aql: &str, + limit: Option, + include_attributes: bool, + ) -> Result> { + let mut all = Vec::new(); + let mut start_at = 0u32; + let max_page_size = 25u32; + + loop { + let page_size = match limit { + Some(cap) => { + let remaining = cap.saturating_sub(all.len() as u32); + if remaining == 0 { + break; + } + remaining.min(max_page_size) + } + None => max_page_size, + }; + + let path = format!( + "object/aql?startAt={}&maxResults={}&includeAttributes={}", + start_at, page_size, include_attributes + ); + let body = serde_json::json!({ "qlQuery": aql }); + let page: AssetsPage = + self.post_assets(workspace_id, &path, &body).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + + if let Some(cap) = limit { + if all.len() >= cap as usize { + all.truncate(cap as usize); + break; + } + } + if !has_more { + break; + } + start_at = next; + } + Ok(all) + } + + /// Get a single asset by its numeric ID. + pub async fn get_asset( + &self, + workspace_id: &str, + object_id: &str, + include_attributes: bool, + ) -> Result { + let path = format!( + "object/{}?includeAttributes={}", + urlencoding::encode(object_id), + include_attributes + ); + self.get_assets(workspace_id, &path).await + } +} + +/// Resolve an object key (e.g., "OBJ-1") to its numeric ID. +/// If the input is purely numeric, returns it as-is. +pub async fn resolve_object_key( + client: &JiraClient, + workspace_id: &str, + key_or_id: &str, +) -> Result { + // If purely numeric, treat as ID directly + if key_or_id.chars().all(|c| c.is_ascii_digit()) { + return Ok(key_or_id.to_string()); + } + + // Resolve via AQL + let results = client + .search_assets( + workspace_id, + &format!("objectKey = \"{}\"", key_or_id), + Some(1), + false, + ) + .await?; + + results + .into_iter() + .next() + .map(|obj| obj.id) + .ok_or_else(|| { + JrError::UserError(format!( + "No asset matching \"{}\" found. Check the object key and try again.", + key_or_id + )) + .into() + }) +} +``` + +- [ ] **Step 5: Create `src/api/assets/tickets.rs`** + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::types::assets::ConnectedTicketsResponse; + +impl JiraClient { + /// Get Jira issues connected to an asset object. + /// No pagination — returns all connected tickets in one response. + pub async fn get_connected_tickets( + &self, + workspace_id: &str, + object_id: &str, + ) -> Result { + let path = format!( + "objectconnectedtickets/{}/tickets", + urlencoding::encode(object_id) + ); + self.get_assets(workspace_id, &path).await + } +} +``` + +- [ ] **Step 6: Verify it compiles** + +```bash +cargo build +``` + +- [ ] **Step 7: Commit** + +```bash +git add src/api/assets/ src/api/mod.rs +git commit -m "feat: add Assets API — workspace discovery, AQL search, connected tickets + +get_or_fetch_workspace_id() discovers and caches workspace ID. +search_assets() auto-paginates AQL queries via AssetsPage. +get_asset() fetches single object by ID. +resolve_object_key() resolves object keys to numeric IDs via AQL. +get_connected_tickets() returns issues linked to an asset. +All use get_assets/post_assets on api.atlassian.com gateway." +``` + +--- + +### Task 6: Integration tests + +**Files:** +- Create: `tests/assets.rs` + +- [ ] **Step 1: Create integration tests** + +```rust +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{body_json, method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn search_assets_returns_objects() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("startAt", "0")) + .and(query_param("maxResults", "25")) + .and(query_param("includeAttributes", "false")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + }, + { + "id": "71", + "label": "Globex Inc", + "objectKey": "OBJ-71", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client.search_assets("ws-123", "objectType = Client", None, false).await.unwrap(); + assert_eq!(results.len(), 2); + assert_eq!(results[0].label, "Acme Corp"); + assert_eq!(results[1].object_key, "OBJ-71"); +} + +#[tokio::test] +async fn search_assets_empty() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 0, + "isLast": true, + "values": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client.search_assets("ws-123", "objectType = Nonexistent", None, false).await.unwrap(); + assert!(results.is_empty()); +} + +#[tokio::test] +async fn search_assets_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("maxResults", "1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 1, + "total": 5, + "isLast": false, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client.search_assets("ws-123", "objectType = Client", Some(1), false).await.unwrap(); + assert_eq!(results.len(), 1); +} + +#[tokio::test] +async fn search_assets_is_last_as_string() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": "true", + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client.search_assets("ws-123", "objectType = Client", None, false).await.unwrap(); + assert_eq!(results.len(), 1); +} + +#[tokio::test] +async fn get_asset_returns_object() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/70")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "created": "2025-12-17T14:58:00.000Z", + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "contact@acme.com", "displayValue": "contact@acme.com" } + ] + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let obj = client.get_asset("ws-123", "70", true).await.unwrap(); + assert_eq!(obj.label, "Acme Corp"); + assert_eq!(obj.attributes.len(), 1); +} + +#[tokio::test] +async fn get_connected_tickets_returns_tickets() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectconnectedtickets/70/tickets")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tickets": [ + { + "key": "PROJ-42", + "id": "10968", + "title": "VPN access not working", + "status": { "name": "In Progress", "colorName": "yellow" }, + "type": { "name": "Service Request" }, + "priority": { "name": "High" } + } + ], + "allTicketsQuery": "issueFunction in assetsObject(\"objectId = 70\")" + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let resp = client.get_connected_tickets("ws-123", "70").await.unwrap(); + assert_eq!(resp.tickets.len(), 1); + assert_eq!(resp.tickets[0].key, "PROJ-42"); + assert_eq!(resp.tickets[0].title, "VPN access not working"); + assert!(resp.all_tickets_query.is_some()); +} + +#[tokio::test] +async fn get_connected_tickets_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectconnectedtickets/99/tickets")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tickets": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let resp = client.get_connected_tickets("ws-123", "99").await.unwrap(); + assert!(resp.tickets.is_empty()); +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cargo test --test assets -- --nocapture +``` + +Expected: all 7 tests pass. + +- [ ] **Step 3: Commit** + +```bash +git add tests/assets.rs +git commit -m "test: add integration tests for Assets API + +Tests AQL search (normal, empty, limit, isLast as string), +get object with attributes, connected tickets (normal + empty). +All use wiremock with assets_base_url routing." +``` + +--- + +### Task 7: CLI — assets commands + +**Files:** +- Create: `src/cli/assets.rs` +- Modify: `src/cli/mod.rs` +- Modify: `src/main.rs` + +- [ ] **Step 1: Add `AssetsCommand` enum to `src/cli/mod.rs`** + +Add `pub mod assets;` to the module declarations (alphabetical): + +```rust +pub mod assets; +pub mod auth; +pub mod board; +... +``` + +Add `Assets` variant to `Command` enum (alphabetical, before `Auth`): + +```rust + /// Manage Assets/CMDB objects + Assets { + #[command(subcommand)] + command: AssetsCommand, + }, +``` + +Add `AssetsCommand` enum (before `AuthCommand`): + +```rust +#[derive(Subcommand)] +pub enum AssetsCommand { + /// Search assets with AQL query + Search { + /// AQL query (e.g. "objectType = Client") + query: String, + /// Maximum number of results + #[arg(long)] + limit: Option, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, + /// View asset details + View { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, + /// Show Jira issues connected to an asset + Tickets { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Maximum number of tickets to show + #[arg(long)] + limit: Option, + }, +} +``` + +- [ ] **Step 2: Create `src/cli/assets.rs`** + +```rust +use anyhow::Result; + +use crate::api::assets::{objects, workspace}; +use crate::api::client::JiraClient; +use crate::cli::{AssetsCommand, OutputFormat}; +use crate::output; + +pub async fn handle( + command: AssetsCommand, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let workspace_id = workspace::get_or_fetch_workspace_id(client).await?; + + match command { + AssetsCommand::Search { + query, + limit, + attributes, + } => handle_search(&workspace_id, &query, limit, attributes, output_format, client).await, + AssetsCommand::View { key, attributes } => { + handle_view(&workspace_id, &key, attributes, output_format, client).await + } + AssetsCommand::Tickets { key, limit } => { + handle_tickets(&workspace_id, &key, limit, output_format, client).await + } + } +} + +async fn handle_search( + workspace_id: &str, + query: &str, + limit: Option, + attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let objects = client + .search_assets(workspace_id, query, limit, attributes) + .await?; + + let rows: Vec> = objects + .iter() + .map(|o| { + vec![ + o.object_key.clone(), + o.object_type.name.clone(), + o.label.clone(), + ] + }) + .collect(); + + output::print_output(output_format, &["Key", "Type", "Name"], &rows, &objects) +} + +async fn handle_view( + workspace_id: &str, + key: &str, + attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let object_id = objects::resolve_object_key(client, workspace_id, key).await?; + let object = client.get_asset(workspace_id, &object_id, attributes).await?; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&object)?); + } + OutputFormat::Table => { + let mut rows = vec![ + vec!["Key".into(), object.object_key.clone()], + vec!["Type".into(), object.object_type.name.clone()], + vec!["Name".into(), object.label.clone()], + ]; + + if let Some(ref created) = object.created { + rows.push(vec!["Created".into(), created.clone()]); + } + if let Some(ref updated) = object.updated { + rows.push(vec!["Updated".into(), updated.clone()]); + } + + println!("{}", output::render_table(&["Field", "Value"], &rows)); + + if attributes && !object.attributes.is_empty() { + println!(); + let attr_rows: Vec> = object + .attributes + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute_id.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute ID", "Value"], &attr_rows) + ); + } + } + } + Ok(()) +} + +async fn handle_tickets( + workspace_id: &str, + key: &str, + limit: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let object_id = objects::resolve_object_key(client, workspace_id, key).await?; + let resp = client + .get_connected_tickets(workspace_id, &object_id) + .await?; + + let tickets = match limit { + Some(n) => resp.tickets.into_iter().take(n as usize).collect::>(), + None => resp.tickets, + }; + + let rows: Vec> = tickets + .iter() + .map(|t| { + vec![ + t.key.clone(), + t.issue_type + .as_ref() + .map(|it| it.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.title.clone(), + t.status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.priority + .as_ref() + .map(|p| p.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Key", "Type", "Title", "Status", "Priority"], + &rows, + &tickets, + ) +} +``` + +- [ ] **Step 3: Add dispatch in `src/main.rs`** + +Add the `Assets` arm to the match block, before `Auth`: + +```rust + cli::Command::Assets { command } => { + let config = config::Config::load()?; + let client = api::client::JiraClient::from_config(&config, cli.verbose)?; + cli::assets::handle(command, &cli.output, &client).await + } +``` + +- [ ] **Step 4: Verify it compiles and all tests pass** + +```bash +cargo build +cargo test +cargo clippy -- -D warnings +cargo fmt --all +``` + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/assets.rs src/cli/mod.rs src/main.rs +git commit -m "feat: add jr assets search, view, and tickets commands + +New top-level assets command for CMDB operations. +- jr assets search : search objects via AQL with pagination +- jr assets view : view object details with optional attributes +- jr assets tickets : show connected Jira issues +- Object key resolution via AQL (keys like OBJ-1 to numeric IDs) +- Workspace ID auto-discovered and cached +- JSON output support via --output json" +``` + +--- + +### Task 8: Update README and CLAUDE.md + +**Files:** +- Modify: `README.md` +- Modify: `CLAUDE.md` + +- [ ] **Step 1: Update README.md** + +Add asset commands to the command table after the queue entries: + +```markdown +| `jr assets search ` | Search assets via AQL query | +| `jr assets view ` | View asset details (key or numeric ID) | +| `jr assets tickets ` | Show Jira issues connected to an asset | +``` + +- [ ] **Step 2: Update CLAUDE.md** + +In the `src/` tree: +- Add `assets.rs` under `cli/` +- Add `api/assets/` section +- Add `types/assets/` line +- Update `cache.rs` description + +- [ ] **Step 3: Commit** + +```bash +git add README.md CLAUDE.md +git commit -m "docs: add assets commands to README and CLAUDE.md + +Document jr assets search, view, and tickets. Update architecture +with api/assets/ and types/assets/ directories." +``` + +--- + +### Task 9: Object key resolution unit tests + +**Files:** +- Modify: `src/api/assets/objects.rs` + +- [ ] **Step 1: Add unit test for numeric ID detection** + +Add at the bottom of `src/api/assets/objects.rs`: + +```rust +#[cfg(test)] +mod tests { + #[test] + fn numeric_id_detected() { + assert!("123".chars().all(|c| c.is_ascii_digit())); + assert!("0".chars().all(|c| c.is_ascii_digit())); + } + + #[test] + fn object_key_not_numeric() { + assert!(!"OBJ-1".chars().all(|c| c.is_ascii_digit())); + assert!(!"SCHEMA-88".chars().all(|c| c.is_ascii_digit())); + assert!(!"abc".chars().all(|c| c.is_ascii_digit())); + } + + #[test] + fn empty_string_is_numeric() { + // Empty string passes chars().all() vacuously — edge case + // resolve_object_key would treat "" as numeric ID, which is harmless + // as the API will return 404 + assert!("".chars().all(|c| c.is_ascii_digit())); + } +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cargo test --lib api::assets -- --nocapture +``` + +- [ ] **Step 3: Commit** + +```bash +git add src/api/assets/objects.rs +git commit -m "test: add unit tests for object key vs numeric ID detection" +``` diff --git a/docs/superpowers/plans/2026-03-24-default-result-limit.md b/docs/superpowers/plans/2026-03-24-default-result-limit.md new file mode 100644 index 0000000..2b3ecdc --- /dev/null +++ b/docs/superpowers/plans/2026-03-24-default-result-limit.md @@ -0,0 +1,702 @@ +# Default Result Limit Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add a default 30-result limit to `jr issue list` with `--all` flag for unlimited, and a truncation message showing approximate total count. + +**Architecture:** Modify `search_issues()` to return `SearchResult { issues, has_more }`, add `approximate_count()` API method, add `--all` flag to CLI with `conflicts_with = "limit"`, and print truncation hint to stderr when results are capped. Add `strip_order_by()` helper to the `jql` module for count queries. + +**Tech Stack:** Rust, clap 4 (derive), reqwest, serde, wiremock (tests), assert_cmd + predicates (integration tests) + +**Spec:** `docs/superpowers/specs/2026-03-24-default-result-limit-design.md` + +--- + +## File Structure + +| File | Responsibility | Change | +|------|---------------|--------| +| `src/api/jira/issues.rs` | Issue search + count API methods | Add `SearchResult` struct, change `search_issues()` return type, add `approximate_count()` + `ApproximateCountResponse` | +| `src/jql.rs` | JQL string utilities | Add `strip_order_by()` function | +| `src/cli/mod.rs` | CLI argument definitions | Add `all: bool` to `IssueCommand::List` | +| `src/cli/issue/list.rs` | Issue list handler | Add `DEFAULT_LIMIT`, effective limit resolution, truncation message logic | +| `src/cli/board.rs` | Board view handler | Update `search_issues()` caller to destructure `SearchResult` | +| `tests/issue_commands.rs` | Integration tests for issue API | Update `search_issues()` callers, add new tests | +| `tests/common/fixtures.rs` | Test fixtures | Add `issue_search_response_with_next_page()` and `approximate_count_response()` helpers | + +**Not changed:** `src/cli/issue/assets.rs` — verified it calls `get_issue()`, not `search_issues()`, so no changes needed. + +--- + +### Task 1: Add `SearchResult` struct, update `search_issues()`, and fix all callers + +This task must be atomic — changing the return type and updating all callers in one commit to maintain a compilable state. + +**Files:** +- Modify: `src/api/jira/issues.rs` +- Modify: `src/cli/issue/list.rs` +- Modify: `src/cli/board.rs` +- Modify: `tests/issue_commands.rs` + +- [ ] **Step 1: Write unit tests for `SearchResult`** + +Add at the bottom of `src/api/jira/issues.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn search_result_has_more_false_when_no_truncation() { + let result = SearchResult { + issues: vec![], + has_more: false, + }; + assert!(!result.has_more); + } + + #[test] + fn search_result_has_more_true_when_truncated() { + let result = SearchResult { + issues: vec![], + has_more: true, + }; + assert!(result.has_more); + } +} +``` + +- [ ] **Step 2: Add `SearchResult` struct and update `search_issues()`** + +Add the struct above the `impl JiraClient` block in `src/api/jira/issues.rs`: + +```rust +/// Result of a paginated issue search, including whether more results exist. +pub struct SearchResult { + pub issues: Vec, + pub has_more: bool, +} +``` + +Change the `search_issues()` signature from `Result>` to `Result`. + +Replace the entire function body with: + +```rust + pub async fn search_issues( + &self, + jql: &str, + limit: Option, + extra_fields: &[&str], + ) -> Result { + let max_per_page = limit.unwrap_or(50).min(100); + let mut all_issues: Vec = Vec::new(); + let mut next_page_token: Option = None; + + let mut fields = vec![ + "summary", + "status", + "issuetype", + "priority", + "assignee", + "project", + "description", + ]; + fields.extend_from_slice(extra_fields); + + let mut more_available = false; + + loop { + let mut body = serde_json::json!({ + "jql": jql, + "maxResults": max_per_page, + "fields": fields + }); + + if let Some(ref token) = next_page_token { + body["nextPageToken"] = serde_json::json!(token); + } + + let page: CursorPage = self.post("/rest/api/3/search/jql", &body).await?; + + let page_has_more = page.has_more(); + let token = page.next_page_token.clone(); + all_issues.extend(page.issues); + + if let Some(max) = limit { + if all_issues.len() >= max as usize { + more_available = all_issues.len() > max as usize || page_has_more; + all_issues.truncate(max as usize); + break; + } + } + + if !page_has_more { + break; + } + + next_page_token = token; + } + + Ok(SearchResult { + issues: all_issues, + has_more: more_available, + }) + } +``` + +- [ ] **Step 3: Update all callers** + +In `src/cli/issue/list.rs` at line 147, change: + +```rust + let issues = client.search_issues(&effective_jql, limit, &extra).await?; +``` + +to: + +```rust + let search_result = client.search_issues(&effective_jql, limit, &extra).await?; + let issues = search_result.issues; +``` + +In `src/cli/board.rs` at line 69, change: + +```rust + client.search_issues(&jql, None, &[]).await? +``` + +to: + +```rust + client.search_issues(&jql, None, &[]).await?.issues +``` + +In `tests/issue_commands.rs`, update `test_search_issues` (lines 24-29): + +```rust + let result = client + .search_issues("assignee = currentUser()", None, &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert_eq!(result.issues[0].key, "FOO-1"); + assert!(!result.has_more); +``` + +Update `test_search_issues_with_story_points` (lines 99-109): + +```rust + let result = client + .search_issues("project = FOO", None, &["customfield_10031"]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 2); + assert_eq!( + result.issues[0].fields.story_points("customfield_10031"), + Some(5.0) + ); + assert_eq!(result.issues[1].fields.story_points("customfield_10031"), None); + assert!(!result.has_more); +``` + +- [ ] **Step 4: Run all tests to verify compilation and correctness** + +Run: `cargo test --all-features` +Expected: ALL PASS + +- [ ] **Step 5: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 6: Commit** + +```bash +git add src/api/jira/issues.rs src/cli/issue/list.rs src/cli/board.rs tests/issue_commands.rs +git commit -m "feat: add SearchResult struct, update search_issues return type and all callers" +``` + +--- + +### Task 2: Add `strip_order_by()` JQL helper + +**Files:** +- Modify: `src/jql.rs` + +- [ ] **Step 1: Write unit tests for `strip_order_by()`** + +Add these tests inside the first `mod tests` block in `src/jql.rs` (after the existing `trailing_backslash` test, before the closing `}`). The `strip_order_by` function should be placed after the `escape_value` function (after line 8, before the first `#[cfg(test)]` at line 10). + +```rust + #[test] + fn strip_order_by_removes_clause() { + assert_eq!( + strip_order_by("project = PROJ ORDER BY updated DESC"), + "project = PROJ" + ); + } + + #[test] + fn strip_order_by_no_clause() { + assert_eq!(strip_order_by("project = PROJ"), "project = PROJ"); + } + + #[test] + fn strip_order_by_case_insensitive() { + assert_eq!( + strip_order_by("project = PROJ order by rank ASC"), + "project = PROJ" + ); + } + + #[test] + fn strip_order_by_trims_whitespace() { + assert_eq!( + strip_order_by("project = PROJ ORDER BY rank ASC"), + "project = PROJ" + ); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib strip_order_by` +Expected: FAIL — `strip_order_by` not found + +- [ ] **Step 3: Implement `strip_order_by()`** + +Add to `src/jql.rs` after the `escape_value` function (after line 8), before the first `#[cfg(test)]` block (line 10): + +```rust +/// Strip `ORDER BY` clause from JQL for use with count-only endpoints. +/// +/// The approximate-count endpoint only needs the WHERE clause. ORDER BY is +/// meaningless for a count and may cause issues with bounded-JQL validation. +pub fn strip_order_by(jql: &str) -> &str { + let upper = jql.to_uppercase(); + if let Some(pos) = upper.find(" ORDER BY") { + jql[..pos].trim_end() + } else { + jql + } +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib strip_order_by` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/jql.rs +git commit -m "feat: add strip_order_by JQL helper for count queries" +``` + +--- + +### Task 3: Add `approximate_count()` API method + +**Files:** +- Modify: `src/api/jira/issues.rs` + +- [ ] **Step 1: Write unit tests for `ApproximateCountResponse` deserialization** + +Add to the existing `mod tests` block in `src/api/jira/issues.rs`: + +```rust + #[test] + fn approximate_count_response_deserializes() { + let json = r#"{"count": 1234}"#; + let resp: ApproximateCountResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.count, 1234); + } + + #[test] + fn approximate_count_response_zero() { + let json = r#"{"count": 0}"#; + let resp: ApproximateCountResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.count, 0); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib approximate_count_response` +Expected: FAIL — `ApproximateCountResponse` not found + +- [ ] **Step 3: Add `ApproximateCountResponse` and `approximate_count()` method** + +Add `use serde::Deserialize;` to the file-level imports at the top of `src/api/jira/issues.rs` (alongside the existing `use` statements). + +Add the response struct above the `impl JiraClient` block (file-private — no `pub`): + +```rust +#[derive(Deserialize)] +struct ApproximateCountResponse { + count: u64, +} +``` + +Add the method inside the `impl JiraClient` block, after `search_issues()`: + +```rust + /// Get an approximate count of issues matching a JQL query. + /// + /// Uses the dedicated count endpoint which is lightweight (no issue data fetched). + /// The JQL should not include ORDER BY — use `jql::strip_order_by()` before calling. + pub async fn approximate_count(&self, jql: &str) -> Result { + let body = serde_json::json!({ "jql": jql }); + let resp: ApproximateCountResponse = self + .post("/rest/api/3/search/approximate-count", &body) + .await?; + Ok(resp.count) + } +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib approximate_count_response` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jira/issues.rs +git commit -m "feat: add approximate_count API method" +``` + +--- + +### Task 4: Add `--all` flag to CLI and default limit logic + +**Files:** +- Modify: `src/cli/mod.rs` +- Modify: `src/cli/issue/list.rs` + +- [ ] **Step 1: Write unit tests for effective limit resolution** + +Add to the `mod tests` block in `src/cli/issue/list.rs`: + +```rust + #[test] + fn effective_limit_defaults_to_30() { + assert_eq!(resolve_effective_limit(None, false), Some(30)); + } + + #[test] + fn effective_limit_respects_explicit_limit() { + assert_eq!(resolve_effective_limit(Some(50), false), Some(50)); + } + + #[test] + fn effective_limit_all_returns_none() { + assert_eq!(resolve_effective_limit(None, true), None); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib effective_limit` +Expected: FAIL — `resolve_effective_limit` not found + +- [ ] **Step 3: Add `--all` flag to `IssueCommand::List`** + +In `src/cli/mod.rs`, inside the `List` variant of `IssueCommand` (after the `limit` field at line 165), add: + +```rust + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, +``` + +- [ ] **Step 4: Implement `resolve_effective_limit()` and update `handle_list()`** + +Add to `src/cli/issue/list.rs` near the other helper functions (e.g., near `resolve_show_points`): + +```rust +const DEFAULT_LIMIT: u32 = 30; + +/// Resolve the effective limit from CLI flags. +fn resolve_effective_limit(limit: Option, all: bool) -> Option { + if all { + None + } else { + Some(limit.unwrap_or(DEFAULT_LIMIT)) + } +} +``` + +Update the destructuring at line 28 to include `all`: + +```rust + let IssueCommand::List { + jql, + status, + team, + limit, + all, + points: show_points, + assets: show_assets, + } = command + else { + unreachable!() + }; +``` + +Add after destructuring, before `let sp_field_id`: + +```rust + let effective_limit = resolve_effective_limit(limit, all); +``` + +Update the `search_issues` call to use `effective_limit` instead of `limit`: + +```rust + let search_result = client + .search_issues(&effective_jql, effective_limit, &extra) + .await?; + let issues = search_result.issues; +``` + +- [ ] **Step 5: Add truncation message logic** + +After the `output::print_output(...)` call (the last statement before `Ok(())`), add: + +```rust + if search_result.has_more && !all { + let count_jql = crate::jql::strip_order_by(&effective_jql); + match client.approximate_count(count_jql).await { + Ok(total) if total > 0 => { + eprintln!( + "Showing {} of ~{} results. Use --limit or --all to see more.", + issues.len(), + total + ); + } + Ok(_) | Err(_) => { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + } +``` + +Note on ownership: `let issues = search_result.issues;` moves `issues` out of `search_result`, but `search_result.has_more` is `bool` (`Copy`), so accessing it after the partial move is valid Rust. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test --all-features` +Expected: ALL PASS + +- [ ] **Step 7: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/list.rs +git commit -m "feat: add --all flag and default 30-result limit to issue list (#43)" +``` + +--- + +### Task 5: Integration tests + +**Files:** +- Modify: `tests/common/fixtures.rs` +- Modify: `tests/issue_commands.rs` + +- [ ] **Step 1: Add test fixture helpers** + +In `tests/common/fixtures.rs`, add: + +```rust +/// Search response with `nextPageToken` set (indicating more results exist). +pub fn issue_search_response_with_next_page(issues: Vec) -> Value { + json!({ "issues": issues, "nextPageToken": "next-page-token-abc" }) +} + +/// Response for the approximate-count endpoint. +pub fn approximate_count_response(count: u64) -> Value { + json!({ "count": count }) +} +``` + +- [ ] **Step 2: Write integration tests** + +In `tests/issue_commands.rs`, add: + +```rust +#[tokio::test] +async fn test_search_issues_has_more_flag() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response_with_next_page(vec![ + common::fixtures::issue_response("FOO-1", "Test issue", "To Do"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", Some(1), &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert!(result.has_more); +} + +#[tokio::test] +async fn test_search_issues_no_more_results() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![ + common::fixtures::issue_response("FOO-1", "Test issue", "To Do"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", Some(10), &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert!(!result.has_more); +} + +#[tokio::test] +async fn test_search_issues_no_limit_fetches_all() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![ + common::fixtures::issue_response("FOO-1", "Issue 1", "To Do"), + common::fixtures::issue_response("FOO-2", "Issue 2", "To Do"), + common::fixtures::issue_response("FOO-3", "Issue 3", "To Do"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", None, &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 3); + assert!(!result.has_more); +} + +#[tokio::test] +async fn test_approximate_count() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::approximate_count_response(42)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let count = client.approximate_count("project = FOO").await.unwrap(); + assert_eq!(count, 42); +} + +#[tokio::test] +async fn test_approximate_count_zero() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::approximate_count_response(0)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let count = client.approximate_count("project = FOO").await.unwrap(); + assert_eq!(count, 0); +} + +#[tokio::test] +async fn test_approximate_count_server_error_returns_err() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with(ResponseTemplate::new(500)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.approximate_count("project = FOO").await; + assert!(result.is_err()); +} +``` + +- [ ] **Step 3: Run integration tests** + +Run: `cargo test --test issue_commands` +Expected: ALL PASS + +- [ ] **Step 4: Run full test suite + clippy** + +Run: `cargo test --all-features && cargo clippy --all --all-features --tests -- -D warnings` +Expected: ALL PASS, no warnings + +- [ ] **Step 5: Commit** + +```bash +git add tests/common/fixtures.rs tests/issue_commands.rs +git commit -m "test: add integration tests for SearchResult, approximate_count, and truncation" +``` + +--- + +### Task 6: Final verification and format check + +**Files:** None (verification only) + +- [ ] **Step 1: Run cargo fmt** + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues (if there are, run `cargo fmt --all` and include in commit) + +- [ ] **Step 2: Run full CI-equivalent check** + +Run: `cargo fmt --all -- --check && cargo clippy --all --all-features --tests -- -D warnings && cargo test --all-features` +Expected: All three pass + +- [ ] **Step 3: Fix any issues found, commit if needed** + +If `cargo fmt` requires changes: +```bash +cargo fmt --all +git add -u +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-24-issue-linked-assets.md b/docs/superpowers/plans/2026-03-24-issue-linked-assets.md new file mode 100644 index 0000000..e18d25d --- /dev/null +++ b/docs/superpowers/plans/2026-03-24-issue-linked-assets.md @@ -0,0 +1,1501 @@ +# Issue Linked Assets Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Expose CMDB/Assets objects linked to Jira issues via `jr issue view`, `jr issue list --assets`, and `jr issue assets KEY`. + +**Architecture:** Auto-discover CMDB custom field IDs from `GET /rest/api/3/field` (cached 7-day TTL). Parse CMDB field values adaptively from issue responses (handles both modern `{label, objectKey}` and legacy `{workspaceId, objectId}` shapes). Enrich with Assets API when only IDs are available. + +**Tech Stack:** Rust, serde_json::Value for adaptive parsing, futures::future::join_all for parallel enrichment, wiremock for integration tests. + +**Spec:** `docs/superpowers/specs/2026-03-24-issue-linked-assets-design.md` + +--- + +## File Structure + +### New Files + +| File | Responsibility | +|------|---------------| +| `src/types/assets/linked.rs` | `LinkedAsset` struct + display formatting + JSON serialization | +| `src/api/assets/linked.rs` | Cache orchestration (`get_or_fetch_cmdb_field_ids`), adaptive parsing (`extract_linked_assets`), parallel enrichment (`enrich_assets`) | +| `src/cli/issue/assets.rs` | `handle_issue_assets()` command handler for `jr issue assets KEY` | +| `tests/cmdb_fields.rs` | Integration tests for field discovery + linked asset parsing + enrichment | + +### Modified Files + +| File | Change | +|------|--------| +| `src/types/assets/mod.rs` | Add `pub mod linked;` and re-export | +| `src/api/assets/mod.rs` | Add `pub mod linked;` | +| `src/api/jira/fields.rs` | Add `find_cmdb_field_ids()` | +| `src/cache.rs` | Add `CmdbFieldsCache` struct + read/write functions | +| `src/cli/mod.rs` | Add `Assets` variant to `IssueCommand`, add `--assets` flag to `List` | +| `src/cli/issue/mod.rs` | Add `mod assets;` and wire up dispatch | +| `src/cli/issue/list.rs` | Add Assets row to `handle_view`, Assets column to `handle_list` | +| `src/cli/issue/format.rs` | Add `format_issue_row_with_assets()` and headers variant | + +--- + +### Task 1: LinkedAsset Type + +**Files:** +- Create: `src/types/assets/linked.rs` +- Modify: `src/types/assets/mod.rs` + +- [ ] **Step 1: Write the LinkedAsset struct and display formatting** + +```rust +// src/types/assets/linked.rs +use serde::Serialize; + +/// An asset reference extracted from a CMDB custom field on a Jira issue. +#[derive(Debug, Clone, Default, Serialize)] +pub struct LinkedAsset { + #[serde(skip_serializing_if = "Option::is_none")] + pub key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "type")] + pub asset_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub workspace_id: Option, +} + +impl LinkedAsset { + /// Human-readable display: "OBJ-1 (Acme Corp)", "OBJ-1", or "#12345 (run "jr init" to resolve asset names)". + pub fn display(&self) -> String { + match (&self.key, &self.name) { + (Some(key), Some(name)) => format!("{} ({})", key, name), + (Some(key), None) => key.clone(), + (None, Some(name)) => name.clone(), + (None, None) => match &self.id { + Some(id) => format!("#{} (run \"jr init\" to resolve asset names)", id), + None => "(unknown)".into(), + }, + } + } +} + +/// Format a list of linked assets for display in a table cell. +pub fn format_linked_assets(assets: &[LinkedAsset]) -> String { + if assets.is_empty() { + return "(none)".into(); + } + assets + .iter() + .map(|a| a.display()) + .collect::>() + .join(", ") +} + +/// Format for list table: first asset + count if multiple. +pub fn format_linked_assets_short(assets: &[LinkedAsset]) -> String { + match assets.len() { + 0 => "-".into(), + 1 => assets[0].display(), + n => format!("{} (+{} more)", assets[0].display(), n - 1), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn display_key_and_name() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display(), "OBJ-1 (Acme Corp)"); + } + + #[test] + fn display_key_only() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + ..Default::default() + }; + assert_eq!(a.display(), "OBJ-1"); + } + + #[test] + fn display_name_only() { + let a = LinkedAsset { + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display(), "Acme Corp"); + } + + #[test] + fn display_id_fallback_with_hint() { + let a = LinkedAsset { + id: Some("12345".into()), + ..Default::default() + }; + assert_eq!( + a.display(), + "#12345 (run \"jr init\" to resolve asset names)" + ); + } + + #[test] + fn display_nothing() { + let a = LinkedAsset::default(); + assert_eq!(a.display(), "(unknown)"); + } + + #[test] + fn format_empty_list() { + assert_eq!(format_linked_assets(&[]), "(none)"); + } + + #[test] + fn format_single_asset() { + let assets = vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }]; + assert_eq!(format_linked_assets(&assets), "OBJ-1 (Acme)"); + } + + #[test] + fn format_multiple_assets() { + let assets = vec![ + LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-2".into()), + name: Some("Globex".into()), + ..Default::default() + }, + ]; + assert_eq!( + format_linked_assets(&assets), + "OBJ-1 (Acme), OBJ-2 (Globex)" + ); + } + + #[test] + fn format_short_empty() { + assert_eq!(format_linked_assets_short(&[]), "-"); + } + + #[test] + fn format_short_single() { + let assets = vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }]; + assert_eq!(format_linked_assets_short(&assets), "OBJ-1 (Acme)"); + } + + #[test] + fn format_short_multiple() { + let assets = vec![ + LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-2".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-3".into()), + ..Default::default() + }, + ]; + assert_eq!( + format_linked_assets_short(&assets), + "OBJ-1 (Acme) (+2 more)" + ); + } + + #[test] + fn serialize_json_skips_none() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }; + let json = serde_json::to_value(&a).unwrap(); + assert_eq!(json.get("key").unwrap(), "OBJ-1"); + assert_eq!(json.get("name").unwrap(), "Acme"); + assert!(json.get("id").is_none()); + assert!(json.get("workspace_id").is_none()); + } +} +``` + +- [ ] **Step 2: Register the module** + +Add to `src/types/assets/mod.rs`: + +```rust +pub mod linked; +pub mod object; +pub mod ticket; + +pub use linked::*; +pub use object::*; +pub use ticket::*; +``` + +- [ ] **Step 3: Run tests** + +Run: `cargo test --lib types::assets::linked` +Expected: All 11 tests PASS + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 5: Commit** + +```bash +git add src/types/assets/linked.rs src/types/assets/mod.rs +git commit -m "feat: add LinkedAsset type with display formatting" +``` + +--- + +### Task 2: CMDB Field Discovery + +**Files:** +- Modify: `src/api/jira/fields.rs` + +- [ ] **Step 1: Write the failing test** + +Add to the existing `tests` module in `src/api/jira/fields.rs`: + +```rust +#[test] +fn filter_cmdb_fields_finds_assets_type() { + let fields = vec![make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + )]; + let result = filter_cmdb_fields(&fields); + assert_eq!(result, vec!["customfield_10191"]); +} + +#[test] +fn filter_cmdb_fields_ignores_non_cmdb() { + let fields = vec![ + make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + ), + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!(result, vec!["customfield_10191"]); +} + +#[test] +fn filter_cmdb_fields_empty_when_no_cmdb() { + let fields = vec![make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + )]; + let result = filter_cmdb_fields(&fields); + assert!(result.is_empty()); +} + +#[test] +fn filter_cmdb_fields_multiple() { + let fields = vec![ + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + make_field( + "customfield_10245", + "Server", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!(result, vec!["customfield_10191", "customfield_10245"]); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib api::jira::fields::tests::filter_cmdb` +Expected: FAIL — `filter_cmdb_fields` not found + +- [ ] **Step 3: Implement filter_cmdb_fields and find_cmdb_field_ids** + +Add to `src/api/jira/fields.rs` (after the existing `filter_story_points_fields` function): + +```rust +const CMDB_SCHEMA_TYPE: &str = "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype"; + +pub fn filter_cmdb_fields(fields: &[Field]) -> Vec { + fields + .iter() + .filter(|f| { + f.custom == Some(true) + && f.schema + .as_ref() + .and_then(|s| s.custom.as_deref()) + .map(|c| c == CMDB_SCHEMA_TYPE) + .unwrap_or(false) + }) + .map(|f| f.id.clone()) + .collect() +} +``` + +Add to the `impl JiraClient` block: + +```rust +pub async fn find_cmdb_field_ids(&self) -> Result> { + let fields = self.list_fields().await?; + Ok(filter_cmdb_fields(&fields)) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib api::jira::fields::tests::filter_cmdb` +Expected: All 4 tests PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jira/fields.rs +git commit -m "feat: add CMDB field discovery via schema.custom filter" +``` + +--- + +### Task 3: CMDB Fields Cache + +**Files:** +- Modify: `src/cache.rs` + +- [ ] **Step 1: Write the failing tests** + +Add to the existing `tests` module in `src/cache.rs`: + +```rust +#[test] +fn read_missing_cmdb_fields_cache_returns_none() { + with_temp_cache(|| { + let result = read_cmdb_fields_cache().unwrap(); + assert!(result.is_none()); + }); +} + +#[test] +fn write_then_read_cmdb_fields_cache() { + with_temp_cache(|| { + write_cmdb_fields_cache(&["customfield_10191".into(), "customfield_10245".into()]) + .unwrap(); + + let cache = read_cmdb_fields_cache() + .unwrap() + .expect("should exist"); + assert_eq!(cache.field_ids, vec!["customfield_10191", "customfield_10245"]); + }); +} + +#[test] +fn expired_cmdb_fields_cache_returns_none() { + with_temp_cache(|| { + let expired = CmdbFieldsCache { + field_ids: vec!["customfield_10191".into()], + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("cmdb_fields.json"), content).unwrap(); + + let result = read_cmdb_fields_cache().unwrap(); + assert!(result.is_none(), "expired cmdb fields cache should return None"); + }); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib cache::tests::cmdb` +Expected: FAIL — `CmdbFieldsCache` not found + +- [ ] **Step 3: Implement CmdbFieldsCache** + +Add to `src/cache.rs` (after the `WorkspaceCache` section): + +```rust +#[derive(Debug, Serialize, Deserialize)] +pub struct CmdbFieldsCache { + pub field_ids: Vec, + pub fetched_at: DateTime, +} + +pub fn read_cmdb_fields_cache() -> Result> { + let path = cache_dir().join("cmdb_fields.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let cache: CmdbFieldsCache = serde_json::from_str(&content)?; + + let age = Utc::now() - cache.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + + Ok(Some(cache)) +} + +pub fn write_cmdb_fields_cache(field_ids: &[String]) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let cache = CmdbFieldsCache { + field_ids: field_ids.to_vec(), + fetched_at: Utc::now(), + }; + + let content = serde_json::to_string_pretty(&cache)?; + std::fs::write(dir.join("cmdb_fields.json"), content)?; + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib cache::tests::cmdb` +Expected: All 3 tests PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/cache.rs +git commit -m "feat: add CmdbFieldsCache with 7-day TTL" +``` + +--- + +### Task 4: Adaptive Parsing & Enrichment + +**Files:** +- Create: `src/api/assets/linked.rs` +- Modify: `src/api/assets/mod.rs` + +- [ ] **Step 1: Write unit tests for adaptive parsing** + +Create `src/api/assets/linked.rs` with tests first: + +```rust +use std::collections::HashMap; + +use anyhow::Result; +use serde_json::Value; + +use crate::api::assets::workspace::get_or_fetch_workspace_id; +use crate::api::client::JiraClient; +use crate::cache; +use crate::types::assets::LinkedAsset; + +/// Get CMDB field IDs, using cache when available. +pub async fn get_or_fetch_cmdb_field_ids(client: &JiraClient) -> Result> { + if let Some(cached) = cache::read_cmdb_fields_cache()? { + return Ok(cached.field_ids); + } + + let field_ids = client.find_cmdb_field_ids().await?; + let _ = cache::write_cmdb_fields_cache(&field_ids); + Ok(field_ids) +} + +/// Extract linked assets from issue extra fields using discovered CMDB field IDs. +pub fn extract_linked_assets( + extra: &HashMap, + cmdb_field_ids: &[String], +) -> Vec { + let mut assets = Vec::new(); + + for field_id in cmdb_field_ids { + let Some(value) = extra.get(field_id) else { + continue; + }; + if value.is_null() { + continue; + } + + match value { + Value::Array(arr) => { + for item in arr { + if let Some(asset) = parse_cmdb_value(item) { + assets.push(asset); + } + } + } + Value::Object(_) => { + if let Some(asset) = parse_cmdb_value(value) { + assets.push(asset); + } + } + Value::String(s) => { + assets.push(LinkedAsset { + name: Some(s.clone()), + ..Default::default() + }); + } + _ => {} + } + } + + assets +} + +fn parse_cmdb_value(value: &Value) -> Option { + let obj = value.as_object()?; + + let label = obj.get("label").and_then(|v| v.as_str()).map(String::from); + let object_key = obj + .get("objectKey") + .and_then(|v| v.as_str()) + .map(String::from); + let object_id = obj.get("objectId").and_then(|v| { + v.as_str() + .map(String::from) + .or_else(|| v.as_u64().map(|n| n.to_string())) + }); + let workspace_id = obj + .get("workspaceId") + .and_then(|v| v.as_str()) + .map(String::from); + + // Only create an asset if we got at least something useful. + if label.is_none() && object_key.is_none() && object_id.is_none() { + return None; + } + + Some(LinkedAsset { + key: object_key, + name: label, + asset_type: None, + id: object_id, + workspace_id, + }) +} + +/// Enrich assets that only have IDs by fetching from the Assets API. +pub async fn enrich_assets( + client: &JiraClient, + assets: &mut [LinkedAsset], +) { + // Only enrich assets that have an ID but are missing key/name. + let needs_enrichment: Vec = assets + .iter() + .enumerate() + .filter(|(_, a)| a.id.is_some() && a.key.is_none() && a.name.is_none()) + .map(|(i, _)| i) + .collect(); + + if needs_enrichment.is_empty() { + return; + } + + // Get workspace ID — required for Assets API calls. + let workspace_id = match get_or_fetch_workspace_id(client).await { + Ok(wid) => wid, + Err(_) => return, // Degrade gracefully + }; + + let futures: Vec<_> = needs_enrichment + .iter() + .map(|&idx| { + let wid = workspace_id.clone(); + let oid = assets[idx].id.clone().unwrap(); + async move { + let result = client.get_asset(&wid, &oid, false).await; + (idx, result) + } + }) + .collect(); + + let results = futures::future::join_all(futures).await; + + for (idx, result) in results { + if let Ok(obj) = result { + assets[idx].key = Some(obj.object_key); + assets[idx].name = Some(obj.label); + assets[idx].asset_type = Some(obj.object_type.name); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn make_extra(field_id: &str, value: Value) -> HashMap { + let mut map = HashMap::new(); + map.insert(field_id.to_string(), value); + map + } + + #[test] + fn parse_modern_label_and_key() { + let extra = make_extra( + "customfield_10191", + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert!(assets[0].id.is_none()); + } + + #[test] + fn parse_legacy_ids_only() { + let extra = make_extra( + "customfield_10191", + json!([{"workspaceId": "ws-1", "objectId": "88", "id": "ws-1:88"}]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].id.as_deref(), Some("88")); + assert_eq!(assets[0].workspace_id.as_deref(), Some("ws-1")); + assert!(assets[0].key.is_none()); + assert!(assets[0].name.is_none()); + } + + #[test] + fn parse_mixed_fields() { + let extra = make_extra( + "customfield_10191", + json!([{ + "label": "Acme Corp", + "objectKey": "OBJ-1", + "workspaceId": "ws-1", + "objectId": "88" + }]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert_eq!(assets[0].id.as_deref(), Some("88")); + } + + #[test] + fn parse_null_field_skipped() { + let extra = make_extra("customfield_10191", Value::Null); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_empty_array() { + let extra = make_extra("customfield_10191", json!([])); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_missing_field_skipped() { + let extra = HashMap::new(); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_string_value_as_name() { + let extra = make_extra("customfield_10191", json!("Some Asset")); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].name.as_deref(), Some("Some Asset")); + } + + #[test] + fn parse_multiple_cmdb_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme", "objectKey": "OBJ-1"}]), + ); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Server-1", "objectKey": "SRV-1"}]), + ); + let field_ids = vec!["customfield_10191".into(), "customfield_10245".into()]; + let assets = extract_linked_assets(&extra, &field_ids); + assert_eq!(assets.len(), 2); + } + + #[test] + fn parse_multiple_objects_in_array() { + let extra = make_extra( + "customfield_10191", + json!([ + {"label": "Acme", "objectKey": "OBJ-1"}, + {"label": "Globex", "objectKey": "OBJ-2"} + ]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 2); + assert_eq!(assets[0].name.as_deref(), Some("Acme")); + assert_eq!(assets[1].name.as_deref(), Some("Globex")); + } + + #[test] + fn parse_single_object_not_array() { + let extra = make_extra( + "customfield_10191", + json!({"label": "Acme", "objectKey": "OBJ-1"}), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + } + + #[test] + fn parse_empty_object_skipped() { + let extra = make_extra("customfield_10191", json!([{}])); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_numeric_object_id() { + let extra = make_extra( + "customfield_10191", + json!([{"objectId": 88}]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].id.as_deref(), Some("88")); + } +} +``` + +- [ ] **Step 2: Register the module** + +Add `pub mod linked;` to `src/api/assets/mod.rs`: + +```rust +pub mod linked; +pub mod objects; +pub mod tickets; +pub mod workspace; +``` + +- [ ] **Step 3: Run tests to verify they pass** + +Run: `cargo test --lib api::assets::linked::tests` +Expected: All 12 tests PASS + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 5: Commit** + +```bash +git add src/api/assets/linked.rs src/api/assets/mod.rs +git commit -m "feat: add adaptive CMDB field parsing and enrichment" +``` + +--- + +### Task 5: CLI — Add IssueCommand::Assets and --assets flag + +**Files:** +- Modify: `src/cli/mod.rs` + +- [ ] **Step 1: Add `Assets` variant to `IssueCommand` and `--assets` flag to `List`** + +In `src/cli/mod.rs`, add to the `IssueCommand` enum: + +After the existing `LinkTypes` variant, add: + +```rust +/// Show assets linked to an issue +Assets { + /// Issue key (e.g., FOO-123) + key: String, +}, +``` + +In the existing `List` variant, add the `--assets` flag after `points`: + +```rust +/// Show linked assets column +#[arg(long)] +assets: bool, +``` + +- [ ] **Step 2: Do NOT commit yet** — the match will be non-exhaustive. Continue to Task 6 which adds the handler and dispatch in the same commit. + +--- + +### Task 6: CLI — jr issue assets command handler + +**Files:** +- Create: `src/cli/issue/assets.rs` +- Modify: `src/cli/issue/mod.rs` + +- [ ] **Step 1: Create the command handler** + +```rust +// src/cli/issue/assets.rs +use anyhow::Result; + +use crate::api::assets::linked::{ + enrich_assets, extract_linked_assets, get_or_fetch_cmdb_field_ids, +}; +use crate::api::client::JiraClient; +use crate::cli::OutputFormat; +use crate::error::JrError; +use crate::output; + +pub(super) async fn handle_issue_assets( + key: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let cmdb_field_ids = get_or_fetch_cmdb_field_ids(client).await?; + + if cmdb_field_ids.is_empty() { + return Err(JrError::UserError( + "No Assets custom fields found on this Jira instance. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + .into()); + } + + let extra_fields: Vec<&str> = cmdb_field_ids.iter().map(|s| s.as_str()).collect(); + let issue = client.get_issue(key, &extra_fields).await?; + let mut assets = extract_linked_assets(&issue.fields.extra, &cmdb_field_ids); + + if assets.is_empty() { + eprintln!("No assets linked to {}.", key); + return Ok(()); + } + + enrich_assets(client, &mut assets).await; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&assets)?); + } + OutputFormat::Table => { + let rows: Vec> = assets + .iter() + .map(|a| { + vec![ + a.key.clone().unwrap_or_else(|| { + a.id.as_ref() + .map(|id| format!("#{}", id)) + .unwrap_or_else(|| "-".into()) + }), + a.asset_type.clone().unwrap_or_else(|| "-".into()), + a.name.clone().unwrap_or_else(|| "-".into()), + ] + }) + .collect(); + + output::print_output(output_format, &["Key", "Type", "Name"], &rows, &assets)?; + } + } + + Ok(()) +} +``` + +- [ ] **Step 2: Wire up dispatch in mod.rs** + +Modify `src/cli/issue/mod.rs`: + +Add `mod assets;` at the top with the other module declarations. + +Add to the `match command` block in the `handle` function: + +```rust +IssueCommand::Assets { key } => { + assets::handle_issue_assets(&key, output_format, client).await +} +``` + +- [ ] **Step 3: Run to verify it compiles** + +Run: `cargo check` +Expected: Clean compile + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 5: Commit** (includes Task 5 CLI changes + Task 6 handler) + +```bash +git add src/cli/mod.rs src/cli/issue/assets.rs src/cli/issue/mod.rs +git commit -m "feat: add jr issue assets command and --assets flag" +``` + +--- + +### Task 7: CLI — Assets row in jr issue view + +**Files:** +- Modify: `src/cli/issue/list.rs` + +- [ ] **Step 1: Add Assets row to handle_view** + +In `src/cli/issue/list.rs`, modify the `handle_view` function. + +Add at the top of the file: + +```rust +use crate::api::assets::linked::{ + enrich_assets, extract_linked_assets, get_or_fetch_cmdb_field_ids, +}; +use crate::types::assets::linked::format_linked_assets; +``` + +In `handle_view`, after the line that builds `extra` from `sp_field_id` (around line 258), add CMDB field discovery: + +```rust +let cmdb_field_ids = get_or_fetch_cmdb_field_ids(client).await.unwrap_or_default(); +let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); +for f in &cmdb_field_ids { + extra.push(f.as_str()); +} +``` + +(Replace the existing `let extra: Vec<&str> = sp_field_id.iter().copied().collect();` line.) + +Then after the Links row (around line 391, after `rows.push(vec!["Links".into(), links_display]);`), add: + +```rust +if !cmdb_field_ids.is_empty() { + let mut linked = extract_linked_assets(&issue.fields.extra, &cmdb_field_ids); + enrich_assets(client, &mut linked).await; + let display = if linked.is_empty() { + "(none)".into() + } else { + format_linked_assets(&linked) + }; + rows.push(vec!["Assets".into(), display]); +} +``` + +- [ ] **Step 2: Run to verify it compiles** + +Run: `cargo check` +Expected: Clean compile + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "feat: show linked assets row in jr issue view" +``` + +--- + +### Task 8: CLI — --assets column in jr issue list + +**Files:** +- Modify: `src/cli/issue/list.rs` +- Modify: `src/cli/issue/format.rs` + +- [ ] **Step 1: Extend format.rs to support assets column** + +Modify the existing `format_issue_row` in `src/cli/issue/format.rs` to accept an optional assets parameter, avoiding duplication: + +```rust +use crate::types::assets::LinkedAsset; +use crate::types::assets::linked::format_linked_assets_short; +``` + +Replace the existing `format_issue_row` signature and body: + +```rust +/// Build a single table row for an issue, optionally including story points and/or assets. +pub fn format_issue_row( + issue: &Issue, + sp_field_id: Option<&str>, + assets: Option<&[LinkedAsset]>, +) -> Vec { + let mut row = Vec::new(); + row.push(issue.key.clone()); + row.push( + issue.fields.issue_type.as_ref().map(|t| t.name.clone()).unwrap_or_default(), + ); + row.push( + issue.fields.status.as_ref().map(|s| s.name.clone()).unwrap_or_default(), + ); + row.push( + issue.fields.priority.as_ref().map(|p| p.name.clone()).unwrap_or_default(), + ); + if let Some(field_id) = sp_field_id { + row.push( + issue.fields.story_points(field_id).map(format_points).unwrap_or_else(|| "-".into()), + ); + } + row.push( + issue.fields.assignee.as_ref().map(|a| a.display_name.clone()).unwrap_or_else(|| "Unassigned".into()), + ); + if let Some(linked) = assets { + row.push(format_linked_assets_short(linked)); + } + row.push(issue.fields.summary.clone()); + row +} +``` + +Update `format_issue_rows_public` to pass `None` for the new parameter: + +```rust +pub fn format_issue_rows_public(issues: &[Issue]) -> Vec> { + issues + .iter() + .map(|issue| format_issue_row(issue, None, None)) + .collect() +} +``` + +Update `issue_table_headers` to accept assets flag: + +```rust +pub fn issue_table_headers(show_points: bool, show_assets: bool) -> Vec<&'static str> { + let mut headers = vec!["Key", "Type", "Status", "Priority"]; + if show_points { + headers.push("Points"); + } + headers.push("Assignee"); + if show_assets { + headers.push("Assets"); + } + headers.push("Summary"); + headers +} +``` + +**Note:** All existing callers of `format_issue_row(issue, sp_field_id)` must be updated to `format_issue_row(issue, sp_field_id, None)`. All callers of `issue_table_headers(show_points)` must be updated to `issue_table_headers(show_points, false)`. Search for these with `cargo check` — the compiler will find them all. + +- [ ] **Step 2: Update handle_list in list.rs** + +In `handle_list`, extract the new `assets` flag from the command match: + +Update the destructuring at the top of `handle_list` to include the new flag: + +```rust +let IssueCommand::List { + jql, + status, + team, + limit, + points: show_points, + assets: show_assets, +} = command +``` + +After the `extra` fields setup, add CMDB field discovery when `--assets` is passed: + +```rust +let cmdb_field_ids = if show_assets { + let ids = get_or_fetch_cmdb_field_ids(client).await.unwrap_or_default(); + if ids.is_empty() { + eprintln!( + "warning: --assets ignored. No Assets custom fields found on this Jira instance." + ); + } + ids +} else { + Vec::new() +}; +for f in &cmdb_field_ids { + extra.push(f.as_str()); +} +``` + +After fetching issues and before building rows, extract and enrich assets for each issue: + +```rust +let show_assets_col = show_assets && !cmdb_field_ids.is_empty(); +let mut issue_assets: Vec> = Vec::new(); +if show_assets_col { + for issue in &issues { + let mut linked = extract_linked_assets(&issue.fields.extra, &cmdb_field_ids); + enrich_assets(client, &mut linked).await; + issue_assets.push(linked); + } +} +``` + +Update the row building to pass assets: + +```rust +let rows: Vec> = issues + .iter() + .enumerate() + .map(|(i, issue)| { + let assets = if show_assets_col { + Some(issue_assets[i].as_slice()) + } else { + None + }; + format::format_issue_row(issue, effective_sp, assets) + }) + .collect(); +``` + +Update the headers call to pass the assets flag: + +```rust +let headers = format::issue_table_headers( + effective_sp.is_some(), + show_assets_col, +); +output::print_output(output_format, &headers, &rows, &issues)?; +``` + +- [ ] **Step 3: Run to verify it compiles** + +Run: `cargo check` +Expected: Clean compile + +- [ ] **Step 4: Run all existing tests** + +Run: `cargo test` +Expected: All tests PASS (existing list tests still work) + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/issue/list.rs src/cli/issue/format.rs +git commit -m "feat: add --assets column to jr issue list" +``` + +--- + +### Task 9: Integration Tests + +**Files:** +- Create: `tests/cmdb_fields.rs` + +- [ ] **Step 1: Write integration tests** + +```rust +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +fn fields_response_with_cmdb() -> serde_json::Value { + json!([ + { + "id": "summary", + "name": "Summary", + "custom": false, + "schema": { "type": "string" } + }, + { + "id": "customfield_10191", + "name": "Client", + "custom": true, + "schema": { + "type": "any", + "custom": "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + "customId": 10191 + } + }, + { + "id": "customfield_10031", + "name": "Story Points", + "custom": true, + "schema": { + "type": "number", + "custom": "com.atlassian.jira.plugin.system.customfieldtypes:float", + "customId": 10031 + } + } + ]) +} + +fn fields_response_no_cmdb() -> serde_json::Value { + json!([ + { + "id": "summary", + "name": "Summary", + "custom": false, + "schema": { "type": "string" } + } + ]) +} + +#[tokio::test] +async fn discover_cmdb_field_ids() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_with_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let ids = client.find_cmdb_field_ids().await.unwrap(); + assert_eq!(ids, vec!["customfield_10191"]); +} + +#[tokio::test] +async fn discover_cmdb_field_ids_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_no_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let ids = client.find_cmdb_field_ids().await.unwrap(); + assert!(ids.is_empty()); +} + +#[tokio::test] +async fn issue_with_modern_cmdb_fields() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/PROJ-1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "key": "PROJ-1", + "fields": { + "summary": "Test issue", + "customfield_10191": [ + { + "label": "Acme Corp", + "objectKey": "OBJ-1" + } + ] + } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issue = client + .get_issue("PROJ-1", &["customfield_10191"]) + .await + .unwrap(); + + let cmdb_ids = vec!["customfield_10191".to_string()]; + let assets = + jr::api::assets::linked::extract_linked_assets(&issue.fields.extra, &cmdb_ids); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); +} + +#[tokio::test] +async fn issue_with_null_cmdb_field() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/PROJ-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "key": "PROJ-2", + "fields": { + "summary": "No assets", + "customfield_10191": null + } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issue = client + .get_issue("PROJ-2", &["customfield_10191"]) + .await + .unwrap(); + + let cmdb_ids = vec!["customfield_10191".to_string()]; + let assets = + jr::api::assets::linked::extract_linked_assets(&issue.fields.extra, &cmdb_ids); + assert!(assets.is_empty()); +} + +#[tokio::test] +async fn enrichment_resolves_ids_to_names() { + let server = MockServer::start().await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, "start": 0, "limit": 25, "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + // Mock asset fetch + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/88")) + .and(query_param("includeAttributes", "false")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "13", "name": "Client" } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + let mut assets = vec![jr::types::assets::LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-123".into()), + ..Default::default() + }]; + + jr::api::assets::linked::enrich_assets(&client, &mut assets).await; + + assert_eq!(assets[0].key.as_deref(), Some("OBJ-88")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert_eq!(assets[0].asset_type.as_deref(), Some("Client")); +} +``` + +- [ ] **Step 2: Run integration tests** + +Run: `cargo test --test cmdb_fields` +Expected: All 5 tests PASS + +- [ ] **Step 3: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS (no regressions) + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 5: Commit** + +```bash +git add tests/cmdb_fields.rs +git commit -m "test: add integration tests for linked assets" +``` + +--- + +### Task 10: Documentation & Final Verification + +**Files:** +- Modify: `README.md` +- Modify: `CLAUDE.md` + +- [ ] **Step 1: Update README command table** + +In the Commands table in `README.md`, update the `jr issue view KEY` description and add the new command: + +``` +| `jr issue view KEY` | View issue details (includes story points, linked assets) | +``` + +Add after the `jr issue link-types` row: + +``` +| `jr issue assets KEY` | Show assets linked to an issue | +``` + +Add to the `jr issue list` row: + +``` +| `jr issue list` | List issues (smart defaults for scrum/kanban, `--team`, `--points`, `--assets`) | +``` + +- [ ] **Step 2: Update CLAUDE.md architecture** + +In the `src/cli/issue/` section of CLAUDE.md, add: + +``` +│ │ ├── assets.rs # linked assets (issue→asset lookup) +``` + +In the `src/api/assets/` section: + +``` +│ │ ├── linked.rs # CMDB field discovery cache, adaptive parsing, enrichment +``` + +In the `src/types/assets/` section: + +``` +├── types/assets/ # Serde structs for Assets API responses (AssetObject, ConnectedTicket, LinkedAsset, etc.) +``` + +- [ ] **Step 3: Run full test suite one final time** + +Run: `cargo test && cargo clippy --all --all-features --tests -- -D warnings && cargo fmt --all -- --check` +Expected: All pass, clean clippy, clean fmt + +- [ ] **Step 4: Commit** + +```bash +git add README.md CLAUDE.md +git commit -m "docs: add issue assets command to README and CLAUDE.md" +``` diff --git a/docs/superpowers/plans/2026-03-24-jsm-queues.md b/docs/superpowers/plans/2026-03-24-jsm-queues.md new file mode 100644 index 0000000..de1e15f --- /dev/null +++ b/docs/superpowers/plans/2026-03-24-jsm-queues.md @@ -0,0 +1,1645 @@ +# JSM Queue Support Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `jr queue list` and `jr queue view` commands for Jira Service Management projects, with project type auto-detection and caching. + +**Architecture:** New `src/api/jsm/` and `src/types/jsm/` sibling modules alongside existing `jira/` directories. Project type is auto-detected via platform API, cached per-project with 7-day TTL. JSM API calls use `get_from_instance()` (not `get()`) to hit `/rest/servicedeskapi/` on the real instance URL. + +**Tech Stack:** Rust, reqwest, serde, chrono, comfy-table, wiremock (tests), clap + +**Spec:** `docs/superpowers/specs/2026-03-24-jsm-queues-design.md` + +--- + +## File Structure + +| File | Responsibility | Change | +|------|---------------|--------| +| `src/api/pagination.rs` | Add `ServiceDeskPage` | Modify | +| `src/cache.rs` | Add `ProjectMeta`, `read_project_meta()`, `write_project_meta()` | Modify | +| `src/types/jsm/mod.rs` | Re-exports for JSM types | Create | +| `src/types/jsm/servicedesk.rs` | `ServiceDesk` struct | Create | +| `src/types/jsm/queue.rs` | `Queue`, `QueueIssue`, `QueueIssueFields` | Create | +| `src/types/mod.rs` | Add `pub mod jsm;` | Modify | +| `src/api/jsm/mod.rs` | Re-exports for JSM API | Create | +| `src/api/jsm/servicedesks.rs` | `list_service_desks()`, `get_or_fetch_project_meta()` | Create | +| `src/api/jsm/queues.rs` | `list_queues()`, `get_queue_issues()` | Create | +| `src/api/mod.rs` | Add `pub mod jsm;` | Modify | +| `src/cli/mod.rs` | Add `Queue` command + `QueueCommand` enum | Modify | +| `src/cli/queue.rs` | `handle()`, `handle_list()`, `handle_view()` | Create | +| `src/main.rs` | Add dispatch for `Command::Queue` | Modify | +| `tests/queue.rs` | Integration tests for queue commands | Create | +| `tests/project_meta.rs` | Integration tests for project type detection | Create | + +--- + +### Task 1: ServiceDeskPage pagination type + +**Files:** +- Modify: `src/api/pagination.rs` + +- [ ] **Step 1: Write unit tests for `ServiceDeskPage`** + +Add these tests at the bottom of the existing `#[cfg(test)] mod tests` block in `src/api/pagination.rs`: + +```rust + #[test] + fn test_service_desk_page_has_more() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 5, + start: 0, + limit: 50, + is_last_page: false, + values: vec!["a".into(), "b".into(), "c".into(), "d".into(), "e".into()], + }; + assert!(page.has_more()); + assert_eq!(page.next_start(), 5); + } + + #[test] + fn test_service_desk_page_last_page() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 3, + start: 10, + limit: 50, + is_last_page: true, + values: vec!["a".into(), "b".into(), "c".into()], + }; + assert!(!page.has_more()); + assert_eq!(page.next_start(), 13); + } + + #[test] + fn test_service_desk_page_empty() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 0, + start: 0, + limit: 50, + is_last_page: true, + values: vec![], + }; + assert!(!page.has_more()); + assert_eq!(page.next_start(), 0); + assert!(page.values.is_empty()); + } + + #[test] + fn test_service_desk_page_deserialize() { + let json = r#"{ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": false, + "values": ["item1", "item2"] + }"#; + let page: ServiceDeskPage = serde_json::from_str(json).unwrap(); + assert_eq!(page.size, 2); + assert_eq!(page.values.len(), 2); + assert!(!page.is_last_page); + } +``` + +- [ ] **Step 2: Run tests — verify they fail** + +```bash +cargo test --lib pagination -- --nocapture +``` + +Expected: compilation errors — `ServiceDeskPage` doesn't exist yet. + +- [ ] **Step 3: Implement `ServiceDeskPage`** + +Add this after the `CursorPage` impl block (before the `#[cfg(test)]` module) in `src/api/pagination.rs`: + +```rust +/// Offset-based pagination used by Jira Service Management `/rest/servicedeskapi/` endpoints. +/// +/// Uses different field names than `OffsetPage`: `size` (items in page) instead of `total`, +/// `isLastPage` boolean instead of computed from startAt+maxResults, and `start`/`limit` +/// instead of `startAt`/`maxResults`. +#[derive(Debug, Deserialize)] +pub struct ServiceDeskPage { + /// Count of items in the current page. + pub size: u32, + /// Zero-based starting index. + pub start: u32, + /// Maximum items per page. + pub limit: u32, + /// Whether this is the last page of results. + #[serde(rename = "isLastPage")] + pub is_last_page: bool, + /// The items in this page. + #[serde(default)] + pub values: Vec, +} + +impl ServiceDeskPage { + /// Returns true if there are more pages after this one. + pub fn has_more(&self) -> bool { + !self.is_last_page + } + + /// Returns the `start` value for the next page. + pub fn next_start(&self) -> u32 { + self.start + self.size + } +} +``` + +- [ ] **Step 4: Run tests — verify they pass** + +```bash +cargo test --lib pagination -- --nocapture +``` + +Expected: all pagination tests pass including the 4 new ones. + +- [ ] **Step 5: Commit** + +```bash +git add src/api/pagination.rs +git commit -m "feat: add ServiceDeskPage pagination type for JSM API + +ServiceDeskPage handles the PagedDTO format used by +/rest/servicedeskapi/ endpoints which differs from the platform API's +OffsetPage format (isLastPage boolean, size/start/limit fields)." +``` + +--- + +### Task 2: ProjectMeta cache + +**Files:** +- Modify: `src/cache.rs` + +- [ ] **Step 1: Write unit tests for `ProjectMeta` cache** + +Add these tests inside the existing `#[cfg(test)] mod tests` block in `src/cache.rs`: + +```rust + #[test] + fn read_missing_project_meta_returns_none() { + with_temp_cache(|| { + let result = read_project_meta("NOEXIST").unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_project_meta() { + with_temp_cache(|| { + let meta = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now(), + }; + write_project_meta("HELPDESK", &meta).unwrap(); + + let loaded = read_project_meta("HELPDESK").unwrap().expect("should exist"); + assert_eq!(loaded.project_type, "service_desk"); + assert_eq!(loaded.service_desk_id.as_deref(), Some("15")); + assert_eq!(loaded.project_id, "10042"); + assert!(!loaded.simplified); + }); + } + + #[test] + fn expired_project_meta_returns_none() { + with_temp_cache(|| { + let meta = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + write_project_meta("HELPDESK", &meta).unwrap(); + + let result = read_project_meta("HELPDESK").unwrap(); + assert!(result.is_none(), "expired project meta should return None"); + }); + } + + #[test] + fn project_meta_multiple_projects() { + with_temp_cache(|| { + let jsm = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now(), + }; + let software = ProjectMeta { + project_type: "software".into(), + simplified: true, + project_id: "10001".into(), + service_desk_id: None, + fetched_at: Utc::now(), + }; + write_project_meta("HELPDESK", &jsm).unwrap(); + write_project_meta("DEV", &software).unwrap(); + + let jsm_loaded = read_project_meta("HELPDESK").unwrap().expect("should exist"); + assert_eq!(jsm_loaded.project_type, "service_desk"); + + let sw_loaded = read_project_meta("DEV").unwrap().expect("should exist"); + assert_eq!(sw_loaded.project_type, "software"); + assert!(sw_loaded.service_desk_id.is_none()); + }); + } +``` + +- [ ] **Step 2: Run tests — verify they fail** + +```bash +cargo test --lib cache -- --nocapture +``` + +Expected: compilation errors — `ProjectMeta`, `read_project_meta`, `write_project_meta` don't exist yet. + +- [ ] **Step 3: Implement `ProjectMeta` and cache functions** + +Add to `src/cache.rs`, after the existing `write_team_cache` function (before the `#[cfg(test)]` block): + +```rust +use std::collections::HashMap; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectMeta { + pub project_type: String, + pub simplified: bool, + pub project_id: String, + pub service_desk_id: Option, + pub fetched_at: DateTime, +} + +pub fn read_project_meta(project_key: &str) -> Result> { + let path = cache_dir().join("project_meta.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let map: HashMap = serde_json::from_str(&content)?; + + match map.get(project_key) { + Some(meta) => { + let age = Utc::now() - meta.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + Ok(None) + } else { + Ok(Some(meta.clone())) + } + } + None => Ok(None), + } +} + +pub fn write_project_meta(project_key: &str, meta: &ProjectMeta) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let path = dir.join("project_meta.json"); + + // Read existing map or start fresh + let mut map: HashMap = if path.exists() { + let content = std::fs::read_to_string(&path)?; + serde_json::from_str(&content).unwrap_or_default() + } else { + HashMap::new() + }; + + map.insert(project_key.to_string(), meta.clone()); + + let content = serde_json::to_string_pretty(&map)?; + std::fs::write(&path, content)?; + Ok(()) +} +``` + +Also add `use std::collections::HashMap;` to the top of the file if not already present. + +- [ ] **Step 4: Run tests — verify they pass** + +```bash +cargo test --lib cache -- --nocapture +``` + +Expected: all cache tests pass including the 4 new ones. + +- [ ] **Step 5: Run clippy** + +```bash +cargo clippy -- -D warnings +``` + +Expected: no warnings. + +- [ ] **Step 6: Commit** + +```bash +git add src/cache.rs +git commit -m "feat: add ProjectMeta cache for project type detection + +Per-project cache in ~/.cache/jr/project_meta.json stores +projectTypeKey, simplified flag, projectId, and serviceDeskId +with 7-day TTL. Supports multiple projects in the same cache file." +``` + +--- + +### Task 3: JSM types (ServiceDesk, Queue, QueueIssue) + +**Files:** +- Create: `src/types/jsm/mod.rs` +- Create: `src/types/jsm/servicedesk.rs` +- Create: `src/types/jsm/queue.rs` +- Modify: `src/types/mod.rs` + +- [ ] **Step 1: Create `src/types/jsm/servicedesk.rs`** + +```rust +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize, Serialize)] +pub struct ServiceDesk { + pub id: String, + #[serde(rename = "projectId")] + pub project_id: String, + #[serde(rename = "projectName")] + pub project_name: String, +} +``` + +- [ ] **Step 2: Create `src/types/jsm/queue.rs`** + +```rust +use serde::{Deserialize, Serialize}; + +use crate::types::jira::issue::{IssueType, Priority, Status}; +use crate::types::jira::User; + +#[derive(Debug, Deserialize, Serialize)] +pub struct Queue { + pub id: String, + pub name: String, + pub jql: Option, + pub fields: Option>, + #[serde(rename = "issueCount")] + pub issue_count: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct QueueIssue { + pub key: String, + pub fields: QueueIssueFields, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct QueueIssueFields { + pub summary: Option, + pub status: Option, + pub issuetype: Option, + pub priority: Option, + pub assignee: Option, + pub reporter: Option, + pub created: Option, +} +``` + +- [ ] **Step 3: Create `src/types/jsm/mod.rs`** + +```rust +pub mod queue; +pub mod servicedesk; + +pub use queue::*; +pub use servicedesk::*; +``` + +- [ ] **Step 4: Add `pub mod jsm;` to `src/types/mod.rs`** + +The file currently contains only `pub mod jira;`. Add: + +```rust +pub mod jira; +pub mod jsm; +``` + +- [ ] **Step 5: Verify it compiles** + +```bash +cargo build +``` + +Expected: compiles with no errors. There may be "unused" warnings — that's fine, they'll be used in the next task. + +- [ ] **Step 6: Write deserialization unit test** + +Add to the bottom of `src/types/jsm/queue.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_queue_with_all_fields() { + let json = r#"{ + "id": "10", + "name": "Triage", + "jql": "project = HELPDESK AND status = New", + "fields": ["issuetype", "issuekey", "summary", "status"], + "issueCount": 12 + }"#; + let queue: Queue = serde_json::from_str(json).unwrap(); + assert_eq!(queue.id, "10"); + assert_eq!(queue.name, "Triage"); + assert_eq!(queue.issue_count, Some(12)); + assert!(queue.jql.is_some()); + } + + #[test] + fn deserialize_queue_without_optional_fields() { + let json = r#"{ + "id": "20", + "name": "All open" + }"#; + let queue: Queue = serde_json::from_str(json).unwrap(); + assert_eq!(queue.id, "20"); + assert!(queue.issue_count.is_none()); + assert!(queue.jql.is_none()); + assert!(queue.fields.is_none()); + } + + #[test] + fn deserialize_queue_issue_minimal() { + let json = r#"{ + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working" + } + }"#; + let issue: QueueIssue = serde_json::from_str(json).unwrap(); + assert_eq!(issue.key, "HELPDESK-42"); + assert_eq!(issue.fields.summary.as_deref(), Some("VPN not working")); + assert!(issue.fields.status.is_none()); + assert!(issue.fields.assignee.is_none()); + } + + #[test] + fn deserialize_queue_issue_full() { + let json = r#"{ + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working", + "status": { "name": "New", "statusCategory": { "name": "To Do", "key": "new" } }, + "issuetype": { "name": "Service Request" }, + "priority": { "name": "High" }, + "assignee": { "accountId": "abc123", "displayName": "Jane D." }, + "reporter": { "accountId": "def456", "displayName": "John S." }, + "created": "2026-03-24T10:00:00.000+0000" + } + }"#; + let issue: QueueIssue = serde_json::from_str(json).unwrap(); + assert_eq!(issue.key, "HELPDESK-42"); + assert_eq!(issue.fields.status.as_ref().unwrap().name, "New"); + assert_eq!( + issue.fields.assignee.as_ref().unwrap().display_name, + "Jane D." + ); + } +} +``` + +- [ ] **Step 7: Run tests** + +```bash +cargo test --lib types::jsm -- --nocapture +``` + +Expected: all 4 tests pass. + +- [ ] **Step 8: Commit** + +```bash +git add src/types/jsm/ src/types/mod.rs +git commit -m "feat: add JSM types for ServiceDesk, Queue, and QueueIssue + +First JSM type definitions in the new types/jsm/ sibling module. +Queue issues use a limited field set (only queue-configured fields), +reusing Status, IssueType, Priority, and User from types/jira/." +``` + +--- + +### Task 4: JSM API — service desks + project meta orchestration + +**Files:** +- Create: `src/api/jsm/mod.rs` +- Create: `src/api/jsm/servicedesks.rs` +- Modify: `src/api/mod.rs` + +- [ ] **Step 1: Create `src/api/jsm/mod.rs`** + +```rust +pub mod queues; +pub mod servicedesks; +``` + +- [ ] **Step 2: Add `pub mod jsm;` to `src/api/mod.rs`** + +The file currently contains: + +```rust +pub mod auth; +pub mod client; +pub mod jira; +pub mod pagination; +pub mod rate_limit; +``` + +Add `pub mod jsm;` after `pub mod jira;`: + +```rust +pub mod auth; +pub mod client; +pub mod jira; +pub mod jsm; +pub mod pagination; +pub mod rate_limit; +``` + +- [ ] **Step 3: Create `src/api/jsm/servicedesks.rs`** + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::ServiceDeskPage; +use crate::cache::{self, ProjectMeta}; +use crate::error::JrError; +use crate::types::jsm::ServiceDesk; +use chrono::Utc; + +impl JiraClient { + /// List all service desks, auto-paginating. + pub async fn list_service_desks(&self) -> Result> { + let mut all = Vec::new(); + let mut start = 0u32; + let page_size = 50u32; + + loop { + let path = format!( + "/rest/servicedeskapi/servicedesk?start={}&limit={}", + start, page_size + ); + let page: ServiceDeskPage = + self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + if !has_more { + break; + } + start = next; + } + Ok(all) + } +} + +/// Fetch project metadata, using cache when available. +/// +/// 1. Check cache for project_key — return if fresh. +/// 2. GET /rest/api/3/project/{key} — extract projectTypeKey, simplified, id. +/// 3. If service_desk: list service desks, match by projectId to find serviceDeskId. +/// 4. Write to cache and return. +pub async fn get_or_fetch_project_meta( + client: &JiraClient, + project_key: &str, +) -> Result { + // Check cache first + if let Some(cached) = cache::read_project_meta(project_key)? { + return Ok(cached); + } + + // Fetch project details from platform API + let project: serde_json::Value = client + .get(&format!( + "/rest/api/3/project/{}", + urlencoding::encode(project_key) + )) + .await?; + + let project_type = project + .get("projectTypeKey") + .and_then(|v| v.as_str()) + .unwrap_or("software") + .to_string(); + + let simplified = project + .get("simplified") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + let project_id = project + .get("id") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + + // If it's a service desk, resolve the serviceDeskId + let service_desk_id = if project_type == "service_desk" { + let desks = client.list_service_desks().await?; + desks + .iter() + .find(|d| d.project_id == project_id) + .map(|d| d.id.clone()) + } else { + None + }; + + let meta = ProjectMeta { + project_type, + simplified, + project_id, + service_desk_id, + fetched_at: Utc::now(), + }; + + // Write to cache (best-effort — don't fail the command if cache write fails) + let _ = cache::write_project_meta(project_key, &meta); + + Ok(meta) +} + +/// Require the project to be a JSM service desk. Returns the serviceDeskId or errors. +pub async fn require_service_desk( + client: &JiraClient, + project_key: &str, +) -> Result { + let meta = get_or_fetch_project_meta(client, project_key).await?; + + if meta.project_type != "service_desk" { + let type_label = match meta.project_type.as_str() { + "software" => "Jira Software", + "business" => "Jira Work Management", + _ => "Jira", + }; + return Err(JrError::UserError(format!( + "\"{}\" is a {} project. Queue commands require a Jira Service Management project. \ + Run \"jr project fields {}\" to see available commands.", + project_key, type_label, project_key + )) + .into()); + } + + meta.service_desk_id.ok_or_else(|| { + JrError::UserError(format!( + "No service desk found for project \"{}\". \ + The project may not be configured as a service desk.", + project_key + )) + .into() + }) +} +``` + +- [ ] **Step 4: Create placeholder `src/api/jsm/queues.rs`** + +Create an empty placeholder so the `mod.rs` compiles: + +```rust +// Queue API methods — implemented in Task 5. +``` + +- [ ] **Step 5: Verify it compiles** + +```bash +cargo build +``` + +Expected: compiles. Warnings about unused imports/functions are expected until CLI wiring. + +- [ ] **Step 6: Commit** + +```bash +git add src/api/jsm/ src/api/mod.rs +git commit -m "feat: add JSM service desk API + project meta orchestration + +list_service_desks() auto-paginates through /rest/servicedeskapi/servicedesk. +get_or_fetch_project_meta() resolves projectTypeKey and serviceDeskId with +7-day cache. require_service_desk() gates JSM commands with helpful errors. + +Uses get_from_instance() to hit instance URL directly (not OAuth proxy)." +``` + +--- + +### Task 5: JSM API — queue methods + +**Files:** +- Modify: `src/api/jsm/queues.rs` + +- [ ] **Step 1: Write integration tests for queue API methods** + +Create `tests/queue.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn list_queues_returns_all_queues() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .and(query_param("includeCount", "true")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "10", "name": "Triage", "jql": "project = HELPDESK AND status = New", "issueCount": 12 }, + { "id": "20", "name": "In Progress", "jql": "project = HELPDESK AND status = \"In Progress\"", "issueCount": 7 } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let queues = client.list_queues("15").await.unwrap(); + assert_eq!(queues.len(), 2); + assert_eq!(queues[0].name, "Triage"); + assert_eq!(queues[0].issue_count, Some(12)); + assert_eq!(queues[1].name, "In Progress"); +} + +#[tokio::test] +async fn list_queues_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 0, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let queues = client.list_queues("15").await.unwrap(); + assert!(queues.is_empty()); +} + +#[tokio::test] +async fn get_queue_issues_returns_issues() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working", + "status": { "name": "New", "statusCategory": { "name": "To Do", "key": "new" } }, + "issuetype": { "name": "Service Request" }, + "priority": { "name": "High" }, + "assignee": null + } + }, + { + "key": "HELPDESK-41", + "fields": { + "summary": "Need license renewal", + "status": { "name": "New", "statusCategory": { "name": "To Do", "key": "new" } }, + "issuetype": { "name": "Service Request" }, + "assignee": { "accountId": "abc", "displayName": "Jane D." } + } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issues = client.get_queue_issues("15", "10", None).await.unwrap(); + assert_eq!(issues.len(), 2); + assert_eq!(issues[0].key, "HELPDESK-42"); + assert!(issues[0].fields.assignee.is_none()); + assert_eq!( + issues[1].fields.assignee.as_ref().unwrap().display_name, + "Jane D." + ); +} + +#[tokio::test] +async fn get_queue_issues_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 1, + "isLastPage": false, + "values": [ + { + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working" + } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issues = client.get_queue_issues("15", "10", Some(1)).await.unwrap(); + assert_eq!(issues.len(), 1); + assert_eq!(issues[0].key, "HELPDESK-42"); +} + +#[tokio::test] +async fn get_queue_issues_paginated() { + let server = MockServer::start().await; + + // Page 1 + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 1, + "isLastPage": false, + "values": [ + { "key": "HELPDESK-2", "fields": { "summary": "Issue A" } } + ] + }))) + .mount(&server) + .await; + + // Page 2 + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "1")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 1, + "limit": 1, + "isLastPage": true, + "values": [ + { "key": "HELPDESK-1", "fields": { "summary": "Issue B" } } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issues = client.get_queue_issues("15", "10", None).await.unwrap(); + assert_eq!(issues.len(), 2); + assert_eq!(issues[0].key, "HELPDESK-2"); + assert_eq!(issues[1].key, "HELPDESK-1"); +} +``` + +- [ ] **Step 2: Run tests — verify they fail** + +```bash +cargo test --test queue -- --nocapture +``` + +Expected: compilation errors — `list_queues` and `get_queue_issues` don't exist yet. + +- [ ] **Step 3: Implement queue API methods** + +Replace `src/api/jsm/queues.rs` with: + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::ServiceDeskPage; +use crate::types::jsm::Queue; +use crate::types::jsm::QueueIssue; + +impl JiraClient { + /// List all queues for a service desk, auto-paginating. + pub async fn list_queues(&self, service_desk_id: &str) -> Result> { + let base = format!( + "/rest/servicedeskapi/servicedesk/{}/queue", + service_desk_id + ); + let mut all = Vec::new(); + let mut start = 0u32; + let page_size = 50u32; + + loop { + let path = format!( + "{}?includeCount=true&start={}&limit={}", + base, start, page_size + ); + let page: ServiceDeskPage = + self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + if !has_more { + break; + } + start = next; + } + Ok(all) + } + + /// Get issues in a queue, with optional limit and auto-pagination. + pub async fn get_queue_issues( + &self, + service_desk_id: &str, + queue_id: &str, + limit: Option, + ) -> Result> { + let base = format!( + "/rest/servicedeskapi/servicedesk/{}/queue/{}/issue", + service_desk_id, queue_id + ); + let mut all = Vec::new(); + let mut start = 0u32; + let max_page_size = 50u32; + + loop { + let page_size = match limit { + Some(cap) => { + let remaining = cap.saturating_sub(all.len() as u32); + if remaining == 0 { + break; + } + remaining.min(max_page_size) + } + None => max_page_size, + }; + let path = format!( + "{}?start={}&limit={}", + base, start, page_size + ); + let page: ServiceDeskPage = + self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + + if let Some(cap) = limit { + if all.len() >= cap as usize { + all.truncate(cap as usize); + break; + } + } + if !has_more { + break; + } + start = next; + } + Ok(all) + } +} +``` + +- [ ] **Step 4: Run tests — verify they pass** + +```bash +cargo test --test queue -- --nocapture +``` + +Expected: all 5 queue integration tests pass. + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jsm/queues.rs tests/queue.rs +git commit -m "feat: add JSM queue API methods with auto-pagination + +list_queues() and get_queue_issues() wrap /rest/servicedeskapi/ +queue endpoints with ServiceDeskPage pagination. get_queue_issues +supports --limit with dynamic page sizing (same pattern as +list_comments). Includes 5 wiremock integration tests." +``` + +--- + +### Task 6: CLI — queue commands + +**Files:** +- Create: `src/cli/queue.rs` +- Modify: `src/cli/mod.rs` +- Modify: `src/main.rs` + +- [ ] **Step 1: Add `QueueCommand` enum to `src/cli/mod.rs`** + +Add `pub mod queue;` to the top of `src/cli/mod.rs` after the existing module declarations: + +```rust +pub mod auth; +pub mod board; +pub mod init; +pub mod issue; +pub mod project; +pub mod queue; +pub mod sprint; +pub mod team; +pub mod worklog; +``` + +Add the `Queue` variant to the `Command` enum (after `Team`): + +```rust + /// Manage JSM queues + Queue { + #[command(subcommand)] + command: QueueCommand, + }, +``` + +Add the `QueueCommand` enum after `WorklogCommand`: + +```rust +#[derive(Subcommand)] +pub enum QueueCommand { + /// List queues for the service desk + List, + /// View issues in a queue + View { + /// Queue name (partial match supported) + name: Option, + /// Queue ID (use if name is ambiguous) + #[arg(long)] + id: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + }, +} +``` + +- [ ] **Step 2: Create `src/cli/queue.rs`** + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::jsm::servicedesks; +use crate::cli::{OutputFormat, QueueCommand}; +use crate::config::Config; +use crate::error::JrError; +use crate::output; +use crate::partial_match::{self, MatchResult}; + +pub async fn handle( + command: QueueCommand, + output_format: &OutputFormat, + config: &Config, + client: &JiraClient, + project_override: Option<&str>, +) -> Result<()> { + let project_key = config + .project_key(project_override) + .ok_or_else(|| { + JrError::UserError( + "No project configured. Run \"jr init\" or pass --project.".into(), + ) + })?; + + let service_desk_id = + servicedesks::require_service_desk(client, &project_key).await?; + + match command { + QueueCommand::List => handle_list(&service_desk_id, output_format, client).await, + QueueCommand::View { name, id, limit } => { + handle_view(&service_desk_id, name, id, limit, output_format, client).await + } + } +} + +async fn handle_list( + service_desk_id: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let queues = client.list_queues(service_desk_id).await?; + + let rows: Vec> = queues + .iter() + .map(|q| { + vec![ + q.name.clone(), + q.issue_count + .map(|c| c.to_string()) + .unwrap_or_else(|| "—".into()), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Queue", "Issues"], + &rows, + &queues, + ) +} + +async fn handle_view( + service_desk_id: &str, + name: Option, + id: Option, + limit: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + // Resolve the queue ID + let queue_id = match id { + Some(id) => id, + None => { + let name = name.ok_or_else(|| { + JrError::UserError( + "Specify a queue name or use --id. \ + Run \"jr queue list\" to see available queues." + .into(), + ) + })?; + resolve_queue_by_name(service_desk_id, &name, client).await? + } + }; + + let issues = client + .get_queue_issues(service_desk_id, &queue_id, limit) + .await?; + + let rows: Vec> = issues + .iter() + .map(|i| { + vec![ + i.key.clone(), + i.fields + .issuetype + .as_ref() + .map(|t| t.name.clone()) + .unwrap_or_else(|| "—".into()), + i.fields + .summary + .clone() + .unwrap_or_else(|| "—".into()), + i.fields + .status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_else(|| "—".into()), + i.fields + .assignee + .as_ref() + .map(|u| u.display_name.clone()) + .unwrap_or_else(|| "—".into()), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Key", "Type", "Summary", "Status", "Assignee"], + &rows, + &issues, + ) +} + +async fn resolve_queue_by_name( + service_desk_id: &str, + name: &str, + client: &JiraClient, +) -> Result { + let queues = client.list_queues(service_desk_id).await?; + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + + match partial_match::partial_match(name, &names) { + MatchResult::Exact(matched_name) => { + // Find matching queues by this exact name + let matching: Vec<&crate::types::jsm::Queue> = + queues.iter().filter(|q| q.name == matched_name).collect(); + + if matching.len() > 1 { + // Duplicate queue names — need --id + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + matched_name, + ids.join(", "), + ids[0] + )) + .into()) + } else { + Ok(matching[0].id.clone()) + } + } + MatchResult::Ambiguous(matches) => Err(JrError::UserError(format!( + "\"{}\" matches multiple queues: {}. Be more specific or use --id.", + name, + matches + .iter() + .map(|m| format!("\"{}\"", m)) + .collect::>() + .join(", ") + )) + .into()), + MatchResult::None(_) => Err(JrError::UserError(format!( + "No queue matching \"{}\" found. \ + Run \"jr queue list\" to see available queues.", + name + )) + .into()), + } +} +``` + +- [ ] **Step 3: Add dispatch in `src/main.rs`** + +Add the `Queue` arm to the match block in the `run` function, after the `Team` arm: + +```rust + cli::Command::Queue { command } => { + let config = config::Config::load()?; + let client = api::client::JiraClient::from_config(&config, cli.verbose)?; + cli::queue::handle( + command, + &cli.output, + &config, + &client, + cli.project.as_deref(), + ) + .await + } +``` + +- [ ] **Step 4: Verify it compiles** + +```bash +cargo build +``` + +Expected: compiles with no errors. + +- [ ] **Step 5: Run all tests** + +```bash +cargo test +``` + +Expected: all tests pass. + +- [ ] **Step 6: Run clippy** + +```bash +cargo clippy -- -D warnings +``` + +Expected: no warnings. + +- [ ] **Step 7: Run formatter** + +```bash +cargo fmt --all -- --check +``` + +Expected: no formatting issues. If there are, run `cargo fmt --all`. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/queue.rs src/cli/mod.rs src/main.rs +git commit -m "feat: add jr queue list and jr queue view commands + +New top-level queue command for JSM service desks. +- jr queue list: shows all queues with issue counts +- jr queue view : shows issues in a queue with partial name matching +- --id flag for disambiguation when queue names are duplicates +- Auto-detects project type and errors for non-JSM projects +- JSON output support via --output json" +``` + +--- + +### Task 7: Update README and CLAUDE.md + +**Files:** +- Modify: `README.md` +- Modify: `CLAUDE.md` + +- [ ] **Step 1: Update README.md command table** + +Add a row for the queue commands in the command table in `README.md`. Find the existing command table and add after the `worklog` entries: + +```markdown +| `jr queue list` | List JSM queues for the project's service desk | +| `jr queue view ` | View issues in a queue (partial name match) | +``` + +- [ ] **Step 2: Update CLAUDE.md architecture section** + +In the `src/` tree in `CLAUDE.md`: + +Add under `├── cli/`: +``` +│ ├── queue.rs # queue list/view (JSM service desks) +``` + +Add new directory entries: +``` +├── api/ +│ ├── jsm/ # JSM-specific API call implementations +│ │ ├── servicedesks.rs # list service desks, project meta orchestration +│ │ └── queues.rs # list queues, get queue issues +├── types/jsm/ # Serde structs for JSM API responses (ServiceDesk, Queue, etc.) +``` + +- [ ] **Step 3: Commit** + +```bash +git add README.md CLAUDE.md +git commit -m "docs: add queue commands to README and CLAUDE.md + +Document jr queue list and jr queue view. Update architecture +section with new api/jsm/ and types/jsm/ directories." +``` + +--- + +### Task 8: Queue name matching unit tests + +**Files:** +- Modify: `src/cli/queue.rs` + +- [ ] **Step 1: Add unit tests for queue name resolution** + +Add at the bottom of `src/cli/queue.rs`: + +```rust +#[cfg(test)] +mod tests { + use crate::types::jsm::Queue; + + fn make_queue(id: &str, name: &str) -> Queue { + Queue { + id: id.into(), + name: name.into(), + jql: None, + fields: None, + issue_count: None, + } + } + + fn find_queue_id(name: &str, queues: &[Queue]) -> Result { + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + match crate::partial_match::partial_match(name, &names) { + crate::partial_match::MatchResult::Exact(matched_name) => { + let matching: Vec<&Queue> = + queues.iter().filter(|q| q.name == matched_name).collect(); + if matching.len() > 1 { + Err(format!("duplicate: {}", matching.len())) + } else { + Ok(matching[0].id.clone()) + } + } + crate::partial_match::MatchResult::Ambiguous(m) => { + Err(format!("ambiguous: {}", m.len())) + } + crate::partial_match::MatchResult::None(_) => Err("none".into()), + } + } + + #[test] + fn exact_match() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "In Progress")]; + assert_eq!(find_queue_id("Triage", &queues).unwrap(), "10"); + } + + #[test] + fn partial_match() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "In Progress")]; + assert_eq!(find_queue_id("tri", &queues).unwrap(), "10"); + } + + #[test] + fn ambiguous_match() { + let queues = vec![ + make_queue("10", "Escalated - Client"), + make_queue("20", "Escalated - External"), + ]; + let err = find_queue_id("esc", &queues).unwrap_err(); + assert!(err.starts_with("ambiguous")); + } + + #[test] + fn no_match() { + let queues = vec![make_queue("10", "Triage")]; + let err = find_queue_id("nonexistent", &queues).unwrap_err(); + assert_eq!(err, "none"); + } + + #[test] + fn duplicate_names() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "Triage")]; + let err = find_queue_id("Triage", &queues).unwrap_err(); + assert!(err.starts_with("duplicate")); + } +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cargo test --lib cli::queue -- --nocapture +``` + +Expected: all 5 tests pass. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/queue.rs +git commit -m "test: add unit tests for queue name matching + +Tests exact match, partial match, ambiguous match, no match, +and duplicate queue name scenarios." +``` + +--- + +### Task 9: Project meta integration tests + +**Files:** +- Create: `tests/project_meta.rs` + +- [ ] **Step 1: Create integration tests** + +Create `tests/project_meta.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn project_meta_cache_miss_fetches_from_api() { + let server = MockServer::start().await; + + // Mock platform API for project details + Mock::given(method("GET")) + .and(path("/rest/api/3/project/HELPDESK")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10042", + "key": "HELPDESK", + "name": "Help Desk", + "projectTypeKey": "service_desk", + "simplified": false + }))) + .expect(1) + .mount(&server) + .await; + + // Mock servicedeskapi for service desk list + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "15", "projectId": "10042", "projectName": "Help Desk" } + ] + }))) + .expect(1) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let meta = jr::api::jsm::servicedesks::get_or_fetch_project_meta(&client, "HELPDESK") + .await + .unwrap(); + + assert_eq!(meta.project_type, "service_desk"); + assert_eq!(meta.project_id, "10042"); + assert_eq!(meta.service_desk_id.as_deref(), Some("15")); + assert!(!meta.simplified); +} + +#[tokio::test] +async fn project_meta_software_project_has_no_service_desk_id() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/project/DEV")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10001", + "key": "DEV", + "name": "Development", + "projectTypeKey": "software", + "simplified": true + }))) + .expect(1) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let meta = jr::api::jsm::servicedesks::get_or_fetch_project_meta(&client, "DEV") + .await + .unwrap(); + + assert_eq!(meta.project_type, "software"); + assert!(meta.service_desk_id.is_none()); + assert!(meta.simplified); +} + +#[tokio::test] +async fn require_service_desk_errors_for_software_project() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/project/DEV")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10001", + "key": "DEV", + "name": "Development", + "projectTypeKey": "software", + "simplified": true + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = jr::api::jsm::servicedesks::require_service_desk(&client, "DEV").await; + + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Jira Software project")); + assert!(err.contains("Queue commands require")); +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cargo test --test project_meta -- --nocapture +``` + +Expected: all 3 tests pass. + +Note: These tests use `new_for_test` which sets `instance_url == base_url`, so `get_from_instance` and `get` both hit the wiremock server. This correctly tests the behavior. + +- [ ] **Step 3: Commit** + +```bash +git add tests/project_meta.rs +git commit -m "test: add integration tests for project meta detection + +Tests cache-miss-to-API flow for JSM and software projects, +and require_service_desk error for non-JSM projects." +``` diff --git a/docs/superpowers/plans/2026-03-25-common-filter-flags.md b/docs/superpowers/plans/2026-03-25-common-filter-flags.md new file mode 100644 index 0000000..55b100e --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-common-filter-flags.md @@ -0,0 +1,856 @@ +# Common Filter Flags Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--assignee`, `--reporter`, and `--recent` filter flags to `jr issue list` that compose additively with each other and with `--jql`. + +**Architecture:** Three new `Option` flags on `IssueCommand::List`. A `validate_duration()` function in `jql.rs` for client-side `--recent` validation. A `resolve_user()` helper in `helpers.rs` using the user search API + `partial_match` for disambiguation. A `search_users()` method on `JiraClient`. The JQL construction in `handle_list()` is refactored to a unified flow where all flags (including `--jql`) compose via AND. The implicit `assignee = currentUser()` in scrum/kanban paths is removed. + +**Tech Stack:** Rust, clap 4 (derive), reqwest, serde, wiremock (tests) + +**Spec:** `docs/superpowers/specs/2026-03-24-common-filter-flags-design.md` + +--- + +## File Structure + +| File | Responsibility | Change | +|------|---------------|--------| +| `src/jql.rs` | JQL utilities | Add `validate_duration()` | +| `src/api/jira/users.rs` | User API methods | Add `search_users()` | +| `src/cli/mod.rs` | CLI argument definitions | Add `assignee`, `reporter`, `recent` to `IssueCommand::List` | +| `src/cli/issue/helpers.rs` | Issue command helpers | Add `resolve_user()` | +| `src/cli/issue/list.rs` | Issue list handler | Refactor JQL construction, remove implicit `assignee = currentUser()`, compose all flags, integrate new flags | +| `tests/issue_commands.rs` | Integration tests | Add user search + filter composition tests | +| `tests/common/fixtures.rs` | Test fixtures | Add `user_search_response()` | + +**Not changed:** `src/cli/issue/assets.rs` (does not call `search_issues`), `src/config.rs`, `src/cache.rs` + +--- + +### Task 1: Add `validate_duration()` to `src/jql.rs` + +**Files:** +- Modify: `src/jql.rs` + +- [ ] **Step 1: Write unit tests** + +Add inside the first `mod tests` block in `src/jql.rs`, after the `strip_order_by_trims_whitespace` test: + +```rust + #[test] + fn validate_duration_valid_days() { + assert!(validate_duration("7d").is_ok()); + } + + #[test] + fn validate_duration_valid_weeks() { + assert!(validate_duration("4w").is_ok()); + } + + #[test] + fn validate_duration_valid_months_uppercase() { + assert!(validate_duration("2M").is_ok()); + } + + #[test] + fn validate_duration_valid_years() { + assert!(validate_duration("1y").is_ok()); + } + + #[test] + fn validate_duration_valid_hours() { + assert!(validate_duration("5h").is_ok()); + } + + #[test] + fn validate_duration_valid_minutes() { + assert!(validate_duration("10m").is_ok()); + } + + #[test] + fn validate_duration_valid_zero() { + assert!(validate_duration("0d").is_ok()); + } + + #[test] + fn validate_duration_invalid_unit() { + assert!(validate_duration("7x").is_err()); + } + + #[test] + fn validate_duration_reversed() { + assert!(validate_duration("d7").is_err()); + } + + #[test] + fn validate_duration_empty() { + assert!(validate_duration("").is_err()); + } + + #[test] + fn validate_duration_combined_units() { + assert!(validate_duration("4w2d").is_err()); + } + + #[test] + fn validate_duration_no_digits() { + assert!(validate_duration("d").is_err()); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib validate_duration` +Expected: FAIL — `validate_duration` not found + +- [ ] **Step 3: Implement `validate_duration()`** + +Add to `src/jql.rs` after the `strip_order_by` function, before the first `#[cfg(test)]`: + +```rust +/// Validate a JQL relative date duration string. +/// +/// JQL relative dates use the format `` where unit is one of: +/// `y` (years), `M` (months), `w` (weeks), `d` (days), `h` (hours), `m` (minutes). +/// Units are case-sensitive — `M` is months, `m` is minutes. +/// Combined units like `4w2d` are not supported by Jira. +pub fn validate_duration(s: &str) -> Result<(), String> { + if s.len() < 2 { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + let (digits, unit) = s.split_at(s.len() - 1); + if digits.is_empty() || !digits.chars().all(|c| c.is_ascii_digit()) { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + if !matches!(unit, "y" | "M" | "w" | "d" | "h" | "m") { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib validate_duration` +Expected: PASS (12 tests) + +- [ ] **Step 5: Commit** + +```bash +git add src/jql.rs +git commit -m "feat: add validate_duration for JQL relative date validation" +``` + +--- + +### Task 2: Add `search_users()` API method + +**Files:** +- Modify: `src/api/jira/users.rs` + +- [ ] **Step 1: Write the `search_users()` method** + +The user search endpoint response format is uncertain (may be flat array `[User, ...]` or paginated `{ values: [...] }`). Add a method that handles both by using `serde_json::Value` and extracting users manually: + +```rust + /// Search for users by name or email prefix. + /// + /// Returns active and inactive users — caller should filter by `active` field. + pub async fn search_users(&self, query: &str) -> Result> { + let path = format!( + "/rest/api/3/user/search?query={}", + urlencoding::encode(query) + ); + // The endpoint may return a flat array or a paginated object with "values". + let raw: serde_json::Value = self.get(&path).await?; + let users: Vec = if raw.is_array() { + serde_json::from_value(raw)? + } else if let Some(values) = raw.get("values") { + serde_json::from_value(values.clone())? + } else { + Vec::new() + }; + Ok(users) + } +``` + +Note: `serde_json::Value` and `serde_json::from_value` are used fully qualified — no additional `use` statement needed. Tests for `search_users()` are deferred to Task 5 because it's a thin HTTP wrapper requiring wiremock. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 3: Commit** + +```bash +git add src/api/jira/users.rs +git commit -m "feat: add search_users API method for user name lookup" +``` + +--- + +### Task 3: Add `resolve_user()` helper + +**Files:** +- Modify: `src/cli/issue/helpers.rs` + +- [ ] **Step 1: Write unit test for `me` keyword resolution** + +Add at the bottom of `src/cli/issue/helpers.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn is_me_keyword_lowercase() { + assert!(is_me_keyword("me")); + } + + #[test] + fn is_me_keyword_uppercase() { + assert!(is_me_keyword("ME")); + } + + #[test] + fn is_me_keyword_mixed_case() { + assert!(is_me_keyword("Me")); + } + + #[test] + fn is_me_keyword_not_me() { + assert!(!is_me_keyword("Jane")); + } +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib is_me_keyword` +Expected: FAIL — `is_me_keyword` not found + +- [ ] **Step 3: Add `is_me_keyword()` helper and `resolve_user()` function** + +Add to `src/cli/issue/helpers.rs`: + +```rust +/// Check if a user input string is the "me" keyword (case-insensitive). +fn is_me_keyword(input: &str) -> bool { + input.eq_ignore_ascii_case("me") +} + +/// Resolve a user flag value to a JQL fragment. +/// +/// - `"me"` (case-insensitive) → `"currentUser()"` (no API call) +/// - Any other value → search users API, filter active, disambiguate via partial_match +/// +/// Returns the JQL value to use (either `"currentUser()"` or an unquoted accountId). +pub(super) async fn resolve_user( + client: &JiraClient, + name: &str, + no_input: bool, +) -> Result { + if is_me_keyword(name) { + return Ok("currentUser()".to_string()); + } + + let users = client.search_users(name).await?; + let active_users: Vec<_> = users + .into_iter() + .filter(|u| u.active == Some(true)) + .collect(); + + if active_users.is_empty() { + anyhow::bail!( + "No active user found matching \"{}\". The user may be deactivated.", + name + ); + } + + if active_users.len() == 1 { + return Ok(active_users[0].account_id.clone()); + } + + // Multiple matches — disambiguate + let display_names: Vec = active_users.iter().map(|u| u.display_name.clone()).collect(); + match crate::partial_match::partial_match(name, &display_names) { + crate::partial_match::MatchResult::Exact(matched_name) => { + let user = active_users + .iter() + .find(|u| u.display_name == matched_name) + .expect("matched name must exist in active_users"); + Ok(user.account_id.clone()) + } + crate::partial_match::MatchResult::Ambiguous(matches) => { + if no_input { + anyhow::bail!( + "Multiple users match \"{}\": {}. Use a more specific name.", + name, + matches.join(", ") + ); + } + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let user = active_users + .iter() + .find(|u| &u.display_name == selected_name) + .expect("selected name must exist in active_users"); + Ok(user.account_id.clone()) + } + crate::partial_match::MatchResult::None(_) => { + anyhow::bail!( + "No active user found matching \"{}\". The user may be deactivated.", + name + ); + } + } +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib is_me_keyword` +Expected: PASS (4 tests) + +- [ ] **Step 5: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/issue/helpers.rs +git commit -m "feat: add resolve_user helper for --assignee/--reporter name resolution" +``` + +--- + +### Task 4: Add CLI flags and refactor JQL construction + +This is the core task. It adds the three new flags, refactors JQL construction to a unified flow, removes implicit `assignee = currentUser()`, and makes `--jql` compose with filter flags. All existing tests must be updated atomically. + +**Files:** +- Modify: `src/cli/mod.rs` +- Modify: `src/cli/issue/list.rs` + +- [ ] **Step 1: Add three new flags to `IssueCommand::List`** + +In `src/cli/mod.rs`, inside the `List` variant (after the `all` field at line 168), add: + +```rust + /// Filter by assignee ("me" for current user, or a name to search) + #[arg(long)] + assignee: Option, + /// Filter by reporter ("me" for current user, or a name to search) + #[arg(long)] + reporter: Option, + /// Show issues created within duration (e.g., 7d, 4w, 2M) + #[arg(long)] + recent: Option, +``` + +- [ ] **Step 2: Write unit tests for JQL composition** + +Add to the `mod tests` block in `src/cli/issue/list.rs`: + +```rust + #[test] + fn build_jql_parts_assignee_me() { + let parts = build_filter_clauses( + Some("currentUser()"), // assignee + None, // reporter + None, // status + None, // team + None, // recent + ); + assert_eq!(parts, vec!["assignee = currentUser()"]); + } + + #[test] + fn build_jql_parts_reporter_account_id() { + let parts = build_filter_clauses( + None, + Some("5b10ac8d82e05b22cc7d4ef5"), + None, + None, + None, + ); + assert_eq!(parts, vec!["reporter = 5b10ac8d82e05b22cc7d4ef5"]); + } + + #[test] + fn build_jql_parts_recent() { + let parts = build_filter_clauses(None, None, None, None, Some("7d")); + assert_eq!(parts, vec!["created >= -7d"]); + } + + #[test] + fn build_jql_parts_all_filters() { + let parts = build_filter_clauses( + Some("currentUser()"), + Some("currentUser()"), + Some("In Progress"), + Some(r#"customfield_10001 = "uuid-123""#), + Some("30d"), + ); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"status = \"In Progress\"".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); + } + + #[test] + fn build_jql_parts_empty() { + let parts = build_filter_clauses(None, None, None, None, None); + assert!(parts.is_empty()); + } +``` + +- [ ] **Step 3: Run tests to verify they fail** + +Run: `cargo test --lib build_jql_parts` +Expected: FAIL — `build_filter_clauses` not found + +- [ ] **Step 4: Implement `build_filter_clauses()` and refactor `handle_list()`** + +Add `build_filter_clauses()` to `src/cli/issue/list.rs` near the other helper functions: + +```rust +/// Build JQL filter clauses from resolved flag values. +fn build_filter_clauses( + assignee_jql: Option<&str>, + reporter_jql: Option<&str>, + status: Option<&str>, + team_clause: Option<&str>, + recent: Option<&str>, +) -> Vec { + let mut parts = Vec::new(); + if let Some(a) = assignee_jql { + parts.push(format!("assignee = {a}")); + } + if let Some(r) = reporter_jql { + parts.push(format!("reporter = {r}")); + } + if let Some(s) = status { + parts.push(format!("status = \"{}\"", crate::jql::escape_value(s))); + } + if let Some(t) = team_clause { + parts.push(t.to_string()); + } + if let Some(d) = recent { + parts.push(format!("created >= -{d}")); + } + parts +} +``` + +Now replace the entire JQL construction section of `handle_list()` (lines 28-131). Here is the complete refactored function from destructuring through `effective_jql`: + +```rust + let IssueCommand::List { + jql, + status, + team, + limit, + all, + assignee, + reporter, + recent, + points: show_points, + assets: show_assets, + } = command + else { + unreachable!() + }; + + let effective_limit = resolve_effective_limit(limit, all); + + // Validate --recent duration format early + if let Some(ref d) = recent { + crate::jql::validate_duration(d).map_err(|e| JrError::UserError(e))?; + } + + // Resolve --assignee and --reporter to JQL values + let assignee_jql = if let Some(ref name) = assignee { + Some(helpers::resolve_user(client, name, no_input).await?) + } else { + None + }; + let reporter_jql = if let Some(ref name) = reporter { + Some(helpers::resolve_user(client, name, no_input).await?) + } else { + None + }; + + let sp_field_id = config.global.fields.story_points_field_id.as_deref(); + let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); + + // Resolve team name to (field_id, uuid) before building JQL + let resolved_team = if let Some(ref team_name) = team { + Some(helpers::resolve_team_field(config, client, team_name, no_input).await?) + } else { + None + }; + + // Build pre-formatted team clause for build_filter_clauses + let team_clause = resolved_team.as_ref().map(|(field_id, team_uuid)| { + format!( + "{} = \"{}\"", + field_id, + crate::jql::escape_value(team_uuid) + ) + }); + + // Build filter clauses from all flag values + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + ); + let has_filters = !filter_parts.is_empty(); + + // Build base JQL + order by + let (base_parts, order_by): (Vec, &str) = if let Some(raw_jql) = jql { + // --jql provided: use as base, filter clauses will be appended + (vec![raw_jql], "updated DESC") + } else { + let board_id = config.project.board_id; + let project_key = config.project_key(project_override); + + if let Some(bid) = board_id { + match client.get_board_config(bid).await { + Ok(board_config) => { + let board_type = board_config.board_type.to_lowercase(); + if board_type == "scrum" { + match client.list_sprints(bid, Some("active")).await { + Ok(sprints) if !sprints.is_empty() => { + let sprint = &sprints[0]; + (vec![format!("sprint = {}", sprint.id)], "rank ASC") + } + _ => { + // No active sprint — fall through to fallback + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + } + } else { + // Kanban: statusCategory != Done, no implicit assignee + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + parts.push("statusCategory != Done".into()); + (parts, "rank ASC") + } + } + Err(_) => { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + } + } else { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + }; + + // Combine base + filters + let mut all_parts = base_parts; + all_parts.extend(filter_parts); + + // Guard against unbounded query + if all_parts.is_empty() { + return Err(JrError::UserError( + "No project or filters specified. Use --project, --assignee, --reporter, --status, --team, or --recent. \ + You can also set a default project in .jr.toml or run \"jr init\"." + .into(), + ) + .into()); + } + + let where_clause = all_parts.join(" AND "); + let effective_jql = format!("{where_clause} ORDER BY {order_by}"); +``` + +The rest of `handle_list()` (from the `cmdb_field_ids` section onward) stays unchanged. + +- [ ] **Step 5: Update existing tests and add new guard test** + +The `build_fallback_jql` function is now replaced by the inline guard in `handle_list()`. Remove `build_fallback_jql` and all its tests (`fallback_jql_order_by_not_joined_with_and`, `fallback_jql_with_team_has_valid_order_by`, `fallback_jql_with_all_filters`, `fallback_jql_errors_when_no_filters`, `fallback_jql_with_status_only`, `fallback_jql_escapes_special_chars_in_status`). + +The `build_filter_clauses` unit tests (Step 2) cover the JQL assembly logic that `build_fallback_jql` previously handled. + +Add a test verifying the `--jql` + filter flag composition: + +```rust + #[test] + fn build_jql_parts_jql_plus_status_compose() { + // --jql "type = Bug" --status "Done" should AND together + let filter = build_filter_clauses( + None, + None, + Some("Done"), + None, + None, + ); + // In handle_list, base_parts = ["type = Bug"], filter appended + let mut all_parts = vec!["type = Bug".to_string()]; + all_parts.extend(filter); + let jql = all_parts.join(" AND "); + assert_eq!(jql, r#"type = Bug AND status = "Done""#); + } +``` + +- [ ] **Step 6: Run all tests** + +Run: `cargo test --all-features` +Expected: ALL PASS + +- [ ] **Step 7: Run clippy** + +Run: `cargo clippy --all --all-features --tests -- -D warnings` +Expected: No warnings + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/list.rs +git commit -m "feat: add --assignee, --reporter, --recent flags with unified JQL composition (#44)" +``` + +--- + +### Task 5: Integration tests + +**Files:** +- Modify: `tests/common/fixtures.rs` +- Modify: `tests/issue_commands.rs` + +- [ ] **Step 1: Add test fixture helper** + +In `tests/common/fixtures.rs`, add: + +```rust +/// User search response — flat array of User objects. +pub fn user_search_response(users: Vec<(&str, &str, bool)>) -> Value { + let user_objects: Vec = users + .into_iter() + .map(|(account_id, display_name, active)| { + json!({ + "accountId": account_id, + "displayName": display_name, + "emailAddress": format!("{}@test.com", display_name.to_lowercase().replace(' ', ".")), + "active": active, + }) + }) + .collect(); + json!(user_objects) +} +``` + +- [ ] **Step 2: Write integration tests** + +In `tests/issue_commands.rs`, add: + +```rust +#[tokio::test] +async fn test_search_users_single_result() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![ + ("acc-123", "Jane Doe", true), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Jane").await.unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-123"); + assert_eq!(users[0].display_name, "Jane Doe"); + assert_eq!(users[0].active, Some(true)); +} + +#[tokio::test] +async fn test_search_users_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with( + ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![]), + ), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Nobody").await.unwrap(); + assert!(users.is_empty()); +} + +#[tokio::test] +async fn test_search_users_multiple() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![ + ("acc-1", "Jane Doe", true), + ("acc-2", "Jane Smith", true), + ("acc-3", "Jane Inactive", false), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Jane").await.unwrap(); + assert_eq!(users.len(), 3); + // Caller is responsible for filtering active users +} +``` + +- [ ] **Step 3: Add `resolve_user()` integration tests** + +In `tests/issue_commands.rs`, add: + +```rust +#[tokio::test] +async fn test_resolve_user_me_keyword() { + // "me" should return "currentUser()" without making any API call + // No mock server needed — the function short-circuits + let server = MockServer::start().await; + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = jr::cli::issue::helpers::resolve_user(&client, "me", false).await.unwrap(); + assert_eq!(result, "currentUser()"); +} + +#[tokio::test] +async fn test_resolve_user_single_active_match() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![ + ("acc-123", "Jane Doe", true), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = jr::cli::issue::helpers::resolve_user(&client, "Jane", true).await.unwrap(); + assert_eq!(result, "acc-123"); +} + +#[tokio::test] +async fn test_resolve_user_no_active_match() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![ + ("acc-123", "Jane Doe", false), // inactive + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = jr::cli::issue::helpers::resolve_user(&client, "Jane", true).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("No active user found")); +} +``` + +Note: `resolve_user()` needs to be `pub` (not just `pub(super)`) for integration tests to access it. Update visibility in `helpers.rs` if needed, or test through the CLI binary instead. + +- [ ] **Step 4: Run integration tests** + +Run: `cargo test --test issue_commands` +Expected: ALL PASS + +- [ ] **Step 5: Run full test suite + clippy** + +Run: `cargo test --all-features && cargo clippy --all --all-features --tests -- -D warnings` +Expected: ALL PASS, no warnings + +- [ ] **Step 5: Commit** + +```bash +git add tests/common/fixtures.rs tests/issue_commands.rs +git commit -m "test: add integration tests for user search and filter flags" +``` + +--- + +### Task 6: Final verification and format check + +**Files:** None (verification only) + +- [ ] **Step 1: Run cargo fmt** + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues (if there are, run `cargo fmt --all` and include in commit) + +- [ ] **Step 2: Run full CI-equivalent check** + +Run: `cargo fmt --all -- --check && cargo clippy --all --all-features --tests -- -D warnings && cargo test --all-features` +Expected: All three pass + +- [ ] **Step 3: Fix any issues found, commit if needed** + +If `cargo fmt` requires changes: +```bash +cargo fmt --all +git add -u +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-25-jql-project-scope.md b/docs/superpowers/plans/2026-03-25-jql-project-scope.md new file mode 100644 index 0000000..dfea9d3 --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-jql-project-scope.md @@ -0,0 +1,388 @@ +# Fix `--jql` + `--project` Scope Composition — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Fix bug #54 where `--jql` overrides `--project` scope, so that project and JQL compose with AND logic. + +**Architecture:** Extract project-key resolution before the JQL vs board-aware branch in `handle_list`. Fix `strip_order_by` to handle position-0 ORDER BY. Add a pure `build_jql_base_parts` function for testability. + +**Tech Stack:** Rust, clap 4, wiremock (tests) + +**Spec:** `docs/superpowers/specs/2026-03-25-jql-project-scope-design.md` + +--- + +### Task 1: Fix `strip_order_by` to handle ORDER BY at position 0 + +**Files:** +- Modify: `src/jql.rs:40-47` (the `strip_order_by` function) +- Test: `src/jql.rs` (inline `#[cfg(test)]` module, after line 105) + +- [ ] **Step 1: Write the failing test** + +In `src/jql.rs`, inside the existing `mod tests` block, after the `strip_order_by_trims_whitespace` test (around line 105), add: + +```rust + #[test] + fn strip_order_by_at_position_zero() { + assert_eq!(strip_order_by("ORDER BY created DESC"), ""); + } + + #[test] + fn strip_order_by_at_position_zero_lowercase() { + assert_eq!(strip_order_by("order by rank ASC"), ""); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib jql::tests::strip_order_by_at_position` +Expected: FAIL — both tests fail because the current implementation only searches for `" ORDER BY"` (with leading space) and misses `"ORDER BY"` at position 0. + +- [ ] **Step 3: Fix `strip_order_by`** + +Replace the current `strip_order_by` function in `src/jql.rs:40-47` with: + +```rust +pub fn strip_order_by(jql: &str) -> &str { + let upper = jql.to_ascii_uppercase(); + if let Some(pos) = upper.find(" ORDER BY") { + jql[..pos].trim_end() + } else if upper.starts_with("ORDER BY") { + "" + } else { + jql + } +} +``` + +- [ ] **Step 4: Run all `jql` tests to verify everything passes** + +Run: `cargo test --lib jql::tests` +Expected: All tests PASS (existing tests unchanged, new tests pass). + +- [ ] **Step 5: Commit** + +```bash +git add src/jql.rs +git commit -m "fix: handle ORDER BY at position 0 in strip_order_by" +``` + +--- + +### Task 2: Extract `build_jql_base_parts` and fix project scoping with `--jql` + +**Files:** +- Modify: `src/cli/issue/list.rs:89-144` (the `if let Some(raw_jql) = jql` block) +- Test: `src/cli/issue/list.rs` (inline `#[cfg(test)]` module) + +**Context:** The current code in `handle_list` resolves the project key *inside* the `else` branch (line 96). The fix hoists `project_key` resolution above the `if/else` so it's available in both branches. To make the JQL composition directly testable, extract the `--jql` branch logic into a pure function. + +- [ ] **Step 1: Write failing unit tests** + +In `src/cli/issue/list.rs`, inside the existing `mod tests` block, add these tests at the end (before the closing `}`): + +```rust + #[test] + fn build_jql_base_parts_jql_with_project() { + let (parts, order_by) = + build_jql_base_parts(Some("priority = Highest"), Some("PROJ")); + assert_eq!( + parts, + vec![ + "project = \"PROJ\"".to_string(), + "priority = Highest".to_string(), + ] + ); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_without_project() { + let (parts, order_by) = + build_jql_base_parts(Some("priority = Highest"), None); + assert_eq!(parts, vec!["priority = Highest".to_string()]); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_with_order_by_and_project() { + let (parts, order_by) = + build_jql_base_parts(Some("priority = Highest ORDER BY created DESC"), Some("PROJ")); + assert_eq!( + parts, + vec![ + "project = \"PROJ\"".to_string(), + "priority = Highest".to_string(), + ] + ); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_order_by_only_with_project() { + let (parts, order_by) = + build_jql_base_parts(Some("ORDER BY created DESC"), Some("PROJ")); + assert_eq!(parts, vec!["project = \"PROJ\"".to_string()]); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_order_by_only_no_project() { + let (parts, order_by) = + build_jql_base_parts(Some("ORDER BY created DESC"), None); + assert!(parts.is_empty()); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_no_jql() { + let (parts, order_by) = build_jql_base_parts(None, Some("PROJ")); + assert!(parts.is_empty()); + assert_eq!(order_by, ""); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib cli::issue::list::tests::build_jql_base_parts` +Expected: FAIL — `build_jql_base_parts` function does not exist yet. + +- [ ] **Step 3: Implement `build_jql_base_parts`** + +In `src/cli/issue/list.rs`, add this function above `handle_list` (after the imports, around line 17): + +```rust +/// Build base JQL parts when `--jql` is provided. +/// +/// Returns `(base_parts, order_by)`. When `jql` is `None` (no `--jql` flag), +/// returns empty parts and an empty order_by — the caller handles the +/// board-aware logic in that case. +fn build_jql_base_parts( + jql: Option<&str>, + project_key: Option<&str>, +) -> (Vec, &'static str) { + let Some(raw_jql) = jql else { + return (Vec::new(), ""); + }; + + let stripped = crate::jql::strip_order_by(raw_jql); + let mut parts = Vec::new(); + + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + if !stripped.is_empty() { + parts.push(stripped.to_string()); + } + + (parts, "updated DESC") +} +``` + +- [ ] **Step 4: Run unit tests to verify they pass** + +Run: `cargo test --lib cli::issue::list::tests::build_jql_base_parts` +Expected: All 6 tests PASS. + +- [ ] **Step 5: Refactor `handle_list` to use `build_jql_base_parts`** + +In `src/cli/issue/list.rs`, replace lines 89-144 (the entire `// Build base JQL + order by` block) with: + +```rust + // Resolve project key once, before the JQL vs board-aware branch + let project_key = config.project_key(project_override); + + // Build base JQL + order by + let (base_parts, order_by): (Vec, &str) = { + let (jql_parts, jql_order) = + build_jql_base_parts(jql.as_deref(), project_key.as_deref()); + if jql.is_some() { + (jql_parts, jql_order) + } else { + let board_id = config.project.board_id; + + if let Some(bid) = board_id { + match client.get_board_config(bid).await { + Ok(board_config) => { + let board_type = board_config.board_type.to_lowercase(); + if board_type == "scrum" { + match client.list_sprints(bid, Some("active")).await { + Ok(sprints) if !sprints.is_empty() => { + let sprint = &sprints[0]; + (vec![format!("sprint = {}", sprint.id)], "rank ASC") + } + _ => { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + } + } else { + // Kanban: statusCategory != Done, no implicit assignee + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + parts.push("statusCategory != Done".into()); + (parts, "rank ASC") + } + } + Err(_) => { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + } + } else { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") + } + } + }; +``` + +Note: The `else` branch is structurally the same as before — it now references the hoisted `project_key` variable instead of calling `config.project_key(project_override)` inline. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All tests PASS (unit + integration). No behavior change for non-`--jql` paths. + +- [ ] **Step 7: Run clippy and format** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no format issues. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "fix: compose --jql with --project scope using AND logic (#54)" +``` + +--- + +### Task 3: Add integration test for `--jql` + `--project` composition + +**Files:** +- Modify: `tests/issue_commands.rs` (add import + new test at end of file) + +**Context:** The existing integration tests call `client.search_issues(jql, ...)` with a pre-built JQL string. This test verifies the composed JQL is correctly sent in the POST body to the API using `body_partial_json` to match the `jql` field in the request body, ensuring the mock only matches if the correct composed JQL is sent. + +- [ ] **Step 1: Add `body_partial_json` import** + +In `tests/issue_commands.rs`, update the import line (line 4) from: + +```rust +use wiremock::matchers::{method, path}; +``` + +to: + +```rust +use wiremock::matchers::{body_partial_json, method, path}; +``` + +- [ ] **Step 2: Write the integration test** + +Add at the end of `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_search_issues_jql_with_project_scope() { + let server = MockServer::start().await; + + // The mock only matches if the POST body contains the expected composed JQL + let expected_jql = r#"project = "PROJ" AND priority = Highest ORDER BY updated DESC"#; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": expected_jql + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "High priority issue", + "To Do", + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // This is the JQL that handle_list would compose when given + // --project PROJ --jql "priority = Highest" + let result = client.search_issues(expected_jql, Some(30), &[]).await.unwrap(); + assert_eq!(result.issues.len(), 1); + assert_eq!(result.issues[0].key, "PROJ-1"); +} +``` + +- [ ] **Step 3: Run the test to verify it passes** + +Run: `cargo test --test issue_commands test_search_issues_jql_with_project_scope` +Expected: PASS — wiremock matches the POST body containing the composed JQL. + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 5: Commit** + +```bash +git add tests/issue_commands.rs +git commit -m "test: add integration test for --jql + --project composition (#54)" +``` + +--- + +### Task 4: Format and final verification + +**Files:** +- All modified files from Tasks 1-3 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all` + +- [ ] **Step 4: If formatting changed anything, commit** + +Run: `cargo fmt --all -- --check` +If it reports changes: +```bash +git add src/jql.rs src/cli/issue/list.rs tests/issue_commands.rs +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-25-open-flag.md b/docs/superpowers/plans/2026-03-25-open-flag.md new file mode 100644 index 0000000..2fefb84 --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-open-flag.md @@ -0,0 +1,256 @@ +# `--open` Flag Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--open` boolean flag to `jr issue list` that appends `statusCategory != "Done"` to the JQL query, excluding completed issues. + +**Architecture:** Single flag addition threading through the existing JQL composition pipeline. `--open` is passed to `build_filter_clauses()` which appends one clause. Conflicts with `--status` via clap's `conflicts_with`. + +**Tech Stack:** Rust, clap 4 derive macros, existing JQL composition in `list.rs` + +**Spec:** `docs/superpowers/specs/2026-03-25-open-flag-design.md` + +--- + +### Task 1: Add `--open` flag to CLI definition and thread through handler + +**Files:** +- Modify: `src/cli/mod.rs:153-184` (add `open` field to `IssueCommand::List`) +- Modify: `src/cli/issue/list.rs:28-42` (destructure `open`) +- Modify: `src/cli/issue/list.rs:78-85` (pass `open` to `build_filter_clauses`) +- Modify: `src/cli/issue/list.rs:322-346` (add `open` parameter, append clause) +- Modify: `src/cli/issue/list.rs:148-156` (update guard error message) + +- [ ] **Step 1: Write failing tests for `build_filter_clauses` with `open`** + +Add three tests to the existing `#[cfg(test)] mod tests` block at the bottom of `src/cli/issue/list.rs`: + +```rust +#[test] +fn build_jql_parts_open() { + let parts = build_filter_clauses(None, None, None, None, None, true); + assert_eq!(parts, vec!["statusCategory != \"Done\""]); +} + +#[test] +fn build_jql_parts_open_with_assignee() { + let parts = build_filter_clauses(Some("currentUser()"), None, None, None, None, true); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != \"Done\"".to_string())); +} + +#[test] +fn build_jql_parts_all_filters_with_open() { + let parts = build_filter_clauses( + Some("currentUser()"), + Some("currentUser()"), + None, // status conflicts with open, so None here + Some(r#"customfield_10001 = "uuid-123""#), + Some("30d"), + true, + ); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != \"Done\"".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib build_jql_parts_open 2>&1 | head -20` +Expected: Compilation error — `build_filter_clauses` doesn't accept 6 arguments yet. + +- [ ] **Step 3: Update `build_filter_clauses` signature, body, and all existing callers** + +In `src/cli/issue/list.rs`, change `build_filter_clauses` (line 322-346) to accept `open: bool` and append the clause. **Also update all existing call sites and test calls in the same step** to avoid compilation failures: + +```rust +/// Build JQL filter clauses from resolved flag values. +fn build_filter_clauses( + assignee_jql: Option<&str>, + reporter_jql: Option<&str>, + status: Option<&str>, + team_clause: Option<&str>, + recent: Option<&str>, + open: bool, +) -> Vec { + let mut parts = Vec::new(); + if let Some(a) = assignee_jql { + parts.push(format!("assignee = {a}")); + } + if let Some(r) = reporter_jql { + parts.push(format!("reporter = {r}")); + } + if let Some(s) = status { + parts.push(format!("status = \"{}\"", crate::jql::escape_value(s))); + } + if open { + parts.push("statusCategory != \"Done\"".to_string()); + } + if let Some(t) = team_clause { + parts.push(t.to_string()); + } + if let Some(d) = recent { + parts.push(format!("created >= -{d}")); + } + parts +} +``` + +Also update the call site at `src/cli/issue/list.rs:79-85` to pass `false` for now: + +```rust + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + false, // will be replaced with `open` in step 6 + ); +``` + +Update all existing test calls that pass 5 args to pass 6 (add `false` as the last argument): + +- `build_jql_parts_assignee_me`: `build_filter_clauses(Some("currentUser()"), None, None, None, None, false);` +- `build_jql_parts_reporter_account_id`: `build_filter_clauses(None, Some("5b10ac8d82e05b22cc7d4ef5"), None, None, None, false);` +- `build_jql_parts_recent`: `build_filter_clauses(None, None, None, None, Some("7d"), false);` +- `build_jql_parts_all_filters`: `build_filter_clauses(Some("currentUser()"), Some("currentUser()"), Some("In Progress"), Some(r#"customfield_10001 = "uuid-123""#), Some("30d"), false);` +- `build_jql_parts_empty`: `build_filter_clauses(None, None, None, None, None, false);` +- `build_jql_parts_jql_plus_status_compose`: `build_filter_clauses(None, None, Some("Done"), None, None, false);` +- `build_jql_parts_status_escaping`: `build_filter_clauses(None, None, Some(r#"He said "hi" \o/"#), None, None, false);` + +- [ ] **Step 4: Run tests to verify new tests pass** + +Run: `cargo test --lib build_jql_parts 2>&1` +Expected: All `build_jql_parts_*` tests pass (10 tests total — 7 existing + 3 new). + +- [ ] **Step 5: Add `--open` flag to clap definition** + +In `src/cli/mod.rs`, add the `open` field to `IssueCommand::List` (after `recent`, before `points`): + +```rust + /// Show only open issues (excludes Done status category) + #[arg(long, conflicts_with = "status")] + open: bool, +``` + +- [ ] **Step 6: Thread `open` through `handle_list`** + +In `src/cli/issue/list.rs`, update the destructure (line 28-42) to include `open`: + +```rust + let IssueCommand::List { + jql, + status, + team, + limit, + all, + assignee, + reporter, + recent, + open, + points: show_points, + assets: show_assets, + } = command + else { + unreachable!() + }; +``` + +Replace the `false` placeholder in the `build_filter_clauses` call with `open`: + +```rust + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + open, + ); +``` + +- [ ] **Step 7: Update guard error message** + +In `src/cli/issue/list.rs` (line 151), add `--open` to the error message: + +```rust + "No project or filters specified. Use --project, --assignee, --reporter, --status, --open, --team, --recent, or --jql. \ + You can also set a default project in .jr.toml or run \"jr init\"." +``` + +- [ ] **Step 8: Run full test suite** + +Run: `cargo test 2>&1` +Expected: All tests pass. No compilation errors. + +Run: `cargo clippy -- -D warnings 2>&1` +Expected: No warnings. + +- [ ] **Step 9: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/list.rs +git commit -m "feat: add --open flag to exclude Done issues from jr issue list (#45)" +``` + +--- + +### Task 2: Update documentation + +**Files:** +- Modify: `README.md:90` (add `--open` to command table) + +- [ ] **Step 1: Update README** + +In `README.md`, update the `jr issue list` row (line 90) to include `--open`: + +```markdown +| `jr issue list` | List issues (`--assignee`, `--reporter`, `--recent`, `--status`, `--open`, `--team`, `--jql`, `--limit`/`--all`, `--points`, `--assets`) | +``` + +Also add an example to the Quick Start section (after the existing `--reporter me --recent 7d` line): + +```markdown +# Open issues assigned to me (excludes Done) +jr issue list --assignee me --open +``` + +- [ ] **Step 2: Verify build still passes** + +Run: `cargo test 2>&1` +Expected: All tests pass (docs changes don't affect tests, but verify nothing was accidentally broken). + +- [ ] **Step 3: Commit** + +```bash +git add README.md +git commit -m "docs: add --open flag to README command table and examples" +``` + +--- + +### Task 3: Format and final verification + +- [ ] **Step 1: Run formatter** + +Run: `cargo fmt --all -- --check 2>&1` +If any formatting issues: `cargo fmt --all` + +- [ ] **Step 2: Run full CI checks** + +Run: `cargo clippy -- -D warnings 2>&1 && cargo test 2>&1` +Expected: Zero warnings, all tests pass. + +- [ ] **Step 3: Commit formatting if needed** + +```bash +# Only if cargo fmt made changes: +git add -A +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-25-project-fields-global-flag.md b/docs/superpowers/plans/2026-03-25-project-fields-global-flag.md new file mode 100644 index 0000000..92d0b38 --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-project-fields-global-flag.md @@ -0,0 +1,193 @@ +# Fix `project fields` Global `--project` Flag — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Fix bug #56 — make `project fields` use the global `--project` flag instead of a positional argument, consistent with all other subcommands. + +**Architecture:** Remove the positional `project` field from `ProjectCommand::Fields` (clap derive enum), update the handler to resolve the project key solely from `config.project_key(project_override)`, and update the README example. + +**Tech Stack:** Rust, clap 4 (derive API) + +**Spec:** `docs/superpowers/specs/2026-03-25-project-fields-global-flag-design.md` + +--- + +### Task 1: Remove positional arg and update handler + +**Files:** +- Modify: `src/cli/mod.rs` (the `ProjectCommand` enum, around line 356-361) +- Modify: `src/cli/project.rs` (the `handle` dispatch and `handle_fields` function) + +- [ ] **Step 1: Change `Fields` from struct variant to unit variant** + +In `src/cli/mod.rs`, replace the `Fields` variant in the `ProjectCommand` enum: + +```rust +// Before (lines 356-360): + /// Show valid issue types, priorities, and statuses + Fields { + /// Project key (uses configured project if omitted) + project: Option, + }, + +// After: + /// Show valid issue types, priorities, and statuses + Fields, +``` + +- [ ] **Step 2: Update the dispatch in `handle`** + +In `src/cli/project.rs`, replace the `Fields` match arm in the `handle` function: + +```rust +// Before (lines 21-23): + ProjectCommand::Fields { project } => { + handle_fields(project, config, client, output_format, project_override).await + } + +// After: + ProjectCommand::Fields => { + handle_fields(config, client, output_format, project_override).await + } +``` + +- [ ] **Step 3: Update `handle_fields` signature and project resolution** + +In `src/cli/project.rs`, replace the `handle_fields` function signature and project resolution: + +```rust +// Before (lines 60-73): +async fn handle_fields( + project: Option, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let project_key = project + .or_else(|| config.project_key(project_override)) + .ok_or_else(|| { + anyhow::anyhow!( + "No project specified. Run \"jr project list\" to see available projects." + ) + })?; + +// After: +async fn handle_fields( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let project_key = config.project_key(project_override).ok_or_else(|| { + anyhow::anyhow!( + "No project specified. Run \"jr project list\" to see available projects." + ) + })?; +``` + +The rest of `handle_fields` (lines 75-107) remains unchanged. + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests PASS (no regressions — no existing tests depend on the positional arg). + +- [ ] **Step 5: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no format issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/mod.rs src/cli/project.rs +git commit -m "fix: use global --project flag for project fields (#56)" +``` + +--- + +### Task 2: Update documentation + +**Files:** +- Modify: `README.md` (line ~123) +- Modify: `docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md` (lines 37 and 409) +- Modify: `docs/superpowers/plans/2026-03-21-jr-implementation.md` (line 3870) + +- [ ] **Step 1: Update the `project fields` example in README** + +In `README.md`, find line ~123: + +```markdown +| `jr project fields FOO` | Show valid issue types and priorities | +``` + +Replace with: + +```markdown +| `jr project fields --project FOO` | Show valid issue types, priorities, and statuses | +``` + +Note: The description is also updated to include "and statuses" since PR #61 added statuses support. + +- [ ] **Step 2: Update v1 design spec examples** + +In `docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md`, update two references: + +Line 37 — replace: +``` +jr project fields FOO # List valid issue types, priorities, statuses for a project +``` +with: +``` +jr project fields --project FOO # List valid issue types, priorities, statuses for a project +``` + +Line 409 — replace: +``` +`jr project fields FOO --output json` returns valid issue types, priorities, and statuses for a project. +``` +with: +``` +`jr project fields --project FOO --output json` returns valid issue types, priorities, and statuses for a project. +``` + +- [ ] **Step 3: Update v1 implementation plan error message** + +In `docs/superpowers/plans/2026-03-21-jr-implementation.md`, line 3870 — replace: +``` +.ok_or_else(|| anyhow::anyhow!("No project specified. Use 'jr project fields FOO' or configure .jr.toml"))?; +``` +with: +``` +.ok_or_else(|| anyhow::anyhow!("No project specified. Use 'jr project fields --project FOO' or configure .jr.toml"))?; +``` + +- [ ] **Step 4: Commit** + +```bash +git add README.md docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md docs/superpowers/plans/2026-03-21-jr-implementation.md +git commit -m "docs: update project fields examples to use --project flag (#56)" +``` + +--- + +### Task 3: Final verification + +**Files:** +- All modified files from Tasks 1-2 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all && cargo fmt --all -- --check` +Expected: No format issues. diff --git a/docs/superpowers/plans/2026-03-25-project-fields-statuses.md b/docs/superpowers/plans/2026-03-25-project-fields-statuses.md new file mode 100644 index 0000000..dc3f3ad --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-project-fields-statuses.md @@ -0,0 +1,317 @@ +# Add Statuses to `project fields` — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Fix bug #55 — add project statuses grouped by issue type to `jr project fields` output (table and JSON). + +**Architecture:** Add `get_project_statuses` API method calling `/rest/api/3/project/{key}/statuses`, then update `handle_fields` CLI handler to fetch and render statuses in both table and JSON formats. + +**Tech Stack:** Rust, serde, wiremock (tests) + +**Spec:** `docs/superpowers/specs/2026-03-25-project-fields-statuses-design.md` + +--- + +### Task 1: Add `get_project_statuses` API method with integration test + +**Files:** +- Modify: `src/api/jira/projects.rs:1-72` (add structs after line 19, add method inside `impl JiraClient` block) +- Modify: `tests/common/fixtures.rs:204` (add fixture at end of file) +- Modify: `tests/project_commands.rs:157` (add test at end of file) + +- [ ] **Step 1: Add the fixture helper** + +In `tests/common/fixtures.rs`, add at the end of the file (after the `project_response` function, before the final newline): + +```rust +/// Project statuses response — top-level array of issue types with nested statuses. +pub fn project_statuses_response() -> Value { + json!([ + { + "id": "3", + "name": "Task", + "self": "https://test.atlassian.net/rest/api/3/issueType/3", + "subtask": false, + "statuses": [ + { + "id": "10000", + "name": "To Do", + "description": "Work that has not been started.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/open.png", + "self": "https://test.atlassian.net/rest/api/3/status/10000" + }, + { + "id": "10001", + "name": "In Progress", + "description": "The issue is currently being worked on.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/inprogress.png", + "self": "https://test.atlassian.net/rest/api/3/status/10001" + }, + { + "id": "10002", + "name": "Done", + "description": "Work has been completed.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/closed.png", + "self": "https://test.atlassian.net/rest/api/3/status/10002" + } + ] + }, + { + "id": "1", + "name": "Bug", + "self": "https://test.atlassian.net/rest/api/3/issueType/1", + "subtask": false, + "statuses": [ + { + "id": "10000", + "name": "To Do", + "description": "Work that has not been started.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/open.png", + "self": "https://test.atlassian.net/rest/api/3/status/10000" + }, + { + "id": "10002", + "name": "Done", + "description": "Work has been completed.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/closed.png", + "self": "https://test.atlassian.net/rest/api/3/status/10002" + } + ] + } + ]) +} +``` + +- [ ] **Step 2: Write the failing integration test** + +In `tests/project_commands.rs`, add at the end of the file: + +```rust +#[tokio::test] +async fn test_get_project_statuses() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/FOO/statuses")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::project_statuses_response()), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("FOO").await.unwrap(); + assert_eq!(result.len(), 2); + assert_eq!(result[0].name, "Task"); + assert_eq!(result[0].id, "3"); + assert_eq!(result[0].subtask, Some(false)); + assert_eq!(result[0].statuses.len(), 3); + assert_eq!(result[0].statuses[0].name, "To Do"); + assert_eq!(result[0].statuses[0].id, "10000"); + assert_eq!( + result[0].statuses[0].description.as_deref(), + Some("Work that has not been started.") + ); + assert_eq!(result[0].statuses[1].name, "In Progress"); + assert_eq!(result[0].statuses[2].name, "Done"); + assert_eq!(result[1].name, "Bug"); + assert_eq!(result[1].statuses.len(), 2); +} + +#[tokio::test] +async fn test_get_project_statuses_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/FOO/statuses")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("FOO").await.unwrap(); + assert!(result.is_empty()); +} +``` + +- [ ] **Step 3: Run the tests to verify they fail** + +Run: `cargo test --test project_commands test_get_project_statuses` +Expected: FAIL — both `test_get_project_statuses` and `test_get_project_statuses_empty` fail because `get_project_statuses` method does not exist yet. + +- [ ] **Step 4: Add serde structs and API method** + +In `src/api/jira/projects.rs`, add the two new structs after `PriorityMetadata` and before the `impl JiraClient` block: + +```rust +#[derive(Debug, Deserialize, Serialize)] +pub struct StatusMetadata { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct IssueTypeWithStatuses { + pub id: String, + pub name: String, + pub subtask: Option, + pub statuses: Vec, +} +``` + +Then add the new method inside the `impl JiraClient` block, after `get_priorities` and before `list_projects`: + +```rust + pub async fn get_project_statuses( + &self, + project_key: &str, + ) -> Result> { + self.get(&format!("/rest/api/3/project/{project_key}/statuses")) + .await + } +``` + +- [ ] **Step 5: Run the tests to verify they pass** + +Run: `cargo test --test project_commands test_get_project_statuses` +Expected: Both `test_get_project_statuses` and `test_get_project_statuses_empty` PASS. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All tests PASS (no regressions). + +- [ ] **Step 7: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no format issues. + +- [ ] **Step 8: Commit** + +```bash +git add src/api/jira/projects.rs tests/common/fixtures.rs tests/project_commands.rs +git commit -m "feat: add get_project_statuses API method (#55)" +``` + +--- + +### Task 2: Update `handle_fields` to fetch and render statuses + +**Files:** +- Modify: `src/cli/project.rs` (the `handle_fields` function) + +**Context:** The `handle_fields` function currently fetches issue types and priorities, then renders them in table or JSON format. We need to add a third fetch for statuses, then render them in both output formats. The spec requires: +- Table: "Statuses by Issue Type" section after Priorities, skip issue types with empty statuses, omit section if all empty +- JSON: `statuses_by_issue_type` field containing the full `Vec` +- Error handling: `?` propagation (hard fail), not `unwrap_or_default()` + +**Note on testing:** No handler-level test is added for the rendering changes. The handler output is `println!` to stdout — testing it would require stdout capture, which is not done anywhere in this codebase. The API-layer test in Task 1 covers deserialization correctness. The rendering logic is trivial iteration; visual correctness will be verified during live testing. + +- [ ] **Step 1: Add the statuses fetch** + +In `src/cli/project.rs`, in the `handle_fields` function, add after `let priorities = client.get_priorities().await?;`: + +```rust + let statuses = client.get_project_statuses(&project_key).await?; +``` + +- [ ] **Step 2: Update the JSON output** + +In `src/cli/project.rs`, replace the JSON branch (the `OutputFormat::Json` arm) with: + +```rust + println!( + "{}", + serde_json::json!({ + "project": project_key, + "issue_types": issue_types, + "priorities": priorities, + "statuses_by_issue_type": statuses, + }) + ); +``` + +- [ ] **Step 3: Update the table output** + +In `src/cli/project.rs`, replace the Table branch (the `OutputFormat::Table` arm) with: + +```rust + OutputFormat::Table => { + println!("Project: {project_key}\n"); + println!("Issue Types:"); + for t in &issue_types { + let suffix = if t.subtask == Some(true) { + " (subtask)" + } else { + "" + }; + println!(" - {}{}", t.name, suffix); + } + println!("\nPriorities:"); + for p in &priorities { + println!(" - {}", p.name); + } + let has_statuses = statuses.iter().any(|it| !it.statuses.is_empty()); + if has_statuses { + println!("\nStatuses by Issue Type:"); + for it in &statuses { + if it.statuses.is_empty() { + continue; + } + println!(" {}:", it.name); + for s in &it.statuses { + println!(" - {}", s.name); + } + } + } + } +``` + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 5: Run clippy and format** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no format issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/project.rs +git commit -m "fix: add statuses to project fields output (#55)" +``` + +--- + +### Task 3: Format and final verification + +**Files:** +- All modified files from Tasks 1-2 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all` + +- [ ] **Step 4: If formatting changed anything, commit** + +Run: `cargo fmt --all -- --check` +If it reports changes: +```bash +git add src/api/jira/projects.rs src/cli/project.rs tests/common/fixtures.rs tests/project_commands.rs +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-25-project-list.md b/docs/superpowers/plans/2026-03-25-project-list.md new file mode 100644 index 0000000..d74de2d --- /dev/null +++ b/docs/superpowers/plans/2026-03-25-project-list.md @@ -0,0 +1,636 @@ +# `jr project list` Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `jr project list` command for project discovery, with `--type`/`--limit`/`--all` flags, plus error message enhancements suggesting valid projects. + +**Architecture:** New types (`ProjectSummary`, `ProjectLead`) in the types layer, a `list_projects` API method calling `GET /rest/api/3/project/search` with offset pagination via `OffsetPage`, a `handle_list` CLI handler with table/JSON output, a `suggest_projects` helper for error enhancement, and README updates. + +**Tech Stack:** Rust, clap 4 derive macros, reqwest, serde, comfy-table, wiremock (tests) + +**Spec:** `docs/superpowers/specs/2026-03-25-project-list-design.md` + +--- + +### Task 1: Add `ProjectSummary` and `ProjectLead` types + +**Files:** +- Modify: `src/types/jira/project.rs` (add types alongside existing `Project`) + +- [ ] **Step 1: Add the new types** + +In `src/types/jira/project.rs`, add below the existing `Project` struct: + +```rust +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct ProjectSummary { + pub key: String, + pub name: String, + #[serde(rename = "projectTypeKey")] + pub project_type_key: String, + pub lead: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct ProjectLead { + #[serde(rename = "displayName")] + pub display_name: String, + #[serde(rename = "accountId")] + pub account_id: String, +} +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo check 2>&1` +Expected: Compiles with no errors (types are defined but unused — that's fine at this stage). + +- [ ] **Step 3: Commit** + +```bash +git add src/types/jira/project.rs +git commit -m "feat: add ProjectSummary and ProjectLead types for project search API" +``` + +--- + +### Task 2: Add `list_projects` API method with integration tests + +**Files:** +- Modify: `src/api/jira/projects.rs` (add `list_projects` method) +- Create: `tests/project_commands.rs` (integration tests) +- Modify: `tests/common/fixtures.rs` (add project search fixture) + +The existing `src/api/jira/projects.rs` already has `impl JiraClient` with `get_project_issue_types` and `get_priorities`. The new method goes in the same `impl` block. + +The API uses query params in the URL path string (e.g., `/rest/api/3/project/search?orderBy=key&maxResults=50`), passed to `self.get(&path)`. The response is deserialized as `OffsetPage` — the `values` key is handled by `OffsetPage`. For the `--all` case (`max_results` is `None`), paginate using `has_more()` and `next_start()`, collecting pages into a single `Vec`. + +- [ ] **Step 1: Add project search fixture to test helpers** + +In `tests/common/fixtures.rs`, add at the end of the file: + +```rust +/// Project search response — paginated envelope with `values` array. +pub fn project_search_response(projects: Vec) -> Value { + let total = projects.len() as u32; + json!({ + "values": projects, + "startAt": 0, + "maxResults": 50, + "total": total, + }) +} + +pub fn project_response(key: &str, name: &str, type_key: &str, lead_name: Option<&str>) -> Value { + let lead = lead_name.map(|name| json!({ + "accountId": format!("acc-{}", key.to_lowercase()), + "displayName": name, + })); + json!({ + "key": key, + "name": name, + "projectTypeKey": type_key, + "lead": lead, + }) +} +``` + +- [ ] **Step 2: Write integration tests** + +Create `tests/project_commands.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn test_list_projects() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![ + common::fixtures::project_response("FOO", "Project Foo", "software", Some("Jane Doe")), + common::fixtures::project_response("BAR", "Project Bar", "service_desk", Some("John Smith")), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert_eq!(projects.len(), 2); + assert_eq!(projects[0].key, "FOO"); + assert_eq!(projects[0].name, "Project Foo"); + assert_eq!(projects[0].project_type_key, "software"); + assert_eq!(projects[0].lead.as_ref().unwrap().display_name, "Jane Doe"); + assert_eq!(projects[1].key, "BAR"); + assert_eq!(projects[1].project_type_key, "service_desk"); +} + +#[tokio::test] +async fn test_list_projects_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::project_search_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert!(projects.is_empty()); +} + +#[tokio::test] +async fn test_list_projects_lead_missing() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![ + common::fixtures::project_response("FOO", "Project Foo", "software", None), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert_eq!(projects.len(), 1); + assert!(projects[0].lead.is_none()); +} + +#[tokio::test] +async fn test_list_projects_with_type_filter() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .and(wiremock::matchers::query_param("typeKey", "software")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![ + common::fixtures::project_response("FOO", "Project Foo", "software", Some("Jane Doe")), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(Some("software"), Some(50)).await.unwrap(); + assert_eq!(projects.len(), 1); + assert_eq!(projects[0].project_type_key, "software"); +} +``` + +- [ ] **Step 3: Run tests to verify they fail** + +Run: `cargo test --test project_commands 2>&1 | head -20` +Expected: Compilation error — `list_projects` method doesn't exist yet. (Note: the `query_param` matcher import will also need `use wiremock::matchers::query_param;` or the fully qualified path used above.) + +- [ ] **Step 4: Implement `list_projects`** + +In `src/api/jira/projects.rs`, add to the existing `impl JiraClient` block, **before** the closing `}`: + +```rust + pub async fn list_projects( + &self, + type_key: Option<&str>, + max_results: Option, + ) -> Result> { + use crate::api::pagination::OffsetPage; + use crate::types::jira::ProjectSummary; + + let page_size = max_results.map(|m| m.min(50)).unwrap_or(50); + let mut all_projects: Vec = Vec::new(); + let mut start_at: u32 = 0; + + loop { + let mut path = format!( + "/rest/api/3/project/search?orderBy=key&startAt={}&maxResults={}", + start_at, page_size + ); + if let Some(tk) = type_key { + path.push_str(&format!("&typeKey={}", urlencoding::encode(tk))); + } + + let page: OffsetPage = self.get(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all_projects.extend(page.values.unwrap_or_default()); + + // If caller specified a limit, stop after one page + if max_results.is_some() || !has_more { + break; + } + start_at = next; + } + + Ok(all_projects) + } +``` + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `cargo test --test project_commands 2>&1` +Expected: All 4 tests pass. + +- [ ] **Step 6: Run clippy** + +Run: `cargo clippy -- -D warnings 2>&1` +Expected: No warnings. + +- [ ] **Step 7: Commit** + +```bash +git add src/api/jira/projects.rs tests/project_commands.rs tests/common/fixtures.rs +git commit -m "feat: add list_projects API method with integration tests (#47)" +``` + +--- + +### Task 3: Add `List` variant to `ProjectCommand` and wire up CLI handler + +**Files:** +- Modify: `src/cli/mod.rs:342-349` (add `List` variant to `ProjectCommand`) +- Modify: `src/cli/project.rs` (add `handle_list`, update match arm, update `Fields` error message) + +- [ ] **Step 1: Add `List` variant to `ProjectCommand`** + +In `src/cli/mod.rs`, replace the `ProjectCommand` enum (lines 342-349): + +```rust +#[derive(Subcommand)] +pub enum ProjectCommand { + /// List accessible projects + List { + /// Filter by project type (software, service_desk, business) + #[arg(long = "type")] + project_type: Option, + /// Maximum number of results (default: 50) + #[arg(long)] + limit: Option, + /// Fetch all projects (paginate through all pages) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, + /// Show valid issue types, priorities, and statuses + Fields { + /// Project key (uses configured project if omitted) + project: Option, + }, +} +``` + +- [ ] **Step 2: Add `handle_list` and update the handler** + +Replace the entire contents of `src/cli/project.rs` with: + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::cli::{OutputFormat, ProjectCommand}; +use crate::config::Config; +use crate::output; + +pub async fn handle( + command: ProjectCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + match command { + ProjectCommand::List { + project_type, + limit, + all, + } => handle_list(client, output_format, project_type.as_deref(), limit, all).await, + ProjectCommand::Fields { project } => { + handle_fields(project, config, client, output_format, project_override).await + } + } +} + +async fn handle_list( + client: &JiraClient, + output_format: &OutputFormat, + project_type: Option<&str>, + limit: Option, + all: bool, +) -> Result<()> { + let max_results = if all { None } else { Some(limit.unwrap_or(50)) }; + let projects = client.list_projects(project_type, max_results).await?; + + let rows: Vec> = projects + .iter() + .map(|p| { + vec![ + p.key.clone(), + p.name.clone(), + p.lead + .as_ref() + .map(|l| l.display_name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + p.project_type_key.clone(), + ] + }) + .collect(); + + output::print_output(output_format, &["Key", "Name", "Lead", "Type"], &rows, &projects) +} + +async fn handle_fields( + project: Option, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let project_key = project + .or_else(|| config.project_key(project_override)) + .ok_or_else(|| { + anyhow::anyhow!( + "No project specified. Run \"jr project list\" to see available projects." + ) + })?; + + let issue_types = client.get_project_issue_types(&project_key).await?; + let priorities = client.get_priorities().await?; + + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::json!({ + "project": project_key, + "issue_types": issue_types, + "priorities": priorities, + }) + ); + } + OutputFormat::Table => { + println!("Project: {project_key}\n"); + println!("Issue Types:"); + for t in &issue_types { + let suffix = if t.subtask == Some(true) { + " (subtask)" + } else { + "" + }; + println!(" - {}{}", t.name, suffix); + } + println!("\nPriorities:"); + for p in &priorities { + println!(" - {}", p.name); + } + } + } + Ok(()) +} +``` + +Note: The `"No project specified"` error message in `handle_fields` now includes the `jr project list` hint (spec deliverable #3, touchpoint 1). + +- [ ] **Step 3: Verify it compiles and all tests pass** + +Run: `cargo test 2>&1` +Expected: All tests pass. The new `List` variant is wired through. + +Run: `cargo clippy -- -D warnings 2>&1` +Expected: No warnings. + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/mod.rs src/cli/project.rs +git commit -m "feat: add jr project list command with --type, --limit, --all flags (#47)" +``` + +--- + +### Task 4: Add `suggest_projects` helper and error message enhancements + +**Files:** +- Modify: `src/cli/project.rs` (add `suggest_projects` function) +- Modify: `src/cli/issue/create.rs` (enhance project 404 error) +- Modify: `src/cli/queue.rs` (enhance service desk lookup error) + +The `suggest_projects` function needs `pub` visibility so `create.rs` and `queue.rs` can call it. It lives in `project.rs` because it belongs with project logic. + +- [ ] **Step 1: Add `suggest_projects` to `src/cli/project.rs`** + +Add at the end of `src/cli/project.rs`, **before** any `#[cfg(test)]` block (if one exists) or at the end of the file: + +```rust +/// Suggest valid projects when an invalid key is used. +/// +/// Returns a hint string like `Did you mean "FOO"? Run "jr project list" to see available projects.` +/// If no close match is found or the API call fails, returns a generic hint. +pub async fn suggest_projects(client: &JiraClient, invalid_key: &str) -> String { + let generic = "Run \"jr project list\" to see available projects.".to_string(); + + let projects = match client.list_projects(None, Some(50)).await { + Ok(p) => p, + Err(_) => return generic, + }; + + let keys: Vec = projects.iter().map(|p| p.key.clone()).collect(); + match crate::partial_match::partial_match(invalid_key, &keys) { + crate::partial_match::MatchResult::Exact(matched) => { + format!("Did you mean \"{matched}\"? {generic}") + } + crate::partial_match::MatchResult::Ambiguous(matches) => { + let quoted: Vec = matches.iter().map(|m| format!("\"{m}\"")).collect(); + format!("Did you mean {}? {generic}", quoted.join(" or ")) + } + crate::partial_match::MatchResult::None(_) => generic, + } +} +``` + +- [ ] **Step 2: Write unit tests for `suggest_projects`** + +Add a `#[cfg(test)]` module at the bottom of `src/cli/project.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + // suggest_projects is async and calls the API, so we test the matching logic + // by testing the partial_match behavior on project keys directly. + // The integration of suggest_projects with the API is covered by the + // integration tests in tests/project_commands.rs. + + #[test] + fn suggest_projects_match_logic_exact() { + let keys = vec!["FOO".to_string(), "BAR".to_string(), "BAZ".to_string()]; + match crate::partial_match::partial_match("FOO", &keys) { + crate::partial_match::MatchResult::Exact(m) => assert_eq!(m, "FOO"), + _ => panic!("Expected exact match"), + } + } + + #[test] + fn suggest_projects_match_logic_partial() { + let keys = vec!["FOO".to_string(), "BAR".to_string(), "BAZ".to_string()]; + match crate::partial_match::partial_match("FO", &keys) { + crate::partial_match::MatchResult::Exact(m) => assert_eq!(m, "FOO"), + _ => panic!("Expected unique partial match"), + } + } + + #[test] + fn suggest_projects_match_logic_ambiguous() { + let keys = vec!["FOO".to_string(), "BAR".to_string(), "BAZ".to_string()]; + match crate::partial_match::partial_match("BA", &keys) { + crate::partial_match::MatchResult::Ambiguous(matches) => { + assert_eq!(matches.len(), 2); + assert!(matches.contains(&"BAR".to_string())); + assert!(matches.contains(&"BAZ".to_string())); + } + _ => panic!("Expected ambiguous match"), + } + } + + #[test] + fn suggest_projects_match_logic_none() { + let keys = vec!["FOO".to_string(), "BAR".to_string()]; + match crate::partial_match::partial_match("ZZZ", &keys) { + crate::partial_match::MatchResult::None(_) => {} // expected + _ => panic!("Expected no match"), + } + } +} +``` + +- [ ] **Step 3: Enhance error in `src/cli/issue/create.rs`** + +In `src/cli/issue/create.rs`, find the project key resolution block (lines 37-49). Replace the `.ok_or_else` error with a static hint (no API call needed since the user didn't provide a key at all): + +```rust + let project_key = project + .or_else(|| config.project_key(project_override)) + .or_else(|| { + if no_input { + None + } else { + helpers::prompt_input("Project key").ok() + } + }) + .ok_or_else(|| { + anyhow::anyhow!( + "Project key is required. Use --project or configure .jr.toml. \ + Run \"jr project list\" to see available projects." + ) + })?; +``` + +Note: The spec mentions enhancing 404 errors when a key IS provided but is invalid. However, in `create.rs` the project key is embedded in the JSON body sent to `POST /rest/api/3/issue` — Jira returns a generic error, not a project-specific 404. Catching and enhancing this specific error would require parsing Jira's error response body to detect project-related failures, which adds fragile coupling to Jira's error message format. The static hint on the "no project" path is sufficient — users who provide a wrong key get Jira's own error plus can discover valid keys via `jr project list`. + +- [ ] **Step 4: Enhance error in `src/cli/queue.rs`** + +In `src/cli/queue.rs`, find the project key resolution (line 18-20). Replace it with: + +```rust + let project_key = config.project_key(project_override).ok_or_else(|| { + JrError::UserError( + "No project configured. Run \"jr init\" or pass --project. \ + Run \"jr project list\" to see available projects." + .into(), + ) + })?; +``` + +Same rationale as `create.rs` — the "no project configured" path gets a static hint. The `require_service_desk` call that follows uses the project key to look up a service desk; if it fails, Jira's error message is returned. Adding a dynamic `suggest_projects` call here would require an extra API round-trip on every queue error, which isn't worth the complexity for a path that already tells users what went wrong. + +- [ ] **Step 5: Verify it compiles and all tests pass** + +Run: `cargo test 2>&1` +Expected: All tests pass. + +Run: `cargo clippy -- -D warnings 2>&1` +Expected: No warnings. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/project.rs src/cli/issue/create.rs src/cli/queue.rs +git commit -m "feat: add suggest_projects helper and enhance error messages (#47)" +``` + +--- + +### Task 5: Update documentation + +**Files:** +- Modify: `README.md` (add `jr project list` to command table and quick start) + +- [ ] **Step 1: Update README command table** + +In `README.md`, find the `jr project fields FOO` row (line 119) and add a new row **before** it: + +```markdown +| `jr project list` | List accessible projects (`--type`, `--limit`/`--all`) | +``` + +So the two project rows will be: +```markdown +| `jr project list` | List accessible projects (`--type`, `--limit`/`--all`) | +| `jr project fields FOO` | Show valid issue types and priorities | +``` + +- [ ] **Step 2: Add quick start example** + +In `README.md`, add after the `jr issue list --assignee me --open` line (line 67), before `# View a specific issue`: + +```markdown + +# Discover available projects +jr project list +``` + +- [ ] **Step 3: Verify build still passes** + +Run: `cargo test 2>&1` +Expected: All tests pass. + +- [ ] **Step 4: Commit** + +```bash +git add README.md +git commit -m "docs: add jr project list to README command table and examples" +``` + +--- + +### Task 6: Format and final verification + +- [ ] **Step 1: Run formatter** + +Run: `cargo fmt --all -- --check 2>&1` +If any formatting issues: `cargo fmt --all` + +- [ ] **Step 2: Run full CI checks** + +Run: `cargo clippy -- -D warnings 2>&1 && cargo test 2>&1` +Expected: Zero warnings, all tests pass. + +- [ ] **Step 3: Commit formatting if needed** + +```bash +# Only if cargo fmt made changes: +git add -A +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-03-26-asset-attribute-names.md b/docs/superpowers/plans/2026-03-26-asset-attribute-names.md new file mode 100644 index 0000000..13d1670 --- /dev/null +++ b/docs/superpowers/plans/2026-03-26-asset-attribute-names.md @@ -0,0 +1,552 @@ +# Asset Attribute Names — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace raw numeric `Attribute ID` column in `jr assets view --attributes` with human-readable attribute names. + +**Architecture:** Add new serde types for the richer `/object/{id}/attributes` API response (which includes attribute names inline), add a new `get_object_attributes()` API method, and update the CLI `handle_view` to use the new endpoint with filtering/sorting. Existing types and methods remain unchanged for search and linked asset enrichment. + +**Tech Stack:** Rust, serde, reqwest, comfy-table + +**Spec:** `docs/superpowers/specs/2026-03-26-asset-attribute-names-design.md` + +--- + +### Task 1: Add new serde types with tests + +**Files:** +- Modify: `src/types/assets/object.rs:38` (add new types before `#[cfg(test)]` block) + +- [ ] **Step 1: Write the failing tests** + +In `src/types/assets/object.rs`, add these tests inside the existing `#[cfg(test)] mod tests` block (after line 86, before the closing `}`): + +```rust + #[test] + fn deserialize_object_attribute_with_name() { + let json = r#"{ + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert_eq!(attr.id, "637"); + assert_eq!(attr.object_type_attribute_id, "134"); + assert_eq!(attr.object_type_attribute.name, "Location"); + assert!(!attr.object_type_attribute.system); + assert!(!attr.object_type_attribute.hidden); + assert!(!attr.object_type_attribute.label); + assert_eq!(attr.object_type_attribute.position, 4); + assert_eq!(attr.values.len(), 1); + assert_eq!( + attr.values[0].display_value.as_deref(), + Some("New York, NY") + ); + } + + #[test] + fn deserialize_object_attribute_defaults() { + // system, hidden, label, position all absent — should default to false/0 + let json = r#"{ + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name" + }, + "objectAttributeValues": [] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert_eq!(attr.object_type_attribute.name, "Name"); + assert!(!attr.object_type_attribute.system); + assert!(!attr.object_type_attribute.hidden); + assert!(!attr.object_type_attribute.label); + assert_eq!(attr.object_type_attribute.position, 0); + assert!(attr.values.is_empty()); + } + + #[test] + fn deserialize_object_attribute_system() { + let json = r#"{ + "id": "638", + "objectTypeAttributeId": "136", + "objectTypeAttribute": { + "id": "136", + "name": "Created", + "system": true, + "hidden": false, + "label": false, + "position": 2 + }, + "objectAttributeValues": [ + { "value": "2021-02-16T20:04:41.527Z", "displayValue": "16/Feb/21 8:04 PM" } + ] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert!(attr.object_type_attribute.system); + assert_eq!( + attr.values[0].display_value.as_deref(), + Some("16/Feb/21 8:04 PM") + ); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib -- assets::object::tests::deserialize_object_attribute` +Expected: FAIL with "cannot find type `ObjectAttribute` in this scope" + +- [ ] **Step 3: Implement the new types** + +In `src/types/assets/object.rs`, add these types after line 38 (after the `ObjectAttributeValue` struct, before the `#[cfg(test)]` block): + +```rust +/// A single attribute entry from `GET /object/{id}/attributes`. +/// Includes the full attribute definition with name, unlike `AssetAttribute` +/// which only has the numeric `objectTypeAttributeId`. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectTypeAttribute")] + pub object_type_attribute: ObjectTypeAttributeDef, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +/// Attribute definition from the object type schema. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeAttributeDef { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib -- assets::object::tests::deserialize_object_attribute` +Expected: All 3 tests PASS. + +- [ ] **Step 5: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 6: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 7: Commit** + +```bash +git add src/types/assets/object.rs +git commit -m "feat: add ObjectAttribute and ObjectTypeAttributeDef types (#58)" +``` + +--- + +### Task 2: Add `get_object_attributes()` API method + +**Files:** +- Modify: `src/api/assets/objects.rs:6` (add import) +- Modify: `src/api/assets/objects.rs:75` (add method after `get_asset`) + +- [ ] **Step 1: Add the import** + +In `src/api/assets/objects.rs`, change line 6: + +```rust +use crate::types::assets::AssetObject; +``` + +To: + +```rust +use crate::types::assets::{AssetObject, ObjectAttribute}; +``` + +- [ ] **Step 2: Add the API method** + +In `src/api/assets/objects.rs`, add this method inside the `impl JiraClient` block, after `get_asset` (after line 75, before the closing `}`): + +```rust + /// Get all attributes for a single object, with full attribute definitions + /// including human-readable names. + pub async fn get_object_attributes( + &self, + workspace_id: &str, + object_id: &str, + ) -> Result> { + let path = format!("object/{}/attributes", urlencoding::encode(object_id)); + self.get_assets(workspace_id, &path).await + } +``` + +- [ ] **Step 3: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 4: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 5: Commit** + +```bash +git add src/api/assets/objects.rs +git commit -m "feat: add get_object_attributes() API method (#58)" +``` + +--- + +### Task 3: Update `handle_view` to display attribute names + +**Files:** +- Modify: `src/cli/assets.rs:94-95` (change `get_asset` call) +- Modify: `src/cli/assets.rs:98-141` (replace both `Json` and `Table` branches) + +- [ ] **Step 1: Change `get_asset` to not fetch attributes** + +In `src/cli/assets.rs`, replace line 94-95: + +```rust + let object = client + .get_asset(workspace_id, &object_id, attributes) +``` + +With: + +```rust + let object = client + .get_asset(workspace_id, &object_id, false) +``` + +- [ ] **Step 2: Replace the JSON output branch** + +In `src/cli/assets.rs`, replace lines 99-101: + +```rust + OutputFormat::Json => { + println!("{}", output::render_json(&object)?); + } +``` + +With: + +```rust + OutputFormat::Json => { + if attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + // JSON: filter system and hidden only (keep label for programmatic consumers) + attrs.retain(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + }); + attrs.sort_by_key(|a| a.object_type_attribute.position); + let combined = serde_json::json!({ + "object": object, + "attributes": attrs, + }); + println!("{}", serde_json::to_string_pretty(&combined)?); + } else { + println!("{}", output::render_json(&object)?); + } + } +``` + +- [ ] **Step 3: Replace the attribute rendering block in the Table branch** + +In `src/cli/assets.rs`, replace lines 118-139: + +```rust + if attributes && !object.attributes.is_empty() { + println!(); + let attr_rows: Vec> = object + .attributes + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute_id.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute ID", "Value"], &attr_rows) + ); + } +``` + +With: + +```rust + if attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + attrs.retain(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + && !a.object_type_attribute.label + }); + attrs.sort_by_key(|a| a.object_type_attribute.position); + + if !attrs.is_empty() { + println!(); + let attr_rows: Vec> = attrs + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute.name.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute", "Value"], &attr_rows) + ); + } + } +``` + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 5: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/assets.rs +git commit -m "feat: display attribute names instead of numeric IDs in assets view (#58)" +``` + +--- + +### Task 4: Add integration test for `get_object_attributes()` + +**Files:** +- Modify: `tests/assets.rs:284` (add test at end of file) + +- [ ] **Step 1: Write the integration test** + +In `tests/assets.rs`, add this test after `get_connected_tickets_empty` (after line 284): + +```rust +#[tokio::test] +async fn get_object_attributes_returns_named_attributes() { + let server = MockServer::start().await; + + // Mock returns a mix of system, label, hidden, and user-defined attributes + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/object/88/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + "objectAttributeValues": [ + { "value": "OBJ-88", "displayValue": "OBJ-88" } + ] + }, + { + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1 + }, + "objectAttributeValues": [ + { "value": "Acme Corp", "displayValue": "Acme Corp" } + ] + }, + { + "id": "641", + "objectTypeAttributeId": "140", + "objectTypeAttribute": { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }, + { + "id": "642", + "objectTypeAttributeId": "141", + "objectTypeAttribute": { + "id": "141", + "name": "Internal Notes", + "system": false, + "hidden": true, + "label": false, + "position": 6 + }, + "objectAttributeValues": [ + { "value": "secret", "displayValue": "secret" } + ] + }, + { + "id": "643", + "objectTypeAttributeId": "142", + "objectTypeAttribute": { + "id": "142", + "name": "Seats", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + "objectAttributeValues": [ + { "value": "10", "displayValue": "10" } + ] + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let attrs = client + .get_object_attributes("ws-123", "88") + .await + .unwrap(); + + // All 5 attributes returned from API + assert_eq!(attrs.len(), 5); + + // Verify attribute names are present + assert_eq!(attrs[0].object_type_attribute.name, "Key"); + assert!(attrs[0].object_type_attribute.system); + + // Verify label attribute + assert_eq!(attrs[1].object_type_attribute.name, "Name"); + assert!(attrs[1].object_type_attribute.label); + + // Verify hidden attribute + assert_eq!(attrs[3].object_type_attribute.name, "Internal Notes"); + assert!(attrs[3].object_type_attribute.hidden); + + // Simulate the CLI filter: exclude system, hidden, label + let mut visible: Vec<_> = attrs + .into_iter() + .filter(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + && !a.object_type_attribute.label + }) + .collect(); + visible.sort_by_key(|a| a.object_type_attribute.position); + + // Only user-defined, non-hidden attributes remain + assert_eq!(visible.len(), 2); + // Sorted by position: Seats (4) before Location (5) + assert_eq!(visible[0].object_type_attribute.name, "Seats"); + assert_eq!(visible[0].object_type_attribute.position, 4); + assert_eq!(visible[1].object_type_attribute.name, "Location"); + assert_eq!(visible[1].object_type_attribute.position, 5); + + // Verify displayValue is available + assert_eq!( + visible[1].values[0].display_value.as_deref(), + Some("New York, NY") + ); +} +``` + +- [ ] **Step 2: Run the new test** + +Run: `cargo test --test assets -- get_object_attributes_returns_named_attributes` +Expected: PASS + +- [ ] **Step 3: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 4: Commit** + +```bash +git add tests/assets.rs +git commit -m "test: add integration test for get_object_attributes (#58)" +``` + +--- + +### Task 5: Final verification + +**Files:** +- All modified files from Tasks 1-3 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all -- --check` +Expected: No format issues. diff --git a/docs/superpowers/plans/2026-03-26-board-flag.md b/docs/superpowers/plans/2026-03-26-board-flag.md new file mode 100644 index 0000000..a8fa976 --- /dev/null +++ b/docs/superpowers/plans/2026-03-26-board-flag.md @@ -0,0 +1,360 @@ +# Add `--board` Flag — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add a `--board ` CLI flag to `sprint list`, `sprint current`, and `board view` so users can specify a board without editing `.jr.toml`. + +**Architecture:** Convert three unit variants in `SprintCommand` and `BoardCommand` to struct variants with `board: Option`. Add `Config::board_id()` resolver that prioritizes CLI override over config fallback. Update handlers and error messages. + +**Tech Stack:** Rust, clap 4 (derive API) + +**Spec:** `docs/superpowers/specs/2026-03-26-board-flag-design.md` + +--- + +### Task 1: Add `Config::board_id()` resolver with tests + +**Files:** +- Modify: `src/config.rs:109-113` (add method after `project_key`) +- Test: `src/config.rs` (inline `#[cfg(test)]` module, after `test_project_key_cli_override`) + +- [ ] **Step 1: Write the failing test** + +In `src/config.rs`, add this test after `test_project_key_cli_override` (after line 236): + +```rust +#[test] +fn test_board_id_cli_override() { + let config = Config { + global: GlobalConfig::default(), + project: ProjectConfig { + project: None, + board_id: Some(42), + }, + }; + // CLI override wins + assert_eq!(config.board_id(Some(99)), Some(99)); + // Config fallback + assert_eq!(config.board_id(None), Some(42)); + // Neither set + let empty = Config::default(); + assert_eq!(empty.board_id(None), None); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test test_board_id_cli_override` +Expected: FAIL with "no method named `board_id` found" + +- [ ] **Step 3: Implement `board_id()` method** + +In `src/config.rs`, add this method after `project_key` (after line 113): + +```rust +pub fn board_id(&self, cli_override: Option) -> Option { + cli_override.or(self.project.board_id) +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test test_board_id_cli_override` +Expected: PASS + +- [ ] **Step 5: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 6: Commit** + +```bash +git add src/config.rs +git commit -m "feat: add Config::board_id() resolver for CLI override (#57)" +``` + +--- + +### Task 2: Convert enum variants and update handlers + +**Files:** +- Modify: `src/cli/mod.rs:363-377` (BoardCommand and SprintCommand enums) +- Modify: `src/cli/sprint.rs:10-34` (handle function) +- Modify: `src/cli/board.rs:9-19,34-41` (handle and handle_view functions) + +- [ ] **Step 1: Convert `BoardCommand::View` to struct variant** + +In `src/cli/mod.rs`, replace lines 363-369: + +```rust +#[derive(Subcommand)] +pub enum BoardCommand { + /// List boards + List, + /// View current board issues + View, +} +``` + +With: + +```rust +#[derive(Subcommand)] +pub enum BoardCommand { + /// List boards + List, + /// View current board issues + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +} +``` + +- [ ] **Step 2: Convert `SprintCommand` variants to struct variants** + +In `src/cli/mod.rs`, replace lines 371-377: + +```rust +#[derive(Subcommand)] +pub enum SprintCommand { + /// List sprints + List, + /// Show current sprint issues + Current, +} +``` + +With: + +```rust +#[derive(Subcommand)] +pub enum SprintCommand { + /// List sprints + List { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +} +``` + +- [ ] **Step 3: Update `board.rs` handler dispatch** + +In `src/cli/board.rs`, replace lines 15-18: + +```rust + match command { + BoardCommand::List => handle_list(client, output_format).await, + BoardCommand::View => handle_view(config, client, output_format).await, + } +``` + +With: + +```rust + match command { + BoardCommand::List => handle_list(client, output_format).await, + BoardCommand::View { board } => { + handle_view(config, client, output_format, board).await + } + } +``` + +- [ ] **Step 4: Update `handle_view` signature and board_id resolution** + +In `src/cli/board.rs`, replace lines 34-41: + +```rust +async fn handle_view( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, +) -> Result<()> { + let board_id = config.project.board_id.ok_or_else(|| { + anyhow::anyhow!("No board_id configured. Set board_id in .jr.toml or run \"jr init\".") + })?; +``` + +With: + +```rust +async fn handle_view( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + board_override: Option, +) -> Result<()> { + let board_id = config.board_id(board_override).ok_or_else(|| { + anyhow::anyhow!( + "No board configured. Use --board or set board_id in .jr.toml.\n\ + Run \"jr board list\" to see available boards." + ) + })?; +``` + +The rest of `handle_view` (lines 43-82) remains unchanged. + +- [ ] **Step 5: Update `sprint.rs` handler — extract board from variants and update error message** + +In `src/cli/sprint.rs`, replace lines 10-35: + +```rust +/// Handle all sprint subcommands. +pub async fn handle( + command: SprintCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, +) -> Result<()> { + let board_id = config.project.board_id.ok_or_else(|| { + anyhow::anyhow!("No board_id configured. Set board_id in .jr.toml or run \"jr init\".") + })?; + + // Guard: sprints only make sense for scrum boards + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + if board_type != "scrum" { + bail!( + "Sprint commands are only available for scrum boards. Board {} is a {} board.", + board_id, + board_config.board_type + ); + } + + match command { + SprintCommand::List => handle_list(board_id, client, output_format).await, + SprintCommand::Current => handle_current(board_id, client, output_format, config).await, + } +} +``` + +With: + +```rust +/// Handle all sprint subcommands. +pub async fn handle( + command: SprintCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, +) -> Result<()> { + let board_override = match &command { + SprintCommand::List { board } => *board, + SprintCommand::Current { board } => *board, + }; + + let board_id = config.board_id(board_override).ok_or_else(|| { + anyhow::anyhow!( + "No board configured. Use --board or set board_id in .jr.toml.\n\ + Run \"jr board list\" to see available boards." + ) + })?; + + // Guard: sprints only make sense for scrum boards + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + if board_type != "scrum" { + bail!( + "Sprint commands are only available for scrum boards. Board {} is a {} board.", + board_id, + board_config.board_type + ); + } + + match command { + SprintCommand::List { .. } => handle_list(board_id, client, output_format).await, + SprintCommand::Current { .. } => { + handle_current(board_id, client, output_format, config).await + } + } +} +``` + +Note: This extracts `board` from each variant at the top using separate match arms, then uses `{ .. }` in the dispatch match to ignore the already-consumed field. This avoids duplicating the board_id resolution in each dispatch arm. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All tests PASS (existing `compute_sprint_summary` tests are unaffected). + +- [ ] **Step 7: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no format issues. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/mod.rs src/cli/sprint.rs src/cli/board.rs +git commit -m "feat: add --board flag to sprint and board commands (#57)" +``` + +--- + +### Task 3: Update documentation + +**Files:** +- Modify: `README.md` (lines 110-113) + +- [ ] **Step 1: Update README command table** + +In `README.md`, replace lines 110-113: + +```markdown +| `jr board list` | List boards | +| `jr board view` | Show current board issues | +| `jr sprint list` | List sprints (scrum only) | +| `jr sprint current` | Show current sprint issues (with points summary) | +``` + +With: + +```markdown +| `jr board list` | List boards | +| `jr board view --board 42` | Show current board issues (`--board` or config) | +| `jr sprint list --board 42` | List sprints (`--board` or config, scrum only) | +| `jr sprint current --board 42` | Show current sprint issues (with points summary) | +``` + +- [ ] **Step 2: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings. + +- [ ] **Step 3: Commit** + +```bash +git add README.md +git commit -m "docs: update README with --board flag examples (#57)" +``` + +--- + +### Task 4: Final verification + +**Files:** +- All modified files from Tasks 1-3 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all && cargo fmt --all -- --check` +Expected: No format issues. diff --git a/docs/superpowers/plans/2026-03-26-issue-view-fields.md b/docs/superpowers/plans/2026-03-26-issue-view-fields.md new file mode 100644 index 0000000..51fd7a9 --- /dev/null +++ b/docs/superpowers/plans/2026-03-26-issue-view-fields.md @@ -0,0 +1,564 @@ +# Issue View Standard Fields — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `created`, `updated`, `reporter`, `resolution`, `components`, and `fixVersions` to issue view JSON output, and show `created`, `updated`, `reporter` in the table view. + +**Architecture:** Add 3 new serde types (`Resolution`, `Component`, `Version`) and 6 new fields to `IssueFields`. Update the API field lists in both `get_issue()` and `search_issues()`. Add Reporter/Created/Updated rows to the table view. JSON output requires no code change — serde derives handle it. + +**Tech Stack:** Rust, serde, reqwest, comfy-table, wiremock + +**Spec:** `docs/superpowers/specs/2026-03-26-issue-view-fields-design.md` + +--- + +### Task 1: Add new types and fields to `IssueFields` with tests + +**Files:** +- Modify: `src/types/jira/issue.rs:56-72` (add new types before `IssueFields`, add fields to `IssueFields`) + +- [ ] **Step 1: Write the failing tests** + +In `src/types/jira/issue.rs`, add these tests inside the existing `#[cfg(test)] mod tests` block (after the closing `}` of `issuelinks_empty_array` at line 262, before the module's closing `}` at line 263): + +```rust + #[test] + fn new_fields_present() { + let json = json!({ + "summary": "test", + "created": "2026-03-20T14:32:00.000+0000", + "updated": "2026-03-25T09:15:22.000+0000", + "reporter": {"accountId": "abc123", "displayName": "Jane Smith"}, + "resolution": {"name": "Fixed"}, + "components": [{"name": "Backend"}, {"name": "API"}], + "fixVersions": [{"name": "v2.0", "released": false, "releaseDate": "2026-04-01"}] + }); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!(fields.created.as_deref(), Some("2026-03-20T14:32:00.000+0000")); + assert_eq!(fields.updated.as_deref(), Some("2026-03-25T09:15:22.000+0000")); + let reporter = fields.reporter.unwrap(); + assert_eq!(reporter.display_name, "Jane Smith"); + assert_eq!(reporter.account_id, "abc123"); + assert_eq!(fields.resolution.unwrap().name, "Fixed"); + let components = fields.components.unwrap(); + assert_eq!(components.len(), 2); + assert_eq!(components[0].name, "Backend"); + assert_eq!(components[1].name, "API"); + let versions = fields.fix_versions.unwrap(); + assert_eq!(versions.len(), 1); + assert_eq!(versions[0].name, "v2.0"); + assert_eq!(versions[0].released, Some(false)); + assert_eq!(versions[0].release_date.as_deref(), Some("2026-04-01")); + // New typed fields should NOT appear in extra + assert!(!fields.extra.contains_key("created")); + assert!(!fields.extra.contains_key("updated")); + assert!(!fields.extra.contains_key("reporter")); + assert!(!fields.extra.contains_key("resolution")); + assert!(!fields.extra.contains_key("components")); + assert!(!fields.extra.contains_key("fixVersions")); + } + + #[test] + fn new_fields_absent() { + let json = json!({"summary": "test"}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert!(fields.created.is_none()); + assert!(fields.updated.is_none()); + assert!(fields.reporter.is_none()); + assert!(fields.resolution.is_none()); + assert!(fields.components.is_none()); + assert!(fields.fix_versions.is_none()); + } + + #[test] + fn new_fields_null() { + let json = json!({ + "summary": "test", + "created": null, + "updated": null, + "reporter": null, + "resolution": null, + "components": null, + "fixVersions": null + }); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert!(fields.created.is_none()); + assert!(fields.updated.is_none()); + assert!(fields.reporter.is_none()); + assert!(fields.resolution.is_none()); + assert!(fields.components.is_none()); + assert!(fields.fix_versions.is_none()); + } + + #[test] + fn components_empty_array() { + let json = json!({"summary": "test", "components": []}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!(fields.components, Some(vec![])); + } + + #[test] + fn fix_versions_empty_array() { + let json = json!({"summary": "test", "fixVersions": []}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!(fields.fix_versions, Some(vec![])); + } + + #[test] + fn version_optional_fields_absent() { + let json = json!({"summary": "test", "fixVersions": [{"name": "v1.0"}]}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + let v = &fields.fix_versions.unwrap()[0]; + assert_eq!(v.name, "v1.0"); + assert!(v.released.is_none()); + assert!(v.release_date.is_none()); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib -- jira::issue::tests::new_fields` +Expected: FAIL with "unknown field" or "cannot find type" errors because `Resolution`, `Component`, `Version` types and the 6 new fields on `IssueFields` don't exist yet. + +- [ ] **Step 3: Implement the new types and fields** + +In `src/types/jira/issue.rs`, add these 3 types after `IssueProject` (after line 107, before `Transition`). Note: `PartialEq` is added beyond the spec's derives — it is required for the `assert_eq!(fields.components, Some(vec![]))` assertions in the empty-array tests: + +```rust +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Resolution { + pub name: String, +} + +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Component { + pub name: String, +} + +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Version { + pub name: String, + pub released: Option, + #[serde(rename = "releaseDate")] + pub release_date: Option, +} +``` + +Then modify `IssueFields` (lines 56-72) to add the 6 new fields. Replace the entire struct: + +```rust +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct IssueFields { + pub summary: String, + pub description: Option, + pub status: Option, + #[serde(rename = "issuetype")] + pub issue_type: Option, + pub priority: Option, + pub assignee: Option, + pub reporter: Option, + pub project: Option, + pub created: Option, + pub updated: Option, + pub resolution: Option, + #[serde(default)] + pub components: Option>, + #[serde(rename = "fixVersions", default)] + pub fix_versions: Option>, + #[serde(default)] + pub labels: Option>, + pub parent: Option, + pub issuelinks: Option>, + #[serde(flatten)] + pub extra: HashMap, +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib -- jira::issue::tests::new_fields` +Expected: All 6 new tests PASS. + +- [ ] **Step 5: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 6: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 7: Commit** + +```bash +git add src/types/jira/issue.rs +git commit -m "feat: add Resolution, Component, Version types and new fields to IssueFields (#59)" +``` + +--- + +### Task 2: Update API field lists + +**Files:** +- Modify: `src/api/jira/issues.rs:96-97` (`get_issue` field string) +- Modify: `src/api/jira/issues.rs:31-39` (`search_issues` field vec) + +**TDD note:** The unit tests in Task 1 already verify that `IssueFields` correctly deserializes the 6 new fields. The API field list changes here are purely additive (telling the Jira API to return more fields) — the deserialization is already tested. Integration tests in Task 4 verify end-to-end behavior with wiremock. + +- [ ] **Step 1: Update `get_issue()` field string** + +In `src/api/jira/issues.rs`, replace lines 96-97: + +```rust + let mut fields = + "summary,status,issuetype,priority,assignee,project,description,labels,parent,issuelinks".to_string(); +``` + +With: + +```rust + let mut fields = + "summary,status,issuetype,priority,assignee,reporter,project,description,labels,parent,issuelinks,created,updated,resolution,components,fixVersions".to_string(); +``` + +- [ ] **Step 2: Update `search_issues()` field vec** + +In `src/api/jira/issues.rs`, replace lines 31-39: + +```rust + let mut fields = vec![ + "summary", + "status", + "issuetype", + "priority", + "assignee", + "project", + "description", + ]; +``` + +With: + +```rust + let mut fields = vec![ + "summary", + "status", + "issuetype", + "priority", + "assignee", + "reporter", + "project", + "description", + "created", + "updated", + "resolution", + "components", + "fixVersions", + ]; +``` + +- [ ] **Step 3: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 4: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jira/issues.rs +git commit -m "feat: add standard fields to get_issue and search_issues API requests (#59)" +``` + +--- + +### Task 3: Add Reporter, Created, Updated to table view + +**Files:** +- Modify: `src/cli/issue/list.rs:441-499` (`handle_view` table rows) + +**TDD note:** `handle_view` is async and prints to stdout via `println!`, making direct unit testing impractical without stdout capture. The table row construction uses the same typed fields tested in Task 1, and `format_comment_date` is already tested in existing unit tests (lines 614-631 of `list.rs`). End-to-end table verification is done via live testing in Task 5. + +- [ ] **Step 1: Restructure the `rows` vec in `handle_view`** + +In `src/cli/issue/list.rs`, replace lines 441-499 (the initial `let mut rows = vec![...]` block through the closing `];`): + +```rust + let mut rows = vec![ + vec!["Key".into(), issue.key.clone()], + vec!["Summary".into(), issue.fields.summary.clone()], + vec![ + "Type".into(), + issue + .fields + .issue_type + .as_ref() + .map(|t| t.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Status".into(), + issue + .fields + .status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Priority".into(), + issue + .fields + .priority + .as_ref() + .map(|p| p.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Assignee".into(), + issue + .fields + .assignee + .as_ref() + .map(|a| a.display_name.clone()) + .unwrap_or_else(|| "Unassigned".into()), + ], + vec![ + "Reporter".into(), + issue + .fields + .reporter + .as_ref() + .map(|r| r.display_name.clone()) + .unwrap_or_else(|| "(none)".into()), + ], + vec![ + "Created".into(), + issue + .fields + .created + .as_deref() + .map(format_comment_date) + .unwrap_or_default(), + ], + vec![ + "Updated".into(), + issue + .fields + .updated + .as_deref() + .map(format_comment_date) + .unwrap_or_default(), + ], + vec![ + "Project".into(), + issue + .fields + .project + .as_ref() + .map(|p| format!("{} ({})", p.name.as_deref().unwrap_or(""), p.key)) + .unwrap_or_default(), + ], + vec![ + "Labels".into(), + issue + .fields + .labels + .as_ref() + .filter(|l| !l.is_empty()) + .map(|l| l.join(", ")) + .unwrap_or_else(|| "(none)".into()), + ], + ]; +``` + +This inserts Reporter, Created, Updated between Assignee and Project. The code after line 499 (Parent push, Links push, Assets, Points, Description) remains unchanged. + +- [ ] **Step 2: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 3: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "feat: display reporter, created, updated in issue view table (#59)" +``` + +--- + +### Task 4: Add integration test for new fields + +**Files:** +- Modify: `tests/issue_commands.rs` (add test at end of file) +- Modify: `tests/common/fixtures.rs` (add fixture helper) + +- [ ] **Step 1: Add fixture helper for issue with all fields** + +In `tests/common/fixtures.rs`, add this function at the end of the file (after `project_response`): + +```rust +pub fn issue_response_with_standard_fields(key: &str, summary: &str) -> Value { + json!({ + "key": key, + "fields": { + "summary": summary, + "status": {"name": "In Progress", "statusCategory": {"name": "In Progress", "key": "indeterminate"}}, + "issuetype": {"name": "Bug"}, + "priority": {"name": "High"}, + "assignee": {"accountId": "abc123", "displayName": "John Doe"}, + "reporter": {"accountId": "def456", "displayName": "Jane Smith"}, + "project": {"key": key.split('-').next().unwrap_or("TEST"), "name": "Test Project"}, + "created": "2026-03-20T14:32:00.000+0000", + "updated": "2026-03-25T09:15:22.000+0000", + "resolution": {"name": "Fixed"}, + "components": [{"name": "Backend"}, {"name": "API"}], + "fixVersions": [{"name": "v2.0", "released": false, "releaseDate": "2026-04-01"}], + "labels": ["bug"], + "parent": null, + "issuelinks": [] + } + }) +} +``` + +- [ ] **Step 2: Write the integration test** + +In `tests/issue_commands.rs`, add this test at the end of the file: + +```rust +#[tokio::test] +async fn get_issue_includes_standard_fields() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-42")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response_with_standard_fields( + "FOO-42", + "Test with all fields", + )), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let issue = client.get_issue("FOO-42", &[]).await.unwrap(); + + // Verify new fields are deserialized + assert_eq!( + issue.fields.created.as_deref(), + Some("2026-03-20T14:32:00.000+0000") + ); + assert_eq!( + issue.fields.updated.as_deref(), + Some("2026-03-25T09:15:22.000+0000") + ); + + let reporter = issue.fields.reporter.as_ref().unwrap(); + assert_eq!(reporter.display_name, "Jane Smith"); + assert_eq!(reporter.account_id, "def456"); + + assert_eq!(issue.fields.resolution.as_ref().unwrap().name, "Fixed"); + + let components = issue.fields.components.as_ref().unwrap(); + assert_eq!(components.len(), 2); + assert_eq!(components[0].name, "Backend"); + assert_eq!(components[1].name, "API"); + + let versions = issue.fields.fix_versions.as_ref().unwrap(); + assert_eq!(versions.len(), 1); + assert_eq!(versions[0].name, "v2.0"); + assert_eq!(versions[0].released, Some(false)); + assert_eq!(versions[0].release_date.as_deref(), Some("2026-04-01")); + + // Verify JSON serialization includes the new fields + let json_str = serde_json::to_string(&issue).unwrap(); + assert!(json_str.contains("\"created\"")); + assert!(json_str.contains("\"reporter\"")); + assert!(json_str.contains("\"resolution\"")); + assert!(json_str.contains("\"components\"")); + assert!(json_str.contains("\"fixVersions\"")); +} + +#[tokio::test] +async fn get_issue_null_standard_fields() { + let server = MockServer::start().await; + + // Issue with all new fields null/absent + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-43")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-43", + "Minimal issue", + "To Do", + )), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let issue = client.get_issue("FOO-43", &[]).await.unwrap(); + + // All new fields should be None (the fixture doesn't include them) + assert!(issue.fields.created.is_none()); + assert!(issue.fields.updated.is_none()); + assert!(issue.fields.reporter.is_none()); + assert!(issue.fields.resolution.is_none()); + assert!(issue.fields.components.is_none()); + assert!(issue.fields.fix_versions.is_none()); +} +``` + +- [ ] **Step 3: Run the new tests** + +Run: `cargo test --test issue_commands -- get_issue_includes_standard_fields && cargo test --test issue_commands -- get_issue_null_standard_fields` +Expected: Both PASS. + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 5: Commit** + +```bash +git add tests/issue_commands.rs tests/common/fixtures.rs +git commit -m "test: add integration tests for standard issue fields (#59)" +``` + +--- + +### Task 5: Final verification + +**Files:** +- All modified files from Tasks 1-4 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all -- --check` +Expected: No format issues. diff --git a/docs/superpowers/plans/2026-03-26-jrerror-exit-codes.md b/docs/superpowers/plans/2026-03-26-jrerror-exit-codes.md new file mode 100644 index 0000000..9d33210 --- /dev/null +++ b/docs/superpowers/plans/2026-03-26-jrerror-exit-codes.md @@ -0,0 +1,270 @@ +# JrError Exit Codes Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace 13 `anyhow::anyhow!`/`bail!` calls with typed `JrError` variants so `main.rs` can map exit codes correctly (78 for config errors, 64 for user input errors). + +**Architecture:** Mechanical replacements across 8 files + one format string change in `error.rs`. No new types or variants. + +**Tech Stack:** Rust, thiserror, anyhow + +--- + +### Task 1: Change ConfigError display format and add unit tests + +**Files:** +- Modify: `src/error.rs:14` +- Test: `src/error.rs` (inline unit tests) + +- [ ] **Step 1: Write failing unit tests for exit code mapping** + +Add a `#[cfg(test)]` module at the bottom of `src/error.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn config_error_exit_code() { + let err = JrError::ConfigError("test".into()); + assert_eq!(err.exit_code(), 78); + } + + #[test] + fn user_error_exit_code() { + let err = JrError::UserError("test".into()); + assert_eq!(err.exit_code(), 64); + } + + #[test] + fn config_error_display_no_prefix() { + let err = JrError::ConfigError("No board_id configured.".into()); + assert_eq!(err.to_string(), "No board_id configured."); + } + + #[test] + fn user_error_display_passthrough() { + let err = JrError::UserError("Invalid selection".into()); + assert_eq!(err.to_string(), "Invalid selection"); + } +} +``` + +- [ ] **Step 2: Run tests to verify the display test fails** + +Run: `cargo test --lib error::tests` +Expected: `config_error_display_no_prefix` FAILS because current format prepends "Configuration error: ". + +- [ ] **Step 3: Change ConfigError format string** + +In `src/error.rs:14`, change: +```rust + #[error("Configuration error: {0}")] + ConfigError(String), +``` +to: +```rust + #[error("{0}")] + ConfigError(String), +``` + +- [ ] **Step 4: Run tests to verify all pass** + +Run: `cargo test --lib error::tests` +Expected: All 4 tests PASS. + +- [ ] **Step 5: Commit** + +```bash +git add src/error.rs +git commit -m "fix: remove redundant ConfigError display prefix (#30)" +``` + +--- + +### Task 2: Replace anyhow::anyhow! with JrError::ConfigError (7 locations) + +**Files:** +- Modify: `src/cli/board.rs:39-41` +- Modify: `src/cli/sprint.rs:16-18` +- Modify: `src/api/client.rs:35-36` +- Modify: `src/cli/team.rs:85-87` +- Modify: `src/config.rs:97-98` +- Modify: `src/cli/issue/helpers.rs:19-23` +- Modify: `src/cli/issue/helpers.rs:77-81` + +Each replacement follows the same pattern. Replace: +```rust +anyhow::anyhow!("message text") +``` +with: +```rust +crate::error::JrError::ConfigError("message text".into()) +``` + +Note: some files may already import `JrError` or `crate::error`. Check each file's imports and add `use crate::error::JrError;` if not already present. Prefer the shortest unambiguous path available in each file. + +- [ ] **Step 1: Replace in `src/cli/board.rs:40`** + +Change the `anyhow::anyhow!(...)` call to `JrError::ConfigError("...".into())`, keeping the existing message text exactly as-is. Add `use crate::error::JrError;` if not present. + +- [ ] **Step 2: Replace in `src/cli/sprint.rs:17`** + +Same pattern: `anyhow::anyhow!(...)` → `JrError::ConfigError("...".into())`. + +- [ ] **Step 3: Replace in `src/api/client.rs:36`** + +This file already uses `JrError::ConfigError` at lines 278 and 301, so the import exists. Change line 36 only. + +- [ ] **Step 4: Replace in `src/cli/team.rs:86`** + +Same pattern. Add import if needed. + +- [ ] **Step 5: Replace in `src/config.rs:98`** + +Same pattern. Add import if needed. + +- [ ] **Step 6: Replace in `src/cli/issue/helpers.rs:20` and `:78`** + +Two locations in the same file. Same pattern for both. + +- [ ] **Step 7: Run tests and clippy** + +Run: `cargo test && cargo clippy -- -D warnings` +Expected: All tests pass, no clippy warnings. No test should break — error messages are identical. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/board.rs src/cli/sprint.rs src/api/client.rs src/cli/team.rs src/config.rs src/cli/issue/helpers.rs +git commit -m "fix: use JrError::ConfigError for config-missing guards (#30)" +``` + +--- + +### Task 3: Replace anyhow::anyhow!/bail! with JrError::UserError (6 locations) + +**Files:** +- Modify: `src/cli/issue/create.rs:47-52, 63, 74` +- Modify: `src/cli/project.rs:69-73` +- Modify: `src/cli/issue/workflow.rs:121, 123` + +Same mechanical pattern. Replace: +```rust +anyhow::anyhow!("message text") +``` +with: +```rust +JrError::UserError("message text".into()) +``` + +For `bail!("message")` at `workflow.rs:123`, replace with: +```rust +return Err(JrError::UserError("Selection out of range".into()).into()); +``` + +- [ ] **Step 1: Replace in `src/cli/issue/create.rs:48, 63, 74`** + +Three `anyhow::anyhow!(...)` calls → `JrError::UserError("...".into())`. Add import if needed. + +- [ ] **Step 2: Replace in `src/cli/project.rs:70`** + +Same pattern. Add import if needed. + +- [ ] **Step 3: Replace in `src/cli/issue/workflow.rs:121` and `:123`** + +Line 121: `anyhow::anyhow!("Invalid selection")` → `JrError::UserError("Invalid selection".into())` + +Line 123: `bail!("Selection out of range")` → `return Err(JrError::UserError("Selection out of range".into()).into())` + +Add import if needed. + +- [ ] **Step 4: Run tests and clippy** + +Run: `cargo test && cargo clippy -- -D warnings` +Expected: All tests pass, no clippy warnings. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/issue/create.rs src/cli/project.rs src/cli/issue/workflow.rs +git commit -m "fix: use JrError::UserError for missing-input guards (#30)" +``` + +--- + +### Task 4: Add inline unit tests for error type at call sites + +**Files:** +- Modify: `src/cli/board.rs` (add `#[cfg(test)]` module) +- Modify: `src/cli/issue/create.rs` (add `#[cfg(test)]` module) + +Handler functions are `pub(super)` — not reachable from `tests/`. Instead, add inline unit tests in the modules where the errors are raised. This follows the same pattern used elsewhere in the codebase (unit tests inline, integration tests for API calls). + +- [ ] **Step 1: Add ConfigError unit test in `src/cli/board.rs`** + +Add a `#[cfg(test)]` module at the bottom of `board.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + use crate::error::JrError; + + #[test] + fn missing_board_id_returns_config_error() { + // board_id = None triggers the ConfigError guard + let result: Option = None; + let err = result + .ok_or_else(|| { + JrError::ConfigError( + "No board_id configured. Set board_id in .jr.toml or run \"jr init\".".into(), + ) + }) + .unwrap_err(); + assert_eq!(err.exit_code(), 78); + assert!(err.to_string().contains("No board_id configured")); + } +} +``` + +- [ ] **Step 2: Add UserError unit test in `src/cli/issue/create.rs`** + +Add a `#[cfg(test)]` module at the bottom of `create.rs`: + +```rust +#[cfg(test)] +mod tests { + use crate::error::JrError; + + #[test] + fn missing_project_returns_user_error() { + let result: Option = None; + let err = result + .ok_or_else(|| { + JrError::UserError("Project key is required. Use --project or configure .jr.toml. Run \"jr project list\" to see available projects.".into()) + }) + .unwrap_err(); + assert_eq!(err.exit_code(), 64); + assert!(err.to_string().contains("Project key is required")); + } +} +``` + +- [ ] **Step 3: Run all tests** + +Run: `cargo test` +Expected: All tests pass including the new unit tests. + +- [ ] **Step 4: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: Clean. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/board.rs src/cli/issue/create.rs +git commit -m "test: add unit tests for exit code mapping at error sites (#30)" +``` diff --git a/docs/superpowers/plans/2026-03-26-kanban-jql-fix.md b/docs/superpowers/plans/2026-03-26-kanban-jql-fix.md new file mode 100644 index 0000000..c3fbbe9 --- /dev/null +++ b/docs/superpowers/plans/2026-03-26-kanban-jql-fix.md @@ -0,0 +1,139 @@ +# Fix Kanban JQL `AND ORDER BY` Bug — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Fix invalid JQL in `board view` kanban path where `ORDER BY rank ASC` is incorrectly joined with predicates via `AND`, producing a 400 error from Jira Cloud. + +**Architecture:** Extract kanban JQL construction into a pure helper function `build_kanban_jql()`, fix the ORDER BY separation, and add unit tests. The handler calls the helper instead of building JQL inline. Note: the spec shows an inline fix in its "Code Change" section, but the spec's Testing section prescribes extracting a helper for testability — this plan follows the helper approach. + +**Tech Stack:** Rust, clap 4 (derive API) + +**Spec:** `docs/superpowers/specs/2026-03-26-kanban-jql-fix-design.md` + +--- + +### Task 1: Extract helper, fix JQL, and add tests + +**Files:** +- Modify: `src/cli/board.rs` + +- [ ] **Step 1: Write the failing tests** + +In `src/cli/board.rs`, add a `#[cfg(test)]` module at the end of the file (after line 82): + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn build_kanban_jql_with_project() { + let jql = build_kanban_jql(Some("FOO")); + assert_eq!( + jql, + "project = \"FOO\" AND statusCategory != Done ORDER BY rank ASC" + ); + } + + #[test] + fn build_kanban_jql_without_project() { + let jql = build_kanban_jql(None); + assert_eq!(jql, "statusCategory != Done ORDER BY rank ASC"); + } + + #[test] + fn build_kanban_jql_escapes_special_characters() { + let jql = build_kanban_jql(Some("FOO\"BAR")); + assert_eq!( + jql, + "project = \"FOO\\\"BAR\" AND statusCategory != Done ORDER BY rank ASC" + ); + } +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib -- board::tests` +Expected: FAIL with "cannot find function `build_kanban_jql`" + +- [ ] **Step 3: Implement `build_kanban_jql` and update `handle_view`** + +In `src/cli/board.rs`, add this function before `handle_view` (after `handle_list`, around line 33). Note: `crate::jql::escape_value` is used as a full path intentionally — there is no `use crate::jql` import in this file, matching the existing inline usage pattern. + +```rust +/// Build JQL for kanban board view: all non-Done issues, ordered by rank. +fn build_kanban_jql(project_key: Option<&str>) -> String { + let mut parts: Vec = Vec::new(); + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + parts.push("statusCategory != Done".into()); + let where_clause = parts.join(" AND "); + format!("{where_clause} ORDER BY rank ASC") +} +``` + +Then in `handle_view`, find the kanban JQL construction block (lines 62-69, inside the `else` branch). Replace these lines: + +```rust + let mut jql_parts: Vec = Vec::new(); + if let Some(ref pk) = project_key { + jql_parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + jql_parts.push("statusCategory != Done".into()); + jql_parts.push("ORDER BY rank ASC".into()); + let jql = jql_parts.join(" AND "); + client.search_issues(&jql, None, &[]).await?.issues +``` + +With: + +```rust + let jql = build_kanban_jql(project_key.as_deref()); + client.search_issues(&jql, None, &[]).await?.issues +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib -- board::tests` +Expected: All 3 tests PASS. + +- [ ] **Step 5: Run all tests** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 6: Run clippy and format** + +Run: `cargo fmt --all && cargo clippy -- -D warnings` +Expected: No warnings, no format issues. + +- [ ] **Step 7: Commit** + +```bash +git add src/cli/board.rs +git commit -m "fix: kanban board view builds valid JQL (ORDER BY separated from predicates) (#31)" +``` + +--- + +### Task 2: Final verification + +**Files:** +- All modified files from Task 1 + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter** + +Run: `cargo fmt --all -- --check` +Expected: No format issues. diff --git a/docs/superpowers/plans/2026-03-27-board-auto-resolve.md b/docs/superpowers/plans/2026-03-27-board-auto-resolve.md new file mode 100644 index 0000000..edb22c1 --- /dev/null +++ b/docs/superpowers/plans/2026-03-27-board-auto-resolve.md @@ -0,0 +1,931 @@ +# Board Auto-Resolve Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Auto-resolve board IDs from project keys so `jr sprint list --project PROJ` just works, and add `--type` filter to `jr board list`. + +**Architecture:** Three layers of change: (1) types — add `BoardLocation` struct, (2) API — add filter params to `list_boards()`, (3) CLI — add `resolve_board_id()` helper, wire it into sprint/board handlers, add `--type` to board list, thread global `--project` flag. + +**Tech Stack:** Rust, clap (derive), serde, wiremock (tests), tokio + +--- + +### Task 1: Add `BoardLocation` type and update `Board` struct + +**Files:** +- Modify: `src/types/jira/board.rs` + +- [ ] **Step 1: Write the unit test for `BoardLocation` deserialization** + +Add a test module at the bottom of `src/types/jira/board.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn board_deserializes_with_location() { + let json = r#"{ + "id": 42, + "name": "My Board", + "type": "scrum", + "location": { + "projectKey": "PROJ", + "projectName": "My Project" + } + }"#; + let board: Board = serde_json::from_str(json).unwrap(); + assert_eq!(board.id, 42); + assert_eq!(board.board_type, "scrum"); + let loc = board.location.unwrap(); + assert_eq!(loc.project_key.as_deref(), Some("PROJ")); + assert_eq!(loc.project_name.as_deref(), Some("My Project")); + } + + #[test] + fn board_deserializes_without_location() { + let json = r#"{ + "id": 99, + "name": "No Location Board", + "type": "kanban" + }"#; + let board: Board = serde_json::from_str(json).unwrap(); + assert_eq!(board.id, 99); + assert!(board.location.is_none()); + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --lib board::tests -- --nocapture` +Expected: FAIL — `Board` struct has no `location` field + +- [ ] **Step 3: Implement `BoardLocation` and update `Board`** + +Replace the entire content of `src/types/jira/board.rs`: + +```rust +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct Board { + pub id: u64, + pub name: String, + #[serde(rename = "type")] + pub board_type: String, + #[serde(default)] + pub location: Option, +} + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct BoardLocation { + #[serde(default, rename = "projectKey")] + pub project_key: Option, + #[serde(default, rename = "projectName")] + pub project_name: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct BoardConfig { + pub id: u64, + pub name: String, + #[serde(rename = "type", default)] + pub board_type: String, +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test --lib board::tests -- --nocapture` +Expected: PASS (2 tests) + +- [ ] **Step 5: Run full test suite to check nothing broke** + +Run: `cargo test` +Expected: All tests pass — existing code ignores the new `location` field + +- [ ] **Step 6: Commit** + +```bash +git add src/types/jira/board.rs +git commit -m "feat: add BoardLocation type to Board struct (#70)" +``` + +--- + +### Task 2: Add filter parameters to `list_boards()` API + +**Files:** +- Modify: `src/api/jira/boards.rs` + +- [ ] **Step 1: Write the integration test for filtered board listing** + +Create `tests/board_commands.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +fn board_response(id: u64, name: &str, board_type: &str, project_key: &str) -> serde_json::Value { + serde_json::json!({ + "id": id, + "name": name, + "type": board_type, + "location": { + "projectKey": project_key, + "projectName": format!("{} Project", project_key) + } + }) +} + +fn board_list_response(boards: Vec) -> serde_json::Value { + let total = boards.len() as u32; + serde_json::json!({ + "values": boards, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +#[tokio::test] +async fn list_boards_with_project_and_type_filter() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + board_list_response(vec![ + board_response(42, "My Board", "scrum", "PROJ"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client.list_boards(Some("PROJ"), Some("scrum")).await.unwrap(); + assert_eq!(boards.len(), 1); + assert_eq!(boards[0].id, 42); + assert_eq!(boards[0].name, "My Board"); +} + +#[tokio::test] +async fn list_boards_without_filters() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .respond_with(ResponseTemplate::new(200).set_body_json( + board_list_response(vec![ + board_response(1, "Board A", "scrum", "FOO"), + board_response(2, "Board B", "kanban", "BAR"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client.list_boards(None, None).await.unwrap(); + assert_eq!(boards.len(), 2); +} + +#[tokio::test] +async fn list_boards_empty_result() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "NOPE")) + .respond_with(ResponseTemplate::new(200).set_body_json( + board_list_response(vec![]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client.list_boards(Some("NOPE"), None).await.unwrap(); + assert!(boards.is_empty()); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test board_commands -- --nocapture` +Expected: FAIL — `list_boards` doesn't accept arguments + +- [ ] **Step 3: Implement filtered `list_boards()`** + +Replace the `list_boards` method in `src/api/jira/boards.rs`: + +```rust +use crate::api::client::JiraClient; +use crate::api::pagination::OffsetPage; +use crate::types::jira::{Board, BoardConfig}; +use anyhow::Result; + +impl JiraClient { + /// List boards accessible to the authenticated user, optionally filtered. + pub async fn list_boards( + &self, + project_key: Option<&str>, + board_type: Option<&str>, + ) -> Result> { + let mut all_boards: Vec = Vec::new(); + let mut start_at: u32 = 0; + let max_results: u32 = 50; + + loop { + let mut path = format!( + "/rest/agile/1.0/board?startAt={}&maxResults={}", + start_at, max_results + ); + if let Some(pk) = project_key { + path.push_str(&format!("&projectKeyOrId={pk}")); + } + if let Some(bt) = board_type { + path.push_str(&format!("&type={bt}")); + } + let page: OffsetPage = self.get(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all_boards.extend(page.values.unwrap_or_default()); + + if !has_more { + break; + } + start_at = next; + } + + Ok(all_boards) + } + + /// Get the configuration for a specific board. + pub async fn get_board_config(&self, board_id: u64) -> Result { + let path = format!("/rest/agile/1.0/board/{}/configuration", board_id); + self.get(&path).await + } +} +``` + +- [ ] **Step 4: Fix the only existing caller — `board.rs` `handle_list`** + +In `src/cli/board.rs`, change line 23 from: +```rust +let boards = client.list_boards().await?; +``` +to: +```rust +let boards = client.list_boards(None, None).await?; +``` + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `cargo test --test board_commands -- --nocapture && cargo test --lib` +Expected: All pass + +- [ ] **Step 6: Commit** + +```bash +git add src/api/jira/boards.rs src/cli/board.rs tests/board_commands.rs +git commit -m "feat: add project and type filters to list_boards API (#70)" +``` + +--- + +### Task 3: Add `resolve_board_id()` helper and wire into sprint/board handlers + +This is the core task. It adds the auto-resolve helper, threads `--project` through main.rs, updates both handlers, and adds `--type` to `BoardCommand::List`. + +**Files:** +- Modify: `src/cli/mod.rs` (BoardCommand::List enum variant) +- Modify: `src/cli/board.rs` (add resolve_board_id, update handle/handle_list/handle_view) +- Modify: `src/cli/sprint.rs` (update handle to use resolve_board_id) +- Modify: `src/main.rs` (thread cli.project to board/sprint) + +- [ ] **Step 1: Write integration tests for auto-resolve scenarios** + +Add to `tests/board_commands.rs`: + +```rust +fn board_config_response(id: u64, board_type: &str) -> serde_json::Value { + serde_json::json!({ + "id": id, + "name": "Board Config", + "type": board_type + }) +} + +fn sprint_list_response(sprints: Vec) -> serde_json::Value { + let total = sprints.len() as u32; + serde_json::json!({ + "values": sprints, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +fn sprint_response(id: u64, name: &str, state: &str) -> serde_json::Value { + serde_json::json!({ + "id": id, + "name": name, + "state": state, + "startDate": "2026-03-20T00:00:00.000Z", + "endDate": "2026-04-03T00:00:00.000Z" + }) +} + +fn sprint_issues_response(issues: Vec) -> serde_json::Value { + let total = issues.len() as u32; + serde_json::json!({ + "issues": issues, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +#[tokio::test] +async fn resolve_board_auto_discovers_single_scrum_board() { + let server = MockServer::start().await; + + // list_boards filtered by project+scrum returns 1 board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + board_list_response(vec![ + board_response(42, "PROJ Scrum Board", "scrum", "PROJ"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + // resolve_board_id with no --board, no config, but project_override="PROJ" and require_scrum=true + let board_id = + jr::cli::board::resolve_board_id(&config, &client, None, Some("PROJ"), true) + .await + .unwrap(); + assert_eq!(board_id, 42); +} + +#[tokio::test] +async fn resolve_board_errors_on_multiple_boards() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + board_list_response(vec![ + board_response(42, "Board A", "scrum", "PROJ"), + board_response(99, "Board B", "scrum", "PROJ"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, Some("PROJ"), true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple scrum boards"), "got: {msg}"); + assert!(msg.contains("42"), "should list board ID 42, got: {msg}"); + assert!(msg.contains("99"), "should list board ID 99, got: {msg}"); +} + +#[tokio::test] +async fn resolve_board_errors_on_no_boards() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "NOPE")) + .and(query_param("type", "scrum")) + .respond_with( + ResponseTemplate::new(200).set_body_json(board_list_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, Some("NOPE"), true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No scrum boards found"), "got: {msg}"); + assert!(msg.contains("NOPE"), "should mention project key, got: {msg}"); +} + +#[tokio::test] +async fn resolve_board_uses_explicit_board_override() { + // No server mocks needed — resolve_board_id should return immediately + let server = MockServer::start().await; + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let board_id = + jr::cli::board::resolve_board_id(&config, &client, Some(42), None, true) + .await + .unwrap(); + assert_eq!(board_id, 42); + // No HTTP requests should have been made +} + +#[tokio::test] +async fn resolve_board_errors_without_project_or_board() { + let server = MockServer::start().await; + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, None, true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No board configured"), "got: {msg}"); + assert!(msg.contains("--project"), "should suggest --project, got: {msg}"); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --test board_commands resolve_board -- --nocapture` +Expected: FAIL — `resolve_board_id` doesn't exist + +- [ ] **Step 3: Update `BoardCommand::List` to struct variant with `--type`** + +In `src/cli/mod.rs`, replace lines 361-370: + +```rust +#[derive(Subcommand)] +pub enum BoardCommand { + /// List boards + List { + /// Filter by board type + #[arg(long = "type", value_parser = clap::builder::PossibleValuesParser::new(["scrum", "kanban"]))] + board_type: Option, + }, + /// View current board issues + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +} +``` + +- [ ] **Step 4: Thread `cli.project` to board and sprint handlers in `main.rs`** + +In `src/main.rs`, change line 129 from: +```rust +cli::board::handle(command, &config, &client, &cli.output).await +``` +to: +```rust +cli::board::handle(command, &config, &client, &cli.output, cli.project.as_deref()).await +``` + +Change line 134 from: +```rust +cli::sprint::handle(command, &config, &client, &cli.output).await +``` +to: +```rust +cli::sprint::handle(command, &config, &client, &cli.output, cli.project.as_deref()).await +``` + +- [ ] **Step 5: Implement `resolve_board_id()` and update board handlers** + +Replace the full content of `src/cli/board.rs`. Note: this removes the `missing_board_id_returns_config_error` unit test, which is now covered by the `resolve_board_errors_without_project_or_board` integration test added in Step 1. + +```rust +use anyhow::{Result, bail}; + +use crate::api::client::JiraClient; +use crate::cli::{BoardCommand, OutputFormat}; +use crate::config::Config; +use crate::error::JrError; +use crate::output; + +/// Resolve a board ID from CLI override, config, or auto-discovery. +/// +/// Resolution order: +/// 1. CLI `--board` override +/// 2. Config `board_id` from `.jr.toml` +/// 3. Auto-discover via Jira API using project key +pub async fn resolve_board_id( + config: &Config, + client: &JiraClient, + board_override: Option, + project_override: Option<&str>, + require_scrum: bool, +) -> Result { + // Step 1: CLI override + if let Some(id) = board_override { + return Ok(id); + } + + // Step 2: Config + if let Some(id) = config.project.board_id { + return Ok(id); + } + + // Step 3: Auto-discover + let project_key = config.project_key(project_override).ok_or_else(|| { + JrError::ConfigError( + "No board configured and no project specified. \ + Use --board , set board_id in .jr.toml, or specify --project to auto-discover." + .into(), + ) + })?; + + let type_filter = if require_scrum { Some("scrum") } else { None }; + let boards = client.list_boards(Some(&project_key), type_filter).await?; + + match boards.len() { + 0 => { + let board_kind = if require_scrum { "scrum boards" } else { "boards" }; + bail!( + "No {} found for project {}. \ + Verify the project key is correct, then try \"jr board list --project {}\".", + board_kind, + project_key, + project_key, + ); + } + 1 => { + let board = &boards[0]; + eprintln!("Using board {} - {} ({})", board.id, board.name, board.board_type); + Ok(board.id) + } + _ => { + let board_kind = if require_scrum { "scrum boards" } else { "boards" }; + let mut msg = format!("Multiple {} found for project {}:\n", board_kind, project_key); + for b in &boards { + if require_scrum { + msg.push_str(&format!(" {} {}\n", b.id, b.name)); + } else { + msg.push_str(&format!(" {} {} {}\n", b.id, b.board_type, b.name)); + } + } + msg.push_str("Use --board to select one, or set board_id in .jr.toml."); + bail!("{}", msg); + } + } +} + +/// Handle all board subcommands. +pub async fn handle( + command: BoardCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + match command { + BoardCommand::List { board_type } => { + handle_list(client, output_format, project_override, board_type.as_deref()).await + } + BoardCommand::View { board } => { + handle_view(config, client, output_format, board, project_override).await + } + } +} + +async fn handle_list( + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, + board_type_filter: Option<&str>, +) -> Result<()> { + let boards = client.list_boards(project_override, board_type_filter).await?; + + let rows: Vec> = boards + .iter() + .map(|b| { + let project = b + .location + .as_ref() + .and_then(|loc| loc.project_key.as_deref()) + .unwrap_or("-"); + vec![ + b.id.to_string(), + b.board_type.clone(), + project.to_string(), + b.name.clone(), + ] + }) + .collect(); + + output::print_output(output_format, &["ID", "Type", "Project", "Name"], &rows, &boards)?; + + Ok(()) +} + +/// Build JQL for kanban board view: all non-Done issues, ordered by rank. +fn build_kanban_jql(project_key: Option<&str>) -> String { + let mut parts: Vec = Vec::new(); + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + parts.push("statusCategory != Done".into()); + let where_clause = parts.join(" AND "); + format!("{where_clause} ORDER BY rank ASC") +} + +async fn handle_view( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + board_override: Option, + project_override: Option<&str>, +) -> Result<()> { + let board_id = resolve_board_id(config, client, board_override, project_override, false).await?; + + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + + let issues = if board_type == "scrum" { + // For scrum boards, fetch the active sprint's issues + let sprints = client.list_sprints(board_id, Some("active")).await?; + if sprints.is_empty() { + bail!("No active sprint found for board {}.", board_id); + } + let sprint = &sprints[0]; + client.get_sprint_issues(sprint.id, None, &[]).await? + } else { + // Kanban: search for issues not in Done status category + let project_key = config.project_key(project_override); + if project_key.is_none() { + eprintln!( + "warning: no project configured for board. Showing issues across all projects. Set project in .jr.toml to scope results." + ); + } + let jql = build_kanban_jql(project_key.as_deref()); + client.search_issues(&jql, None, &[]).await?.issues + }; + + let rows = super::issue::format_issue_rows_public(&issues); + + output::print_output( + output_format, + &["Key", "Type", "Status", "Priority", "Assignee", "Summary"], + &rows, + &issues, + )?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn build_kanban_jql_with_project() { + let jql = build_kanban_jql(Some("FOO")); + assert_eq!( + jql, + "project = \"FOO\" AND statusCategory != Done ORDER BY rank ASC" + ); + } + + #[test] + fn build_kanban_jql_without_project() { + let jql = build_kanban_jql(None); + assert_eq!(jql, "statusCategory != Done ORDER BY rank ASC"); + } + + #[test] + fn build_kanban_jql_escapes_special_characters() { + let jql = build_kanban_jql(Some("FOO\"BAR")); + assert_eq!( + jql, + "project = \"FOO\\\"BAR\" AND statusCategory != Done ORDER BY rank ASC" + ); + } +} +``` + +- [ ] **Step 6: Update sprint handler to use `resolve_board_id()`** + +Replace `src/cli/sprint.rs` handler function (lines 10-47) with: + +```rust +/// Handle all sprint subcommands. +pub async fn handle( + command: SprintCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let board_override = match &command { + SprintCommand::List { board } => *board, + SprintCommand::Current { board } => *board, + }; + + let board_id = + crate::cli::board::resolve_board_id(config, client, board_override, project_override, true) + .await?; + + // Guard: sprints only make sense for scrum boards. + // When resolve_board_id auto-discovers (step 3), it already filters to scrum. + // This guard catches the case where --board or config provides a kanban board directly. + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + if board_type != "scrum" { + bail!( + "Sprint commands are only available for scrum boards. Board {} is a {} board.", + board_id, + board_config.board_type + ); + } + + match command { + SprintCommand::List { .. } => handle_list(board_id, client, output_format).await, + SprintCommand::Current { .. } => { + handle_current(board_id, client, output_format, config).await + } + } +} +``` + +- [ ] **Step 7: Run all tests** + +Run: `cargo test` +Expected: All pass + +- [ ] **Step 8: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: Clean + +- [ ] **Step 9: Commit** + +```bash +git add src/cli/mod.rs src/cli/board.rs src/cli/sprint.rs src/main.rs tests/board_commands.rs +git commit -m "feat: add board auto-resolve with --project and --type filters (#70)" +``` + +--- + +### Task 4: Add test fixtures to `tests/common/fixtures.rs` + +Move the inline fixture helpers from `tests/board_commands.rs` into the shared fixtures file for reuse. + +**Files:** +- Modify: `tests/common/fixtures.rs` +- Modify: `tests/board_commands.rs` + +- [ ] **Step 1: Add board fixture helpers to `tests/common/fixtures.rs`** + +Append to the end of `tests/common/fixtures.rs`: + +```rust +pub fn board_response(id: u64, name: &str, board_type: &str, project_key: &str) -> Value { + json!({ + "id": id, + "name": name, + "type": board_type, + "location": { + "projectKey": project_key, + "projectName": format!("{} Project", project_key) + } + }) +} + +pub fn board_list_response(boards: Vec) -> Value { + let total = boards.len() as u32; + json!({ + "values": boards, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +pub fn board_config_response(id: u64, board_type: &str) -> Value { + json!({ + "id": id, + "name": "Board Config", + "type": board_type + }) +} + +pub fn sprint_response(id: u64, name: &str, state: &str) -> Value { + json!({ + "id": id, + "name": name, + "state": state, + "startDate": "2026-03-20T00:00:00.000Z", + "endDate": "2026-04-03T00:00:00.000Z" + }) +} + +pub fn sprint_list_response(sprints: Vec) -> Value { + let total = sprints.len() as u32; + json!({ + "values": sprints, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +pub fn sprint_issues_response(issues: Vec) -> Value { + let total = issues.len() as u32; + json!({ + "issues": issues, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} +``` + +- [ ] **Step 2: Update `tests/board_commands.rs` to use shared fixtures** + +Remove the local `board_response`, `board_list_response`, `board_config_response`, `sprint_list_response`, `sprint_response`, and `sprint_issues_response` functions from `tests/board_commands.rs` and replace all calls with `common::fixtures::board_response(...)`, `common::fixtures::board_list_response(...)`, etc. + +- [ ] **Step 3: Run tests to verify everything still passes** + +Run: `cargo test --test board_commands -- --nocapture` +Expected: All pass + +- [ ] **Step 4: Commit** + +```bash +git add tests/common/fixtures.rs tests/board_commands.rs +git commit -m "refactor: move board test fixtures to shared fixtures file (#70)" +``` + +--- + +### Task 5: Update README and verify + +**Files:** +- Modify: `README.md` + +- [ ] **Step 1: Update the board list and sprint list entries in the Commands table** + +In `README.md`, update the `jr board list` row (line 110) from: +``` +| `jr board list` | List boards | +``` +to: +``` +| `jr board list` | List boards (`--project`, `--type scrum\|kanban`) | +``` + +Update the `jr sprint list` row (line 112) from: +``` +| `jr sprint list --board 42` | List sprints (`--board` or config, scrum only) | +``` +to: +``` +| `jr sprint list --board 42` | List sprints (`--board` or config or auto-discover, scrum only) | +``` + +- [ ] **Step 2: Add auto-discover example to Quick Start** + +After the existing `jr issue list --project FOO` line (line 58), add: + +```bash +# Sprint list (auto-discovers scrum board for project) +jr sprint list --project FOO +``` + +- [ ] **Step 3: Run full test suite one final time** + +Run: `cargo test && cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: All pass, clean + +- [ ] **Step 4: Commit** + +```bash +git add README.md +git commit -m "docs: update README with board auto-resolve and --type flag (#70)" +``` diff --git a/docs/superpowers/plans/2026-03-27-board-view-limit.md b/docs/superpowers/plans/2026-03-27-board-view-limit.md new file mode 100644 index 0000000..13959a4 --- /dev/null +++ b/docs/superpowers/plans/2026-03-27-board-view-limit.md @@ -0,0 +1,603 @@ +# Board View --limit Flag Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--limit` and `--all` flags to `jr board view` so output is bounded by default (30 issues), with both scrum and kanban paths respecting the limit via early-stop pagination. + +**Architecture:** Extract shared `resolve_effective_limit()` helper to `cli/mod.rs`. Add `limit` parameter to `get_sprint_issues()` with early-stop pagination returning `SprintIssuesResult`. Wire both paths in `board.rs` handler to pass effective limit and show truncation hints. + +**Tech Stack:** Rust, clap 4 (derive), wiremock (integration tests), anyhow + +--- + +### Task 1: Extract shared `resolve_effective_limit` to `cli/mod.rs` + +**Files:** +- Modify: `src/cli/mod.rs:433` (add helper at end of file) +- Modify: `src/cli/issue/list.rs:310-318` (remove local copy, import shared) + +- [ ] **Step 1: Write failing tests in `cli/mod.rs`** + +Add a `#[cfg(test)]` module at the bottom of `src/cli/mod.rs`: + +```rust +pub(crate) const DEFAULT_LIMIT: u32 = 30; + +/// Resolve the effective limit from CLI flags. +/// +/// Returns `None` when `--all` is set (no limit), otherwise returns the +/// explicit `--limit` value or the default. +pub(crate) fn resolve_effective_limit(limit: Option, all: bool) -> Option { + if all { + None + } else { + Some(limit.unwrap_or(DEFAULT_LIMIT)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn effective_limit_defaults_to_30() { + assert_eq!(resolve_effective_limit(None, false), Some(30)); + } + + #[test] + fn effective_limit_respects_explicit_limit() { + assert_eq!(resolve_effective_limit(Some(50), false), Some(50)); + } + + #[test] + fn effective_limit_all_returns_none() { + assert_eq!(resolve_effective_limit(None, true), None); + } +} +``` + +- [ ] **Step 2: Run tests to verify they pass** + +Run: `cargo test --lib cli::tests` +Expected: All 3 tests PASS. + +- [ ] **Step 3: Remove local copy from `issue/list.rs` and import shared version** + +In `src/cli/issue/list.rs`, remove lines 310-318 (the `const DEFAULT_LIMIT` and `fn resolve_effective_limit` definitions). Then update line 63 where `resolve_effective_limit` is called — it will now resolve through the parent module. Add this import near the top of the file: + +```rust +use crate::cli::resolve_effective_limit; +``` + +Also remove the 3 `resolve_effective_limit` tests from the `list.rs` test module (lines 692-705) since they now live in `cli/mod.rs`. + +- [ ] **Step 4: Run all tests to verify nothing broke** + +Run: `cargo test` +Expected: All tests pass. The moved tests pass from their new location, and `issue list` uses the shared function. + +- [ ] **Step 5: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Clean. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/list.rs +git commit -m "refactor: extract resolve_effective_limit to cli/mod.rs (#69)" +``` + +--- + +### Task 2: Add `SprintIssuesResult` and limit-aware `get_sprint_issues()` + +**Files:** +- Modify: `src/api/jira/sprints.rs:35-73` + +- [ ] **Step 1: Add `SprintIssuesResult` struct** + +Add to the bottom of `src/api/jira/sprints.rs` (after the `impl JiraClient` block's closing brace): + +```rust +/// Result of fetching sprint issues with optional limit. +pub struct SprintIssuesResult { + pub issues: Vec, + pub has_more: bool, +} +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build` +Expected: Compiles (struct is defined but not yet used). + +- [ ] **Step 3: Add `limit` parameter and early-stop to `get_sprint_issues()`** + +Replace the current `get_sprint_issues` function (lines 35-72) with: + +```rust + /// Get issues in a specific sprint, with optional JQL filter and limit. + pub async fn get_sprint_issues( + &self, + sprint_id: u64, + jql: Option<&str>, + limit: Option, + extra_fields: &[&str], + ) -> Result { + let mut all_issues: Vec = Vec::new(); + let mut start_at: u32 = 0; + let max_results: u32 = 50; + let mut result_has_more = false; + + loop { + let mut path = format!( + "/rest/agile/1.0/sprint/{}/issue?startAt={}&maxResults={}", + sprint_id, start_at, max_results + ); + let mut fields_str = "summary,status,issuetype,priority,assignee,project".to_string(); + for f in extra_fields { + fields_str.push(','); + fields_str.push_str(f); + } + path.push_str(&format!("&fields={}", fields_str)); + if let Some(q) = jql { + path.push_str(&format!("&jql={}", urlencoding::encode(q))); + } + let page: OffsetPage = self.get(&path).await?; + let page_has_more = page.has_more(); + let next = page.next_start(); + all_issues.extend(page.issues.unwrap_or_default()); + + // Early-stop: if we have enough issues, truncate and break + if let Some(max) = limit { + if all_issues.len() >= max as usize { + result_has_more = all_issues.len() > max as usize || page_has_more; + all_issues.truncate(max as usize); + break; + } + } + + if !page_has_more { + break; + } + start_at = next; + } + + Ok(SprintIssuesResult { + issues: all_issues, + has_more: result_has_more, + }) + } +``` + +- [ ] **Step 5: Fix compile errors in callers** + +Two callers need updating: + +**`src/cli/board.rs:70`** — change: +```rust +client.get_sprint_issues(sprint.id, None, &[]).await? +``` +to: +```rust +client.get_sprint_issues(sprint.id, None, None, &[]).await?.issues +``` + +**`src/cli/sprint.rs:123`** — change: +```rust +let issues = client.get_sprint_issues(sprint.id, None, &extra).await?; +``` +to: +```rust +let issues = client.get_sprint_issues(sprint.id, None, None, &extra).await?.issues; +``` + +Both pass `None` for limit (preserving current unbounded behavior) and extract `.issues` from the result. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All tests pass. No behavior change — both callers pass `None` for limit. + +- [ ] **Step 7: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: Clean. + +- [ ] **Step 8: Commit** + +```bash +git add src/api/jira/sprints.rs src/cli/board.rs src/cli/sprint.rs +git commit -m "feat: add limit parameter to get_sprint_issues with early-stop (#69)" +``` + +--- + +### Task 3: Add `--limit`/`--all` flags and wire into both board view paths + +**Files:** +- Modify: `src/cli/mod.rs:360-370` (BoardCommand enum) +- Modify: `src/cli/board.rs:18, 46-93` (match arm + handle_view rewrite) + +This task combines the flag addition and wiring to avoid an intermediate commit with an unused variable (which would fail `cargo clippy -- -D warnings`). + +- [ ] **Step 1: Add flags to `BoardCommand::View` enum** + +In `src/cli/mod.rs`, replace lines 364-369: + +```rust + /// View current board issues + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +``` + +with: + +```rust + /// View current board issues + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, +``` + +- [ ] **Step 2: Update match arm in `board.rs` handle function** + +In `src/cli/board.rs:18`, change: + +```rust + BoardCommand::View { board } => handle_view(config, client, output_format, board).await, +``` + +to: + +```rust + BoardCommand::View { board, limit, all } => { + handle_view(config, client, output_format, board, limit, all).await + } +``` + +- [ ] **Step 3: Rewrite `handle_view` with limit support and truncation hints** + +Replace the entire `handle_view` function (lines 46-93) with: + +```rust +async fn handle_view( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + board_override: Option, + limit: Option, + all: bool, +) -> Result<()> { + let effective_limit = crate::cli::resolve_effective_limit(limit, all); + + let board_id = config.board_id(board_override).ok_or_else(|| { + JrError::ConfigError( + "No board configured. Use --board or set board_id in .jr.toml.\n\ + Run \"jr board list\" to see available boards." + .into(), + ) + })?; + + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + + let (issues, has_more) = if board_type == "scrum" { + // For scrum boards, fetch the active sprint's issues + let sprints = client.list_sprints(board_id, Some("active")).await?; + if sprints.is_empty() { + bail!("No active sprint found for board {}.", board_id); + } + let sprint = &sprints[0]; + let result = client + .get_sprint_issues(sprint.id, None, effective_limit, &[]) + .await?; + (result.issues, result.has_more) + } else { + // Kanban: search for issues not in Done status category + let project_key = config.project_key(None); + if project_key.is_none() { + eprintln!( + "warning: no project configured for board. Showing issues across all projects. Set project in .jr.toml to scope results." + ); + } + let jql = build_kanban_jql(project_key.as_deref()); + let result = client.search_issues(&jql, effective_limit, &[]).await?; + (result.issues, result.has_more) + }; + + let rows = super::issue::format_issue_rows_public(&issues); + + output::print_output( + output_format, + &["Key", "Type", "Status", "Priority", "Assignee", "Summary"], + &rows, + &issues, + )?; + + if has_more && !all { + if board_type != "scrum" { + // Kanban: try to get approximate total via JQL count + let project_key = config.project_key(None); + let jql = build_kanban_jql(project_key.as_deref()); + let count_jql = crate::jql::strip_order_by(&jql); + match client.approximate_count(count_jql).await { + Ok(total) if total > 0 => { + eprintln!( + "Showing {} of ~{} results. Use --limit or --all to see more.", + issues.len(), + total + ); + } + Ok(_) | Err(_) => { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + } else { + // Scrum: no reliable total count from Agile API + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + + Ok(()) +} +``` + +- [ ] **Step 4: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: Clean. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/mod.rs src/cli/board.rs +git commit -m "feat: add --limit/--all to board view with truncation hints (#69)" +``` + +--- + +### Task 4: Add integration tests for board view limit + +**Files:** +- Modify: `tests/common/fixtures.rs` (add board/sprint fixtures) +- Create: `tests/board_commands.rs` + +All existing integration tests use `JiraClient::new_for_test()` to test API methods directly (there is no `JR_AUTH_HEADER` env var — auth is loaded from keychain in `from_config()`). The flag conflict test can use `cargo_bin` since clap rejects args before auth is checked. + +- [ ] **Step 1: Add board and sprint fixture helpers to `tests/common/fixtures.rs`** + +Add these functions at the bottom of `tests/common/fixtures.rs`: + +```rust +/// Board configuration response. +pub fn board_config_response(board_type: &str) -> Value { + json!({ + "id": 382, + "name": "Test Board", + "type": board_type + }) +} + +/// Sprint list response (offset-paginated). +pub fn sprint_list_response(sprints: Vec) -> Value { + let total = sprints.len() as u32; + json!({ + "startAt": 0, + "maxResults": 50, + "total": total, + "values": sprints + }) +} + +/// Single sprint object. +pub fn sprint(id: u64, name: &str, state: &str) -> Value { + json!({ + "id": id, + "name": name, + "state": state, + "startDate": "2026-03-20T00:00:00.000Z", + "endDate": "2026-04-03T00:00:00.000Z" + }) +} + +/// Sprint issues response (offset-paginated). +pub fn sprint_issues_response(issues: Vec, total: u32) -> Value { + json!({ + "startAt": 0, + "maxResults": 50, + "total": total, + "issues": issues + }) +} +``` + +- [ ] **Step 2: Create `tests/board_commands.rs` with API-level tests** + +Create `tests/board_commands.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build N issues for testing. +fn make_issues(count: usize) -> Vec { + (1..=count) + .map(|i| { + common::fixtures::issue_response( + &format!("TEST-{}", i), + &format!("Issue {}", i), + "In Progress", + ) + }) + .collect() +} + +#[tokio::test] +async fn get_sprint_issues_with_limit() { + let server = MockServer::start().await; + + // Mock sprint issues — return 5 issues with total=5 + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(make_issues(5), 5)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .get_sprint_issues(100, None, Some(3), &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 3); + assert!(result.has_more); + assert_eq!(result.issues[0].key, "TEST-1"); + assert_eq!(result.issues[2].key, "TEST-3"); +} + +#[tokio::test] +async fn get_sprint_issues_no_limit() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(make_issues(5), 5)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .get_sprint_issues(100, None, None, &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 5); + assert!(!result.has_more); +} + +#[tokio::test] +async fn search_issues_with_limit() { + let server = MockServer::start().await; + + // Return 5 issues with a next page token (indicating more exist) + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response_with_next_page(make_issues(5)), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .search_issues("statusCategory != Done ORDER BY rank ASC", Some(3), &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 3); + assert!(result.has_more); +} + +#[test] +fn board_view_limit_and_all_conflict() { + let mut cmd = Command::cargo_bin("jr").unwrap(); + cmd.arg("board") + .arg("view") + .arg("--limit") + .arg("3") + .arg("--all"); + + cmd.assert().failure().code(2); +} +``` + +- [ ] **Step 3: Run integration tests** + +Run: `cargo test --test board_commands` +Expected: All 4 tests pass. + +- [ ] **Step 4: Run all tests (unit + integration)** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: Clean. + +- [ ] **Step 6: Commit** + +```bash +git add tests/board_commands.rs tests/common/fixtures.rs +git commit -m "test: add integration tests for board view --limit (#69)" +``` + +--- + +### Task 5: Final verification and format + +**Files:** +- None (verification only) + +- [ ] **Step 1: Run full test suite** + +Run: `cargo test` +Expected: All tests pass (unit, integration, proptest, snapshots). + +- [ ] **Step 2: Run clippy with strict warnings** + +Run: `cargo clippy -- -D warnings` +Expected: Zero warnings. + +- [ ] **Step 3: Run format check** + +Run: `cargo fmt --all -- --check` +Expected: Clean. + +- [ ] **Step 4: Verify the feature works manually (optional)** + +Test the new flags parse correctly: + +```bash +cargo run -- board view --help +``` + +Expected output includes `--limit ` and `--all` in the help text, with `--all` noted as conflicting with `--limit`. diff --git a/docs/superpowers/plans/2026-03-28-input-validation.md b/docs/superpowers/plans/2026-03-28-input-validation.md new file mode 100644 index 0000000..b348ace --- /dev/null +++ b/docs/superpowers/plans/2026-03-28-input-validation.md @@ -0,0 +1,619 @@ +# Input Validation Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Validate `--project` and `--status` flags in `jr issue list` before building JQL, so invalid values produce actionable errors instead of silent empty results. + +**Architecture:** Two pre-flight validation checks in `handle_list`: (1) project existence via `GET /rest/api/3/project/{key}`, (2) status name via partial_match against project-scoped or global status lists. When both flags are set, a single API call validates both. A new `resolved_status` variable carries the matched name into `build_filter_clauses`. + +**Tech Stack:** Rust, serde, wiremock (tests), existing `partial_match` module + +--- + +### Task 1: Add `project_exists()` API method + +**Files:** +- Modify: `src/api/jira/projects.rs` + +- [ ] **Step 1: Write the integration test** + +Create `tests/input_validation.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn project_exists_returns_true_for_valid_project() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": "10000", + "key": "PROJ", + "name": "My Project" + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + assert!(client.project_exists("PROJ").await.unwrap()); +} + +#[tokio::test] +async fn project_exists_returns_false_for_invalid_project() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/NOPE")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["No project could be found with key 'NOPE'."], + "errors": {} + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + assert!(!client.project_exists("NOPE").await.unwrap()); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test input_validation -- --nocapture` +Expected: FAIL — `project_exists` doesn't exist + +- [ ] **Step 3: Implement `project_exists()`** + +Add to `src/api/jira/projects.rs`, inside the `impl JiraClient` block (after `get_project_statuses`): + +```rust + /// Check whether a project with the given key exists. + /// + /// Returns `Ok(true)` if the project is accessible, `Ok(false)` if the API + /// returns 404, and propagates any other error (auth, network, etc.). + pub async fn project_exists(&self, key: &str) -> Result { + let path = format!("/rest/api/3/project/{}", urlencoding::encode(key)); + match self.get::(&path).await { + Ok(_) => Ok(true), + Err(e) => { + if let Some(crate::error::JrError::ApiError { status: 404, .. }) = + e.downcast_ref::() + { + Ok(false) + } else { + Err(e) + } + } + } + } +``` + +- [ ] **Step 4: Run tests** + +Run: `cargo test --test input_validation -- --nocapture && cargo test --lib` +Expected: All pass + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jira/projects.rs tests/input_validation.rs +git commit -m "feat: add project_exists API method (#71)" +``` + +--- + +### Task 2: Add `get_all_statuses()` API method + +**Files:** +- Create: `src/api/jira/statuses.rs` +- Modify: `src/api/jira/mod.rs` + +- [ ] **Step 1: Write the integration test** + +Add to `tests/input_validation.rs`: + +```rust +#[tokio::test] +async fn get_all_statuses_returns_status_names() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/status")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + {"id": "1", "name": "To Do", "statusCategory": {"key": "new"}}, + {"id": "2", "name": "In Progress", "statusCategory": {"key": "indeterminate"}}, + {"id": "3", "name": "Done", "statusCategory": {"key": "done"}} + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses = client.get_all_statuses().await.unwrap(); + assert_eq!(statuses.len(), 3); + assert!(statuses.contains(&"To Do".to_string())); + assert!(statuses.contains(&"In Progress".to_string())); + assert!(statuses.contains(&"Done".to_string())); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test input_validation get_all_statuses -- --nocapture` +Expected: FAIL — `get_all_statuses` doesn't exist + +- [ ] **Step 3: Create `src/api/jira/statuses.rs`** + +```rust +use crate::api::client::JiraClient; +use anyhow::Result; +use serde::Deserialize; + +#[derive(Deserialize)] +struct StatusEntry { + name: String, +} + +impl JiraClient { + /// Fetch all statuses from active workflows (global, not project-scoped). + /// + /// Returns a flat list of unique status names. The endpoint is not paginated. + pub async fn get_all_statuses(&self) -> Result> { + let entries: Vec = self.get("/rest/api/3/status").await?; + let names: Vec = entries.into_iter().map(|e| e.name).collect(); + Ok(names) + } +} +``` + +- [ ] **Step 4: Register the module in `src/api/jira/mod.rs`** + +Add `pub mod statuses;` to `src/api/jira/mod.rs` (after `pub mod sprints;`). + +- [ ] **Step 5: Run tests** + +Run: `cargo test --test input_validation -- --nocapture && cargo test --lib` +Expected: All pass + +- [ ] **Step 6: Commit** + +```bash +git add src/api/jira/statuses.rs src/api/jira/mod.rs tests/input_validation.rs +git commit -m "feat: add get_all_statuses API method (#71)" +``` + +--- + +### Task 3: Add `extract_unique_status_names()` helper and wire validation into `handle_list` + +This is the core task — adds the validation logic, reorders `handle_list`, and introduces `resolved_status`. + +**Files:** +- Modify: `src/cli/issue/list.rs` + +- [ ] **Step 1: Write the unit test for `extract_unique_status_names`** + +Add to the `#[cfg(test)] mod tests` block at the bottom of `src/cli/issue/list.rs`: + +```rust + #[test] + fn extract_unique_status_names_deduplicates_and_sorts() { + use crate::api::jira::projects::{IssueTypeWithStatuses, StatusMetadata}; + let issue_types = vec![ + IssueTypeWithStatuses { + id: "1".into(), + name: "Task".into(), + subtask: None, + statuses: vec![ + StatusMetadata { id: "10".into(), name: "To Do".into(), description: None }, + StatusMetadata { id: "20".into(), name: "In Progress".into(), description: None }, + StatusMetadata { id: "30".into(), name: "Done".into(), description: None }, + ], + }, + IssueTypeWithStatuses { + id: "2".into(), + name: "Bug".into(), + subtask: None, + statuses: vec![ + StatusMetadata { id: "10".into(), name: "To Do".into(), description: None }, + StatusMetadata { id: "30".into(), name: "Done".into(), description: None }, + ], + }, + ]; + let names = extract_unique_status_names(&issue_types); + assert_eq!(names, vec!["Done", "In Progress", "To Do"]); + } + + #[test] + fn extract_unique_status_names_empty() { + let names = extract_unique_status_names(&[]); + assert!(names.is_empty()); + } +``` + +- [ ] **Step 2: Write `extract_unique_status_names`** + +Add this function in `src/cli/issue/list.rs` (before `handle_list`, after the imports): + +```rust +use crate::api::jira::projects::IssueTypeWithStatuses; + +/// Extract unique status names from project-scoped statuses response (deduplicated, sorted). +fn extract_unique_status_names(issue_types: &[IssueTypeWithStatuses]) -> Vec { + let mut seen = std::collections::HashSet::new(); + let mut names = Vec::new(); + for it in issue_types { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + names.push(s.name.clone()); + } + } + } + names.sort(); + names +} +``` + +- [ ] **Step 3: Run unit tests** + +Run: `cargo test --lib issue::list::tests -- --nocapture` +Expected: All pass (including the 2 new tests) + +- [ ] **Step 4: Write integration tests for validation** + +Add to `tests/input_validation.rs`: + +```rust +fn project_statuses_response(statuses: Vec<&str>) -> serde_json::Value { + let status_objects: Vec = statuses + .iter() + .enumerate() + .map(|(i, name)| { + serde_json::json!({ + "id": format!("{}", i + 1), + "name": name, + "description": null + }) + }) + .collect(); + serde_json::json!([{ + "id": "1", + "name": "Task", + "subtask": false, + "statuses": status_objects + }]) +} + +fn global_statuses_response(statuses: Vec<&str>) -> serde_json::Value { + let entries: Vec = statuses + .iter() + .enumerate() + .map(|(i, name)| { + serde_json::json!({ + "id": format!("{}", i + 1), + "name": name, + "statusCategory": {"key": "new"} + }) + }) + .collect(); + serde_json::json!(entries) +} + +#[tokio::test] +async fn invalid_status_with_project_returns_error() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(project_statuses_response(vec!["To Do", "In Progress", "Done"])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + + // Extract unique names and test partial match + let names: Vec = { + let mut seen = std::collections::HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("Nonexistant", &names); + assert!(matches!(result, jr::partial_match::MatchResult::None(_))); +} + +#[tokio::test] +async fn valid_status_partial_match_resolves() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(project_statuses_response(vec!["To Do", "In Progress", "Done"])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + let names: Vec = { + let mut seen = std::collections::HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("in prog", &names); + match result { + jr::partial_match::MatchResult::Exact(name) => assert_eq!(name, "In Progress"), + other => panic!("Expected Exact, got {:?}", std::mem::discriminant(&other)), + } +} + +#[tokio::test] +async fn ambiguous_status_returns_multiple_matches() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with(ResponseTemplate::new(200).set_body_json( + project_statuses_response(vec!["In Progress", "In Review", "Done"]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + let names: Vec = { + let mut seen = std::collections::HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("in", &names); + match result { + jr::partial_match::MatchResult::Ambiguous(matches) => { + assert!(matches.contains(&"In Progress".to_string())); + assert!(matches.contains(&"In Review".to_string())); + } + other => panic!("Expected Ambiguous, got {:?}", std::mem::discriminant(&other)), + } +} + +#[tokio::test] +async fn status_validation_with_global_statuses() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/status")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + {"id": "1", "name": "Open", "statusCategory": {"key": "new"}}, + {"id": "2", "name": "Closed", "statusCategory": {"key": "done"}} + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses = client.get_all_statuses().await.unwrap(); + + let result = jr::partial_match::partial_match("Nonexistant", &statuses); + assert!(matches!(result, jr::partial_match::MatchResult::None(_))); +} + +#[tokio::test] +async fn project_statuses_404_means_project_not_found() { + // Spec test 7: when both --project and --status are set, project statuses 404 + // should surface as a project-not-found error + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/NOPE/statuses")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["No project could be found with key 'NOPE'."], + "errors": {} + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("NOPE").await; + assert!(result.is_err()); + let err = result.unwrap_err(); + // The error should be a 404 ApiError + assert!(err + .downcast_ref::() + .is_some_and(|e| matches!(e, jr::error::JrError::ApiError { status: 404, .. }))); +} +``` + +- [ ] **Step 5: Wire validation into `handle_list`** + +In `src/cli/issue/list.rs`, make the following changes to `handle_list`: + +**5a.** Add imports at the top of the file (after existing imports): + +```rust +use crate::partial_match::{self, MatchResult}; +``` + +**5b.** Move `project_key` resolution up. Cut this line from its current position (line 108): +```rust +let project_key = config.project_key(project_override); +``` +And paste it right after the `team_clause` block (after line 95), before the current `build_filter_clauses` call. + +**5c.** Replace the `build_filter_clauses` call (currently lines 98-105) with the validation block + new `build_filter_clauses` call. The old code: +```rust + // Build filter clauses from all flag values + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + open, + ); +``` + +Replace with: + +```rust + // Validate --project exists + if let Some(ref pk) = project_key { + // Skip if --status is set (project will be validated via statuses endpoint below) + if status.is_none() && !client.project_exists(pk).await? { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )) + .into()); + } + } + + // Validate --status and resolve to exact name + let resolved_status: Option = if let Some(ref status_input) = status { + let valid_statuses = if let Some(ref pk) = project_key { + // Project-scoped: also validates project existence (404 = not found) + match client.get_project_statuses(pk).await { + Ok(issue_types) => extract_unique_status_names(&issue_types), + Err(e) => { + if let Some(JrError::ApiError { status: 404, .. }) = + e.downcast_ref::() + { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )) + .into()); + } + return Err(e); + } + } + } else { + client.get_all_statuses().await? + }; + + match partial_match::partial_match(status_input, &valid_statuses) { + MatchResult::Exact(name) => Some(name), + MatchResult::Ambiguous(matches) => { + return Err(JrError::UserError(format!( + "Ambiguous status \"{}\". Matches: {}", + status_input, + matches.join(", ") + )) + .into()); + } + MatchResult::None(all) => { + let available = all.join(", "); + let scope = if let Some(ref pk) = project_key { + format!(" for project {}", pk) + } else { + String::new() + }; + return Err(JrError::UserError(format!( + "No status matching \"{}\"{scope}. Available: {available}", + status_input, + )) + .into()); + } + } + } else { + None + }; + + // Build filter clauses from all flag values + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + resolved_status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + open, + ); +``` + +**5d.** Remove the now-duplicate `let project_key = ...` line from its old position (which was after the `build_filter_clauses` call). It was already moved up in step 5b. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All pass + +- [ ] **Step 7: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: Clean + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/issue/list.rs tests/input_validation.rs +git commit -m "feat: validate --project and --status in issue list (#71)" +``` + +--- + +### Task 4: Update README + +**Files:** +- Modify: `README.md` + +- [ ] **Step 1: Update the issue list command description** + +In the Commands table, find: +``` +| `jr issue list` | List issues (`--assignee`, `--reporter`, `--recent`, `--status`, `--open`, `--team`, `--jql`, `--limit`/`--all`, `--points`, `--assets`) | +``` + +The description already mentions `--status`. No change needed to the flag list — the validation is transparent to the user. The behavior change (errors instead of silent empty results) doesn't need a README flag update. + +**Instead, add an example to Quick Start** after the `jr issue list --assignee me --open` line: + +```bash +# Issues in a specific status +jr issue list --project FOO --status "In Progress" +``` + +- [ ] **Step 2: Run full test suite** + +Run: `cargo test && cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: All pass, clean + +- [ ] **Step 3: Commit** + +```bash +git add README.md +git commit -m "docs: add status filter example to README (#71)" +``` diff --git a/docs/superpowers/plans/2026-03-28-sprint-current-limit.md b/docs/superpowers/plans/2026-03-28-sprint-current-limit.md new file mode 100644 index 0000000..7da8910 --- /dev/null +++ b/docs/superpowers/plans/2026-03-28-sprint-current-limit.md @@ -0,0 +1,503 @@ +# Sprint Current --limit Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--limit` and `--all` flags to `jr sprint current` so output size is bounded by default (30 issues), matching `issue list` and `board view`. + +**Architecture:** Pure wiring change — `SprintCommand::Current` gains two clap fields, `handle_current` computes `effective_limit` via existing `resolve_effective_limit()`, and passes it to the existing `get_sprint_issues()` which already supports limits. A "more results" hint prints to stderr when results are truncated. + +**Tech Stack:** Rust, clap (derive), wiremock (integration tests), assert_cmd + +--- + +### File Structure + +| Action | File | Responsibility | +|--------|------|----------------| +| Modify | `src/cli/mod.rs:382-396` | Add `limit` and `all` fields to `SprintCommand::Current` | +| Modify | `src/cli/sprint.rs:17-44` | Update both match sites in `handle`, add params to `handle_current` | +| Modify | `src/cli/sprint.rs:106-178` | Wire `effective_limit` into `handle_current`, add "more results" hint | +| Create | `tests/sprint_commands.rs` | Integration tests for limit behavior | + +No new API methods, no new utility functions, no new fixture helpers needed. + +--- + +### Task 1: Add --limit/--all flags to SprintCommand::Current + +**Files:** +- Modify: `src/cli/mod.rs:382-396` + +- [ ] **Step 1: Add `limit` and `all` fields to `SprintCommand::Current`** + +In `src/cli/mod.rs`, change `SprintCommand::Current` from: + +```rust + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +``` + +To: + +```rust + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, +``` + +This matches `BoardCommand::View` exactly (lines 370-379 in the same file). + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` + +Expected: Compile errors in `src/cli/sprint.rs` because the match arms don't account for the new fields yet. That's expected — we'll fix them in Task 2. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/mod.rs +git commit -m "feat: add --limit and --all flags to sprint current CLI definition (#72)" +``` + +--- + +### Task 2: Wire limit into handle_current + +**Files:** +- Modify: `src/cli/sprint.rs:1-178` + +- [ ] **Step 1: Update the board_override extraction match** + +In `src/cli/sprint.rs`, change line 19 from: + +```rust + SprintCommand::Current { board } => *board, +``` + +To: + +```rust + SprintCommand::Current { board, .. } => *board, +``` + +The `..` ignores the new `limit` and `all` fields when we only need `board`. + +- [ ] **Step 2: Update the dispatch match to extract and pass limit/all** + +In `src/cli/sprint.rs`, change lines 41-43 from: + +```rust + SprintCommand::Current { .. } => { + handle_current(board_id, client, output_format, config).await + } +``` + +To: + +```rust + SprintCommand::Current { limit, all, .. } => { + handle_current(board_id, client, output_format, config, limit, all).await + } +``` + +- [ ] **Step 3: Update handle_current signature and wire effective_limit** + +Change the `handle_current` function signature (line 106) from: + +```rust +async fn handle_current( + board_id: u64, + client: &JiraClient, + output_format: &OutputFormat, + config: &Config, +) -> Result<()> { +``` + +To: + +```rust +async fn handle_current( + board_id: u64, + client: &JiraClient, + output_format: &OutputFormat, + config: &Config, + limit: Option, + all: bool, +) -> Result<()> { + let effective_limit = crate::cli::resolve_effective_limit(limit, all); +``` + +Then change the `get_sprint_issues` call (lines 121-124) from: + +```rust + let issues = client + .get_sprint_issues(sprint.id, None, None, &extra) + .await? + .issues; +``` + +To: + +```rust + let result = client + .get_sprint_issues(sprint.id, None, effective_limit, &extra) + .await?; + let issues = result.issues; + let has_more = result.has_more; +``` + +- [ ] **Step 4: Capture issue count before the match block, add "more results" hint after it** + +The `OutputFormat::Json` branch moves `issues` into `serde_json::json!` (`"issues": issues`), so `issues.len()` would be a use-after-move if placed after the match. Capture the count first. + +Add this line immediately after `let has_more = result.has_more;`: + +```rust + let issue_count = issues.len(); +``` + +Then after the closing brace of `match output_format { ... }` (line 175), add before the final `Ok(())`: + +```rust + if has_more && !all { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issue_count + ); + } +``` + +- [ ] **Step 5: Verify it compiles and existing tests pass** + +Run: `cargo build && cargo test --lib` + +Expected: Build succeeds. All existing unit tests pass (the `compute_sprint_summary` tests don't touch `handle_current`). + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/sprint.rs +git commit -m "feat: wire --limit/--all into sprint current handler (#72)" +``` + +--- + +### Task 3: Integration tests + +**Files:** +- Create: `tests/sprint_commands.rs` + +- [ ] **Step 1: Write integration tests** + +Create `tests/sprint_commands.rs` with the following content: + +```rust +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build N issues for testing. +fn make_issues(count: usize) -> Vec { + (1..=count) + .map(|i| { + common::fixtures::issue_response( + &format!("TEST-{}", i), + &format!("Issue {}", i), + "In Progress", + ) + }) + .collect() +} + +/// Mount prereq mocks (board list, board config, active sprint) on the server. +async fn mount_prereqs(server: &MockServer) { + // Board auto-resolve: list boards for project PROJ, type=scrum → 1 board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![common::fixtures::board_response( + 42, + "PROJ Scrum Board", + "scrum", + "PROJ", + )]), + )) + .mount(server) + .await; + + // Board config → scrum + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(server) + .await; + + // Active sprint list → one sprint + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .and(query_param("state", "active")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::sprint_list_response(vec![common::fixtures::sprint( + 100, + "Sprint 1", + "active", + )]), + )) + .mount(server) + .await; +} + +#[tokio::test] +async fn sprint_current_default_limit_caps_at_30() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + // Sprint issues: 35 results in one page + let issues = make_issues(35); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 35)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + // Should show exactly 30 issues (default limit) + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 30, "Expected 30 issues, got {issue_count}"); + + // Should show "more results" hint + assert!( + stderr.contains("Showing 30 results"), + "Expected 'Showing 30 results' in stderr, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_limit_flag() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(20); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 20)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .arg("--limit") + .arg("5") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 5, "Expected 5 issues, got {issue_count}"); + + assert!( + stderr.contains("Showing 5 results"), + "Expected 'Showing 5 results' in stderr, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_all_flag_returns_everything() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(35); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 35)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .arg("--all") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 35, "Expected 35 issues, got {issue_count}"); + + assert!( + !stderr.contains("Showing"), + "Should NOT show 'Showing' hint with --all, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_under_limit_no_hint() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(10); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 10)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 10, "Expected 10 issues, got {issue_count}"); + + assert!( + !stderr.contains("Showing"), + "Should NOT show hint when under limit, got: {stderr}" + ); +} + +#[test] +fn sprint_current_limit_and_all_conflict() { + let mut cmd = Command::cargo_bin("jr").unwrap(); + cmd.arg("sprint") + .arg("current") + .arg("--limit") + .arg("3") + .arg("--all"); + + cmd.assert().failure().code(2); +} +``` + +- [ ] **Step 2: Run the tests** + +Run: `cargo test --test sprint_commands` + +Expected: All 5 tests pass (the CLI changes from Tasks 1-2 are already in place). + +If any test fails, fix the implementation in `sprint.rs` — not the tests. The tests encode the spec requirements. + +**TDD note:** Tests come after implementation here because the integration tests invoke the binary with `--limit`/`--all` flags via `assert_cmd`. Clap rejects unknown flags with exit 2 before any handler code runs, so writing these tests before the flag definition gives false failures (clap parse error), not meaningful red-green-refactor. The test-first discipline applies at the unit level; at the CLI integration level, the flag must exist first. + +- [ ] **Step 3: Run the full test suite to verify nothing is broken** + +Run: `cargo test` + +Expected: All tests pass (unit + integration). + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: Zero warnings. + +- [ ] **Step 5: Run formatter** + +Run: `cargo fmt --all -- --check` + +Expected: No formatting changes needed. + +- [ ] **Step 6: Commit** + +```bash +git add tests/sprint_commands.rs +git commit -m "test: add integration tests for sprint current --limit (#72)" +``` + +--- + +### Task 4: Format and final verification + +- [ ] **Step 1: Run full test suite one final time** + +Run: `cargo test` + +Expected: All tests pass. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: Zero warnings. + +- [ ] **Step 3: Run formatter and commit if needed** + +Run: `cargo fmt --all` + +If any files changed: + +```bash +git add -A +git commit -m "style: format code" +``` diff --git a/docs/superpowers/plans/2026-04-01-assets-schema-discovery.md b/docs/superpowers/plans/2026-04-01-assets-schema-discovery.md new file mode 100644 index 0000000..03db91f --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-assets-schema-discovery.md @@ -0,0 +1,1665 @@ +# Assets Schema Discovery Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `jr assets schemas`, `jr assets types`, and `jr assets schema` commands so users and AI agents can discover the Assets data model without guessing. + +**Architecture:** Three new CLI subcommands backed by two new API methods (`list_object_schemas`, `list_object_types`) and an extended existing method (`get_object_type_attributes`). New serde types for schema/object-type responses in `src/types/assets/schema.rs`. Type resolution uses the existing `partial_match` module. The existing `AssetsPage` pagination struct is reused for the schema list endpoint. + +**Tech Stack:** Rust, clap (CLI), serde (JSON), wiremock (integration tests), assert_cmd (CLI smoke tests), comfy-table (table output) + +--- + +## File Structure + +| File | Responsibility | +|------|---------------| +| `src/types/assets/schema.rs` | **New** — `ObjectSchema`, `ObjectTypeEntry` serde structs | +| `src/types/assets/object.rs` | Extend `ObjectTypeAttributeDef` with `default_type`, `reference_type`, `reference_object_type`, cardinality, `editable`, `description`, `options`; add `DefaultType`, `ReferenceType`, `ReferenceObjectType` structs | +| `src/types/assets/mod.rs` | Register new `schema` module | +| `src/api/assets/schemas.rs` | **New** — `list_object_schemas` (paginated), `list_object_types` (flat) | +| `src/api/assets/mod.rs` | Register new `schemas` module | +| `src/cli/mod.rs` | Add `Schemas`, `Types`, `Schema` variants to `AssetsCommand` | +| `src/cli/assets.rs` | Add `handle_schemas`, `handle_types`, `handle_schema` handlers + type display helper | +| `tests/assets.rs` | Integration tests for new API methods and CLI commands | +| `tests/cli_smoke.rs` | Smoke tests for new subcommands | +| `CLAUDE.md` | Update `assets.rs` description | +| `README.md` | Add new commands to table | + +--- + +### Task 1: New serde types for schemas and object type entries + +**Files:** +- Create: `src/types/assets/schema.rs` +- Modify: `src/types/assets/mod.rs:1-7` + +- [ ] **Step 1: Write the failing test for ObjectSchema deserialization** + +Add to the bottom of the new file `src/types/assets/schema.rs`: + +```rust +use serde::{Deserialize, Serialize}; + +/// Object schema from GET /objectschema/list. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectSchema { + pub id: String, + pub name: String, + #[serde(rename = "objectSchemaKey")] + pub object_schema_key: String, + pub description: Option, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectTypeCount", default)] + pub object_type_count: i64, +} + +/// Object type entry from GET /objectschema/{id}/objecttypes/flat. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeEntry { + pub id: String, + pub name: String, + pub description: Option, + #[serde(default)] + pub position: i32, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectSchemaId")] + pub object_schema_id: String, + #[serde(default)] + pub inherited: bool, + #[serde(rename = "abstractObjectType", default)] + pub abstract_object_type: bool, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_object_schema_full() { + let json = r#"{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "description": "IT assets schema", + "objectCount": 95, + "objectTypeCount": 34 + }"#; + let schema: ObjectSchema = serde_json::from_str(json).unwrap(); + assert_eq!(schema.id, "6"); + assert_eq!(schema.name, "ITSM"); + assert_eq!(schema.object_schema_key, "ITSM"); + assert_eq!(schema.description.as_deref(), Some("IT assets schema")); + assert_eq!(schema.object_count, 95); + assert_eq!(schema.object_type_count, 34); + } + + #[test] + fn deserialize_object_schema_minimal() { + let json = r#"{ + "id": "1", + "name": "HR", + "objectSchemaKey": "HR" + }"#; + let schema: ObjectSchema = serde_json::from_str(json).unwrap(); + assert_eq!(schema.id, "1"); + assert_eq!(schema.name, "HR"); + assert!(schema.description.is_none()); + assert_eq!(schema.object_count, 0); + assert_eq!(schema.object_type_count, 0); + } + + #[test] + fn deserialize_object_type_entry() { + let json = r#"{ + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "1", + "inherited": false, + "abstractObjectType": false, + "parentObjectTypeInherited": false + }"#; + let entry: ObjectTypeEntry = serde_json::from_str(json).unwrap(); + assert_eq!(entry.id, "19"); + assert_eq!(entry.name, "Employee"); + assert_eq!(entry.position, 0); + assert_eq!(entry.object_count, 42); + assert_eq!(entry.object_schema_id, "1"); + assert!(!entry.inherited); + assert!(!entry.abstract_object_type); + assert!(entry.description.is_none()); + } + + #[test] + fn deserialize_object_type_entry_with_description() { + let json = r#"{ + "id": "23", + "name": "Office", + "description": "Physical office or site.", + "position": 2, + "objectCount": 0, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }"#; + let entry: ObjectTypeEntry = serde_json::from_str(json).unwrap(); + assert_eq!(entry.description.as_deref(), Some("Physical office or site.")); + assert_eq!(entry.position, 2); + } +} +``` + +- [ ] **Step 2: Register the module** + +In `src/types/assets/mod.rs`, add the `schema` module. The file should become: + +```rust +pub mod linked; +pub mod object; +pub mod schema; +pub mod ticket; + +pub use linked::*; +pub use object::*; +pub use schema::*; +pub use ticket::*; +``` + +- [ ] **Step 3: Run tests to verify they pass** + +Run: `cargo test --lib types::assets::schema` +Expected: 4 tests PASS + +- [ ] **Step 4: Commit** + +```bash +git add src/types/assets/schema.rs src/types/assets/mod.rs +git commit -m "feat(types): add ObjectSchema and ObjectTypeEntry serde structs (#87)" +``` + +--- + +### Task 2: Extend ObjectTypeAttributeDef with new fields + +**Files:** +- Modify: `src/types/assets/object.rs:54-67` + +- [ ] **Step 1: Write the failing test for DefaultType deserialization** + +Add to `src/types/assets/object.rs`, inside the existing `mod tests` block, after the last test: + +```rust + #[test] + fn deserialize_attribute_def_with_default_type() { + let json = r#"{ + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": true, + "description": "The name of the object" + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.name, "Name"); + assert!(def.label); + let dt = def.default_type.unwrap(); + assert_eq!(dt.id, 0); + assert_eq!(dt.name, "Text"); + assert_eq!(def.minimum_cardinality, 1); + assert!(def.editable); + assert_eq!(def.description.as_deref(), Some("The name of the object")); + assert!(def.reference_type.is_none()); + assert!(def.reference_object_type.is_none()); + } + + #[test] + fn deserialize_attribute_def_with_reference() { + let json = r#"{ + "id": "869", + "name": "Service relationships", + "system": false, + "hidden": false, + "label": false, + "position": 6, + "referenceType": { "id": "36", "name": "Depends on" }, + "referenceObjectTypeId": "122", + "referenceObjectType": { "id": "122", "name": "Service" }, + "minimumCardinality": 0, + "maximumCardinality": -1, + "editable": true + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.name, "Service relationships"); + assert!(def.default_type.is_none()); + let rt = def.reference_type.unwrap(); + assert_eq!(rt.name, "Depends on"); + let rot = def.reference_object_type.unwrap(); + assert_eq!(rot.name, "Service"); + assert_eq!(def.minimum_cardinality, 0); + assert_eq!(def.maximum_cardinality, -1); + } + + #[test] + fn deserialize_attribute_def_select_with_options() { + let json = r#"{ + "id": "868", + "name": "Tier", + "system": false, + "hidden": false, + "label": false, + "position": 5, + "defaultType": { "id": 10, "name": "Select" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": true, + "options": "Tier 1,Tier 2,Tier 3" + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + let dt = def.default_type.unwrap(); + assert_eq!(dt.name, "Select"); + assert_eq!(def.options.as_deref(), Some("Tier 1,Tier 2,Tier 3")); + assert_eq!(def.minimum_cardinality, 1); + } + + #[test] + fn deserialize_attribute_def_backward_compat() { + // Existing JSON without the new fields — must still deserialize + let json = r#"{ + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.id, "134"); + assert!(def.system); + assert!(def.default_type.is_none()); + assert!(def.reference_type.is_none()); + assert!(def.reference_object_type.is_none()); + assert_eq!(def.minimum_cardinality, 0); + assert_eq!(def.maximum_cardinality, 0); + assert!(!def.editable); + assert!(def.description.is_none()); + assert!(def.options.is_none()); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib types::assets::object::tests::deserialize_attribute_def_with_default_type` +Expected: FAIL — `ObjectTypeAttributeDef` does not have field `default_type` + +- [ ] **Step 3: Add new structs and extend ObjectTypeAttributeDef** + +In `src/types/assets/object.rs`, add after the existing `ObjectTypeAttributeDef` struct. The full struct becomes: + +```rust +/// Attribute definition from the object type schema. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeAttributeDef { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, + #[serde(rename = "defaultType")] + pub default_type: Option, + #[serde(rename = "referenceType")] + pub reference_type: Option, + #[serde(rename = "referenceObjectType")] + pub reference_object_type: Option, + #[serde(rename = "minimumCardinality", default)] + pub minimum_cardinality: i32, + #[serde(rename = "maximumCardinality", default)] + pub maximum_cardinality: i32, + #[serde(default)] + pub editable: bool, + pub description: Option, + pub options: Option, +} + +/// Attribute data type (e.g., Text, DateTime, Select). +#[derive(Debug, Deserialize, Serialize)] +pub struct DefaultType { + pub id: i32, + pub name: String, +} + +/// Reference link type (e.g., "Depends on", "References"). +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceType { + pub id: String, + pub name: String, +} + +/// Target object type for a reference attribute (e.g., "Service", "Employee"). +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceObjectType { + pub id: String, + pub name: String, +} +``` + +- [ ] **Step 4: Run all tests to verify they pass** + +Run: `cargo test --lib types::assets::object` +Expected: All tests PASS (both new and existing) + +- [ ] **Step 5: Commit** + +```bash +git add src/types/assets/object.rs +git commit -m "feat(types): extend ObjectTypeAttributeDef with defaultType, reference, cardinality (#87)" +``` + +--- + +### Task 3: New API methods for schema listing + +**Files:** +- Create: `src/api/assets/schemas.rs` +- Modify: `src/api/assets/mod.rs:1-4` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test for list_object_schemas** + +Add to the bottom of `tests/assets.rs`: + +```rust +#[tokio::test] +async fn list_object_schemas_returns_schemas() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/list", + )) + .and(query_param("startAt", "0")) + .and(query_param("maxResults", "25")) + .and(query_param("includeCounts", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 34 + }, + { + "id": "1", + "name": "Human Resources", + "objectSchemaKey": "HR", + "description": "HR schema", + "status": "Ok", + "objectCount": 1023, + "objectTypeCount": 14 + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let schemas = client.list_object_schemas("ws-123").await.unwrap(); + assert_eq!(schemas.len(), 2); + assert_eq!(schemas[0].name, "ITSM"); + assert_eq!(schemas[0].object_schema_key, "ITSM"); + assert_eq!(schemas[0].object_type_count, 34); + assert_eq!(schemas[1].name, "Human Resources"); + assert_eq!(schemas[1].description.as_deref(), Some("HR schema")); +} + +#[tokio::test] +async fn list_object_types_returns_flat_array() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .and(query_param("includeObjectCounts", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }, + { + "id": "23", + "name": "Office", + "description": "Physical office or site.", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let types = client.list_object_types("ws-123", "6").await.unwrap(); + assert_eq!(types.len(), 2); + assert_eq!(types[0].name, "Employee"); + assert_eq!(types[0].object_count, 42); + assert_eq!(types[1].name, "Office"); + assert_eq!( + types[1].description.as_deref(), + Some("Physical office or site.") + ); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --test assets list_object_schemas_returns_schemas` +Expected: FAIL — `list_object_schemas` method does not exist + +- [ ] **Step 3: Implement list_object_schemas and list_object_types** + +Create `src/api/assets/schemas.rs`: + +```rust +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::AssetsPage; +use crate::types::assets::{ObjectSchema, ObjectTypeEntry}; + +impl JiraClient { + /// List all object schemas in the workspace with auto-pagination. + pub async fn list_object_schemas( + &self, + workspace_id: &str, + ) -> Result> { + let mut all = Vec::new(); + let mut start_at = 0u32; + let page_size = 25u32; + + loop { + let path = format!( + "objectschema/list?startAt={}&maxResults={}&includeCounts=true", + start_at, page_size + ); + let page: AssetsPage = + self.get_assets(workspace_id, &path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + + if !has_more { + break; + } + start_at = next; + } + Ok(all) + } + + /// List all object types for a given schema (flat, no pagination). + pub async fn list_object_types( + &self, + workspace_id: &str, + schema_id: &str, + ) -> Result> { + let path = format!( + "objectschema/{}/objecttypes/flat?includeObjectCounts=true", + urlencoding::encode(schema_id) + ); + self.get_assets(workspace_id, &path).await + } +} +``` + +- [ ] **Step 4: Register the module** + +In `src/api/assets/mod.rs`, add `pub mod schemas;`. The file should become: + +```rust +pub mod linked; +pub mod objects; +pub mod schemas; +pub mod tickets; +pub mod workspace; +``` + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `cargo test --test assets list_object_schemas_returns_schemas list_object_types_returns_flat_array` +Expected: 2 tests PASS + +- [ ] **Step 6: Commit** + +```bash +git add src/api/assets/schemas.rs src/api/assets/mod.rs tests/assets.rs +git commit -m "feat(api): add list_object_schemas and list_object_types methods (#87)" +``` + +--- + +### Task 4: CLI subcommand definitions + +**Files:** +- Modify: `src/cli/mod.rs:107-142` +- Test: `tests/cli_smoke.rs` + +- [ ] **Step 1: Write the smoke tests** + +Add to the bottom of `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_assets_schemas_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "schemas", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("List object schemas")); +} + +#[test] +fn test_assets_types_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "types", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("List object types")) + .stdout(predicate::str::contains("--schema")); +} + +#[test] +fn test_assets_schema_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "schema", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Show attributes")) + .stdout(predicate::str::contains("--schema")); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --test cli_smoke test_assets_schemas_help` +Expected: FAIL — no `schemas` subcommand + +- [ ] **Step 3: Add the three new variants to AssetsCommand** + +In `src/cli/mod.rs`, add inside `pub enum AssetsCommand` (after the `Tickets` variant): + +```rust + /// List object schemas in the workspace + Schemas, + /// List object types (all schemas or filtered) + Types { + /// Filter by schema (partial name match or exact ID) + #[arg(long)] + schema: Option, + }, + /// Show attributes for an object type + Schema { + /// Object type name (partial match supported) + name: String, + /// Filter by schema (partial name match or exact ID) + #[arg(long)] + schema: Option, + }, +``` + +- [ ] **Step 4: Add stub match arms in handle()** + +In `src/cli/assets.rs`, add match arms inside the `match command` block in `handle()`. Add them after the `AssetsCommand::Tickets` arm: + +```rust + AssetsCommand::Schemas => { + handle_schemas(&workspace_id, output_format, client).await + } + AssetsCommand::Types { schema } => { + handle_types(&workspace_id, schema, output_format, client).await + } + AssetsCommand::Schema { name, schema } => { + handle_schema(&workspace_id, &name, schema, output_format, client).await + } +``` + +Add stub handler functions at the bottom of the file (before `#[cfg(test)]`): + +```rust +async fn handle_schemas( + _workspace_id: &str, + _output_format: &OutputFormat, + _client: &JiraClient, +) -> Result<()> { + todo!("handle_schemas") +} + +async fn handle_types( + _workspace_id: &str, + _schema: Option, + _output_format: &OutputFormat, + _client: &JiraClient, +) -> Result<()> { + todo!("handle_types") +} + +async fn handle_schema( + _workspace_id: &str, + _name: &str, + _schema: Option, + _output_format: &OutputFormat, + _client: &JiraClient, +) -> Result<()> { + todo!("handle_schema") +} +``` + +- [ ] **Step 5: Run smoke tests to verify they pass** + +Run: `cargo test --test cli_smoke test_assets_schemas_help test_assets_types_help test_assets_schema_help` +Expected: 3 tests PASS + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/mod.rs src/cli/assets.rs tests/cli_smoke.rs +git commit -m "feat(cli): add schemas, types, schema subcommand definitions (#87)" +``` + +--- + +### Task 5: Implement handle_schemas + +**Files:** +- Modify: `src/cli/assets.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test for schemas JSON output** + +Add to `tests/assets.rs`: + +```rust +#[tokio::test] +async fn schemas_json_lists_all_schemas() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/list", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 34 + }, + { + "id": "1", + "name": "Human Resources", + "objectSchemaKey": "HR", + "status": "Ok", + "objectCount": 1023, + "objectTypeCount": 14 + } + ] + }))) + .mount(&server) + .await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let _guard = set_cache_dir(&tempfile::tempdir().unwrap().into_path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schemas", "--output", "json"]) + .output() + .unwrap(); + + assert!(output.status.success(), "stderr: {}", String::from_utf8_lossy(&output.stderr)); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + assert_eq!(arr.len(), 2); + assert_eq!(arr[0]["name"], "ITSM"); + assert_eq!(arr[0]["objectSchemaKey"], "ITSM"); + assert_eq!(arr[1]["name"], "Human Resources"); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets schemas_json_lists_all_schemas` +Expected: FAIL — `todo!("handle_schemas")` panics + +- [ ] **Step 3: Implement handle_schemas** + +In `src/cli/assets.rs`, replace the `handle_schemas` stub: + +```rust +async fn handle_schemas( + workspace_id: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + + let rows: Vec> = schemas + .iter() + .map(|s| { + vec![ + s.id.clone(), + s.object_schema_key.clone(), + s.name.clone(), + s.description.clone().unwrap_or_else(|| "\u{2014}".into()), + s.object_type_count.to_string(), + s.object_count.to_string(), + ] + }) + .collect(); + + output::print_output( + output_format, + &["ID", "Key", "Name", "Description", "Types", "Objects"], + &rows, + &schemas, + ) +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test --test assets schemas_json_lists_all_schemas` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/assets.rs tests/assets.rs +git commit -m "feat(cli): implement handle_schemas for assets schemas command (#87)" +``` + +--- + +### Task 6: Implement handle_types + +**Files:** +- Modify: `src/cli/assets.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test for types JSON output** + +Add to `tests/assets.rs`. This test needs mocks for workspace discovery, schema list, and objecttypes/flat for each schema: + +```rust +#[tokio::test] +async fn types_json_lists_all_types() { + let server = MockServer::start().await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + // Mock schema list + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/list", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 2 + }] + }))) + .mount(&server) + .await; + + // Mock object types for schema 6 + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }, + { + "id": "23", + "name": "Office", + "description": "Physical office.", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + let _guard = set_cache_dir(&tempfile::tempdir().unwrap().into_path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "types", "--output", "json"]) + .output() + .unwrap(); + + assert!(output.status.success(), "stderr: {}", String::from_utf8_lossy(&output.stderr)); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + assert_eq!(arr.len(), 2); + assert_eq!(arr[0]["name"], "Employee"); + assert_eq!(arr[0]["schemaName"], "ITSM"); + assert_eq!(arr[1]["name"], "Office"); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets types_json_lists_all_types` +Expected: FAIL — `todo!("handle_types")` panics + +- [ ] **Step 3: Implement handle_types** + +In `src/cli/assets.rs`, replace the `handle_types` stub. Also add the `resolve_schema` helper that will be reused by `handle_schema`: + +```rust +/// Resolve a --schema flag to a single schema, matching by ID (exact) or name (partial). +fn resolve_schema<'a>( + input: &str, + schemas: &'a [crate::types::assets::ObjectSchema], +) -> Result<&'a crate::types::assets::ObjectSchema> { + // Try exact ID match first + if let Some(s) = schemas.iter().find(|s| s.id == input) { + return Ok(s); + } + // Partial match on name + let names: Vec = schemas.iter().map(|s| s.name.clone()).collect(); + match partial_match::partial_match(input, &names) { + partial_match::MatchResult::Exact(name) => { + Ok(schemas.iter().find(|s| s.name == name).unwrap()) + } + partial_match::MatchResult::Ambiguous(matches) => Err(JrError::UserError(format!( + "Ambiguous schema \"{}\". Matches: {}", + input, + matches.join(", ") + )) + .into()), + partial_match::MatchResult::None(all) => { + let available = if all.is_empty() { + "none".to_string() + } else { + all.join(", ") + }; + Err(JrError::UserError(format!( + "No schema matching \"{}\". Available: {}", + input, available + )) + .into()) + } + } +} + +async fn handle_types( + workspace_id: &str, + schema_filter: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + if schemas.is_empty() { + return Err( + JrError::UserError("No asset schemas found in this workspace.".into()).into(), + ); + } + + let target_schemas: Vec<&crate::types::assets::ObjectSchema> = match &schema_filter { + Some(input) => vec![resolve_schema(input, &schemas)?], + None => schemas.iter().collect(), + }; + + // Build a map of schema_id → schema_name for injection + let schema_names: std::collections::HashMap<&str, &str> = schemas + .iter() + .map(|s| (s.id.as_str(), s.name.as_str())) + .collect(); + + let mut all_types = Vec::new(); + for schema in &target_schemas { + let types = client + .list_object_types(workspace_id, &schema.id) + .await?; + all_types.extend(types); + } + + match output_format { + OutputFormat::Json => { + // Inject schemaName into each entry + let mut json_types: Vec = Vec::new(); + for t in &all_types { + let mut val = serde_json::to_value(t)?; + if let Some(map) = val.as_object_mut() { + let schema_name = schema_names + .get(t.object_schema_id.as_str()) + .unwrap_or(&""); + map.insert( + "schemaName".to_string(), + serde_json::Value::String(schema_name.to_string()), + ); + } + json_types.push(val); + } + println!("{}", output::render_json(&json_types)?); + } + OutputFormat::Table => { + let rows: Vec> = all_types + .iter() + .map(|t| { + let schema_name = schema_names + .get(t.object_schema_id.as_str()) + .unwrap_or(&"\u{2014}"); + vec![ + t.id.clone(), + t.name.clone(), + schema_name.to_string(), + t.description + .clone() + .unwrap_or_else(|| "\u{2014}".into()), + t.object_count.to_string(), + ] + }) + .collect(); + + output::print_output( + output_format, + &["ID", "Name", "Schema", "Description", "Objects"], + &rows, + &all_types, + )?; + } + } + Ok(()) +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test --test assets types_json_lists_all_types` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/assets.rs tests/assets.rs +git commit -m "feat(cli): implement handle_types for assets types command (#87)" +``` + +--- + +### Task 7: Implement handle_schema (attribute display) + +**Files:** +- Modify: `src/cli/assets.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test for schema JSON output** + +Add to `tests/assets.rs`: + +```rust +#[tokio::test] +async fn schema_json_shows_attributes() { + let server = MockServer::start().await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + // Mock schema list + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/list", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 2 + }] + }))) + .mount(&server) + .await; + + // Mock object types for schema 6 + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "23", + "name": "Office", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + // Mock object type attributes + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": false + }, + { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": true, + "description": "The name of the object" + }, + { + "id": "869", + "name": "Service relationships", + "system": false, + "hidden": false, + "label": false, + "position": 6, + "referenceType": { "id": "36", "name": "Depends on" }, + "referenceObjectType": { "id": "122", "name": "Service" }, + "minimumCardinality": 0, + "maximumCardinality": -1, + "editable": true + } + ]))) + .mount(&server) + .await; + + let _guard = set_cache_dir(&tempfile::tempdir().unwrap().into_path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schema", "Office", "--output", "json"]) + .output() + .unwrap(); + + assert!(output.status.success(), "stderr: {}", String::from_utf8_lossy(&output.stderr)); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + // JSON includes all attributes (including system) + assert_eq!(arr.len(), 3); + assert_eq!(arr[0]["name"], "Key"); + assert_eq!(arr[0]["system"], true); + assert_eq!(arr[2]["name"], "Service relationships"); + assert!(arr[2].get("referenceObjectType").is_some()); +} + +#[tokio::test] +async fn schema_table_filters_system_attrs() { + let server = MockServer::start().await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + // Mock schema list + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/list", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 1 + }] + }))) + .mount(&server) + .await; + + // Mock object types for schema 6 + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "23", + "name": "Office", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + // Mock object type attributes — includes system "Key" and "Created" + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "editable": false + }, + { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "editable": true + }, + { + "id": "136", + "name": "Created", + "system": true, + "hidden": false, + "label": false, + "position": 2, + "defaultType": { "id": 6, "name": "DateTime" }, + "minimumCardinality": 1, + "editable": false + } + ]))) + .mount(&server) + .await; + + let _guard = set_cache_dir(&tempfile::tempdir().unwrap().into_path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schema", "Office"]) + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(output.status.success(), "stderr: {}", String::from_utf8_lossy(&output.stderr)); + // Table output should contain the header and "Name" but not "Key" or "Created" (system) + assert!(stdout.contains("Object Type: Office")); + assert!(stdout.contains("Name")); + assert!(!stdout.contains("Created")); + // "Key" appears in the header row (column name), so check for the system attribute row + // by checking it only appears once (as header, not as data) + let key_count = stdout.matches("Key").count(); + // Should not appear as a data row — only zero times or once in a non-data context + assert!(key_count <= 1, "System attribute 'Key' should be filtered from table, but found {} occurrences", key_count); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets schema_json_shows_attributes` +Expected: FAIL — `todo!("handle_schema")` panics + +- [ ] **Step 3: Implement the attribute type display helper** + +In `src/cli/assets.rs`, add this helper function (above `handle_schema`): + +```rust +/// Format the Type column for an attribute definition. +fn format_attribute_type(attr: &crate::types::assets::ObjectTypeAttributeDef) -> String { + if let Some(ref dt) = attr.default_type { + return dt.name.clone(); + } + if let Some(ref rot) = attr.reference_object_type { + return format!("Reference \u{2192} {}", rot.name); + } + "Unknown".to_string() +} +``` + +- [ ] **Step 4: Implement handle_schema** + +In `src/cli/assets.rs`, replace the `handle_schema` stub: + +```rust +async fn handle_schema( + workspace_id: &str, + type_name: &str, + schema_filter: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + if schemas.is_empty() { + return Err( + JrError::UserError("No asset schemas found in this workspace.".into()).into(), + ); + } + + let target_schemas: Vec<&crate::types::assets::ObjectSchema> = match &schema_filter { + Some(input) => vec![resolve_schema(input, &schemas)?], + None => schemas.iter().collect(), + }; + + // Collect all object types with their schema name + let mut candidates: Vec<(crate::types::assets::ObjectTypeEntry, String)> = Vec::new(); + for schema in &target_schemas { + let types = client + .list_object_types(workspace_id, &schema.id) + .await?; + for t in types { + candidates.push((t, schema.name.clone())); + } + } + + if candidates.is_empty() { + return Err(JrError::UserError( + "No object types found. Run \"jr assets schemas\" to verify your workspace has schemas." + .into(), + ) + .into()); + } + + // Partial match on type name + let type_names: Vec = candidates.iter().map(|(t, _)| t.name.clone()).collect(); + let matched_name = match partial_match::partial_match(type_name, &type_names) { + partial_match::MatchResult::Exact(name) => name, + partial_match::MatchResult::Ambiguous(matches) => { + // Include schema name for disambiguation + let labeled: Vec = matches + .iter() + .filter_map(|m| { + candidates + .iter() + .find(|(t, _)| t.name == *m) + .map(|(t, s)| format!("{} ({})", t.name, s)) + }) + .collect(); + return Err(JrError::UserError(format!( + "Ambiguous type \"{}\". Matches: {}. Use --schema to narrow results.", + type_name, + labeled.join(", ") + )) + .into()); + } + partial_match::MatchResult::None(_) => { + return Err(JrError::UserError(format!( + "No object type matching \"{}\". Run \"jr assets types\" to see available types.", + type_name + )) + .into()); + } + }; + + let (matched_type, schema_name) = candidates + .iter() + .find(|(t, _)| t.name == matched_name) + .unwrap(); + + // Fetch attributes + let attrs = client + .get_object_type_attributes(workspace_id, &matched_type.id) + .await?; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&attrs)?); + } + OutputFormat::Table => { + println!( + "Object Type: {} (Schema: {})\n", + matched_type.name, schema_name + ); + + let mut visible: Vec<&crate::types::assets::ObjectTypeAttributeDef> = attrs + .iter() + .filter(|a| !a.system && !a.hidden) + .collect(); + visible.sort_by_key(|a| a.position); + + let rows: Vec> = visible + .iter() + .map(|a| { + vec![ + a.position.to_string(), + a.name.clone(), + format_attribute_type(a), + if a.minimum_cardinality >= 1 { + "Yes".into() + } else { + "No".into() + }, + if a.editable { "Yes".into() } else { "No".into() }, + ] + }) + .collect(); + + if rows.is_empty() { + println!("No user-defined attributes."); + } else { + println!( + "{}", + output::render_table( + &["Pos", "Name", "Type", "Required", "Editable"], + &rows + ) + ); + } + } + } + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --test assets schema_json_shows_attributes schema_table_filters_system_attrs` +Expected: 2 tests PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/assets.rs tests/assets.rs +git commit -m "feat(cli): implement handle_schema for assets schema command (#87)" +``` + +--- + +### Task 8: Unit tests for format_attribute_type and resolve_schema + +**Files:** +- Modify: `src/cli/assets.rs` (add to existing `#[cfg(test)] mod tests` block) + +- [ ] **Step 1: Write unit tests** + +Add inside the `mod tests` block in `src/cli/assets.rs`: + +```rust + use crate::types::assets::{DefaultType, ObjectTypeAttributeDef, ReferenceObjectType, ReferenceType}; + + fn make_attr_def( + default_type: Option, + reference_object_type: Option, + ) -> ObjectTypeAttributeDef { + ObjectTypeAttributeDef { + id: "1".into(), + name: "test".into(), + system: false, + hidden: false, + label: false, + position: 0, + default_type, + reference_type: None, + reference_object_type, + minimum_cardinality: 0, + maximum_cardinality: 1, + editable: true, + description: None, + options: None, + } + } + + #[test] + fn format_attr_type_default_type() { + let attr = make_attr_def( + Some(DefaultType { id: 0, name: "Text".into() }), + None, + ); + assert_eq!(super::format_attribute_type(&attr), "Text"); + } + + #[test] + fn format_attr_type_reference() { + let attr = make_attr_def( + None, + Some(ReferenceObjectType { id: "122".into(), name: "Service".into() }), + ); + assert_eq!( + super::format_attribute_type(&attr), + "Reference \u{2192} Service" + ); + } + + #[test] + fn format_attr_type_unknown() { + let attr = make_attr_def(None, None); + assert_eq!(super::format_attribute_type(&attr), "Unknown"); + } + + #[test] + fn format_attr_type_default_takes_precedence() { + let attr = make_attr_def( + Some(DefaultType { id: 0, name: "Text".into() }), + Some(ReferenceObjectType { id: "1".into(), name: "Svc".into() }), + ); + assert_eq!(super::format_attribute_type(&attr), "Text"); + } +``` + +- [ ] **Step 2: Run tests to verify they pass** + +Run: `cargo test --lib cli::assets::tests::format_attr_type` +Expected: 4 tests PASS + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/assets.rs +git commit -m "test: add unit tests for format_attribute_type (#87)" +``` + +--- + +### Task 9: Documentation updates + +**Files:** +- Modify: `CLAUDE.md` +- Modify: `README.md` + +- [ ] **Step 1: Update CLAUDE.md** + +In `CLAUDE.md`, find the line describing `assets.rs` in the Architecture section: + +``` +│ ├── assets.rs # assets search/view/tickets (--open/--status client-side filtering, search attribute enrichment) +``` + +Replace with: + +``` +│ ├── assets.rs # assets search/view/tickets/schemas/types/schema (search enrichment, schema discovery) +``` + +- [ ] **Step 2: Update README.md** + +In `README.md`, find the commands table section with `jr assets search`. Add three new rows after the `jr assets tickets` row: + +```markdown +| `jr assets schemas` | List object schemas in the workspace | +| `jr assets types [--schema]` | List object types (all or filtered by schema) | +| `jr assets schema ` | Show attributes for an object type (partial match) | +``` + +- [ ] **Step 3: Run full test suite** + +Run: `cargo test` +Expected: All tests PASS + +Run: `cargo clippy -- -D warnings` +Expected: No warnings + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues + +- [ ] **Step 4: Commit** + +```bash +git add CLAUDE.md README.md +git commit -m "docs: add assets schema discovery commands to CLAUDE.md and README (#87)" +``` + +--- + +## Self-Review + +**Spec coverage check:** +- `jr assets schemas` — Task 5 ✓ +- `jr assets types [--schema]` — Task 6 ✓ +- `jr assets schema [--schema]` — Task 7 ✓ +- `ObjectSchema`, `ObjectTypeEntry` types — Task 1 ✓ +- Extend `ObjectTypeAttributeDef` — Task 2 ✓ +- `list_object_schemas` (paginated), `list_object_types` (flat) — Task 3 ✓ +- CLI subcommand definitions — Task 4 ✓ +- Type display logic (defaultType, referenceObjectType, Unknown) — Task 7 + Task 8 ✓ +- Error handling (no schemas, no match, ambiguous) — Task 6 + Task 7 ✓ +- JSON output (pass-through + schemaName injection) — Task 5 + Task 6 + Task 7 ✓ +- Table output (filtering system/hidden) — Task 7 ✓ +- Smoke tests — Task 4 ✓ +- Integration tests — Tasks 3, 5, 6, 7 ✓ +- Unit tests — Tasks 1, 2, 8 ✓ +- Docs — Task 9 ✓ + +**Placeholder scan:** No TBD, TODO, or "implement later" found. + +**Type consistency:** `ObjectSchema`, `ObjectTypeEntry`, `ObjectTypeAttributeDef`, `DefaultType`, `ReferenceType`, `ReferenceObjectType` used consistently across all tasks. `resolve_schema` signature matches between Task 6 (definition) and Task 7 (reuse). `format_attribute_type` signature matches between Task 7 (definition) and Task 8 (unit tests). diff --git a/docs/superpowers/plans/2026-04-01-assets-search-attribute-names.md b/docs/superpowers/plans/2026-04-01-assets-search-attribute-names.md new file mode 100644 index 0000000..b142846 --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-assets-search-attribute-names.md @@ -0,0 +1,1066 @@ +# Assets Search Attribute Names Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Enrich `assets search --attributes` output with human-readable attribute names via per-object-type definitions, cached locally. + +**Architecture:** Fetch attribute definitions from `GET /objecttype/{id}/attributes` (one call per unique object type, cached 7 days). Build a HashMap mapping `objectTypeAttributeId` → definition. Inject `objectTypeAttribute` with `name` and `position` into each search result attribute. Table output gains an "Attributes" column with inline `Name: Value` pairs. + +**Tech Stack:** Rust, serde_json::Value manipulation, wiremock for integration tests, existing XDG cache infrastructure + +--- + +### Task 1: Cache Layer — `ObjectTypeAttrCache` Read/Write + +**Files:** +- Modify: `src/cache.rs` + +- [ ] **Step 1: Write the failing tests for object type attribute cache** + +Add these tests to the existing `mod tests` block at the bottom of `src/cache.rs` (after the `expired_cmdb_fields_cache_returns_none` test at line ~442): + +```rust +#[test] +fn read_missing_object_type_attr_cache_returns_none() { + with_temp_cache(|| { + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none()); + }); +} + +#[test] +fn write_then_read_object_type_attr_cache() { + with_temp_cache(|| { + let attrs = vec![ + CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }, + CachedObjectTypeAttr { + id: "135".into(), + name: "Name".into(), + system: false, + hidden: false, + label: true, + position: 1, + }, + ]; + write_object_type_attr_cache("23", &attrs).unwrap(); + + let loaded = read_object_type_attr_cache("23") + .unwrap() + .expect("should exist"); + assert_eq!(loaded.len(), 2); + assert_eq!(loaded[0].name, "Key"); + assert!(loaded[0].system); + assert_eq!(loaded[1].name, "Name"); + assert!(loaded[1].label); + }); +} + +#[test] +fn expired_object_type_attr_cache_returns_none() { + with_temp_cache(|| { + let expired = ObjectTypeAttrCache { + fetched_at: Utc::now() - chrono::Duration::days(8), + types: { + let mut m = HashMap::new(); + m.insert( + "23".to_string(), + vec![CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }], + ); + m + }, + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("object_type_attrs.json"), content).unwrap(); + + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none(), "expired cache should return None"); + }); +} + +#[test] +fn object_type_attr_cache_multiple_types() { + with_temp_cache(|| { + let attrs_a = vec![CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }]; + let attrs_b = vec![CachedObjectTypeAttr { + id: "200".into(), + name: "Hostname".into(), + system: false, + hidden: false, + label: false, + position: 3, + }]; + write_object_type_attr_cache("23", &attrs_a).unwrap(); + write_object_type_attr_cache("45", &attrs_b).unwrap(); + + let loaded_a = read_object_type_attr_cache("23") + .unwrap() + .expect("type 23 should exist"); + assert_eq!(loaded_a[0].name, "Key"); + + let loaded_b = read_object_type_attr_cache("45") + .unwrap() + .expect("type 45 should exist"); + assert_eq!(loaded_b[0].name, "Hostname"); + }); +} + +#[test] +fn object_type_attr_cache_corrupt_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + std::fs::write(dir.join("object_type_attrs.json"), "not json").unwrap(); + + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none(), "corrupt cache should return None"); + }); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --lib cache::tests -- object_type_attr` +Expected: FAIL — `read_object_type_attr_cache`, `write_object_type_attr_cache`, `ObjectTypeAttrCache`, `CachedObjectTypeAttr` not found. + +- [ ] **Step 3: Implement the cache types and read/write functions** + +Add these types and functions to `src/cache.rs`, after the existing `CmdbFieldsCache` block (after line ~190, before `#[cfg(test)]`): + +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CachedObjectTypeAttr { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ObjectTypeAttrCache { + pub fetched_at: DateTime, + pub types: HashMap>, +} + +pub fn read_object_type_attr_cache(object_type_id: &str) -> Result>> { + let path = cache_dir().join("object_type_attrs.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let cache: ObjectTypeAttrCache = match serde_json::from_str(&content) { + Ok(c) => c, + Err(_) => return Ok(None), + }; + + let age = Utc::now() - cache.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + + Ok(cache.types.get(object_type_id).cloned()) +} + +pub fn write_object_type_attr_cache(object_type_id: &str, attrs: &[CachedObjectTypeAttr]) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let path = dir.join("object_type_attrs.json"); + + let mut cache: ObjectTypeAttrCache = if path.exists() { + let content = std::fs::read_to_string(&path)?; + serde_json::from_str(&content).unwrap_or(ObjectTypeAttrCache { + fetched_at: Utc::now(), + types: HashMap::new(), + }) + } else { + ObjectTypeAttrCache { + fetched_at: Utc::now(), + types: HashMap::new(), + } + }; + + cache.types.insert(object_type_id.to_string(), attrs.to_vec()); + cache.fetched_at = Utc::now(); + + let content = serde_json::to_string_pretty(&cache)?; + std::fs::write(&path, content)?; + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test --lib cache::tests -- object_type_attr` +Expected: All 5 new tests PASS. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/cache.rs +git commit -m "feat: add object type attribute cache for search enrichment (#86)" +``` + +--- + +### Task 2: API Method — `get_object_type_attributes` + +**Files:** +- Modify: `src/api/assets/objects.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the failing integration test** + +Add this test at the end of `tests/assets.rs`: + +```rust +#[tokio::test] +async fn get_object_type_attributes_returns_definitions() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0, + "editable": false, + "sortable": true + }, + { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "editable": true, + "sortable": true + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5, + "editable": true, + "sortable": true + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let attrs = client + .get_object_type_attributes("ws-123", "23") + .await + .unwrap(); + + assert_eq!(attrs.len(), 3); + assert_eq!(attrs[0].id, "134"); + assert_eq!(attrs[0].name, "Key"); + assert!(attrs[0].system); + assert_eq!(attrs[1].id, "135"); + assert_eq!(attrs[1].name, "Name"); + assert!(attrs[1].label); + assert_eq!(attrs[2].id, "140"); + assert_eq!(attrs[2].name, "Location"); + assert_eq!(attrs[2].position, 5); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets get_object_type_attributes_returns_definitions` +Expected: FAIL — `get_object_type_attributes` method not found on `JiraClient`. + +- [ ] **Step 3: Implement the API method** + +Add this method to the `impl JiraClient` block in `src/api/assets/objects.rs`, after the existing `get_object_attributes` method (after line ~86): + +```rust +/// Get all attribute definitions for an object type. +/// +/// Returns schema-level metadata (name, system, hidden, label, position) +/// for every attribute defined on the type. Used to enrich search results +/// where only `objectTypeAttributeId` is present. +pub async fn get_object_type_attributes( + &self, + workspace_id: &str, + object_type_id: &str, +) -> Result> { + let path = format!("objecttype/{}/attributes", urlencoding::encode(object_type_id)); + self.get_assets(workspace_id, &path).await +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test --test assets get_object_type_attributes_returns_definitions` +Expected: PASS. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/api/assets/objects.rs tests/assets.rs +git commit -m "feat: add get_object_type_attributes API method (#86)" +``` + +--- + +### Task 3: Enrichment Function — `enrich_search_attributes` + +**Files:** +- Modify: `src/api/assets/objects.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the failing integration test for enrichment** + +Add this test at the end of `tests/assets.rs`: + +```rust +#[tokio::test] +async fn enrich_search_attributes_injects_names() { + let server = MockServer::start().await; + + // Mock: object type 13 attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + { + "id": "141", + "name": "Secret", + "system": false, + "hidden": true, + "label": false, + "position": 6 + } + ]))) + .expect(1) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + // Simulate search results with inline attributes (no names) + let mut objects = vec![jr::types::assets::AssetObject { + id: "70".into(), + label: "Acme Corp".into(), + object_key: "OBJ-70".into(), + object_type: jr::types::assets::ObjectType { + id: "13".into(), + name: "Client".into(), + description: None, + }, + created: None, + updated: None, + attributes: vec![ + jr::types::assets::AssetAttribute { + id: "637".into(), + object_type_attribute_id: "140".into(), + values: vec![jr::types::assets::ObjectAttributeValue { + value: Some("New York".into()), + display_value: Some("New York".into()), + }], + }, + jr::types::assets::AssetAttribute { + id: "638".into(), + object_type_attribute_id: "141".into(), + values: vec![jr::types::assets::ObjectAttributeValue { + value: Some("secret".into()), + display_value: Some("secret".into()), + }], + }, + ], + }]; + + let enriched = jr::api::assets::objects::enrich_search_attributes( + &client, "ws-123", &mut objects, + ) + .await + .unwrap(); + + // Returns the attribute definition map for use in output formatting + assert!(enriched.contains_key("140")); + assert_eq!(enriched["140"].name, "Location"); + assert!(enriched.contains_key("141")); + assert_eq!(enriched["141"].name, "Secret"); + assert!(enriched["141"].hidden); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets enrich_search_attributes_injects_names` +Expected: FAIL — `enrich_search_attributes` function not found. + +- [ ] **Step 3: Implement the enrichment function** + +Add this function to `src/api/assets/objects.rs`, outside the `impl JiraClient` block (after the `resolve_object_key` function, before `#[cfg(test)]`): + +```rust +use std::collections::HashMap; +use crate::cache::{self, CachedObjectTypeAttr}; +use crate::types::assets::ObjectTypeAttributeDef; + +/// Enrich search results by resolving attribute definitions for each unique object type. +/// +/// Returns a HashMap mapping `objectTypeAttributeId` → `CachedObjectTypeAttr` for use +/// in output formatting (filtering system/hidden, sorting by position, displaying names). +/// +/// Fetches definitions from cache first, falling back to the API. Results are cached +/// for 7 days per object type. +pub async fn enrich_search_attributes( + client: &JiraClient, + workspace_id: &str, + objects: &mut [AssetObject], +) -> Result> { + // Collect unique object type IDs + let mut type_ids: Vec = objects + .iter() + .map(|o| o.object_type.id.clone()) + .collect(); + type_ids.sort(); + type_ids.dedup(); + + let mut attr_map: HashMap = HashMap::new(); + + for type_id in &type_ids { + // Try cache first + let attrs = match cache::read_object_type_attr_cache(type_id) { + Ok(Some(cached)) => cached, + _ => { + // Cache miss — fetch from API + match client.get_object_type_attributes(workspace_id, type_id).await { + Ok(defs) => { + let cached: Vec = defs + .iter() + .map(|d| CachedObjectTypeAttr { + id: d.id.clone(), + name: d.name.clone(), + system: d.system, + hidden: d.hidden, + label: d.label, + position: d.position, + }) + .collect(); + // Best-effort cache write + let _ = cache::write_object_type_attr_cache(type_id, &cached); + cached + } + Err(_) => { + // Graceful degradation: skip this type + eprintln!( + "Warning: could not fetch attribute definitions for object type {}", + type_id + ); + continue; + } + } + } + }; + + for attr in attrs { + attr_map.insert(attr.id.clone(), attr); + } + } + + Ok(attr_map) +} +``` + +- [ ] **Step 4: Make the function public in lib.rs** + +Check that `src/api/assets/objects.rs` module and the function are accessible from integration tests. The function is already `pub` and the module path `jr::api::assets::objects` should be accessible via `src/lib.rs`. Verify by checking that `src/api/mod.rs` has `pub mod assets;` and `src/api/assets/mod.rs` has `pub mod objects;`. + +Run: `cargo test --test assets enrich_search_attributes_injects_names` +Expected: PASS. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 6: Commit** + +```bash +git add src/api/assets/objects.rs tests/assets.rs +git commit -m "feat: add enrich_search_attributes function (#86)" +``` + +--- + +### Task 4: Update `handle_search` — JSON Enrichment + +**Files:** +- Modify: `src/cli/assets.rs` +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the failing integration test for enriched JSON output** + +Add this test at the end of `tests/assets.rs`: + +```rust +#[tokio::test] +async fn search_attributes_json_includes_names() { + let server = MockServer::start().await; + + // Mock: AQL search with attributes + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "OBJ-70", "displayValue": "OBJ-70" } + ] + }, + { + "id": "638", + "objectTypeAttributeId": "140", + "objectAttributeValues": [ + { "value": "New York", "displayValue": "New York" } + ] + } + ] + } + ] + }))) + .mount(&server) + .await; + + // Mock: object type attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + } + ]))) + .mount(&server) + .await; + + // Mock: workspace discovery (needed for CLI command) + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "--output", "json", + "assets", "search", "--attributes", + "objectType = Client", + ]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + let objects = parsed.as_array().expect("array of objects"); + assert_eq!(objects.len(), 1); + + let attrs = objects[0]["attributes"].as_array().expect("attributes array"); + // System attribute (Key) should be filtered out + // Only Location should remain + assert_eq!(attrs.len(), 1); + assert_eq!(attrs[0]["objectTypeAttribute"]["name"], "Location"); + assert_eq!(attrs[0]["objectTypeAttribute"]["position"], 5); + assert_eq!(attrs[0]["objectAttributeValues"][0]["displayValue"], "New York"); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test assets search_attributes_json_includes_names` +Expected: FAIL — current JSON output doesn't contain `objectTypeAttribute.name` and doesn't filter system attributes. + +- [ ] **Step 3: Update `handle_search` in `src/cli/assets.rs`** + +Replace the `handle_search` function (lines 57-100) with: + +```rust +async fn handle_search( + workspace_id: &str, + query: &str, + limit: Option, + attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let mut objects = client + .search_assets(workspace_id, query, limit, attributes) + .await?; + + if attributes { + let attr_map = + crate::api::assets::objects::enrich_search_attributes(client, workspace_id, &mut objects) + .await?; + + match output_format { + OutputFormat::Json => { + // Serialize to Value, inject objectTypeAttribute, filter system/hidden + let mut json_objects: Vec = Vec::new(); + for obj in &objects { + let mut obj_value = serde_json::to_value(obj)?; + if let Some(attrs_array) = obj_value + .get_mut("attributes") + .and_then(|a| a.as_array_mut()) + { + // Inject objectTypeAttribute into each attribute + for attr_value in attrs_array.iter_mut() { + if let Some(attr_id) = attr_value + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + { + if let Some(def) = attr_map.get(attr_id) { + if let Some(map) = attr_value.as_object_mut() { + map.insert( + "objectTypeAttribute".to_string(), + serde_json::json!({ + "name": def.name, + "position": def.position, + }), + ); + } + } + } + } + // Filter out system and hidden attributes + attrs_array.retain(|attr| { + let attr_id = attr + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + .unwrap_or(""); + match attr_map.get(attr_id) { + Some(def) => !def.system && !def.hidden, + None => true, // keep unknown attributes + } + }); + // Sort by position + attrs_array.sort_by_key(|attr| { + let attr_id = attr + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + .unwrap_or(""); + attr_map.get(attr_id).map(|d| d.position).unwrap_or(i32::MAX) + }); + } + json_objects.push(obj_value); + } + println!("{}", output::render_json(&json_objects)?); + } + OutputFormat::Table => { + let rows: Vec> = objects + .iter() + .map(|o| { + let attr_str = format_inline_attributes(&o.attributes, &attr_map); + vec![ + o.object_key.clone(), + o.object_type.name.clone(), + o.label.clone(), + attr_str, + ] + }) + .collect(); + output::print_output( + output_format, + &["Key", "Type", "Name", "Attributes"], + &rows, + &objects, + )?; + } + } + Ok(()) + } else { + let rows: Vec> = objects + .iter() + .map(|o| { + vec![ + o.object_key.clone(), + o.object_type.name.clone(), + o.label.clone(), + ] + }) + .collect(); + output::print_output(output_format, &["Key", "Type", "Name"], &rows, &objects) + } +} +``` + +- [ ] **Step 4: Add the `format_inline_attributes` helper** + +Add this function to `src/cli/assets.rs`, before `handle_search` (or after it, before `handle_view`): + +```rust +use crate::cache::CachedObjectTypeAttr; +use crate::types::assets::AssetAttribute; + +/// Format attributes as inline `Name: Value` pairs for table display. +/// +/// Filters out system, hidden, and label attributes. Sorts by position. +/// Multi-value attributes use the first displayValue (or value as fallback). +fn format_inline_attributes( + attributes: &[AssetAttribute], + attr_map: &std::collections::HashMap, +) -> String { + let mut displayable: Vec<(&AssetAttribute, &CachedObjectTypeAttr)> = attributes + .iter() + .filter_map(|a| { + attr_map.get(&a.object_type_attribute_id).and_then(|def| { + if def.system || def.hidden || def.label { + None + } else { + Some((a, def)) + } + }) + }) + .collect(); + displayable.sort_by_key(|(_, def)| def.position); + + displayable + .iter() + .filter_map(|(attr, def)| { + let value = attr.values.first().and_then(|v| { + v.display_value + .as_deref() + .or(v.value.as_deref()) + }); + value.map(|v| format!("{}: {}", def.name, v)) + }) + .collect::>() + .join(" | ") +} +``` + +- [ ] **Step 5: Run test to verify it passes** + +Run: `cargo test --test assets search_attributes_json_includes_names` +Expected: PASS. + +- [ ] **Step 6: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 7: Commit** + +```bash +git add src/cli/assets.rs tests/assets.rs +git commit -m "feat: enrich assets search JSON with attribute names (#86)" +``` + +--- + +### Task 5: Table Output Integration Test + +**Files:** +- Test: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test for enriched table output** + +Add this test at the end of `tests/assets.rs`: + +```rust +#[tokio::test] +async fn search_attributes_table_shows_inline_values() { + let server = MockServer::start().await; + + // Mock: AQL search with attributes + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "OBJ-70", "displayValue": "OBJ-70" } + ] + }, + { + "id": "639", + "objectTypeAttributeId": "142", + "objectAttributeValues": [ + { "value": "10", "displayValue": "10" } + ] + }, + { + "id": "638", + "objectTypeAttributeId": "140", + "objectAttributeValues": [ + { "value": "New York", "displayValue": "New York" } + ] + } + ] + } + ] + }))) + .mount(&server) + .await; + + // Mock: object type attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "142", + "name": "Seats", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + } + ]))) + .mount(&server) + .await; + + // Mock: workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "assets", "search", "--attributes", + "objectType = Client", + ]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + // Table should contain the Attributes column with inline values + // Seats (position 4) comes before Location (position 5) + assert!( + stdout.contains("Seats: 10"), + "Expected 'Seats: 10' in table, got: {stdout}" + ); + assert!( + stdout.contains("Location: New York"), + "Expected 'Location: New York' in table, got: {stdout}" + ); + // System attribute Key should NOT appear + assert!( + !stdout.contains("Key: OBJ-70"), + "System attribute Key should be filtered, got: {stdout}" + ); + // Should have Attributes header instead of Created/Updated + assert!( + stdout.contains("Attributes"), + "Expected 'Attributes' header in table, got: {stdout}" + ); + assert!( + !stdout.contains("Created"), + "Should not have Created column, got: {stdout}" + ); +} +``` + +- [ ] **Step 2: Run test to verify it passes** + +Run: `cargo test --test assets search_attributes_table_shows_inline_values` +Expected: PASS (the handler was already updated in Task 4). + +- [ ] **Step 3: Run the full test suite** + +Run: `cargo test` +Expected: All tests pass, including existing tests that should be unaffected. + +- [ ] **Step 4: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 5: Commit** + +```bash +git add tests/assets.rs +git commit -m "test: add integration test for enriched table output (#86)" +``` + +--- + +### Task 6: Documentation Updates + +**Files:** +- Modify: `CLAUDE.md` +- Modify: `README.md` + +- [ ] **Step 1: Update CLAUDE.md architecture comment** + +In `CLAUDE.md`, find the line describing `assets.rs`: +``` +│ ├── assets.rs # assets search/view/tickets (--open/--status client-side filtering) +``` + +Change it to: +``` +│ ├── assets.rs # assets search/view/tickets (--open/--status client-side filtering, search attribute enrichment) +``` + +- [ ] **Step 2: Update README.md assets search description** + +In `README.md`, find the command table row for `jr assets search`: +``` +| `jr assets search ` | Search assets via AQL query | +``` + +Change it to: +``` +| `jr assets search ` | Search assets via AQL query (`--attributes` resolves names) | +``` + +- [ ] **Step 3: Commit** + +```bash +git add CLAUDE.md README.md +git commit -m "docs: update CLAUDE.md and README for search attribute enrichment (#86)" +``` diff --git a/docs/superpowers/plans/2026-04-01-assets-tickets-status-filter.md b/docs/superpowers/plans/2026-04-01-assets-tickets-status-filter.md new file mode 100644 index 0000000..847d843 --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-assets-tickets-status-filter.md @@ -0,0 +1,459 @@ +# Assets Tickets Status Filtering Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--open` and `--status` client-side filtering flags to `jr assets tickets`. + +**Architecture:** The Assets connected-tickets API has no server-side filtering. After fetching all tickets, filter client-side by status category color (`--open`) or status name (`--status`) before applying `--limit` truncation and display. Reuses the existing `partial_match` module for `--status` disambiguation. + +**Tech Stack:** Rust, clap, serde_json (tests) + +**Spec:** `docs/specs/assets-tickets-status-filter.md` + +--- + +### Task 1: Add `--open` and `--status` CLI flags + +**Files:** +- Modify: `src/cli/mod.rs:128-135` (AssetsCommand::Tickets) + +- [ ] **Step 1: Add the flags** + +In `src/cli/mod.rs`, update `AssetsCommand::Tickets` to add `--open` and `--status` with `conflicts_with`: + +```rust + /// Show Jira issues connected to an asset + Tickets { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Maximum number of tickets to show + #[arg(long)] + limit: Option, + /// Show only open tickets (excludes Done status category) + #[arg(long, conflicts_with = "status")] + open: bool, + /// Filter by status (partial match supported) + #[arg(long, conflicts_with = "open")] + status: Option, + }, +``` + +- [ ] **Step 2: Verify compilation** + +Run: `cargo build` +Expected: Compiles (handle_tickets call site will need updating — that's Task 2). + +Note: This will cause a compilation error because `handle_tickets` in `assets.rs` doesn't accept the new fields yet. If it fails, that's expected — Task 2 fixes it. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/mod.rs +git commit -m "feat: add --open and --status flags to assets tickets (#89)" +``` + +--- + +### Task 2: Wire flags into `handle_tickets` and add filtering + +**Files:** +- Modify: `src/cli/assets.rs:8-38` (handle dispatch) and `src/cli/assets.rs:171-225` (handle_tickets) + +- [ ] **Step 1: Update the dispatch in `handle`** + +In `src/cli/assets.rs`, update the `Tickets` match arm to pass the new fields: + +```rust + AssetsCommand::Tickets { + key, + limit, + open, + status, + } => handle_tickets(&workspace_id, &key, limit, open, status, output_format, client).await, +``` + +- [ ] **Step 2: Update `handle_tickets` signature and add filtering** + +Replace the entire `handle_tickets` function: + +```rust +async fn handle_tickets( + workspace_id: &str, + key: &str, + limit: Option, + open: bool, + status: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let object_id = objects::resolve_object_key(client, workspace_id, key).await?; + let resp = client + .get_connected_tickets(workspace_id, &object_id) + .await?; + + // Apply status filtering before limit + let filtered = filter_tickets(resp.tickets, open, status.as_deref())?; + + // Apply limit + let tickets: Vec<_> = match limit { + Some(n) => filtered.into_iter().take(n as usize).collect(), + None => filtered, + }; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&tickets)?); + } + OutputFormat::Table => { + let rows: Vec> = tickets + .iter() + .map(|t| { + vec![ + t.key.clone(), + t.issue_type + .as_ref() + .map(|it| it.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.title.clone(), + t.status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.priority + .as_ref() + .map(|p| p.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Key", "Type", "Title", "Status", "Priority"], + &rows, + &tickets, + )?; + } + } + Ok(()) +} +``` + +Note: JSON output now returns the filtered `tickets` array (not the full `ConnectedTicketsResponse` with `allTicketsQuery`). This is consistent — when you filter, the `allTicketsQuery` JQL no longer represents what's shown. + +- [ ] **Step 3: Add the `filter_tickets` function** + +Add above `handle_tickets` in `src/cli/assets.rs`: + +```rust +use crate::error::JrError; +use crate::partial_match::{self, MatchResult}; +use crate::types::assets::ConnectedTicket; + +/// Filter connected tickets by status. Returns the filtered list. +/// +/// `--open`: exclude tickets where status.colorName == "green" (Done category). +/// `--status`: partial match on status.name. +/// Tickets with no status are included by --open, excluded by --status. +fn filter_tickets( + tickets: Vec, + open: bool, + status: Option<&str>, +) -> Result> { + if open { + return Ok(tickets + .into_iter() + .filter(|t| { + t.status + .as_ref() + .and_then(|s| s.color_name.as_deref()) + .map(|c| c != "green") + .unwrap_or(true) // Include tickets with unknown status + }) + .collect()); + } + + if let Some(status_input) = status { + // Collect unique status names from the response for disambiguation + let mut seen = std::collections::HashSet::new(); + let status_names: Vec = tickets + .iter() + .filter_map(|t| t.status.as_ref().map(|s| s.name.clone())) + .filter(|name| seen.insert(name.clone())) + .collect(); + + let matched = match partial_match::partial_match(status_input, &status_names) { + MatchResult::Exact(name) => name, + MatchResult::Ambiguous(matches) => { + return Err(JrError::UserError(format!( + "Ambiguous status \"{}\". Matches: {}", + status_input, + matches.join(", ") + )) + .into()); + } + MatchResult::None(all) => { + let available = if all.is_empty() { + "none".to_string() + } else { + all.join(", ") + }; + return Err(JrError::UserError(format!( + "No status matching \"{}\". Available: {}", + status_input, available + )) + .into()); + } + }; + + return Ok(tickets + .into_iter() + .filter(|t| { + t.status + .as_ref() + .map(|s| s.name == matched) + .unwrap_or(false) + }) + .collect()); + } + + // No filter + Ok(tickets) +} +``` + +- [ ] **Step 4: Verify compilation** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/assets.rs +git commit -m "feat: wire --open and --status filtering into assets tickets (#89)" +``` + +--- + +### Task 3: Add unit tests for `filter_tickets` + +**Files:** +- Modify: `src/cli/assets.rs` (add `#[cfg(test)] mod tests` block) + +- [ ] **Step 1: Add test helpers and tests** + +Add at the bottom of `src/cli/assets.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + use crate::types::assets::{ConnectedTicket, TicketPriority, TicketStatus, TicketType}; + + fn make_ticket(key: &str, status_name: &str, color: &str) -> ConnectedTicket { + ConnectedTicket { + key: key.to_string(), + id: "1".to_string(), + title: format!("Ticket {}", key), + reporter: None, + created: None, + updated: None, + status: Some(TicketStatus { + name: status_name.to_string(), + color_name: Some(color.to_string()), + }), + issue_type: Some(TicketType { + name: "Task".to_string(), + }), + priority: Some(TicketPriority { + name: "Medium".to_string(), + }), + } + } + + fn make_ticket_no_status(key: &str) -> ConnectedTicket { + ConnectedTicket { + key: key.to_string(), + id: "1".to_string(), + title: format!("Ticket {}", key), + reporter: None, + created: None, + updated: None, + status: None, + issue_type: None, + priority: None, + } + } + + #[test] + fn filter_open_excludes_done() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + make_ticket("A-3", "To Do", "blue-gray"), + ]; + let result = filter_tickets(tickets, true, None).unwrap(); + assert_eq!(result.len(), 2); + assert_eq!(result[0].key, "A-1"); + assert_eq!(result[1].key, "A-3"); + } + + #[test] + fn filter_open_includes_no_status() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket_no_status("A-2"), + ]; + let result = filter_tickets(tickets, true, None).unwrap(); + assert_eq!(result.len(), 2); + } + + #[test] + fn filter_status_exact_match() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + make_ticket("A-3", "To Do", "blue-gray"), + ]; + let result = filter_tickets(tickets, false, Some("Done")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-2"); + } + + #[test] + fn filter_status_partial_match() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + ]; + let result = filter_tickets(tickets, false, Some("prog")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-1"); + } + + #[test] + fn filter_status_no_match() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + ]; + let result = filter_tickets(tickets, false, Some("Blocked")); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("No status matching")); + assert!(err.contains("In Progress")); + } + + #[test] + fn filter_status_ambiguous() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "In Review", "yellow"), + ]; + let result = filter_tickets(tickets, false, Some("In")); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Ambiguous")); + } + + #[test] + fn filter_status_excludes_no_status() { + let tickets = vec![ + make_ticket("A-1", "Done", "green"), + make_ticket_no_status("A-2"), + ]; + let result = filter_tickets(tickets, false, Some("Done")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-1"); + } + + #[test] + fn no_filter_returns_all() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + ]; + let result = filter_tickets(tickets, false, None).unwrap(); + assert_eq!(result.len(), 2); + } +} +``` + +- [ ] **Step 2: Run tests** + +Run: `cargo test filter_ --lib -- --nocapture` +Expected: All 8 tests pass. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/assets.rs +git commit -m "test: add unit tests for assets tickets status filtering (#89)" +``` + +--- + +### Task 4: Add CLI smoke test for --open/--status conflict + +**Files:** +- Modify: `tests/cli_smoke.rs` + +- [ ] **Step 1: Add conflict test** + +Add to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_assets_tickets_open_and_status_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "assets", + "tickets", + "OBJ-1", + "--open", + "--status", + "Done", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} +``` + +- [ ] **Step 2: Run test** + +Run: `cargo test test_assets_tickets_open_and_status_conflict -- --nocapture` +Expected: PASS + +- [ ] **Step 3: Commit** + +```bash +git add tests/cli_smoke.rs +git commit -m "test: add CLI smoke test for --open/--status conflict (#89)" +``` + +--- + +### Task 5: Run full test suite and lint + +**Files:** None (verification only) + +- [ ] **Step 1: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: No warnings. + +- [ ] **Step 3: Run format check** + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues. If any, run `cargo fmt --all` to fix. + +- [ ] **Step 4: If any issues, fix and commit** + +Fix any issues and commit. diff --git a/docs/superpowers/plans/2026-04-01-assets-view-default-attributes.md b/docs/superpowers/plans/2026-04-01-assets-view-default-attributes.md new file mode 100644 index 0000000..f22393c --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-assets-view-default-attributes.md @@ -0,0 +1,324 @@ +# Assets View Default Attributes Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Make `assets view` include attributes by default, replacing `--attributes` (opt-in) with `--no-attributes` (opt-out). + +**Architecture:** Change the CLI flag from `--attributes` to `--no-attributes` on `AssetsCommand::View`, invert the condition in `handle_view`, and update tests. No API or type changes. + +**Tech Stack:** Rust, clap (derive API), wiremock, assert_cmd + +--- + +## File Map + +| File | Action | Responsibility | +|------|--------|----------------| +| `src/cli/mod.rs:120-127` | Modify | Change `View` variant: `attributes: bool` → `no_attributes: bool` | +| `src/cli/assets.rs:34-35` | Modify | Update dispatch: pass `no_attributes` instead of `attributes` | +| `src/cli/assets.rs:103-185` | Modify | Invert condition in `handle_view`: `!no_attributes` replaces `attributes` | +| `tests/cli_smoke.rs` | Modify | Add `assets view --help` smoke test | +| `tests/assets.rs` | Modify | Add integration test for default attributes fetch | + +--- + +### Task 1: Change CLI flag from `--attributes` to `--no-attributes` + +**Files:** +- Modify: `src/cli/mod.rs:120-127` +- Modify: `src/cli/assets.rs:34-35` +- Modify: `src/cli/assets.rs:103-185` + +- [ ] **Step 1: Update the `AssetsCommand::View` variant in `src/cli/mod.rs`** + +Change lines 120-127 from: + +```rust + /// View asset details + View { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, +``` + +to: + +```rust + /// View asset details + View { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Omit object attributes from output + #[arg(long)] + no_attributes: bool, + }, +``` + +- [ ] **Step 2: Update the dispatch in `src/cli/assets.rs`** + +Change lines 34-35 from: + +```rust + AssetsCommand::View { key, attributes } => { + handle_view(&workspace_id, &key, attributes, output_format, client).await +``` + +to: + +```rust + AssetsCommand::View { key, no_attributes } => { + handle_view(&workspace_id, &key, no_attributes, output_format, client).await +``` + +- [ ] **Step 3: Invert the condition in `handle_view` in `src/cli/assets.rs`** + +Change the function signature at line 103 from: + +```rust +async fn handle_view( + workspace_id: &str, + key: &str, + attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { +``` + +to: + +```rust +async fn handle_view( + workspace_id: &str, + key: &str, + no_attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { +``` + +Then change the two `if attributes {` guards (lines 115 and 150) to `if !no_attributes {`: + +Line 115: +```rust + if !no_attributes { +``` + +Line 150: +```rust + if !no_attributes { +``` + +- [ ] **Step 4: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` +Expected: Build succeeds with no errors. + +- [ ] **Step 5: Run existing tests to verify nothing breaks** + +Run: `cargo test --lib -- assets 2>&1 | tail -20` +Expected: All existing `assets` unit tests pass (filter_tickets tests are unaffected). + +Run: `cargo test --test assets 2>&1 | tail -20` +Expected: All existing integration tests pass (they test API methods, not the CLI flag). + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/mod.rs src/cli/assets.rs +git commit -m "$(cat <<'EOF' +fix: show attributes by default in assets view (#85) + +Replace --attributes (opt-in) with --no-attributes (opt-out) on +assets view. The default output now includes the attributes table +and populated attributes in JSON, matching user expectations. +EOF +)" +``` + +--- + +### Task 2: Add CLI smoke test for `assets view --help` + +**Files:** +- Modify: `tests/cli_smoke.rs` + +- [ ] **Step 1: Write the smoke test** + +Add the following test to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_assets_view_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "view", "--help"]) + .assert() + .success() + .stdout(predicates::str::contains("--no-attributes")); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test cli_smoke test_assets_view_help 2>&1 | tail -10` +Expected: PASS. The `--no-attributes` flag appears in help output. + +- [ ] **Step 3: Run the test to verify it passes** + +Run: `cargo test --test cli_smoke test_assets_view_help 2>&1 | tail -10` +Expected: PASS. + +- [ ] **Step 5: Commit** + +```bash +git add tests/cli_smoke.rs +git commit -m "$(cat <<'EOF' +test: add CLI smoke test for assets view --no-attributes (#85) +EOF +)" +``` + +--- + +### Task 3: Add integration test verifying default attributes fetch + +**Files:** +- Modify: `tests/assets.rs` + +- [ ] **Step 1: Write the integration test** + +Add the following test to `tests/assets.rs`. This tests that `get_object_attributes` is called and returns named attributes — validating the API layer that the default view path now exercises. + +```rust +#[tokio::test] +async fn get_object_attributes_filters_system_and_hidden() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/88/attributes")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + "objectAttributeValues": [ + { "value": "OBJ-88", "displayValue": "OBJ-88" } + ] + }, + { + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1 + }, + "objectAttributeValues": [ + { "value": "Acme Corp", "displayValue": "Acme Corp" } + ] + }, + { + "id": "641", + "objectTypeAttributeId": "140", + "objectTypeAttribute": { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }, + { + "id": "642", + "objectTypeAttributeId": "141", + "objectTypeAttribute": { + "id": "141", + "name": "Internal Notes", + "system": false, + "hidden": true, + "label": false, + "position": 6 + }, + "objectAttributeValues": [ + { "value": "secret", "displayValue": "secret" } + ] + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let mut attrs = client.get_object_attributes("ws-123", "88").await.unwrap(); + + // Apply the same filter used by handle_view for JSON output + attrs.retain(|a| !a.object_type_attribute.system && !a.object_type_attribute.hidden); + attrs.sort_by_key(|a| a.object_type_attribute.position); + + // System (Key) and hidden (Internal Notes) are excluded + assert_eq!(attrs.len(), 2); + assert_eq!(attrs[0].object_type_attribute.name, "Name"); + assert_eq!(attrs[1].object_type_attribute.name, "Location"); + assert_eq!( + attrs[1].values[0].display_value.as_deref(), + Some("New York, NY") + ); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test assets get_object_attributes_filters 2>&1 | tail -10` +Expected: PASS. + +- [ ] **Step 3: Commit** + +```bash +git add tests/assets.rs +git commit -m "$(cat <<'EOF' +test: add integration test for attributes filtering (#85) +EOF +)" +``` + +--- + +### Task 4: Run full test suite and lint + +**Files:** None (verification only) + +- [ ] **Step 1: Run clippy** + +Run: `cargo clippy -- -D warnings 2>&1 | tail -20` +Expected: No warnings or errors. + +- [ ] **Step 2: Run format check** + +Run: `cargo fmt --all -- --check 2>&1 | tail -10` +Expected: No formatting issues. + +- [ ] **Step 3: Run full test suite** + +Run: `cargo test 2>&1 | tail -30` +Expected: All tests pass. Key tests to verify: +- `tests/cli_smoke.rs::test_assets_view_help` — PASS +- `tests/assets.rs::get_object_attributes_filters_system_and_hidden` — PASS +- `tests/assets.rs::get_object_attributes_returns_named_attributes` — PASS (existing, unchanged) +- All `cli::assets::tests::filter_*` unit tests — PASS (unchanged) diff --git a/docs/superpowers/plans/2026-04-01-issue-edit-description.md b/docs/superpowers/plans/2026-04-01-issue-edit-description.md new file mode 100644 index 0000000..a65181f --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-issue-edit-description.md @@ -0,0 +1,427 @@ +# Issue Edit Description Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--description`, `--description-stdin`, and `--markdown` flags to `jr issue edit` so users can update issue descriptions without leaving the CLI. + +**Architecture:** Three new fields on the `Edit` clap variant, description-to-ADF conversion in `handle_edit` (same pattern as `handle_create`), integration tests verifying the PUT body sent to Jira. + +**Tech Stack:** Rust, clap (derive), serde_json, wiremock, assert_cmd + +--- + +## File Map + +| File | Action | Responsibility | +|------|--------|----------------| +| `src/cli/mod.rs` | Modify (lines 230-257) | Add 3 fields to `IssueCommand::Edit` | +| `src/cli/issue/create.rs` | Modify (lines 140-265) | Add description handling to `handle_edit` | +| `tests/issue_commands.rs` | Modify (append) | Integration tests for edit + description | + +--- + +### Task 1: Add CLI flags to `IssueCommand::Edit` + +**Files:** +- Modify: `src/cli/mod.rs:230-257` + +- [ ] **Step 1: Add the three new fields to the `Edit` variant** + +In `src/cli/mod.rs`, add `description`, `description_stdin`, and `markdown` fields to `IssueCommand::Edit`, after the existing `parent` field: + +```rust + /// Edit issue fields + Edit { + /// Issue key + key: String, + /// New summary + #[arg(long)] + summary: Option, + /// New issue type + #[arg(long = "type")] + issue_type: Option, + /// New priority + #[arg(long)] + priority: Option, + /// Add or remove labels (e.g., --label add:backend --label remove:frontend) + #[arg(long)] + label: Vec, + /// Team assignment + #[arg(long)] + team: Option, + /// Story points + #[arg(long, conflicts_with = "no_points")] + points: Option, + /// Clear story points + #[arg(long, conflicts_with = "points")] + no_points: bool, + /// Parent issue key + #[arg(long)] + parent: Option, + /// Description + #[arg(short, long, conflicts_with = "description_stdin")] + description: Option, + /// Read description from stdin (for piping) + #[arg(long, conflicts_with = "description")] + description_stdin: bool, + /// Interpret description as Markdown + #[arg(long)] + markdown: bool, + }, +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build 2>&1 | head -30` + +Expected: Compilation error in `create.rs` because `handle_edit` destructuring doesn't include the new fields yet. This is expected — we'll fix it in Task 2. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/mod.rs +git commit -m "feat: add --description, --description-stdin, --markdown flags to issue edit CLI (#82)" +``` + +--- + +### Task 2: Wire description handling into `handle_edit` + +**Files:** +- Modify: `src/cli/issue/create.rs:140-265` + +- [ ] **Step 1: Update the destructuring in `handle_edit` to include the new fields** + +In `src/cli/issue/create.rs`, change the `let IssueCommand::Edit { ... }` destructuring (around line 147) to include the three new fields: + +```rust + let IssueCommand::Edit { + key, + summary, + issue_type, + priority, + label: labels, + team, + points, + no_points, + parent, + description, + description_stdin, + markdown, + } = command + else { + unreachable!() + }; +``` + +- [ ] **Step 2: Add description resolution and ADF conversion** + +Insert the following block after `let mut has_updates = false;` (line 163) and before the `if let Some(ref s) = summary {` block (line 165): + +```rust + // Resolve description + let desc_text = if description_stdin { + let mut buf = String::new(); + std::io::Read::read_to_string(&mut std::io::stdin(), &mut buf)?; + Some(buf) + } else { + description + }; + + if let Some(ref text) = desc_text { + let adf_body = if markdown { + adf::markdown_to_adf(text) + } else { + adf::text_to_adf(text) + }; + fields["description"] = adf_body; + has_updates = true; + } +``` + +- [ ] **Step 3: Update the "no fields specified" error message** + +Change the bail message (around line 246) from: + +```rust + bail!( + "No fields specified to update. Use --summary, --type, --priority, --label, --team, --points, --no-points, or --parent." + ); +``` + +To: + +```rust + bail!( + "No fields specified to update. Use --summary, --type, --priority, --label, --team, --points, --no-points, --parent, --description, or --description-stdin." + ); +``` + +- [ ] **Step 4: Verify it compiles and existing tests pass** + +Run: `cargo build && cargo test --lib 2>&1 | tail -5` + +Expected: Build succeeds, all existing tests pass. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/issue/create.rs +git commit -m "feat: wire description handling into issue edit handler (#82)" +``` + +--- + +### Task 3: Integration test — edit with plain text description + +**Files:** +- Modify: `tests/issue_commands.rs` (append) + +- [ ] **Step 1: Write the integration test** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_edit_issue_with_description() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-10")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { "type": "text", "text": "Updated description" } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-10", + serde_json::json!({ + "description": jr::adf::text_to_adf("Updated description") + }), + ) + .await + .unwrap(); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test issue_commands test_edit_issue_with_description -- --exact 2>&1 | tail -5` + +Expected: PASS — this validates that `edit_issue` sends the correct PUT body with ADF description. + +- [ ] **Step 3: Commit** + +```bash +git add tests/issue_commands.rs +git commit -m "test: add integration test for edit issue with description (#82)" +``` + +--- + +### Task 4: Integration test — edit with markdown description + +**Files:** +- Modify: `tests/issue_commands.rs` (append) + +- [ ] **Step 1: Write the integration test** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_edit_issue_with_markdown_description() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-11")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { + "type": "text", + "text": "bold text", + "marks": [{"type": "strong"}] + } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-11", + serde_json::json!({ + "description": jr::adf::markdown_to_adf("**bold text**") + }), + ) + .await + .unwrap(); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test issue_commands test_edit_issue_with_markdown_description -- --exact 2>&1 | tail -5` + +Expected: PASS — validates that markdown-to-ADF produces the expected bold markup in the PUT body. + +- [ ] **Step 3: Commit** + +```bash +git add tests/issue_commands.rs +git commit -m "test: add integration test for edit issue with markdown description (#82)" +``` + +--- + +### Task 5: Integration test — edit with description combined with other fields + +**Files:** +- Modify: `tests/issue_commands.rs` (append) + +- [ ] **Step 1: Write the integration test** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_edit_issue_description_with_other_fields() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-12")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "summary": "New summary", + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { "type": "text", "text": "New description" } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-12", + serde_json::json!({ + "summary": "New summary", + "description": jr::adf::text_to_adf("New description") + }), + ) + .await + .unwrap(); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test issue_commands test_edit_issue_description_with_other_fields -- --exact 2>&1 | tail -5` + +Expected: PASS — validates that description and summary can coexist in the same PUT body. + +- [ ] **Step 3: Commit** + +```bash +git add tests/issue_commands.rs +git commit -m "test: add integration test for edit issue description with other fields (#82)" +``` + +--- + +### Task 6: CLI-level test — clap rejects conflicting flags + +**Files:** +- Modify: `tests/cli_smoke.rs` (append) + +- [ ] **Step 1: Write the CLI-level test for conflicting flags** + +Append to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_edit_description_and_description_stdin_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["issue", "edit", "FOO-1", "--description", "text", "--description-stdin"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test cli_smoke test_edit_description_and_description_stdin_conflict -- --exact 2>&1 | tail -5` + +Expected: PASS — clap rejects the conflicting flags at parse time. + +- [ ] **Step 3: Commit** + +```bash +git add tests/cli_smoke.rs +git commit -m "test: add CLI test for --description and --description-stdin conflict (#82)" +``` + +--- + +### Task 7: Final verification + +- [ ] **Step 1: Run all tests** + +Run: `cargo test 2>&1 | tail -10` + +Expected: All tests pass. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings 2>&1 | tail -10` + +Expected: Zero warnings. + +- [ ] **Step 3: Run format check** + +Run: `cargo fmt --all -- --check 2>&1 | tail -5` + +Expected: No formatting issues. diff --git a/docs/superpowers/plans/2026-04-01-issue-list-asset-filter.md b/docs/superpowers/plans/2026-04-01-issue-list-asset-filter.md new file mode 100644 index 0000000..f4f47b8 --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-issue-list-asset-filter.md @@ -0,0 +1,977 @@ +# Issue List `--asset` Filter Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--asset ` filter to `jr issue list` that generates an `aqlFunction()` JQL clause, composable with all existing filters. + +**Architecture:** CMDB field discovery is widened from ID-only to (ID, name) pairs so the JQL builder can reference fields by name (required by `aqlFunction()`). A new `build_asset_clause` function produces the AQL JQL fragment. The `--asset` flag auto-enables the existing `--assets` display column. + +**Tech Stack:** Rust, clap, reqwest, wiremock (tests), serde + +**Spec:** `docs/specs/issue-list-asset-filter.md` + +--- + +### Task 1: Widen `filter_cmdb_fields` to return (id, name) pairs + +**Files:** +- Modify: `src/api/jira/fields.rs:85-98` (filter_cmdb_fields) +- Modify: `src/api/jira/fields.rs:39-42` (find_cmdb_field_ids) + +- [ ] **Step 1: Update existing unit tests for new return type** + +In `src/api/jira/fields.rs`, update the four `filter_cmdb_fields_*` tests to expect `Vec<(String, String)>` tuples: + +```rust +#[test] +fn filter_cmdb_fields_finds_assets_type() { + let fields = vec![make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + )]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); +} + +#[test] +fn filter_cmdb_fields_ignores_non_cmdb() { + let fields = vec![ + make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + ), + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); +} + +#[test] +fn filter_cmdb_fields_empty_when_no_cmdb() { + let fields = vec![make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + )]; + let result: Vec<(String, String)> = filter_cmdb_fields(&fields); + assert!(result.is_empty()); +} + +#[test] +fn filter_cmdb_fields_multiple() { + let fields = vec![ + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + make_field( + "customfield_10245", + "Server", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Server".to_string()), + ] + ); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test filter_cmdb_fields -- --nocapture` +Expected: FAIL — return type mismatch. + +- [ ] **Step 3: Update `filter_cmdb_fields` to return `(id, name)` tuples** + +In `src/api/jira/fields.rs`, change the function: + +```rust +pub fn filter_cmdb_fields(fields: &[Field]) -> Vec<(String, String)> { + fields + .iter() + .filter(|f| { + f.custom == Some(true) + && f.schema + .as_ref() + .and_then(|s| s.custom.as_deref()) + .map(|c| c == CMDB_SCHEMA_TYPE) + .unwrap_or(false) + }) + .map(|f| (f.id.clone(), f.name.clone())) + .collect() +} +``` + +Also update `find_cmdb_field_ids` to match: + +```rust +pub async fn find_cmdb_field_ids(&self) -> Result> { + let fields = self.list_fields().await?; + Ok(filter_cmdb_fields(&fields)) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test filter_cmdb_fields -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/api/jira/fields.rs +git commit -m "refactor: widen filter_cmdb_fields to return (id, name) pairs (#88)" +``` + +--- + +### Task 2: Update cache to store (id, name) pairs + +**Files:** +- Modify: `src/cache.rs:152-187` (CmdbFieldsCache, read/write functions) + +- [ ] **Step 1: Update cache unit tests for new type** + +In `src/cache.rs`, update the `write_then_read_cmdb_fields_cache` test: + +```rust +#[test] +fn write_then_read_cmdb_fields_cache() { + with_temp_cache(|| { + write_cmdb_fields_cache(&[ + ("customfield_10191".into(), "Client".into()), + ("customfield_10245".into(), "Server".into()), + ]) + .unwrap(); + + let cache = read_cmdb_fields_cache().unwrap().expect("should exist"); + assert_eq!( + cache.fields, + vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Server".to_string()), + ] + ); + }); +} +``` + +And the `expired_cmdb_fields_cache_returns_none` test: + +```rust +#[test] +fn expired_cmdb_fields_cache_returns_none() { + with_temp_cache(|| { + let expired = CmdbFieldsCache { + fields: vec![("customfield_10191".into(), "Client".into())], + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("cmdb_fields.json"), content).unwrap(); + + let result = read_cmdb_fields_cache().unwrap(); + assert!( + result.is_none(), + "expired cmdb fields cache should return None" + ); + }); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test cmdb_fields_cache -- --nocapture` +Expected: FAIL — `field_ids` does not exist, `fields` does not exist. + +- [ ] **Step 3: Update `CmdbFieldsCache` and read/write functions** + +In `src/cache.rs`: + +```rust +#[derive(Debug, Serialize, Deserialize)] +pub struct CmdbFieldsCache { + pub fields: Vec<(String, String)>, + pub fetched_at: DateTime, +} + +pub fn read_cmdb_fields_cache() -> Result> { + let path = cache_dir().join("cmdb_fields.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let cache: CmdbFieldsCache = serde_json::from_str(&content)?; + + let age = Utc::now() - cache.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + + Ok(Some(cache)) +} + +pub fn write_cmdb_fields_cache(fields: &[(String, String)]) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let cache = CmdbFieldsCache { + fields: fields.to_vec(), + fetched_at: Utc::now(), + }; + + let content = serde_json::to_string_pretty(&cache)?; + std::fs::write(dir.join("cmdb_fields.json"), content)?; + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test cmdb_fields_cache -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/cache.rs +git commit -m "refactor: update CmdbFieldsCache to store (id, name) pairs (#88)" +``` + +--- + +### Task 3: Update `get_or_fetch_cmdb_field_ids` and callers + +**Files:** +- Modify: `src/api/assets/linked.rs:12-19` (get_or_fetch_cmdb_field_ids) +- Modify: `src/cli/issue/list.rs:261-276` (show_assets block) +- Modify: `src/cli/issue/list.rs:507-509` (handle_view) +- Modify: `src/cli/issue/assets.rs:16` (handle_issue_assets) + +- [ ] **Step 1: Update `get_or_fetch_cmdb_field_ids` return type** + +In `src/api/assets/linked.rs`, change: + +```rust +/// Get CMDB fields (id, name pairs), using cache when available. +pub async fn get_or_fetch_cmdb_fields(client: &JiraClient) -> Result> { + if let Some(cached) = cache::read_cmdb_fields_cache()? { + return Ok(cached.fields); + } + + let fields = client.find_cmdb_field_ids().await?; + let _ = cache::write_cmdb_fields_cache(&fields); + Ok(fields) +} + +/// Convenience: extract just the field IDs from CMDB fields. +pub fn cmdb_field_ids(fields: &[(String, String)]) -> Vec { + fields.iter().map(|(id, _)| id.clone()).collect() +} +``` + +- [ ] **Step 2: Update callers in `list.rs` (show_assets block around line 261)** + +In `src/cli/issue/list.rs`, update the `cmdb_field_ids` variable block: + +```rust + let cmdb_fields = if show_assets { + let fields = get_or_fetch_cmdb_fields(client) + .await + .unwrap_or_default(); + if fields.is_empty() { + eprintln!( + "warning: --assets ignored. No Assets custom fields found on this Jira instance." + ); + } + fields + } else { + Vec::new() + }; + let cmdb_field_ids = cmdb_field_ids(&cmdb_fields); + for f in &cmdb_field_ids { + extra.push(f.as_str()); + } +``` + +Also update the import at the top of `list.rs` — change `get_or_fetch_cmdb_field_ids` to `get_or_fetch_cmdb_fields` and add `cmdb_field_ids`: + +```rust +use crate::api::assets::linked::{ + cmdb_field_ids, enrich_assets, extract_linked_assets, get_or_fetch_cmdb_fields, +}; +``` + +- [ ] **Step 3: Update `handle_view` in `list.rs` (around line 507)** + +Change the `cmdb_field_ids` call in `handle_view`: + +```rust + let cmdb_fields = get_or_fetch_cmdb_fields(client) + .await + .unwrap_or_default(); + let cmdb_field_id_list = cmdb_field_ids(&cmdb_fields); + let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); + for f in &cmdb_field_id_list { + extra.push(f.as_str()); + } +``` + +And update the later reference to `cmdb_field_ids` in `handle_view` (around line 675): + +```rust + if !cmdb_field_id_list.is_empty() { + let mut linked = extract_linked_assets(&issue.fields.extra, &cmdb_field_id_list); +``` + +- [ ] **Step 4: Update `handle_issue_assets` in `assets.rs`** + +In `src/cli/issue/assets.rs`, update the import and the call: + +```rust +use crate::api::assets::linked::{ + cmdb_field_ids as get_cmdb_ids, enrich_assets, extract_linked_assets, get_or_fetch_cmdb_fields, +}; +``` + +Then in the function body: + +```rust + let cmdb_fields = get_or_fetch_cmdb_fields(client).await?; + let cmdb_field_ids = get_cmdb_ids(&cmdb_fields); + + if cmdb_field_ids.is_empty() { +``` + +And update the rest of the function to use `cmdb_field_ids` (the local variable, which is already `Vec` of IDs). + +- [ ] **Step 5: Verify the project compiles** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 6: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 7: Commit** + +```bash +git add src/api/assets/linked.rs src/cli/issue/list.rs src/cli/issue/assets.rs +git commit -m "refactor: update CMDB field callers for (id, name) pairs (#88)" +``` + +--- + +### Task 4: Update integration tests for new CMDB field discovery + +**Files:** +- Modify: `tests/cmdb_fields.rs:51-63` (discover_cmdb_field_ids test) + +- [ ] **Step 1: Update integration test to expect (id, name) tuples** + +In `tests/cmdb_fields.rs`, update `discover_cmdb_field_ids`: + +```rust +#[tokio::test] +async fn discover_cmdb_field_ids() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_with_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let fields = client.find_cmdb_field_ids().await.unwrap(); + assert_eq!( + fields, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); +} +``` + +And `discover_cmdb_field_ids_empty`: + +```rust +#[tokio::test] +async fn discover_cmdb_field_ids_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_no_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let fields: Vec<(String, String)> = client.find_cmdb_field_ids().await.unwrap(); + assert!(fields.is_empty()); +} +``` + +- [ ] **Step 2: Run tests to verify they pass** + +Run: `cargo test --test cmdb_fields -- --nocapture` +Expected: PASS + +- [ ] **Step 3: Commit** + +```bash +git add tests/cmdb_fields.rs +git commit -m "test: update CMDB integration tests for (id, name) pairs (#88)" +``` + +--- + +### Task 5: Add asset key validation and `build_asset_clause` + +**Files:** +- Modify: `src/jql.rs` (add `validate_asset_key` and `build_asset_clause`) + +- [ ] **Step 1: Write failing tests for `validate_asset_key`** + +Add to the `mod tests` block in `src/jql.rs`: + +```rust +#[test] +fn validate_asset_key_valid_simple() { + assert!(validate_asset_key("CUST-5").is_ok()); +} + +#[test] +fn validate_asset_key_valid_long() { + assert!(validate_asset_key("SRV-42").is_ok()); +} + +#[test] +fn validate_asset_key_valid_itsm() { + assert!(validate_asset_key("ITSM-123").is_ok()); +} + +#[test] +fn validate_asset_key_invalid_no_number() { + assert!(validate_asset_key("CUST-").is_err()); +} + +#[test] +fn validate_asset_key_invalid_no_prefix() { + assert!(validate_asset_key("-5").is_err()); +} + +#[test] +fn validate_asset_key_invalid_no_hyphen() { + assert!(validate_asset_key("foo").is_err()); +} + +#[test] +fn validate_asset_key_invalid_empty() { + assert!(validate_asset_key("").is_err()); +} + +#[test] +fn validate_asset_key_invalid_spaces() { + assert!(validate_asset_key("CU ST-5").is_err()); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test validate_asset_key -- --nocapture` +Expected: FAIL — function not defined. + +- [ ] **Step 3: Implement `validate_asset_key`** + +Add to `src/jql.rs`: + +```rust +/// Validate an asset object key matches the SCHEMA-NUMBER format. +/// +/// Asset keys are always `-` (e.g., CUST-5, SRV-42, ITSM-123). +pub fn validate_asset_key(key: &str) -> Result<(), String> { + let Some((prefix, number)) = key.split_once('-') else { + return Err(format!( + "Invalid asset key \"{key}\". Expected format: SCHEMA-NUMBER (e.g., CUST-5, SRV-42)." + )); + }; + if prefix.is_empty() + || !prefix.chars().all(|c| c.is_ascii_alphanumeric()) + || number.is_empty() + || !number.chars().all(|c| c.is_ascii_digit()) + { + return Err(format!( + "Invalid asset key \"{key}\". Expected format: SCHEMA-NUMBER (e.g., CUST-5, SRV-42)." + )); + } + Ok(()) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test validate_asset_key -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Write failing tests for `build_asset_clause`** + +Add to the `mod tests` block in `src/jql.rs`: + +```rust +#[test] +fn build_asset_clause_single_field() { + let fields = vec![("customfield_10191".to_string(), "Client".to_string())]; + let clause = build_asset_clause("CUST-5", &fields); + assert_eq!( + clause, + r#""Client" IN aqlFunction("Key = \"CUST-5\"")"# + ); +} + +#[test] +fn build_asset_clause_multiple_fields() { + let fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Server".to_string()), + ]; + let clause = build_asset_clause("SRV-42", &fields); + assert_eq!( + clause, + r#"("Client" IN aqlFunction("Key = \"SRV-42\"") OR "Server" IN aqlFunction("Key = \"SRV-42\""))"# + ); +} + +#[test] +fn build_asset_clause_field_name_with_quotes() { + let fields = vec![("customfield_10191".to_string(), r#"My "Assets""#.to_string())]; + let clause = build_asset_clause("OBJ-1", &fields); + assert_eq!( + clause, + r#""My \"Assets\"" IN aqlFunction("Key = \"OBJ-1\"")"# + ); +} +``` + +- [ ] **Step 6: Run tests to verify they fail** + +Run: `cargo test build_asset_clause -- --nocapture` +Expected: FAIL — function not defined. + +- [ ] **Step 7: Implement `build_asset_clause`** + +Add to `src/jql.rs`: + +```rust +/// Build a JQL clause that filters issues by a linked asset object key. +/// +/// Uses `aqlFunction()` with the human-readable field name (required by Jira Cloud). +/// When multiple CMDB fields exist, OR them together and wrap in parentheses. +pub fn build_asset_clause(asset_key: &str, cmdb_fields: &[(String, String)]) -> String { + let clauses: Vec = cmdb_fields + .iter() + .map(|(_, name)| { + format!( + "\"{}\" IN aqlFunction(\"Key = \\\"{}\\\"\")", + escape_value(name), + escape_value(asset_key), + ) + }) + .collect(); + + if clauses.len() == 1 { + clauses.into_iter().next().unwrap() + } else { + format!("({})", clauses.join(" OR ")) + } +} +``` + +- [ ] **Step 8: Run tests to verify they pass** + +Run: `cargo test build_asset_clause -- --nocapture` +Expected: PASS + +- [ ] **Step 9: Commit** + +```bash +git add src/jql.rs +git commit -m "feat: add validate_asset_key and build_asset_clause (#88)" +``` + +--- + +### Task 6: Add `--asset` CLI flag and wire into `handle_list` + +**Files:** +- Modify: `src/cli/mod.rs:153-187` (IssueCommand::List) +- Modify: `src/cli/issue/list.rs:56-86` (handle_list destructure) +- Modify: `src/cli/issue/list.rs:180-188` (build_filter_clauses call) +- Modify: `src/cli/issue/list.rs:244-276` (filter guard + cmdb block) + +- [ ] **Step 1: Add `--asset` field to `IssueCommand::List`** + +In `src/cli/mod.rs`, add after the `assets: bool` field in `IssueCommand::List`: + +```rust + /// Show linked assets column + #[arg(long)] + assets: bool, + /// Filter by linked asset object key (e.g., CUST-5) + #[arg(long)] + asset: Option, +``` + +- [ ] **Step 2: Update the destructure in `handle_list`** + +In `src/cli/issue/list.rs`, update the destructure: + +```rust + let IssueCommand::List { + jql, + status, + team, + limit, + all, + assignee, + reporter, + recent, + open, + points: show_points, + assets: show_assets, + asset: asset_key, + } = command + else { + unreachable!() + }; +``` + +- [ ] **Step 3: Add asset key validation, clause building, and auto-enable assets column** + +In `src/cli/issue/list.rs`, after the `--recent` validation block (around line 86) add: + +```rust + // Validate --asset key format early + if let Some(ref key) = asset_key { + crate::jql::validate_asset_key(key).map_err(JrError::UserError)?; + } +``` + +Then, after the team clause resolution block (around line 113), add the CMDB field resolution for `--asset`: + +```rust + // Resolve CMDB fields for --asset filter (needs field names for aqlFunction) + let asset_clause = if let Some(ref key) = asset_key { + let cmdb_fields = get_or_fetch_cmdb_fields(client).await?; + if cmdb_fields.is_empty() { + return Err(JrError::UserError( + "--asset requires Assets custom fields on this Jira instance. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + .into()); + } + Some(crate::jql::build_asset_clause(key, &cmdb_fields)) + } else { + None + }; +``` + +- [ ] **Step 4: Pass asset clause into filter building** + +Update the `build_filter_clauses` call to include the asset clause: + +```rust + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + resolved_status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + open, + asset_clause.as_deref(), + ); +``` + +Update the `build_filter_clauses` function signature and body: + +```rust +fn build_filter_clauses( + assignee_jql: Option<&str>, + reporter_jql: Option<&str>, + status: Option<&str>, + team_clause: Option<&str>, + recent: Option<&str>, + open: bool, + asset_clause: Option<&str>, +) -> Vec { + let mut parts = Vec::new(); + if let Some(a) = assignee_jql { + parts.push(format!("assignee = {a}")); + } + if let Some(r) = reporter_jql { + parts.push(format!("reporter = {r}")); + } + if let Some(s) = status { + parts.push(format!("status = \"{}\"", crate::jql::escape_value(s))); + } + if open { + parts.push("statusCategory != Done".to_string()); + } + if let Some(t) = team_clause { + parts.push(t.to_string()); + } + if let Some(d) = recent { + parts.push(format!("created >= -{d}")); + } + if let Some(a) = asset_clause { + parts.push(a.to_string()); + } + parts +} +``` + +- [ ] **Step 5: Auto-enable `--assets` display column when `--asset` is set** + +In `handle_list`, after the destructure, add: + +```rust + // Auto-enable assets display column when filtering by asset + let show_assets = show_assets || asset_key.is_some(); +``` + +- [ ] **Step 6: Update the "no scope" guard error message** + +Update the guard at line 251 to mention `--asset`: + +```rust + return Err(JrError::UserError( + "No project or filters specified. Use --project, --assignee, --reporter, --status, --open, --team, --recent, --asset, or --jql. \ + You can also set a default project in .jr.toml or run \"jr init\"." + .into(), + ) +``` + +- [ ] **Step 7: Verify compilation** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/list.rs +git commit -m "feat: add --asset filter to issue list (#88)" +``` + +--- + +### Task 7: Update `build_filter_clauses` unit tests + +**Files:** +- Modify: `src/cli/issue/list.rs` (existing tests in `mod tests`) + +- [ ] **Step 1: Update all existing `build_filter_clauses` tests** + +Every existing call to `build_filter_clauses` needs the new `asset_clause` parameter (pass `None`). Update each test: + +```rust +#[test] +fn build_jql_parts_assignee_me() { + let parts = build_filter_clauses(Some("currentUser()"), None, None, None, None, false, None); + assert_eq!(parts, vec!["assignee = currentUser()"]); +} + +#[test] +fn build_jql_parts_reporter_account_id() { + let parts = build_filter_clauses( + None, + Some("5b10ac8d82e05b22cc7d4ef5"), + None, + None, + None, + false, + None, + ); + assert_eq!(parts, vec!["reporter = 5b10ac8d82e05b22cc7d4ef5"]); +} + +#[test] +fn build_jql_parts_recent() { + let parts = build_filter_clauses(None, None, None, None, Some("7d"), false, None); + assert_eq!(parts, vec!["created >= -7d"]); +} + +#[test] +fn build_jql_parts_all_filters() { + let parts = build_filter_clauses( + Some("currentUser()"), + Some("currentUser()"), + Some("In Progress"), + Some(r#"customfield_10001 = "uuid-123""#), + Some("30d"), + false, + None, + ); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"status = \"In Progress\"".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); +} + +#[test] +fn build_jql_parts_empty() { + let parts = build_filter_clauses(None, None, None, None, None, false, None); + assert!(parts.is_empty()); +} + +#[test] +fn build_jql_parts_jql_plus_status_compose() { + let filter = build_filter_clauses(None, None, Some("Done"), None, None, false, None); + let mut all_parts = vec!["type = Bug".to_string()]; + all_parts.extend(filter); + let jql = all_parts.join(" AND "); + assert_eq!(jql, r#"type = Bug AND status = "Done""#); +} + +#[test] +fn build_jql_parts_status_escaping() { + let parts = + build_filter_clauses(None, None, Some(r#"He said "hi" \o/"#), None, None, false, None); + assert_eq!(parts, vec![r#"status = "He said \"hi\" \\o/""#.to_string()]); +} + +#[test] +fn build_jql_parts_open() { + let parts = build_filter_clauses(None, None, None, None, None, true, None); + assert_eq!(parts, vec!["statusCategory != Done"]); +} + +#[test] +fn build_jql_parts_open_with_assignee() { + let parts = build_filter_clauses(Some("currentUser()"), None, None, None, None, true, None); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != Done".to_string())); +} + +#[test] +fn build_jql_parts_all_filters_with_open() { + let parts = build_filter_clauses( + Some("currentUser()"), + Some("currentUser()"), + None, + Some(r#"customfield_10001 = "uuid-123""#), + Some("30d"), + true, + None, + ); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != Done".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); +} +``` + +- [ ] **Step 2: Add new test for asset clause** + +```rust +#[test] +fn build_jql_parts_asset_clause() { + let clause = r#""Client" IN aqlFunction("Key = \"CUST-5\"")"#; + let parts = build_filter_clauses(None, None, None, None, None, false, Some(clause)); + assert_eq!(parts, vec![clause.to_string()]); +} + +#[test] +fn build_jql_parts_asset_with_assignee() { + let clause = r#""Client" IN aqlFunction("Key = \"CUST-5\"")"#; + let parts = build_filter_clauses( + Some("currentUser()"), + None, + None, + None, + None, + false, + Some(clause), + ); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&clause.to_string())); +} +``` + +- [ ] **Step 3: Run tests to verify they pass** + +Run: `cargo test build_jql_parts -- --nocapture` +Expected: PASS + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "test: update build_filter_clauses tests for --asset parameter (#88)" +``` + +--- + +### Task 8: Run full test suite and lint + +**Files:** None (verification only) + +- [ ] **Step 1: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: No warnings. + +- [ ] **Step 3: Run format check** + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues. + +- [ ] **Step 4: If any issues, fix and commit** + +Fix any issues found in steps 1-3 and commit the fixes. diff --git a/docs/superpowers/plans/2026-04-01-resolve-asset-custom-fields.md b/docs/superpowers/plans/2026-04-01-resolve-asset-custom-fields.md new file mode 100644 index 0000000..2bf9d4d --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-resolve-asset-custom-fields.md @@ -0,0 +1,743 @@ +# Resolve Asset-Typed Custom Fields Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Enrich CMDB custom fields in JSON output, show per-field asset rows in `issue view`, and add CMDB fields to `project fields`. + +**Architecture:** All three features build on the existing CMDB field discovery (`get_or_fetch_cmdb_fields` returning `(id, name)` pairs) and the `extract_linked_assets` + `enrich_assets` pipeline. JSON enrichment mutates the `fields.extra` HashMap entries in-place before serialization. Per-field rows iterate CMDB fields individually instead of lumping them into one "Assets" row. `project fields` appends a section using the cached field metadata. + +**Tech Stack:** Rust, serde_json, wiremock (tests) + +**Spec:** `docs/specs/resolve-asset-custom-fields.md` + +**Dependency:** This plan assumes PR #88 (`feat/issue-list-asset-filter`) is merged to `develop` first — it provides `get_or_fetch_cmdb_fields` returning `Vec<(String, String)>` and the `cmdb_field_ids` helper. + +--- + +### Task 1: Add `extract_linked_assets_per_field` to `linked.rs` + +**Files:** +- Modify: `src/api/assets/linked.rs` + +- [ ] **Step 1: Write failing tests** + +Add to the `mod tests` block in `src/api/assets/linked.rs`: + +```rust +#[test] +fn extract_per_field_single_field() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 1); + assert_eq!(result[0].0, "Client"); + assert_eq!(result[0].1.len(), 1); + assert_eq!(result[0].1[0].key.as_deref(), Some("OBJ-1")); +} + +#[test] +fn extract_per_field_multiple_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Email Server", "objectKey": "SRV-42"}]), + ); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Affected Service".to_string()), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, "Client"); + assert_eq!(result[1].0, "Affected Service"); +} + +#[test] +fn extract_per_field_skips_empty() { + let mut extra = HashMap::new(); + extra.insert("customfield_10191".into(), json!(null)); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Email Server", "objectKey": "SRV-42"}]), + ); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Affected Service".to_string()), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 1); + assert_eq!(result[0].0, "Affected Service"); +} + +#[test] +fn extract_per_field_missing_field() { + let extra = HashMap::new(); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert!(result.is_empty()); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test extract_per_field -- --nocapture` +Expected: FAIL — function not defined. + +- [ ] **Step 3: Implement `extract_linked_assets_per_field`** + +Add to `src/api/assets/linked.rs`: + +```rust +/// Extract linked assets grouped by CMDB field, returning (field_name, assets) pairs. +/// Skips fields that have no linked assets on the issue. +pub fn extract_linked_assets_per_field( + extra: &HashMap, + cmdb_fields: &[(String, String)], +) -> Vec<(String, Vec)> { + let mut result = Vec::new(); + for (field_id, field_name) in cmdb_fields { + let assets = extract_linked_assets(extra, &[field_id.clone()]); + if !assets.is_empty() { + result.push((field_name.clone(), assets)); + } + } + result +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test extract_per_field -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/api/assets/linked.rs +git commit -m "feat: add extract_linked_assets_per_field for per-field asset display (#90)" +``` + +--- + +### Task 2: Add `enrich_json_assets` to `linked.rs` + +**Files:** +- Modify: `src/api/assets/linked.rs` + +- [ ] **Step 1: Write failing tests** + +Add to the `mod tests` block in `src/api/assets/linked.rs`: + +```rust +#[test] +fn enrich_json_injects_resolved_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([{"objectId": "88", "workspaceId": "ws-1"}]), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-1".into()), + key: Some("OBJ-88".into()), + name: Some("Acme Corp".into()), + asset_type: Some("Client".into()), + }], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let enriched = &extra["customfield_10191"]; + let arr = enriched.as_array().unwrap(); + assert_eq!(arr.len(), 1); + // Original fields preserved + assert_eq!(arr[0]["objectId"], "88"); + assert_eq!(arr[0]["workspaceId"], "ws-1"); + // Enriched fields injected + assert_eq!(arr[0]["objectKey"], "OBJ-88"); + assert_eq!(arr[0]["label"], "Acme Corp"); + assert_eq!(arr[0]["objectType"], "Client"); +} + +#[test] +fn enrich_json_preserves_already_enriched() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([{"objectKey": "OBJ-1", "label": "Already There"}]), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Already There".into()), + ..Default::default() + }], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let enriched = &extra["customfield_10191"]; + let arr = enriched.as_array().unwrap(); + assert_eq!(arr[0]["objectKey"], "OBJ-1"); + assert_eq!(arr[0]["label"], "Already There"); +} + +#[test] +fn enrich_json_partial_enrichment() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([ + {"objectId": "88", "workspaceId": "ws-1"}, + {"objectId": "99", "workspaceId": "ws-1"} + ]), + ); + + // Only first asset was resolved + let per_field = vec![( + "customfield_10191".to_string(), + vec![ + LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-1".into()), + key: Some("OBJ-88".into()), + name: Some("Acme".into()), + asset_type: Some("Client".into()), + }, + LinkedAsset { + id: Some("99".into()), + workspace_id: Some("ws-1".into()), + key: None, + name: None, + asset_type: None, + }, + ], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let arr = extra["customfield_10191"].as_array().unwrap(); + // First asset enriched + assert_eq!(arr[0]["objectKey"], "OBJ-88"); + // Second asset: no enrichment injected (key/name were None) + assert!(arr[1].get("objectKey").is_none()); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test enrich_json -- --nocapture` +Expected: FAIL — function not defined. + +- [ ] **Step 3: Implement `enrich_json_assets`** + +Add to `src/api/assets/linked.rs`: + +```rust +/// Inject enriched asset data back into the issue's `fields.extra` HashMap. +/// +/// For each CMDB field, matches enriched `LinkedAsset` entries by position to the +/// original JSON array elements and injects `objectKey`, `label`, and `objectType` +/// as additional fields (additive, does not remove existing fields). +pub fn enrich_json_assets( + extra: &mut HashMap, + per_field: &[(String, Vec)], +) { + for (field_id, assets) in per_field { + let Some(value) = extra.get_mut(field_id) else { + continue; + }; + let Some(arr) = value.as_array_mut() else { + continue; + }; + + for (i, asset) in assets.iter().enumerate() { + if i >= arr.len() { + break; + } + let Some(obj) = arr[i].as_object_mut() else { + continue; + }; + if let Some(ref key) = asset.key { + obj.insert("objectKey".to_string(), Value::String(key.clone())); + } + if let Some(ref name) = asset.name { + obj.insert("label".to_string(), Value::String(name.clone())); + } + if let Some(ref asset_type) = asset.asset_type { + obj.insert("objectType".to_string(), Value::String(asset_type.clone())); + } + } + } +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cargo test enrich_json -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Commit** + +```bash +git add src/api/assets/linked.rs +git commit -m "feat: add enrich_json_assets for JSON output enrichment (#90)" +``` + +--- + +### Task 3: Update `handle_view` — per-field rows + JSON enrichment + +**Files:** +- Modify: `src/cli/issue/list.rs` (`handle_view` function, around lines 496-702) + +This task depends on PR #88 being merged. The code references `get_or_fetch_cmdb_fields` (returning `Vec<(String, String)>`), `cmdb_field_ids`, and other changes from that PR. + +- [ ] **Step 1: Update imports** + +In `src/cli/issue/list.rs`, update the `linked` import to include the new functions: + +```rust +use crate::api::assets::linked::{ + cmdb_field_ids, enrich_assets, enrich_json_assets, extract_linked_assets_per_field, + get_or_fetch_cmdb_fields, +}; +``` + +Remove `extract_linked_assets` and `format_linked_assets` from the import if they are no longer used in this file (check `handle_list` — it still uses `extract_linked_assets` for the list table; keep it if so). + +- [ ] **Step 2: Rewrite the `handle_view` function** + +Replace the body of `handle_view` with: + +```rust +pub(super) async fn handle_view( + command: IssueCommand, + output_format: &OutputFormat, + config: &Config, + client: &JiraClient, +) -> Result<()> { + let IssueCommand::View { key } = command else { + unreachable!() + }; + + let sp_field_id = config.global.fields.story_points_field_id.as_deref(); + let cmdb_fields = get_or_fetch_cmdb_fields(client) + .await + .unwrap_or_default(); + let cmdb_field_id_list = cmdb_field_ids(&cmdb_fields); + let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); + for f in &cmdb_field_id_list { + extra.push(f.as_str()); + } + let mut issue = client.get_issue(&key, &extra).await?; + + // Extract and enrich assets per-field (shared by both JSON and table paths) + let mut per_field_assets = if !cmdb_fields.is_empty() { + let per_field = extract_linked_assets_per_field(&issue.fields.extra, &cmdb_fields); + // Collect all assets for batch enrichment + let mut all_assets: Vec = per_field + .iter() + .flat_map(|(_, assets)| assets.clone()) + .collect(); + enrich_assets(client, &mut all_assets).await; + + // Redistribute enriched assets back to per-field structure + let mut enriched_per_field = Vec::new(); + let mut offset = 0; + for (field_name, assets) in &per_field { + let enriched = all_assets[offset..offset + assets.len()].to_vec(); + offset += assets.len(); + enriched_per_field.push((field_name.clone(), enriched)); + } + enriched_per_field + } else { + Vec::new() + }; + + match output_format { + OutputFormat::Json => { + // Build (field_id, enriched_assets) for JSON injection + if !per_field_assets.is_empty() { + let per_field_by_id: Vec<(String, Vec)> = cmdb_fields + .iter() + .filter_map(|(id, name)| { + per_field_assets + .iter() + .find(|(n, _)| n == name) + .map(|(_, assets)| (id.clone(), assets.clone())) + }) + .collect(); + enrich_json_assets(&mut issue.fields.extra, &per_field_by_id); + } + println!("{}", output::render_json(&issue)?); + } + OutputFormat::Table => { + let desc_text = issue + .fields + .description + .as_ref() + .map(adf::adf_to_text) + .unwrap_or_else(|| "(no description)".into()); + + let mut rows = vec![ + vec!["Key".into(), issue.key.clone()], + vec!["Summary".into(), issue.fields.summary.clone()], + vec![ + "Type".into(), + issue + .fields + .issue_type + .as_ref() + .map(|t| t.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Status".into(), + issue + .fields + .status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Priority".into(), + issue + .fields + .priority + .as_ref() + .map(|p| p.name.clone()) + .unwrap_or_default(), + ], + vec![ + "Assignee".into(), + issue + .fields + .assignee + .as_ref() + .map(|a| a.display_name.clone()) + .unwrap_or_else(|| "Unassigned".into()), + ], + vec![ + "Reporter".into(), + issue + .fields + .reporter + .as_ref() + .map(|r| r.display_name.clone()) + .unwrap_or_else(|| "(none)".into()), + ], + vec![ + "Created".into(), + issue + .fields + .created + .as_deref() + .map(format_comment_date) + .unwrap_or_else(|| "-".into()), + ], + vec![ + "Updated".into(), + issue + .fields + .updated + .as_deref() + .map(format_comment_date) + .unwrap_or_else(|| "-".into()), + ], + vec![ + "Project".into(), + issue + .fields + .project + .as_ref() + .map(|p| format!("{} ({})", p.name.as_deref().unwrap_or(""), p.key)) + .unwrap_or_default(), + ], + vec![ + "Labels".into(), + issue + .fields + .labels + .as_ref() + .filter(|l| !l.is_empty()) + .map(|l| l.join(", ")) + .unwrap_or_else(|| "(none)".into()), + ], + ]; + + rows.push(vec![ + "Parent".into(), + issue + .fields + .parent + .as_ref() + .map(|p| { + let summary = p + .fields + .as_ref() + .and_then(|f| f.summary.as_deref()) + .unwrap_or(""); + format!("{} ({})", p.key, summary) + }) + .unwrap_or_else(|| "(none)".into()), + ]); + + let links_display = issue + .fields + .issuelinks + .as_ref() + .filter(|links| !links.is_empty()) + .map(|links| { + links + .iter() + .map(|link| { + if let Some(ref outward) = link.outward_issue { + let desc = link + .link_type + .outward + .as_deref() + .unwrap_or(&link.link_type.name); + let summary = outward + .fields + .as_ref() + .and_then(|f| f.summary.as_deref()) + .unwrap_or(""); + format!("{} {} ({})", desc, outward.key, summary) + } else if let Some(ref inward) = link.inward_issue { + let desc = link + .link_type + .inward + .as_deref() + .unwrap_or(&link.link_type.name); + let summary = inward + .fields + .as_ref() + .and_then(|f| f.summary.as_deref()) + .unwrap_or(""); + format!("{} {} ({})", desc, inward.key, summary) + } else { + link.link_type.name.clone() + } + }) + .collect::>() + .join("\n") + }) + .unwrap_or_else(|| "(none)".into()); + rows.push(vec!["Links".into(), links_display]); + + // Per-field asset rows (replaces the old single "Assets" row) + for (field_name, assets) in &per_field_assets { + let display = format_linked_assets(assets); + rows.push(vec![field_name.clone(), display]); + } + + if let Some(field_id) = sp_field_id { + let points_display = issue + .fields + .story_points(field_id) + .map(format::format_points) + .unwrap_or_else(|| "(none)".into()); + rows.push(vec!["Points".into(), points_display]); + } + + rows.push(vec!["Description".into(), desc_text]); + + println!("{}", output::render_table(&["Field", "Value"], &rows)); + } + } + + Ok(()) +} +``` + +Note: the `issue` binding must become `let mut issue = ...` because we mutate `issue.fields.extra` for JSON enrichment. + +- [ ] **Step 3: Verify compilation** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "feat: per-field asset rows and JSON enrichment in issue view (#90)" +``` + +--- + +### Task 4: Update `handle_list` — JSON enrichment when `--assets` active + +**Files:** +- Modify: `src/cli/issue/list.rs` (`handle_list` function, around lines 280-366) + +Currently the `issue list` enrichment block resolves assets for table mode but doesn't inject them into JSON. The change: after enrichment, if output is JSON and `--assets` is active, inject the enriched data into each issue's `fields.extra`. + +The `issue_assets` vec is a flat list per issue, not grouped by field. Rather than complex offset tracking, use a simple approach: for each issue, build a per-field-ID mapping using the already-enriched `issue_assets` data. + +- [ ] **Step 1: Make `issues` mutable** + +Change `let issues = search_result.issues;` to `let mut issues = search_result.issues;`. + +- [ ] **Step 2: Add JSON enrichment after the existing enrichment block** + +In `handle_list`, after the closing `}` of the `if show_assets_col { ... }` block and before the `let rows: Vec> = ...` line, add: + +```rust + // For JSON output with --assets, inject enriched data back into issue JSON + if show_assets_col && matches!(output_format, OutputFormat::Json) { + for (i, issue) in issues.iter_mut().enumerate() { + // Re-extract per field to get field_id grouping, then match by position + // to the enriched issue_assets[i] which has the same total ordering + let mut per_field_by_id: Vec<(String, Vec)> = Vec::new(); + let mut offset = 0; + for field_id in &cmdb_field_id_list { + let count = extract_linked_assets( + &issue.fields.extra, + &[field_id.clone()], + ) + .len(); + if count > 0 && offset + count <= issue_assets[i].len() { + let enriched = issue_assets[i][offset..offset + count].to_vec(); + per_field_by_id.push((field_id.clone(), enriched)); + } + offset += count; + } + enrich_json_assets(&mut issue.fields.extra, &per_field_by_id); + } + } +``` + +Add `enrich_json_assets` to the imports at the top of the file. + +- [ ] **Step 2: Verify compilation** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 3: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 4: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "feat: enrich CMDB fields in issue list JSON output (#90)" +``` + +--- + +### Task 5: Add CMDB fields to `project fields` + +**Files:** +- Modify: `src/cli/project.rs` (`handle_fields` function) + +- [ ] **Step 1: Add import** + +Add to the top of `src/cli/project.rs`: + +```rust +use crate::api::assets::linked::get_or_fetch_cmdb_fields; +``` + +- [ ] **Step 2: Fetch CMDB fields in `handle_fields`** + +After the existing `let statuses = ...` line, add: + +```rust + let cmdb_fields = get_or_fetch_cmdb_fields(client) + .await + .unwrap_or_default(); +``` + +- [ ] **Step 3: Add CMDB fields to JSON output** + +In the `OutputFormat::Json` branch, add `asset_fields` to the JSON object: + +```rust + OutputFormat::Json => { + println!( + "{}", + serde_json::json!({ + "project": project_key, + "issue_types": issue_types, + "priorities": priorities, + "statuses_by_issue_type": statuses, + "asset_fields": cmdb_fields.iter().map(|(id, name)| { + serde_json::json!({"id": id, "name": name}) + }).collect::>(), + }) + ); + } +``` + +- [ ] **Step 4: Add CMDB fields to table output** + +In the `OutputFormat::Table` branch, after the statuses block (after the closing `}` of `if has_statuses { ... }`), add: + +```rust + if !cmdb_fields.is_empty() { + println!("\nCustom Fields (Assets) \u{2014} instance-wide:"); + for (id, name) in &cmdb_fields { + println!(" - {} ({})", name, id); + } + } +``` + +- [ ] **Step 5: Verify compilation** + +Run: `cargo build` +Expected: Compiles without errors. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/project.rs +git commit -m "feat: show CMDB custom fields in project fields output (#90)" +``` + +--- + +### Task 6: Run full test suite and lint + +**Files:** None (verification only) + +- [ ] **Step 1: Run all tests** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 2: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: No warnings. + +- [ ] **Step 3: Run format check** + +Run: `cargo fmt --all -- --check` +Expected: No formatting issues. If any, run `cargo fmt --all` to fix. + +- [ ] **Step 4: If any issues, fix and commit** + +Fix any issues found and commit the fixes. diff --git a/docs/superpowers/plans/2026-04-01-sprint-issue-management.md b/docs/superpowers/plans/2026-04-01-sprint-issue-management.md new file mode 100644 index 0000000..88cb60a --- /dev/null +++ b/docs/superpowers/plans/2026-04-01-sprint-issue-management.md @@ -0,0 +1,709 @@ +# Sprint Issue Management Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `sprint add` and `sprint remove` subcommands so users can add issues to a sprint and move issues to the backlog. + +**Architecture:** Two new API methods (`add_issues_to_sprint`, `move_issues_to_backlog`) using the existing `post_no_content` client method. Two new `SprintCommand` variants with handlers following the existing state-change output pattern. `--current` resolves the active sprint by reusing the existing board resolution + sprint listing code. + +**Tech Stack:** Rust, clap 4 (derive API), wiremock, assert_cmd, serde_json + +--- + +## File Map + +| File | Action | Responsibility | +|------|--------|----------------| +| `src/api/jira/sprints.rs` | Modify | Add `add_issues_to_sprint` and `move_issues_to_backlog` methods | +| `src/cli/mod.rs:400-420` | Modify | Add `Add` and `Remove` variants to `SprintCommand` enum | +| `src/cli/sprint.rs:1-45` | Modify | Update `handle` dispatch, add `handle_add` and `handle_remove` functions | +| `tests/cli_smoke.rs` | Modify | Add smoke tests for `sprint add --help` and `sprint remove --help` | +| `tests/sprint_commands.rs` | Modify | Add integration tests for add and remove commands | + +--- + +### Task 1: Add API methods for sprint issue management + +**Files:** +- Modify: `src/api/jira/sprints.rs` + +- [ ] **Step 1: Add `add_issues_to_sprint` method** + +Append the following methods after the closing `}` of `get_sprint_issues` (after line 87) but before the closing `}` of the `impl JiraClient` block: + +```rust + /// Add issues to a sprint. Max 50 issues per call. + /// POST /rest/agile/1.0/sprint/{sprintId}/issue → 204 No Content + pub async fn add_issues_to_sprint( + &self, + sprint_id: u64, + issues: &[String], + ) -> Result<()> { + let path = format!("/rest/agile/1.0/sprint/{}/issue", sprint_id); + let body = serde_json::json!({ "issues": issues }); + self.post_no_content(&path, &body).await + } + + /// Move issues to the backlog (removes from all sprints). Max 50 issues per call. + /// POST /rest/agile/1.0/backlog/issue → 204 No Content + pub async fn move_issues_to_backlog(&self, issues: &[String]) -> Result<()> { + let path = "/rest/agile/1.0/backlog/issue"; + let body = serde_json::json!({ "issues": issues }); + self.post_no_content(path, &body).await + } +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` +Expected: Build succeeds with no errors. + +- [ ] **Step 3: Commit** + +```bash +git add src/api/jira/sprints.rs +git commit -m "$(cat <<'EOF' +feat: add API methods for sprint issue management (#83) + +Add add_issues_to_sprint and move_issues_to_backlog methods using +the existing post_no_content client method for 204 responses. +EOF +)" +``` + +--- + +### Task 2: Add `Add` and `Remove` variants to `SprintCommand` + +**Files:** +- Modify: `src/cli/mod.rs:400-420` + +- [ ] **Step 1: Add the new variants to `SprintCommand`** + +In `src/cli/mod.rs`, change lines 400-420 from: + +```rust +#[derive(Subcommand)] +pub enum SprintCommand { + /// List sprints + List { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, +} +``` + +to: + +```rust +#[derive(Subcommand)] +pub enum SprintCommand { + /// List sprints + List { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, + /// Add issues to a sprint + Add { + /// Sprint ID (from `jr sprint list`) + #[arg(long, required_unless_present = "current")] + sprint: Option, + /// Use the active sprint instead of specifying an ID + #[arg(long, conflicts_with = "sprint")] + current: bool, + /// Issue keys to add (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, + /// Board ID (used with --current to resolve the active sprint) + #[arg(long)] + board: Option, + }, + /// Remove issues from sprint (moves to backlog) + Remove { + /// Issue keys to remove (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, + }, +} +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` +Expected: Build fails with non-exhaustive match errors in `src/cli/sprint.rs` (the `handle` function doesn't cover `Add` and `Remove` yet). This is expected — Task 3 fixes it. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/mod.rs +git commit -m "$(cat <<'EOF' +feat: add Add and Remove variants to SprintCommand (#83) + +Add clap definitions for sprint add (--sprint/--current + variadic +issues) and sprint remove (variadic issues). Handler not yet wired. +EOF +)" +``` + +--- + +### Task 3: Implement `handle_add` and `handle_remove` in sprint.rs + +**Files:** +- Modify: `src/cli/sprint.rs:1-45` + +- [ ] **Step 1: Add `serde_json` import** + +In `src/cli/sprint.rs`, change line 1 from: + +```rust +use anyhow::{Result, bail}; +``` + +to: + +```rust +use anyhow::{Result, bail}; +use serde_json::json; +``` + +- [ ] **Step 2: Update the `handle` function dispatch** + +Replace the `handle` function (lines 10-45) with: + +```rust +/// Handle all sprint subcommands. +pub async fn handle( + command: SprintCommand, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + match command { + SprintCommand::List { board } => { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + handle_list(board_id, client, output_format).await + } + SprintCommand::Current { + board, limit, all, .. + } => { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + handle_current(board_id, client, output_format, config, limit, all).await + } + SprintCommand::Add { + sprint, + current, + issues, + board, + } => { + handle_add(sprint, current, board, issues, config, client, output_format, project_override) + .await + } + SprintCommand::Remove { issues } => { + handle_remove(issues, output_format, client).await + } + } +} + +/// Resolve board ID and verify it's a scrum board. +async fn resolve_scrum_board( + config: &Config, + client: &JiraClient, + board: Option, + project_override: Option<&str>, +) -> Result { + let board_id = + crate::cli::board::resolve_board_id(config, client, board, project_override, true) + .await?; + + let board_config = client.get_board_config(board_id).await?; + let board_type = board_config.board_type.to_lowercase(); + if board_type != "scrum" { + bail!( + "Sprint commands are only available for scrum boards. Board {} is a {} board.", + board_id, + board_config.board_type + ); + } + + Ok(board_id) +} +``` + +- [ ] **Step 3: Add the `handle_add` function** + +Append after the `handle` function (before `handle_list`): + +```rust +const MAX_SPRINT_ISSUES: usize = 50; + +async fn handle_add( + sprint: Option, + current: bool, + board: Option, + issues: Vec, + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + if issues.len() > MAX_SPRINT_ISSUES { + bail!( + "Too many issues (got {}). Maximum is {} per operation.", + issues.len(), + MAX_SPRINT_ISSUES + ); + } + + let sprint_id = if current { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + let sprints = client.list_sprints(board_id, Some("active")).await?; + if sprints.is_empty() { + bail!("No active sprint found for board {}.", board_id); + } + sprints[0].id + } else { + sprint.expect("clap enforces --sprint when --current is absent") + }; + + client.add_issues_to_sprint(sprint_id, &issues).await?; + + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::to_string_pretty(&json!({ + "sprint_id": sprint_id, + "issues": issues, + "added": true + }))? + ); + } + OutputFormat::Table => { + output::print_success(&format!( + "Added {} issue(s) to sprint {}", + issues.len(), + sprint_id + )); + } + } + + Ok(()) +} +``` + +- [ ] **Step 4: Add the `handle_remove` function** + +Append after `handle_add`: + +```rust +async fn handle_remove( + issues: Vec, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + if issues.len() > MAX_SPRINT_ISSUES { + bail!( + "Too many issues (got {}). Maximum is {} per operation.", + issues.len(), + MAX_SPRINT_ISSUES + ); + } + + client.move_issues_to_backlog(&issues).await?; + + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::to_string_pretty(&json!({ + "issues": issues, + "removed": true + }))? + ); + } + OutputFormat::Table => { + output::print_success(&format!("Moved {} issue(s) to backlog", issues.len())); + } + } + + Ok(()) +} +``` + +- [ ] **Step 5: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` +Expected: Build succeeds with no errors. + +- [ ] **Step 6: Run existing sprint tests to verify nothing breaks** + +Run: `cargo test --lib -- sprint 2>&1 | tail -20` +Expected: All existing `sprint` unit tests pass (sprint_summary tests are unaffected). + +Run: `cargo test --test sprint_commands 2>&1 | tail -20` +Expected: All existing integration tests pass (they test `list`/`current`, not `add`/`remove`). + +- [ ] **Step 7: Commit** + +```bash +git add src/cli/sprint.rs +git commit -m "$(cat <<'EOF' +feat: implement sprint add and remove handlers (#83) + +Wire handle_add (--sprint/--current with board resolution) and +handle_remove (move to backlog) with state-change output pattern. +Validates max 50 issues per operation. +EOF +)" +``` + +--- + +### Task 4: Add CLI smoke tests + +**Files:** +- Modify: `tests/cli_smoke.rs` + +- [ ] **Step 1: Add smoke test for `sprint add --help`** + +Append the following test to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_sprint_add_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Add issues to a sprint")) + .stdout(predicate::str::contains("--sprint")) + .stdout(predicate::str::contains("--current")) + .stdout(predicate::str::contains("--board")); +} +``` + +- [ ] **Step 2: Add smoke test for `sprint remove --help`** + +Append the following test to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_sprint_remove_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "remove", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Remove issues from sprint")) + .stdout(predicate::str::contains("ISSUES")); +} +``` + +- [ ] **Step 3: Add conflict test for `--sprint` and `--current`** + +Append the following test to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_sprint_add_sprint_and_current_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "--sprint", "100", "--current", "FOO-1"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} +``` + +- [ ] **Step 4: Add test that `add` requires `--sprint` or `--current`** + +Append the following test to `tests/cli_smoke.rs`: + +```rust +#[test] +fn test_sprint_add_requires_sprint_or_current() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "FOO-1"]) + .assert() + .failure() + .stderr(predicate::str::contains("--sprint")); +} +``` + +- [ ] **Step 5: Run the smoke tests** + +Run: `cargo test --test cli_smoke 2>&1 | tail -20` +Expected: All tests pass, including the 4 new ones. + +- [ ] **Step 6: Commit** + +```bash +git add tests/cli_smoke.rs +git commit -m "$(cat <<'EOF' +test: add CLI smoke tests for sprint add and remove (#83) + +Verify help output, --sprint/--current conflict, and required +flag enforcement. +EOF +)" +``` + +--- + +### Task 5: Add integration tests for sprint add and remove + +**Files:** +- Modify: `tests/sprint_commands.rs` + +- [ ] **Step 1: Add integration test for `sprint add --sprint`** + +Append the following test to `tests/sprint_commands.rs`: + +```rust +#[tokio::test] +async fn sprint_add_with_sprint_id() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["sprint", "add", "--sprint", "100", "FOO-1", "FOO-2"]) + .output() + .unwrap(); + + assert!(output.status.success(), "Expected success, got: {:?}", output); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Added 2 issue(s) to sprint 100"), + "Expected success message, got: {stdout}" + ); +} +``` + +- [ ] **Step 2: Add integration test for `sprint add --sprint` with JSON output** + +Append the following test to `tests/sprint_commands.rs`: + +```rust +#[tokio::test] +async fn sprint_add_json_output() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/200/issue")) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "--output", "json", + "sprint", "add", "--sprint", "200", "BAR-1", + ]) + .output() + .unwrap(); + + assert!(output.status.success(), "Expected success, got: {:?}", output); + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + assert_eq!(parsed["sprint_id"], 200); + assert_eq!(parsed["issues"], serde_json::json!(["BAR-1"])); + assert_eq!(parsed["added"], true); +} +``` + +- [ ] **Step 3: Add integration test for `sprint remove`** + +Append the following test to `tests/sprint_commands.rs`: + +```rust +#[tokio::test] +async fn sprint_remove_moves_to_backlog() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/backlog/issue")) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["sprint", "remove", "FOO-1", "FOO-3"]) + .output() + .unwrap(); + + assert!(output.status.success(), "Expected success, got: {:?}", output); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Moved 2 issue(s) to backlog"), + "Expected success message, got: {stdout}" + ); +} +``` + +- [ ] **Step 4: Add integration test for `sprint remove` with JSON output** + +Append the following test to `tests/sprint_commands.rs`: + +```rust +#[tokio::test] +async fn sprint_remove_json_output() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/backlog/issue")) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "--output", "json", + "sprint", "remove", "QUX-5", + ]) + .output() + .unwrap(); + + assert!(output.status.success(), "Expected success, got: {:?}", output); + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + assert_eq!(parsed["issues"], serde_json::json!(["QUX-5"])); + assert_eq!(parsed["removed"], true); +} +``` + +- [ ] **Step 5: Add integration test for `sprint add --current`** + +Append the following test to `tests/sprint_commands.rs`: + +```rust +#[tokio::test] +async fn sprint_add_with_current_flag() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .args(["sprint", "add", "--current", "TEST-1", "TEST-2"]) + .output() + .unwrap(); + + assert!(output.status.success(), "Expected success, got: {:?}", output); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Added 2 issue(s) to sprint 100"), + "Expected success message, got: {stdout}" + ); +} +``` + +- [ ] **Step 6: Run the integration tests** + +Run: `cargo test --test sprint_commands 2>&1 | tail -30` +Expected: All tests pass, including the 5 new ones and the 5 existing ones. + +- [ ] **Step 7: Commit** + +```bash +git add tests/sprint_commands.rs +git commit -m "$(cat <<'EOF' +test: add integration tests for sprint add and remove (#83) + +Test add with --sprint flag (table + JSON), remove (table + JSON), +and add with --current flag using board resolution prereqs. +EOF +)" +``` + +--- + +### Task 6: Run full test suite and lint + +**Files:** None (verification only) + +- [ ] **Step 1: Run clippy** + +Run: `cargo clippy -- -D warnings 2>&1 | tail -20` +Expected: No warnings or errors. + +- [ ] **Step 2: Run format check** + +Run: `cargo fmt --all -- --check 2>&1 | tail -10` +Expected: No formatting issues. + +- [ ] **Step 3: Run full test suite** + +Run: `cargo test 2>&1 | tail -30` +Expected: All tests pass. Key tests to verify: +- `tests/cli_smoke.rs::test_sprint_add_help` — PASS +- `tests/cli_smoke.rs::test_sprint_remove_help` — PASS +- `tests/cli_smoke.rs::test_sprint_add_sprint_and_current_conflict` — PASS +- `tests/cli_smoke.rs::test_sprint_add_requires_sprint_or_current` — PASS +- `tests/sprint_commands.rs::sprint_add_with_sprint_id` — PASS +- `tests/sprint_commands.rs::sprint_add_json_output` — PASS +- `tests/sprint_commands.rs::sprint_remove_moves_to_backlog` — PASS +- `tests/sprint_commands.rs::sprint_remove_json_output` — PASS +- `tests/sprint_commands.rs::sprint_add_with_current_flag` — PASS +- All existing sprint tests — PASS (unchanged) +- All existing unit tests — PASS (unchanged) diff --git a/docs/superpowers/plans/2026-04-02-cache-dedup.md b/docs/superpowers/plans/2026-04-02-cache-dedup.md new file mode 100644 index 0000000..fb01314 --- /dev/null +++ b/docs/superpowers/plans/2026-04-02-cache-dedup.md @@ -0,0 +1,473 @@ +# Cache Deduplication Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Deduplicate 5 repetitive cache read/write function pairs in `src/cache.rs` using generic free functions and a minimal `Expiring` trait, normalizing corrupt-file handling to `Ok(None)` across all caches. + +**Architecture:** Extract `read_cache` and `write_cache` generic functions plus a one-method `Expiring` trait. Apply to 3 whole-file caches (teams, workspace, cmdb_fields). Keep 2 keyed caches (project_meta, object_type_attrs) as explicit functions with corrupt-handling fix only. Public API unchanged. + +**Tech Stack:** Rust, serde, serde_json, chrono, anyhow + +**Spec:** `docs/superpowers/specs/2026-04-02-cache-dedup-design.md` +**Issue:** #104 + +--- + +### Task 1: Add corrupt-file tests for caches that currently lack them + +Before refactoring, establish the new behavior as tests. Currently `read_team_cache`, `read_workspace_cache`, and `read_project_meta` propagate deserialization errors. These tests will initially fail (they expect `Ok(None)` but get `Err`). We write them first so the refactor in later tasks makes them pass. + +**Files:** +- Modify: `src/cache.rs:266-636` (test module) + +- [ ] **Step 1: Add `corrupt_team_cache_returns_none` test** + +Add this test at the end of the `mod tests` block (before the closing `}`), after the `object_type_attr_cache_corrupt_returns_none` test: + +```rust + #[test] + fn corrupt_team_cache_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("teams.json"), "not json").unwrap(); + let result = read_team_cache().unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("teams.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_team_cache().unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } +``` + +- [ ] **Step 2: Add `corrupt_workspace_cache_returns_none` test** + +Add immediately after the previous test: + +```rust + #[test] + fn corrupt_workspace_cache_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("workspace.json"), "not json").unwrap(); + let result = read_workspace_cache().unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("workspace.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_workspace_cache().unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } +``` + +- [ ] **Step 3: Add `corrupt_project_meta_returns_none` test** + +Add immediately after the previous test: + +```rust + #[test] + fn corrupt_project_meta_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("project_meta.json"), "not json").unwrap(); + let result = read_project_meta("ANY").unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("project_meta.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_project_meta("ANY").unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } +``` + +- [ ] **Step 4: Run the new tests to verify they fail** + +Run: `~/.cargo/bin/cargo test --lib -- cache::tests::corrupt_team_cache_returns_none cache::tests::corrupt_workspace_cache_returns_none cache::tests::corrupt_project_meta_returns_none 2>&1` + +Expected: 3 FAILURES. `corrupt_team_cache_returns_none` and `corrupt_workspace_cache_returns_none` fail because `read_team_cache` / `read_workspace_cache` use `?` on `serde_json::from_str`, propagating the error instead of returning `Ok(None)`. `corrupt_project_meta_returns_none` fails for the same reason on `read_project_meta`. + +- [ ] **Step 5: Commit the failing tests** + +```bash +git add src/cache.rs +git commit -m "test: add corrupt-file tests for team, workspace, project_meta caches (#104)" +``` + +--- + +### Task 2: Add `Expiring` trait and generic `read_cache` / `write_cache` functions + +Introduce the core abstractions. No callers changed yet — this is additive only. + +**Files:** +- Modify: `src/cache.rs:1-10` (imports and top of file) + +- [ ] **Step 1: Add the `Expiring` trait after the `CACHE_TTL_DAYS` constant** + +Insert after line 7 (`const CACHE_TTL_DAYS: i64 = 7;`): + +```rust +/// Implemented by cache structs that carry a timestamp for TTL checks. +pub(crate) trait Expiring { + fn fetched_at(&self) -> DateTime; +} +``` + +- [ ] **Step 2: Add `DeserializeOwned` to imports** + +Change the serde import line from: + +```rust +use serde::{Deserialize, Serialize}; +``` + +to: + +```rust +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +``` + +- [ ] **Step 3: Add `read_cache` generic function after the `Expiring` trait** + +Insert after the `Expiring` trait definition: + +```rust +/// Read a whole-file cache. Returns `Ok(None)` on missing, expired, or corrupt files. +fn read_cache(filename: &str) -> Result> { + let path = cache_dir().join(filename); + if !path.exists() { + return Ok(None); + } + let content = std::fs::read_to_string(&path)?; + let cache: T = match serde_json::from_str(&content) { + Ok(c) => c, + Err(_) => return Ok(None), + }; + if (Utc::now() - cache.fetched_at()).num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + Ok(Some(cache)) +} + +/// Write a whole-file cache. Creates the cache directory if needed. +fn write_cache(filename: &str, data: &T) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + let content = serde_json::to_string_pretty(data)?; + std::fs::write(dir.join(filename), content)?; + Ok(()) +} +``` + +- [ ] **Step 4: Verify existing tests still pass (no regressions)** + +Run: `~/.cargo/bin/cargo test --lib -- cache::tests 2>&1` + +Expected: The 19 existing tests pass (the 3 new corrupt tests still fail — that's expected). The new trait and functions are unused so far, which is fine — no dead-code warning because they'll be used in the next task. + +- [ ] **Step 5: Verify it compiles cleanly with clippy** + +Run: `~/.cargo/bin/cargo clippy --lib -- -D warnings 2>&1` + +Expected: Clean (possible dead-code warning for `read_cache`/`write_cache` — if so, add `#[allow(dead_code)]` temporarily and remove in Task 3). + +- [ ] **Step 6: Commit** + +```bash +git add src/cache.rs +git commit -m "refactor: add Expiring trait and generic read_cache/write_cache (#104)" +``` + +--- + +### Task 3: Migrate 3 whole-file caches to use generics + +Replace the bodies of the 6 whole-file read/write functions with calls to `read_cache`/`write_cache`. Add `Expiring` impls for each struct. + +**Files:** +- Modify: `src/cache.rs:9-190` (struct definitions and function bodies) + +- [ ] **Step 1: Add `Expiring` impl for `TeamCache`** + +Insert immediately after the `TeamCache` struct definition (after line 19): + +```rust +impl Expiring for TeamCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} +``` + +- [ ] **Step 2: Replace `read_team_cache` body** + +Replace the entire `read_team_cache` function: + +```rust +pub fn read_team_cache() -> Result> { + read_cache("teams.json") +} +``` + +- [ ] **Step 3: Replace `write_team_cache` body** + +Replace the entire `write_team_cache` function: + +```rust +pub fn write_team_cache(teams: &[CachedTeam]) -> Result<()> { + write_cache( + "teams.json", + &TeamCache { + fetched_at: Utc::now(), + teams: teams.to_vec(), + }, + ) +} +``` + +- [ ] **Step 4: Add `Expiring` impl for `WorkspaceCache`** + +Insert immediately after the `WorkspaceCache` struct definition: + +```rust +impl Expiring for WorkspaceCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} +``` + +- [ ] **Step 5: Replace `read_workspace_cache` body** + +Replace the entire `read_workspace_cache` function: + +```rust +pub fn read_workspace_cache() -> Result> { + read_cache("workspace.json") +} +``` + +- [ ] **Step 6: Replace `write_workspace_cache` body** + +Replace the entire `write_workspace_cache` function: + +```rust +pub fn write_workspace_cache(workspace_id: &str) -> Result<()> { + write_cache( + "workspace.json", + &WorkspaceCache { + workspace_id: workspace_id.to_string(), + fetched_at: Utc::now(), + }, + ) +} +``` + +- [ ] **Step 7: Add `Expiring` impl for `CmdbFieldsCache`** + +Insert immediately after the `CmdbFieldsCache` struct definition: + +```rust +impl Expiring for CmdbFieldsCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} +``` + +- [ ] **Step 8: Replace `read_cmdb_fields_cache` body** + +Replace the entire `read_cmdb_fields_cache` function: + +```rust +pub fn read_cmdb_fields_cache() -> Result> { + read_cache("cmdb_fields.json") +} +``` + +- [ ] **Step 9: Replace `write_cmdb_fields_cache` body** + +Replace the entire `write_cmdb_fields_cache` function: + +```rust +pub fn write_cmdb_fields_cache(fields: &[(String, String)]) -> Result<()> { + write_cache( + "cmdb_fields.json", + &CmdbFieldsCache { + fields: fields.to_vec(), + fetched_at: Utc::now(), + }, + ) +} +``` + +- [ ] **Step 10: Run all cache tests** + +Run: `~/.cargo/bin/cargo test --lib -- cache::tests 2>&1` + +Expected: All 19 original tests PASS. The 3 new corrupt tests from Task 1 now also PASS (22 total pass). The `read_cache` generic function handles corrupt files as `Ok(None)`, which is exactly what those tests assert. + +- [ ] **Step 11: Run clippy** + +Run: `~/.cargo/bin/cargo clippy --lib -- -D warnings 2>&1` + +Expected: Clean. Remove any `#[allow(dead_code)]` added in Task 2 if present. + +- [ ] **Step 12: Commit** + +```bash +git add src/cache.rs +git commit -m "refactor: migrate 3 whole-file caches to generic read_cache/write_cache (#104)" +``` + +--- + +### Task 4: Normalize corrupt-file handling in keyed caches + +Fix `read_project_meta` to treat deserialization errors as cache misses. `read_object_type_attr_cache` already does this. Add doc comments explaining why these functions are not genericized. + +**Files:** +- Modify: `src/cache.rs` (keyed cache functions) + +- [ ] **Step 1: Fix `read_project_meta` corrupt-file handling and add doc comment** + +Replace the entire `read_project_meta` function with: + +```rust +/// Read cached project metadata for a specific project key. +/// +/// Keyed cache — not genericized because TTL is checked per-entry +/// (`ProjectMeta.fetched_at`), unlike whole-file caches. +pub fn read_project_meta(project_key: &str) -> Result> { + let path = cache_dir().join("project_meta.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let map: HashMap = match serde_json::from_str(&content) { + Ok(m) => m, + Err(_) => return Ok(None), + }; + + match map.get(project_key) { + Some(meta) => { + let age = Utc::now() - meta.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + Ok(None) + } else { + Ok(Some(meta.clone())) + } + } + None => Ok(None), + } +} +``` + +The key changes from the original: +- `serde_json::from_str(&content)?` becomes `match serde_json::from_str(...) { Ok(m) => m, Err(_) => return Ok(None) }` +- Doc comment added explaining why this is not genericized + +- [ ] **Step 2: Add doc comment to `write_project_meta`** + +Add a doc comment above the existing `write_project_meta` function: + +```rust +/// Write cached project metadata for a specific project key. +/// +/// Merges into the existing map file, preserving entries for other projects. +pub fn write_project_meta(project_key: &str, meta: &ProjectMeta) -> Result<()> { +``` + +The function body stays unchanged. + +- [ ] **Step 3: Add doc comment to `read_object_type_attr_cache`** + +Add a doc comment above the existing `read_object_type_attr_cache` function: + +```rust +/// Read cached attributes for a specific object type. +/// +/// Keyed cache — not genericized because TTL is checked per-file +/// (`ObjectTypeAttrCache.fetched_at`) but lookup is per-key, with a different +/// return type (`Vec`) than the stored wrapper struct. +pub fn read_object_type_attr_cache( +``` + +The function body stays unchanged. + +- [ ] **Step 4: Add doc comment to `write_object_type_attr_cache`** + +Add a doc comment above the existing `write_object_type_attr_cache` function: + +```rust +/// Write cached attributes for a specific object type. +/// +/// Merges into the existing map file, preserving entries for other object types. +pub fn write_object_type_attr_cache( +``` + +The function body stays unchanged. + +- [ ] **Step 5: Run all cache tests** + +Run: `~/.cargo/bin/cargo test --lib -- cache::tests 2>&1` + +Expected: All 22 tests PASS. The `corrupt_project_meta_returns_none` test from Task 1 now passes because `read_project_meta` returns `Ok(None)` on corrupt data. + +- [ ] **Step 6: Run clippy** + +Run: `~/.cargo/bin/cargo clippy -- -D warnings 2>&1` + +Expected: Clean. + +- [ ] **Step 7: Commit** + +```bash +git add src/cache.rs +git commit -m "fix: normalize corrupt-file handling in keyed caches, add doc comments (#104)" +``` + +--- + +### Task 5: Final validation + +Run the full test suite and clippy to ensure nothing is broken across the entire crate. + +**Files:** None (read-only verification) + +- [ ] **Step 1: Run the full test suite** + +Run: `~/.cargo/bin/cargo test 2>&1` + +Expected: All tests pass — unit tests, integration tests, proptests, snapshot tests. + +- [ ] **Step 2: Run clippy on the full crate** + +Run: `~/.cargo/bin/cargo clippy -- -D warnings 2>&1` + +Expected: Clean, zero warnings. + +- [ ] **Step 3: Run format check** + +Run: `~/.cargo/bin/cargo fmt --all -- --check 2>&1` + +Expected: Clean, no formatting issues. + +- [ ] **Step 4: Verify line count reduction** + +Run: `wc -l src/cache.rs` + +Expected: Roughly 550-580 lines total (down from 637). Production code portion should be ~190 lines (down from ~265). diff --git a/docs/superpowers/plans/2026-04-02-issue-move-status-name.md b/docs/superpowers/plans/2026-04-02-issue-move-status-name.md new file mode 100644 index 0000000..628b639 --- /dev/null +++ b/docs/superpowers/plans/2026-04-02-issue-move-status-name.md @@ -0,0 +1,556 @@ +# Issue Move: Accept Target Status Name — Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Allow `jr issue move KEY "Completed"` to match on target status name (not just transition name), and improve the error message to show both. + +**Architecture:** Replace the single-pass transition-name matching in `handle_move` with a unified candidate pool that includes both transition names and target status names, deduplicated case-insensitively. Update the error message to show `"TransitionName (→ StatusName)"` format. Update the `transitions_response` fixture to include `to` status objects. + +**Tech Stack:** Rust, wiremock, assert_cmd, serde_json + +--- + +### Task 1: Update the transitions fixture to include target status + +The existing `transitions_response` fixture in `tests/common/fixtures.rs` only includes `id` and `name` — no `to` status object. The new matching logic needs `to.name`, so the fixture must be updated. A new fixture variant is needed that accepts `(id, transition_name, status_name)` triples. + +**Files:** +- Modify: `tests/common/fixtures.rs:39-43` + +- [ ] **Step 1: Add the new fixture function** + +Add `transitions_response_with_status` below the existing `transitions_response` function (after line 43) in `tests/common/fixtures.rs`: + +```rust +/// Transitions response with target status names. +/// Each tuple is (transition_id, transition_name, target_status_name). +pub fn transitions_response_with_status(transitions: Vec<(&str, &str, &str)>) -> Value { + json!({ + "transitions": transitions.iter().map(|(id, name, status_name)| json!({ + "id": id, + "name": name, + "to": {"name": status_name} + })).collect::>() + }) +} +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo test --test issue_commands --no-run` +Expected: Compiles successfully (new function is unused but that's fine — `fixtures.rs` has `#[allow(dead_code)]` via the `mod common` declaration). + +- [ ] **Step 3: Commit** + +```bash +git add tests/common/fixtures.rs +git commit -m "test: add transitions_response_with_status fixture (#108)" +``` + +--- + +### Task 2: Implement unified candidate pool matching in handle_move + +Replace the transition-name-only matching logic in `handle_move` (`src/cli/issue/workflow.rs` lines 98–139) with a unified candidate pool that includes both transition names and target status names, deduplicated case-insensitively. + +**Files:** +- Modify: `src/cli/issue/workflow.rs:96-139` + +- [ ] **Step 1: Replace the matching block** + +In `src/cli/issue/workflow.rs`, replace lines 96–139 (the `let selected_transition = if let Some(t) ...` block through the closing `};`) with: + +```rust + let selected_transition = if let Some(t) = selected_transition { + t + } else { + // Build unified candidate pool: transition names + target status names. + // Each candidate maps to its transition index. + let mut candidates: Vec<(String, usize)> = Vec::new(); + let mut seen: std::collections::HashSet = std::collections::HashSet::new(); + for (i, t) in transitions.iter().enumerate() { + let t_lower = t.name.to_lowercase(); + if seen.insert(t_lower) { + candidates.push((t.name.clone(), i)); + } + if let Some(ref status) = t.to { + let s_lower = status.name.to_lowercase(); + if seen.insert(s_lower) { + candidates.push((status.name.clone(), i)); + } + } + } + + let candidate_names: Vec = candidates.iter().map(|(name, _)| name.clone()).collect(); + match partial_match::partial_match(&target_status, &candidate_names) { + MatchResult::Exact(name) => { + let idx = candidates + .iter() + .find(|(n, _)| n == &name) + .map(|(_, i)| *i) + .unwrap(); + &transitions[idx] + } + MatchResult::Ambiguous(matches) => { + if no_input { + bail!( + "Ambiguous transition \"{}\". Matches: {}", + target_status, + matches.join(", ") + ); + } + // Interactive disambiguation + eprintln!( + "Ambiguous match for \"{}\". Did you mean one of:", + target_status + ); + for (i, m) in matches.iter().enumerate() { + eprintln!(" {}. {}", i + 1, m); + } + let choice = helpers::prompt_input("Select (number)")?; + let idx: usize = choice + .parse() + .map_err(|_| JrError::UserError("Invalid selection".into()))?; + if idx < 1 || idx > matches.len() { + return Err(JrError::UserError("Selection out of range".into()).into()); + } + let selected_name = &matches[idx - 1]; + let tidx = candidates + .iter() + .find(|(n, _)| n == selected_name) + .map(|(_, i)| *i) + .unwrap(); + &transitions[tidx] + } + MatchResult::None(_) => { + let labels: Vec = transitions + .iter() + .map(|t| { + match t.to.as_ref() { + Some(status) => format!("{} (→ {})", t.name, status.name), + None => t.name.clone(), + } + }) + .collect(); + bail!( + "No transition matching \"{}\". Available: {}", + target_status, + labels.join(", ") + ); + } + } + }; +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `cargo build` +Expected: Compiles successfully. + +- [ ] **Step 3: Run existing tests** + +Run: `cargo test` +Expected: All tests pass. The existing `test_get_transitions` test doesn't exercise `handle_move` directly, so it's unaffected. + +- [ ] **Step 4: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: No warnings. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/issue/workflow.rs +git commit -m "feat: accept target status name in issue move matching (#108)" +``` + +--- + +### Task 3: Add integration tests for the new matching behavior + +Add end-to-end integration tests in `tests/issue_commands.rs` that exercise `handle_move` via the CLI binary. These tests use `assert_cmd::Command::cargo_bin("jr")` with `JR_BASE_URL` and `JR_AUTH_HEADER` env vars against wiremock. + +Each test needs two mocks: GET transitions (to list available transitions) and GET issue (for the idempotency check / current status). A POST transitions mock is needed for tests that succeed. + +**Files:** +- Modify: `tests/issue_commands.rs` (append new tests at end of file) + +- [ ] **Step 1: Add test for matching by transition name (existing behavior preserved)** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_by_transition_name() { + let server = MockServer::start().await; + + // Mock transitions: "Complete" → "Completed", "Review" → "In Review" + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + // Mock get issue (current status: "To Do") + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response("FOO-1", "Test issue", "To Do")), + ) + .mount(&server) + .await; + + // Mock POST transition + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json(serde_json::json!({"transition": {"id": "21"}}))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Complete") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(output.status.success(), "Expected success, stderr: {stderr}"); + assert!( + stderr.contains("Moved FOO-1"), + "Expected move confirmation in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 2: Run test to verify it passes** + +Run: `cargo test test_move_by_transition_name -- --nocapture` +Expected: PASS + +- [ ] **Step 3: Add test for matching by status name (new behavior)** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_by_status_name() { + let server = MockServer::start().await; + + // Transition name "Complete" differs from status name "Completed" + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response("FOO-1", "Test issue", "To Do")), + ) + .mount(&server) + .await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json(serde_json::json!({"transition": {"id": "21"}}))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Completed") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(output.status.success(), "Expected success, stderr: {stderr}"); + assert!( + stderr.contains("Moved FOO-1"), + "Expected move confirmation in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `cargo test test_move_by_status_name -- --nocapture` +Expected: PASS + +- [ ] **Step 5: Add test for deduplication (transition name == status name)** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_dedup_same_transition_and_status_name() { + let server = MockServer::start().await; + + // Transition name matches status name (default Jira workflow pattern) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "In Progress", "In Progress"), + ("31", "Done", "Done"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response("FOO-1", "Test issue", "To Do")), + ) + .mount(&server) + .await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json(serde_json::json!({"transition": {"id": "31"}}))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Done") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(output.status.success(), "Expected success, stderr: {stderr}"); + assert!( + stderr.contains("Moved FOO-1"), + "Expected move confirmation in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 6: Run test to verify it passes** + +Run: `cargo test test_move_dedup_same_transition_and_status_name -- --nocapture` +Expected: PASS + +- [ ] **Step 7: Add test for ambiguous match across transition and status names** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_ambiguous_across_transition_and_status_names() { + let server = MockServer::start().await; + + // "Re" partially matches both "Reopen" (transition) and "Review" (transition) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Reopen", "Open"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response("FOO-1", "Test issue", "Closed"), + )) + .mount(&server) + .await; + + // No POST mock — should not reach transition + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Re") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(!output.status.success(), "Expected failure, stderr: {stderr}"); + assert!( + stderr.contains("Ambiguous"), + "Expected ambiguity error in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 8: Run test to verify it passes** + +Run: `cargo test test_move_ambiguous_across_transition_and_status_names -- --nocapture` +Expected: PASS — exits non-zero with "Ambiguous" error. + +- [ ] **Step 9: Add test for error message format** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_no_match_shows_status_names() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response("FOO-1", "Test issue", "To Do")), + ) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Nonexistent") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(!output.status.success(), "Expected failure, stderr: {stderr}"); + assert!( + stderr.contains("Complete (→ Completed)"), + "Expected enriched error format in stderr: {stderr}" + ); + assert!( + stderr.contains("Review (→ In Review)"), + "Expected enriched error format in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 10: Run test to verify it passes** + +Run: `cargo test test_move_no_match_shows_status_names -- --nocapture` +Expected: PASS — exits non-zero with enriched error message. + +- [ ] **Step 11: Add test for idempotent move with status name input** + +Append to `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_move_idempotent_with_status_name() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ]), + )) + .mount(&server) + .await; + + // Issue is already in "Completed" status + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response("FOO-1", "Test issue", "Completed"), + )) + .mount(&server) + .await; + + // No POST mock — should not reach transition + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Completed") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(output.status.success(), "Expected success (idempotent), stderr: {stderr}"); + assert!( + stderr.contains("already in status"), + "Expected idempotent message in stderr: {stderr}" + ); +} +``` + +- [ ] **Step 12: Run test to verify it passes** + +Run: `cargo test test_move_idempotent_with_status_name -- --nocapture` +Expected: PASS — exits 0 with "already in status" message. + +- [ ] **Step 13: Run the full test suite** + +Run: `cargo test` +Expected: All tests pass. + +- [ ] **Step 14: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` +Expected: No warnings, no formatting issues. + +- [ ] **Step 15: Commit** + +```bash +git add tests/issue_commands.rs +git commit -m "test: add integration tests for issue move status name matching (#108)" +``` diff --git a/docs/superpowers/plans/2026-04-03-handle-list-error-propagation.md b/docs/superpowers/plans/2026-04-03-handle-list-error-propagation.md new file mode 100644 index 0000000..fe02f68 --- /dev/null +++ b/docs/superpowers/plans/2026-04-03-handle-list-error-propagation.md @@ -0,0 +1,430 @@ +# handle_list Error Propagation Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Propagate board/sprint API errors in `handle_list` instead of silently swallowing them, with actionable error messages. + +**Architecture:** Replace two error-swallowing match arms in `handle_list` with proper error propagation. 404 from `get_board_config` becomes a `JrError::UserError` (exit code 64). Other errors propagate with `anyhow::Context`. No changes to the API layer. + +**Tech Stack:** Rust, anyhow, thiserror (JrError), wiremock + assert_cmd (tests) + +--- + +### Task 1: Fix `get_board_config` error handling + +**Files:** +- Modify: `src/cli/issue/list.rs:255-261` + +- [ ] **Step 1: Write the failing integration test for board config 404** + +Create test in `tests/issue_list_errors.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn issue_list_board_config_404_reports_error() { + let server = MockServer::start().await; + + // Board config returns 404 (board deleted or no access) + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["Board does not exist or you do not have permission to see it."] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on board config 404, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Board 42 not found or not accessible"), + "Should mention board ID and accessibility, got: {stderr}" + ); + assert!( + stderr.contains("board_id"), + "Should suggest removing board_id from config, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --test issue_list_errors issue_list_board_config_404_reports_error -- --nocapture` + +Expected: FAIL — the current code silently swallows the 404 and falls back to project JQL, so the command either succeeds or fails with a misleading "No project or filters specified" message. + +- [ ] **Step 3: Write the failing integration test for board config non-404 error** + +Add to `tests/issue_list_errors.rs`: + +```rust +#[tokio::test] +async fn issue_list_board_config_server_error_propagates() { + let server = MockServer::start().await; + + // Board config returns 500 + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with(ResponseTemplate::new(500).set_body_json(serde_json::json!({ + "errorMessages": ["Internal server error"] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on board config 500, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Failed to fetch config for board 42"), + "Should include board ID and context, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); +} +``` + +- [ ] **Step 4: Run test to verify it fails** + +Run: `cargo test --test issue_list_errors issue_list_board_config_server_error_propagates -- --nocapture` + +Expected: FAIL — same silent swallowing behavior as step 2. + +- [ ] **Step 5: Implement `get_board_config` error propagation** + +In `src/cli/issue/list.rs`, replace lines 255-261: + +```rust +// BEFORE (lines 255-261): +Err(_) => { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") +} +``` + +With: + +```rust +Err(e) => { + if let Some(JrError::ApiError { status: 404, .. }) = + e.downcast_ref::() + { + return Err(JrError::UserError(format!( + "Board {} not found or not accessible. \ + Verify the board exists and you have permission, \ + or remove board_id from .jr.toml. \ + Use --jql to query directly.", + bid + )).into()); + } + return Err(e.context(format!( + "Failed to fetch config for board {}. \ + Remove board_id from .jr.toml or use --jql to query directly", + bid + ))); +} +``` + +Note: `JrError` is already imported at the top of `list.rs` (`use crate::error::JrError;`). + +- [ ] **Step 6: Run both board config tests to verify they pass** + +Run: `cargo test --test issue_list_errors -- --nocapture` + +Expected: Both `issue_list_board_config_404_reports_error` and `issue_list_board_config_server_error_propagates` PASS. + +- [ ] **Step 7: Run full test suite to check for regressions** + +Run: `cargo test` + +Expected: All tests pass. No regressions — the happy path and the "no board_id configured" path are unchanged. + +- [ ] **Step 8: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: No warnings. + +- [ ] **Step 9: Commit** + +```bash +git add src/cli/issue/list.rs tests/issue_list_errors.rs +git commit -m "fix: propagate get_board_config errors in handle_list (#32)" +``` + +### Task 2: Fix `list_sprints` error handling + +**Files:** +- Modify: `src/cli/issue/list.rs:234-243` + +- [ ] **Step 1: Write the failing integration test for sprint list error** + +Add to `tests/issue_list_errors.rs`: + +```rust +#[tokio::test] +async fn issue_list_sprint_error_propagates() { + let server = MockServer::start().await; + + // Board config succeeds → scrum board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(&server) + .await; + + // Sprint list returns 500 + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .respond_with(ResponseTemplate::new(500).set_body_json(serde_json::json!({ + "errorMessages": ["Internal server error"] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on sprint list error, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Failed to list sprints for board 42"), + "Should mention board ID and sprints, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); +} +``` + +- [ ] **Step 2: Write the test for no-active-sprint fallback (existing behavior)** + +Add to `tests/issue_list_errors.rs`: + +```rust +use wiremock::matchers::query_param; + +#[tokio::test] +async fn issue_list_no_active_sprint_falls_back_to_project_jql() { + let server = MockServer::start().await; + + // Board config succeeds → scrum board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(&server) + .await; + + // Sprint list returns empty (no active sprint) + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .and(query_param("state", "active")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_list_response(vec![])), + ) + .mount(&server) + .await; + + // Search endpoint returns issues (fallback JQL works) + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_search_response(vec![ + common::fixtures::issue_response("PROJ-1", "Test Issue", "To Do"), + ])), + ) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Should succeed with fallback JQL, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + assert!( + stdout.contains("PROJ-1"), + "Should show fallback results, got: {stdout}" + ); +} +``` + +- [ ] **Step 3: Run the sprint error test to verify it fails** + +Run: `cargo test --test issue_list_errors issue_list_sprint_error_propagates -- --nocapture` + +Expected: FAIL — current code swallows the error via `_ =>` catch-all. + +- [ ] **Step 4: Run the no-active-sprint test to verify it already passes** + +Run: `cargo test --test issue_list_errors issue_list_no_active_sprint_falls_back -- --nocapture` + +Expected: PASS — this is existing behavior that must be preserved. + +- [ ] **Step 5: Implement `list_sprints` error propagation** + +In `src/cli/issue/list.rs`, replace lines 234-243: + +```rust +// BEFORE (lines 234-243): +_ => { + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") +} +``` + +With two explicit match arms: + +```rust +Ok(_) => { + // No active sprint — fall back to project-scoped JQL + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) + )); + } + (parts, "updated DESC") +} +Err(e) => { + return Err(e.context(format!( + "Failed to list sprints for board {}. \ + Use --jql to query directly", + bid + ))); +} +``` + +- [ ] **Step 6: Run all issue_list_errors tests** + +Run: `cargo test --test issue_list_errors -- --nocapture` + +Expected: All 4 tests pass: +- `issue_list_board_config_404_reports_error` — PASS +- `issue_list_board_config_server_error_propagates` — PASS +- `issue_list_sprint_error_propagates` — PASS +- `issue_list_no_active_sprint_falls_back_to_project_jql` — PASS + +- [ ] **Step 7: Run full test suite** + +Run: `cargo test` + +Expected: All tests pass, no regressions. + +- [ ] **Step 8: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: No warnings. + +- [ ] **Step 9: Commit** + +```bash +git add src/cli/issue/list.rs tests/issue_list_errors.rs +git commit -m "fix: propagate list_sprints errors in handle_list (#32)" +``` diff --git a/docs/superpowers/plans/2026-04-03-issue-create-url.md b/docs/superpowers/plans/2026-04-03-issue-create-url.md new file mode 100644 index 0000000..cc80e5a --- /dev/null +++ b/docs/superpowers/plans/2026-04-03-issue-create-url.md @@ -0,0 +1,135 @@ +# Issue Create Browse URL Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add the Jira browse URL to `issue create` table and JSON output, matching the `gh` CLI pattern. + +**Architecture:** Construct the browse URL from `client.instance_url()` + `/browse/` + key in `handle_create`. Table output gets the URL on a second line (plain text, not green). JSON output gets a `url` field alongside `key`. Single file change in `src/cli/issue/create.rs`, one integration test added to `tests/issue_commands.rs`. + +**Tech Stack:** Rust + +**Spec:** `docs/superpowers/specs/2026-04-03-issue-create-url-design.md` + +--- + +## File Map + +| File | Change type | What changes | +|------|-------------|--------------| +| `src/cli/issue/create.rs` | Modify | Update table and JSON output arms in `handle_create` to include browse URL | +| `tests/issue_commands.rs` | Modify | Add integration test verifying browse URL in create response | + +--- + +### Task 1: Add browse URL to `issue create` output and test it + +**Files:** +- Modify: `src/cli/issue/create.rs:136-143` +- Modify: `tests/issue_commands.rs` (append new test) + +- [ ] **Step 1: Write the integration test for URL construction building blocks** + +Add this test at the end of `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_create_issue_response_includes_browse_url() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("URL-1")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + let response = client.create_issue(serde_json::json!({ + "project": {"key": "URL"}, + "issuetype": {"name": "Task"}, + "summary": "Test browse URL", + })).await.unwrap(); + + // Verify the key is returned + assert_eq!(response.key, "URL-1"); + + // Verify browse URL can be constructed from instance_url + let browse_url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key + ); + assert!( + browse_url.contains("/browse/URL-1"), + "Expected browse URL to contain /browse/URL-1, got: {browse_url}" + ); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test issue_commands test_create_issue_response_includes_browse_url` + +Expected: PASS. This test validates the building blocks (`create_issue` API response + `instance_url()`) that the production code depends on for URL construction. + +- [ ] **Step 3: Implement the table output change** + +In `src/cli/issue/create.rs`, change lines 136-143 from: + +```rust + match output_format { + OutputFormat::Json => { + println!("{}", serde_json::to_string_pretty(&response)?); + } + OutputFormat::Table => { + output::print_success(&format!("Created issue {}", response.key)); + } + } +``` + +To: + +```rust + let browse_url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key + ); + + match output_format { + OutputFormat::Json => { + let json_response = json!({ + "key": response.key, + "url": browse_url, + }); + println!("{}", serde_json::to_string_pretty(&json_response)?); + } + OutputFormat::Table => { + output::print_success(&format!("Created issue {}", response.key)); + println!("{}", browse_url); + } + } +``` + +- [ ] **Step 4: Run all tests to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass. The existing create tests (`test_create_issue_with_assignee`, `test_create_issue_without_assignee`, etc.) still pass because they assert on `response.key`, not on stdout content. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` + +Expected: No warnings, no format violations. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/issue/create.rs tests/issue_commands.rs +git commit -m "feat: add browse URL to issue create output (#112)" +``` diff --git a/docs/superpowers/plans/2026-04-03-partial-match-duplicate-names.md b/docs/superpowers/plans/2026-04-03-partial-match-duplicate-names.md new file mode 100644 index 0000000..f824ef8 --- /dev/null +++ b/docs/superpowers/plans/2026-04-03-partial-match-duplicate-names.md @@ -0,0 +1,1033 @@ +# partial_match Duplicate Name Disambiguation Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Detect and surface duplicate exact matches in `partial_match` so callers — especially user-resolution functions — can disambiguate instead of silently picking the first match. + +**Architecture:** Add `ExactMultiple(Vec)` variant to `MatchResult`. Update `partial_match()` to collect all exact matches. Non-user callers add a trivial match arm. User-resolution callers (`resolve_user`, `resolve_assignee`, `resolve_assignee_by_project`, `resolve_team_field`) handle duplicates with interactive disambiguation or `--no-input` error. Fix index-based mapping in all existing branches. + +**Tech Stack:** Rust, partial_match module, dialoguer (interactive prompts), wiremock + assert_cmd (integration tests), proptest (property tests) + +--- + +### Task 1: Add `ExactMultiple` variant and update `partial_match()` logic + +**Files:** +- Modify: `src/partial_match.rs:1-34` (enum + function) + +- [ ] **Step 1: Write the failing unit test for duplicate exact matches** + +Add to the `tests` module in `src/partial_match.rs`: + +```rust +#[test] +fn test_exact_match_duplicate_returns_exact_multiple() { + let candidates = vec![ + "John Smith".into(), + "Jane Doe".into(), + "John Smith".into(), + ]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(names) => { + assert_eq!(names.len(), 2); + assert!(names.iter().all(|n| n == "John Smith")); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cargo test --lib partial_match::tests::test_exact_match_duplicate_returns_exact_multiple -- --nocapture` + +Expected: FAIL — `ExactMultiple` variant does not exist yet. + +- [ ] **Step 3: Add `ExactMultiple` variant to `MatchResult`** + +In `src/partial_match.rs`, replace lines 1-9: + +```rust +/// Result of attempting a partial match against a list of candidates. +#[derive(Debug)] +pub enum MatchResult { + /// Exactly one match found + Exact(String), + /// Multiple candidates share the same exact (case-insensitive) name + ExactMultiple(Vec), + /// Multiple matches — caller should prompt for disambiguation + Ambiguous(Vec), + /// No matches + None(Vec), +} +``` + +Note: `#[derive(Debug)]` is added so tests can use `{:?}` formatting in panic messages. + +- [ ] **Step 4: Update `partial_match()` to collect all exact matches** + +In `src/partial_match.rs`, replace lines 12-34 (the entire `partial_match` function body): + +```rust +/// Case-insensitive substring match against candidates. +pub fn partial_match(input: &str, candidates: &[String]) -> MatchResult { + let lower_input = input.to_lowercase(); + + // Collect all exact matches (case-insensitive) + let exact_matches: Vec = candidates + .iter() + .filter(|c| c.to_lowercase() == lower_input) + .cloned() + .collect(); + + match exact_matches.len() { + 1 => return MatchResult::Exact(exact_matches.into_iter().next().unwrap()), + n if n > 1 => return MatchResult::ExactMultiple(exact_matches), + _ => {} + } + + // Try substring match + let matches: Vec = candidates + .iter() + .filter(|c| c.to_lowercase().contains(&lower_input)) + .cloned() + .collect(); + + match matches.len() { + 0 => MatchResult::None(candidates.to_vec()), + 1 => MatchResult::Exact(matches.into_iter().next().unwrap()), + _ => MatchResult::Ambiguous(matches), + } +} +``` + +- [ ] **Step 5: Run the duplicate test to verify it passes** + +Run: `cargo test --lib partial_match::tests::test_exact_match_duplicate_returns_exact_multiple -- --nocapture` + +Expected: PASS. + +- [ ] **Step 6: Write additional unit tests** + +Add to the `tests` module in `src/partial_match.rs`: + +```rust +#[test] +fn test_exact_match_duplicate_case_insensitive() { + let candidates = vec![ + "John Smith".into(), + "john smith".into(), + ]; + match partial_match("john smith", &candidates) { + MatchResult::ExactMultiple(names) => { + assert_eq!(names.len(), 2); + // Preserves original casing + assert_eq!(names[0], "John Smith"); + assert_eq!(names[1], "john smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } +} + +#[test] +fn test_exact_match_three_duplicates() { + let candidates = vec![ + "John Smith".into(), + "Jane Doe".into(), + "John Smith".into(), + "John Smith".into(), + ]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(names) => { + assert_eq!(names.len(), 3); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } +} +``` + +- [ ] **Step 7: Write proptest for duplicate candidates** + +Add to the `proptests` module in `src/partial_match.rs`: + +```rust +#[test] +fn duplicate_candidates_yield_exact_multiple(idx in 0usize..4) { + let base: Vec = vec![ + "In Progress".into(), "In Review".into(), + "Blocked".into(), "Done".into(), + ]; + // Duplicate one candidate + let mut candidates = base.clone(); + candidates.push(base[idx].clone()); + let input = base[idx].clone(); + match partial_match(&input, &candidates) { + MatchResult::ExactMultiple(names) => { + prop_assert!(names.len() >= 2); + for name in &names { + prop_assert_eq!(name.to_lowercase(), input.to_lowercase()); + } + } + _ => prop_assert!(false, "Expected ExactMultiple for duplicated '{}'", input), + } +} +``` + +- [ ] **Step 8: Run all partial_match tests** + +Run: `cargo test --lib partial_match -- --nocapture` + +Expected: All tests pass including existing ones (no regressions). + +- [ ] **Step 9: Commit** + +```bash +git add src/partial_match.rs +git commit -m "fix: detect duplicate exact matches in partial_match (#117)" +``` + +--- + +### Task 2: Update non-user callers with trivial `ExactMultiple` arm + +**Files:** +- Modify: `src/cli/issue/workflow.rs:129-142` +- Modify: `src/cli/issue/list.rs:179-180` +- Modify: `src/cli/issue/links.rs:62-63` and `130-131` +- Modify: `src/cli/assets.rs:332-333`, `453-454`, `649-650` +- Modify: `src/cli/queue.rs:147-163` and `202-216` + +- [ ] **Step 1: Verify the build fails** + +Run: `cargo build 2>&1 | head -40` + +Expected: FAIL — non-exhaustive match errors at every `MatchResult` match site because `ExactMultiple` is not handled. + +- [ ] **Step 2: Update `src/cli/issue/workflow.rs`** + +Add the `ExactMultiple` arm after the `Exact` arm at line 130. Insert between the `Exact(name) => { ... }` arm (lines 130-142) and the `Ambiguous` arm (line 143): + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate transition names not expected; take first + let name = names.into_iter().next().unwrap(); + let idx = candidates + .iter() + .find(|(n, _)| *n == name) + .map(|(_, i)| *i) + .ok_or_else(|| { + anyhow::anyhow!( + "Internal error: matched candidate \"{}\" not found. Please report this as a bug.", + name + ) + })?; + &transitions[idx] + } +``` + +- [ ] **Step 3: Update `src/cli/issue/list.rs`** + +Add the `ExactMultiple` arm after line 180 (`MatchResult::Exact(name) => Some(name),`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate status names not expected; take first + Some(names.into_iter().next().unwrap()) + } +``` + +- [ ] **Step 4: Update `src/cli/issue/links.rs` — first call site (handle_link)** + +Add the `ExactMultiple` arm after line 63 (`MatchResult::Exact(name) => name,`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate link type names not expected; take first + names.into_iter().next().unwrap() + } +``` + +- [ ] **Step 5: Update `src/cli/issue/links.rs` — second call site (handle_unlink)** + +Add the `ExactMultiple` arm after line 131 (`MatchResult::Exact(name) => name,`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate link type names not expected; take first + names.into_iter().next().unwrap() + } +``` + +- [ ] **Step 6: Update `src/cli/assets.rs` — first call site (ticket status filter)** + +Add the `ExactMultiple` arm after line 333 (`MatchResult::Exact(name) => name,`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate status names not expected; take first + names.into_iter().next().unwrap() + } +``` + +- [ ] **Step 7: Update `src/cli/assets.rs` — second call site (resolve_schema)** + +Add the `ExactMultiple` arm after line 454 (`MatchResult::Exact(name) => Ok(schemas.iter().find(|s| s.name == name).unwrap()),`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate schema names not expected; take first + let name = names.into_iter().next().unwrap(); + Ok(schemas.iter().find(|s| s.name == name).unwrap()) + } +``` + +- [ ] **Step 8: Update `src/cli/assets.rs` — third call site (object type resolution)** + +Add the `ExactMultiple` arm after line 650 (`MatchResult::Exact(name) => name,`). Insert between `Exact` and `Ambiguous`: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate type names not expected after dedup; take first + names.into_iter().next().unwrap() + } +``` + +- [ ] **Step 9: Update `src/cli/queue.rs` — first call site (resolve_queue_id)** + +The queue code at lines 148-162 already handles duplicates within the `Exact` arm. Add `ExactMultiple` arm after the `Exact` arm (after line 163, before `Ambiguous`): + +```rust + MatchResult::ExactMultiple(names) => { + // ExactMultiple means partial_match found duplicate candidate strings. + // Collect all queues matching any of these names and report as duplicates. + let matching: Vec<&crate::types::jsm::Queue> = queues + .iter() + .filter(|q| names.contains(&q.name)) + .collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + names[0], + ids.join(", "), + ids[0] + )) + .into()) + } +``` + +- [ ] **Step 10: Update `src/cli/queue.rs` — second call site (test helper `find_queue_id`)** + +Add the `ExactMultiple` arm after line 211 (after the `Exact` arm, before `Ambiguous`). This is in the `#[cfg(test)]` module: + +```rust + crate::partial_match::MatchResult::ExactMultiple(names) => { + Err(format!("duplicate: {}", names.len())) + } +``` + +- [ ] **Step 11: Verify the build compiles** + +Run: `cargo build 2>&1 | tail -5` + +Expected: Build succeeds. + +- [ ] **Step 12: Run all tests to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass. + +- [ ] **Step 13: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: No warnings. + +- [ ] **Step 14: Commit** + +```bash +git add src/cli/issue/workflow.rs src/cli/issue/list.rs src/cli/issue/links.rs src/cli/assets.rs src/cli/queue.rs +git commit -m "fix: add ExactMultiple match arms to all partial_match callers (#117)" +``` + +--- + +### Task 3: Fix user-resolution callers with duplicate disambiguation + +**Files:** +- Modify: `src/cli/issue/helpers.rs:7-70` (resolve_team_field) +- Modify: `src/cli/issue/helpers.rs:103-168` (resolve_user) +- Modify: `src/cli/issue/helpers.rs:177-240` (resolve_assignee) +- Modify: `src/cli/issue/helpers.rs:250-318` (resolve_assignee_by_project) + +- [ ] **Step 1: Write the failing integration test for duplicate user names in `--no-input` mode** + +Create test file `tests/duplicate_user_disambiguation.rs`: + +```rust +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build a user JSON object for wiremock responses. +fn user_json(account_id: &str, display_name: &str, email: Option<&str>) -> serde_json::Value { + let mut obj = serde_json::json!({ + "accountId": account_id, + "displayName": display_name, + "active": true, + }); + if let Some(e) = email { + obj["emailAddress"] = serde_json::json!(e); + } + obj +} + +#[tokio::test] +async fn issue_list_assignee_duplicate_names_no_input_errors() { + let server = MockServer::start().await; + + // User search returns two users with same display name + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list", "--assignee", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("acc-john-1"), + "Should list first accountId, got: {stderr}" + ); + assert!( + stderr.contains("acc-john-2"), + "Should list second accountId, got: {stderr}" + ); + assert!( + stderr.contains("John Smith"), + "Should mention the duplicate name, got: {stderr}" + ); +} + +#[tokio::test] +async fn issue_assign_duplicate_names_no_input_errors() { + let server = MockServer::start().await; + + // Assignable user search returns two users with same display name + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + // Mock get issue (needed for assign flow) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("FOO-1", "Test issue", None), + )) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "assign", "FOO-1", "--to", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("acc-john-1"), + "Should list first accountId, got: {stderr}" + ); + assert!( + stderr.contains("acc-john-2"), + "Should list second accountId, got: {stderr}" + ); +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `cargo test --test duplicate_user_disambiguation -- --nocapture` + +Expected: FAIL — current code does not detect duplicates, either succeeds silently (picking first user) or fails with a different error. + +- [ ] **Step 3: Implement `ExactMultiple` handling in `resolve_user`** + +In `src/cli/issue/helpers.rs`, add the `ExactMultiple` arm to `resolve_user` (between the `Exact` arm ending at line 141 and the `Ambiguous` arm at line 142): + +```rust + crate::partial_match::MatchResult::ExactMultiple(_) => { + // Multiple users share the same display name — disambiguate + let name_lower = name.to_lowercase(); + let duplicates: Vec<&crate::types::jira::User> = active_users + .iter() + .filter(|u| u.display_name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|u| { + let label = u + .email_address + .as_deref() + .unwrap_or(&u.account_id); + format!(" {} (account: {})", u.display_name, label) + }) + .collect(); + anyhow::bail!( + "Multiple users named \"{}\" found:\n{}\nSpecify the accountId directly or use a more specific name.", + name, + lines.join("\n") + ); + } + + // Interactive: show disambiguation prompt with email or accountId + let labels: Vec = duplicates + .iter() + .map(|u| { + match &u.email_address { + Some(email) => format!("{} ({})", u.display_name, email), + None => format!("{} ({})", u.display_name, u.account_id), + } + }) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users named \"{}\"", name)) + .items(&labels) + .interact()?; + Ok(duplicates[selection].account_id.clone()) + } +``` + +- [ ] **Step 4: Implement `ExactMultiple` handling in `resolve_assignee`** + +In `src/cli/issue/helpers.rs`, add the `ExactMultiple` arm to `resolve_assignee` (between the `Exact` arm ending at line 211 and the `Ambiguous` arm at line 212): + +```rust + crate::partial_match::MatchResult::ExactMultiple(_) => { + let name_lower = name.to_lowercase(); + let duplicates: Vec<&crate::types::jira::User> = users + .iter() + .filter(|u| u.display_name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|u| { + let label = u + .email_address + .as_deref() + .unwrap_or(&u.account_id); + format!(" {} (account: {})", u.display_name, label) + }) + .collect(); + anyhow::bail!( + "Multiple users named \"{}\" found:\n{}\nSpecify the accountId directly or use a more specific name.", + name, + lines.join("\n") + ); + } + + let labels: Vec = duplicates + .iter() + .map(|u| { + match &u.email_address { + Some(email) => format!("{} ({})", u.display_name, email), + None => format!("{} ({})", u.display_name, u.account_id), + } + }) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users named \"{}\"", name)) + .items(&labels) + .interact()?; + Ok(( + duplicates[selection].account_id.clone(), + duplicates[selection].display_name.clone(), + )) + } +``` + +- [ ] **Step 5: Implement `ExactMultiple` handling in `resolve_assignee_by_project`** + +In `src/cli/issue/helpers.rs`, add the `ExactMultiple` arm to `resolve_assignee_by_project` (between the `Exact` arm ending at line 289 and the `Ambiguous` arm at line 290): + +```rust + crate::partial_match::MatchResult::ExactMultiple(_) => { + let name_lower = name.to_lowercase(); + let duplicates: Vec<&crate::types::jira::User> = users + .iter() + .filter(|u| u.display_name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|u| { + let label = u + .email_address + .as_deref() + .unwrap_or(&u.account_id); + format!(" {} (account: {})", u.display_name, label) + }) + .collect(); + anyhow::bail!( + "Multiple users named \"{}\" found:\n{}\nSpecify the accountId directly or use a more specific name.", + name, + lines.join("\n") + ); + } + + let labels: Vec = duplicates + .iter() + .map(|u| { + match &u.email_address { + Some(email) => format!("{} ({})", u.display_name, email), + None => format!("{} ({})", u.display_name, u.account_id), + } + }) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users named \"{}\"", name)) + .items(&labels) + .interact()?; + Ok(( + duplicates[selection].account_id.clone(), + duplicates[selection].display_name.clone(), + )) + } +``` + +- [ ] **Step 6: Implement `ExactMultiple` handling in `resolve_team_field`** + +In `src/cli/issue/helpers.rs`, add the `ExactMultiple` arm to `resolve_team_field` (between the `Exact` arm ending at line 42 and the `Ambiguous` arm at line 43): + +```rust + crate::partial_match::MatchResult::ExactMultiple(_) => { + let name_lower = team_name.to_lowercase(); + let duplicates: Vec<&crate::cache::CachedTeam> = teams + .iter() + .filter(|t| t.name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|t| format!(" {} (id: {})", t.name, t.id)) + .collect(); + anyhow::bail!( + "Multiple teams named \"{}\" found:\n{}\nUse a more specific name.", + team_name, + lines.join("\n") + ); + } + + let labels: Vec = duplicates + .iter() + .map(|t| format!("{} ({})", t.name, t.id)) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple teams named \"{}\"", team_name)) + .items(&labels) + .interact()?; + Ok((field_id, duplicates[selection].id.clone())) + } +``` + +- [ ] **Step 7: Run the integration tests** + +Run: `cargo test --test duplicate_user_disambiguation -- --nocapture` + +Expected: Both `issue_list_assignee_duplicate_names_no_input_errors` and `issue_assign_duplicate_names_no_input_errors` PASS. + +- [ ] **Step 8: Run full test suite** + +Run: `cargo test` + +Expected: All tests pass, no regressions. + +- [ ] **Step 9: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: No warnings. + +- [ ] **Step 10: Commit** + +```bash +git add src/cli/issue/helpers.rs tests/duplicate_user_disambiguation.rs +git commit -m "fix: disambiguate users and teams with duplicate display names (#117, #122)" +``` + +--- + +### Task 4: Fix index-based mapping in existing `Exact` and `Ambiguous` branches + +**Files:** +- Modify: `src/cli/issue/helpers.rs` — all four resolve functions, `Exact` and `Ambiguous` arms + +This task fixes the secondary bug: `.find(|u| u.display_name == name)` in the `Exact` and `Ambiguous` branches can return the wrong user when display names collide. We replace name-based `.find()` with index-aware lookup. + +- [ ] **Step 1: Write a failing integration test for `Exact` match with duplicate display names** + +Add to `tests/duplicate_user_disambiguation.rs`: + +```rust +#[tokio::test] +async fn issue_list_assignee_exact_match_among_multiple_results_no_input_errors() { + let server = MockServer::start().await; + + // User search returns three users: two share "John Smith", one is "John Smithson" + // partial_match("John Smith") → ExactMultiple (the two John Smiths) + // This test verifies that even when the API returns a superset, + // the ExactMultiple path catches the duplicate and errors in --no-input mode. + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-smithson", "John Smithson", None), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list", "--assignee", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names even with extra results, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("acc-john-1") && stderr.contains("acc-john-2"), + "Should list both duplicate accountIds, got: {stderr}" + ); + // Should NOT contain the non-duplicate user + assert!( + !stderr.contains("acc-smithson"), + "Should not mention non-duplicate user, got: {stderr}" + ); +} +``` + +- [ ] **Step 2: Run the test to verify it passes (already handled by ExactMultiple)** + +Run: `cargo test --test duplicate_user_disambiguation issue_list_assignee_exact_match_among_multiple_results -- --nocapture` + +Expected: PASS — the `ExactMultiple` arm from Task 3 already handles this case. This test serves as a regression guard. + +- [ ] **Step 3: Fix `resolve_user` `Exact` branch — use position-based lookup** + +In `src/cli/issue/helpers.rs`, replace the `Exact` arm in `resolve_user` (lines 135-141): + +```rust + crate::partial_match::MatchResult::Exact(matched_name) => { + let user = active_users + .iter() + .find(|u| u.display_name == matched_name) + .expect("matched name must exist in active_users"); + Ok(user.account_id.clone()) + } +``` + +With: + +```rust + crate::partial_match::MatchResult::Exact(ref matched_name) => { + let idx = active_users + .iter() + .position(|u| u.display_name == *matched_name) + .expect("matched name must exist in active_users"); + Ok(active_users[idx].account_id.clone()) + } +``` + +Note: Using `position` + index is functionally identical to `find` for the single-match case, but establishes the index-based pattern consistently. + +- [ ] **Step 4: Fix `resolve_user` `Ambiguous` interactive branch — use index-based lookup** + +In `src/cli/issue/helpers.rs`, replace the `Ambiguous` arm's interactive section in `resolve_user` (lines 150-159): + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let user = active_users + .iter() + .find(|u| &u.display_name == selected_name) + .expect("selected name must exist in active_users"); + Ok(user.account_id.clone()) +``` + +With: + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let idx = active_users + .iter() + .position(|u| u.display_name == *selected_name) + .expect("selected name must exist in active_users"); + Ok(active_users[idx].account_id.clone()) +``` + +- [ ] **Step 5: Fix `resolve_assignee` `Exact` branch** + +In `src/cli/issue/helpers.rs`, replace the `Exact` arm in `resolve_assignee` (lines 205-211): + +```rust + crate::partial_match::MatchResult::Exact(matched_name) => { + let user = users + .iter() + .find(|u| u.display_name == matched_name) + .expect("matched name must exist in users"); + Ok((user.account_id.clone(), user.display_name.clone())) + } +``` + +With: + +```rust + crate::partial_match::MatchResult::Exact(ref matched_name) => { + let idx = users + .iter() + .position(|u| u.display_name == *matched_name) + .expect("matched name must exist in users"); + Ok((users[idx].account_id.clone(), users[idx].display_name.clone())) + } +``` + +- [ ] **Step 6: Fix `resolve_assignee` `Ambiguous` interactive branch** + +In `src/cli/issue/helpers.rs`, replace the `Ambiguous` arm's interactive section in `resolve_assignee` (lines 220-229): + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let user = users + .iter() + .find(|u| &u.display_name == selected_name) + .expect("selected name must exist in users"); + Ok((user.account_id.clone(), user.display_name.clone())) +``` + +With: + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let idx = users + .iter() + .position(|u| u.display_name == *selected_name) + .expect("selected name must exist in users"); + Ok((users[idx].account_id.clone(), users[idx].display_name.clone())) +``` + +- [ ] **Step 7: Fix `resolve_assignee_by_project` `Exact` branch** + +In `src/cli/issue/helpers.rs`, replace the `Exact` arm in `resolve_assignee_by_project` (lines 283-289): + +```rust + crate::partial_match::MatchResult::Exact(matched_name) => { + let user = users + .iter() + .find(|u| u.display_name == matched_name) + .expect("matched name must exist in users"); + Ok((user.account_id.clone(), user.display_name.clone())) + } +``` + +With: + +```rust + crate::partial_match::MatchResult::Exact(ref matched_name) => { + let idx = users + .iter() + .position(|u| u.display_name == *matched_name) + .expect("matched name must exist in users"); + Ok((users[idx].account_id.clone(), users[idx].display_name.clone())) + } +``` + +- [ ] **Step 8: Fix `resolve_assignee_by_project` `Ambiguous` interactive branch** + +In `src/cli/issue/helpers.rs`, replace the `Ambiguous` arm's interactive section in `resolve_assignee_by_project` (lines 298-307): + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let user = users + .iter() + .find(|u| &u.display_name == selected_name) + .expect("selected name must exist in users"); + Ok((user.account_id.clone(), user.display_name.clone())) +``` + +With: + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let idx = users + .iter() + .position(|u| u.display_name == *selected_name) + .expect("selected name must exist in users"); + Ok((users[idx].account_id.clone(), users[idx].display_name.clone())) +``` + +- [ ] **Step 9: Fix `resolve_team_field` `Exact` branch** + +In `src/cli/issue/helpers.rs`, replace the `Exact` arm in `resolve_team_field` (lines 36-42): + +```rust + crate::partial_match::MatchResult::Exact(matched_name) => { + let team = teams + .iter() + .find(|t| t.name == matched_name) + .expect("matched name must exist in teams"); + Ok((field_id, team.id.clone())) + } +``` + +With: + +```rust + crate::partial_match::MatchResult::Exact(ref matched_name) => { + let idx = teams + .iter() + .position(|t| t.name == *matched_name) + .expect("matched name must exist in teams"); + Ok((field_id, teams[idx].id.clone())) + } +``` + +- [ ] **Step 10: Fix `resolve_team_field` `Ambiguous` interactive branch** + +In `src/cli/issue/helpers.rs`, replace the `Ambiguous` arm's interactive section in `resolve_team_field` (lines 52-61): + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple teams match \"{team_name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let team = teams + .iter() + .find(|t| &t.name == selected_name) + .expect("selected name must exist in teams"); + Ok((field_id, team.id.clone())) +``` + +With: + +```rust + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple teams match \"{team_name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let idx = teams + .iter() + .position(|t| t.name == *selected_name) + .expect("selected name must exist in teams"); + Ok((field_id, teams[idx].id.clone())) +``` + +- [ ] **Step 11: Run all tests** + +Run: `cargo test` + +Expected: All tests pass. + +- [ ] **Step 12: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: No warnings. + +- [ ] **Step 13: Commit** + +```bash +git add src/cli/issue/helpers.rs +git commit -m "fix: use index-based lookup in Exact and Ambiguous branches (#117)" +``` diff --git a/docs/superpowers/plans/2026-04-03-queue-case-insensitive-test.md b/docs/superpowers/plans/2026-04-03-queue-case-insensitive-test.md new file mode 100644 index 0000000..1ea673f --- /dev/null +++ b/docs/superpowers/plans/2026-04-03-queue-case-insensitive-test.md @@ -0,0 +1,108 @@ +# Queue Case-Insensitive Duplicate Name Test Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add an integration test that exercises the case-insensitive `to_lowercase()` filter in `resolve_queue_by_name` using mixed-case queue names. + +**Architecture:** One wiremock-backed integration test appended to `tests/queue.rs`. Mocks two queues with different casing ("Triage" and "TRIAGE"), calls `resolve_queue_by_name` with lowercase input ("triage"), and asserts on the error message. No production code changes. + +**Tech Stack:** Rust, wiremock, tokio + +**Spec:** `docs/superpowers/specs/2026-04-03-queue-case-insensitive-test-design.md` + +--- + +## File Map + +| File | Change type | What changes | +|------|-------------|--------------| +| `tests/queue.rs` | Modify | Append one integration test | + +--- + +### Task 1: Add case-insensitive duplicate queue name integration test + +**Files:** +- Modify: `tests/queue.rs:229` (append after existing test) + +- [ ] **Step 1: Write the integration test** + +Add this test at the end of `tests/queue.rs`: + +```rust +#[tokio::test] +async fn resolve_queue_mixed_case_duplicate_names_error_message() { + let server = MockServer::start().await; + + // Two queues with the same name but different casing + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .and(query_param("includeCount", "true")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "30", "name": "Triage", "issueCount": 5 }, + { "id": "40", "name": "TRIAGE", "issueCount": 3 } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + // Lowercase input — matches neither stored name exactly, + // forcing both sides of to_lowercase() to do work + let result = jr::cli::queue::resolve_queue_by_name("15", "triage", &client).await; + + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!( + msg.contains("Multiple queues named \"Triage\""), + "Expected queue name in error, got: {msg}" + ); + assert!( + msg.contains("30, 40"), + "Expected both queue IDs in error, got: {msg}" + ); + assert!( + msg.contains("Use --id 30 to specify"), + "Expected --id suggestion in error, got: {msg}" + ); +} +``` + +**Why these assertions work:** `partial_match("triage", &["Triage", "TRIAGE"])` returns `ExactMultiple("Triage")` — the first candidate whose `to_lowercase()` matches the input. The error message uses this matched name. Then `resolve_queue_by_name` filters queues via `q.name.to_lowercase() == "triage"`, collecting both IDs `["30", "40"]`. + +- [ ] **Step 2: Run the new test to verify it passes** + +Run: `cargo test --test queue resolve_queue_mixed_case_duplicate_names_error_message` + +Expected: PASS. The production code at `src/cli/queue.rs:155-158` already handles case-insensitive filtering correctly. + +- [ ] **Step 3: Verify the test would fail without `to_lowercase()`** + +This is a mental verification, not a code change. If `resolve_queue_by_name` used `q.name == name` instead of `q.name.to_lowercase() == name_lower`, the filter at line 158 would match zero queues when called with `"triage"` against `"Triage"` and `"TRIAGE"`, causing a panic on `ids[0]` (empty vector). This confirms the test exercises the `to_lowercase()` logic. + +- [ ] **Step 4: Run all tests to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass. The existing `resolve_queue_duplicate_names_error_message` test is unchanged. + +- [ ] **Step 5: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` + +Expected: No warnings, no format violations. + +- [ ] **Step 6: Commit** + +```bash +git add tests/queue.rs +git commit -m "test: add case-insensitive duplicate queue name integration test (#131)" +``` diff --git a/docs/superpowers/plans/2026-04-03-simplify-exact-multiple.md b/docs/superpowers/plans/2026-04-03-simplify-exact-multiple.md new file mode 100644 index 0000000..511e599 --- /dev/null +++ b/docs/superpowers/plans/2026-04-03-simplify-exact-multiple.md @@ -0,0 +1,475 @@ +# Simplify ExactMultiple Variant Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Simplify `ExactMultiple(Vec)` to `ExactMultiple(String)` and replace 6 provably-unreachable match arms with `unreachable!()`. + +**Architecture:** Change the enum variant in `partial_match.rs`, then update all 12 call sites across 7 files. Six arms become `unreachable!()`, two filtering callers switch from `names.contains()` to lowercased comparison, four user-resolution callers stay unchanged (already ignore payload). + +**Tech Stack:** Rust, no new dependencies + +**Spec:** `docs/superpowers/specs/2026-04-03-simplify-exact-multiple-design.md` + +--- + +## File Map + +| File | Change type | What changes | +|------|-------------|--------------| +| `src/partial_match.rs` | Modify | Variant `Vec` → `String`, construction, 3 unit tests, 1 proptest | +| `src/cli/issue/workflow.rs` | Modify | ExactMultiple arm → `unreachable!()` | +| `src/cli/issue/list.rs` | Modify | ExactMultiple arm → `unreachable!()` | +| `src/cli/issue/links.rs` | Modify | 2 ExactMultiple arms → `unreachable!()` | +| `src/cli/assets.rs` | Modify | 2 ExactMultiple arms → `unreachable!()`, 1 ExactMultiple filtering caller updated | +| `src/cli/queue.rs` | Modify | ExactMultiple filtering caller updated, dead code removed from Exact branch, test helper updated | +| `src/cli/issue/helpers.rs` | No change | Already uses `ExactMultiple(_)` — compiles as-is with `String` | + +--- + +### Task 1: Change ExactMultiple variant and update tests in partial_match.rs + +**Files:** +- Modify: `src/partial_match.rs:1-200` + +- [ ] **Step 1: Update the enum variant from `Vec` to `String`** + +In `src/partial_match.rs`, change line 7 from: + +```rust + /// Multiple candidates share the same exact (case-insensitive) name + ExactMultiple(Vec), +``` + +To: + +```rust + /// Multiple candidates share the same exact (case-insensitive) name — carries one representative + ExactMultiple(String), +``` + +- [ ] **Step 2: Update the construction in `partial_match()`** + +In `src/partial_match.rs`, change line 27 from: + +```rust + n if n > 1 => return MatchResult::ExactMultiple(exact_matches), +``` + +To: + +```rust + n if n > 1 => return MatchResult::ExactMultiple(exact_matches.into_iter().next().unwrap()), +``` + +- [ ] **Step 3: Update `test_exact_match_duplicate_returns_exact_multiple`** + +In `src/partial_match.rs`, replace the test (lines 103-112) with: + +```rust + #[test] + fn test_exact_match_duplicate_returns_exact_multiple() { + let candidates = vec!["John Smith".into(), "Jane Doe".into(), "John Smith".into()]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(name) => { + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } +``` + +- [ ] **Step 4: Update `test_exact_match_duplicate_case_insensitive`** + +In `src/partial_match.rs`, replace the test (lines 114-126) with: + +```rust + #[test] + fn test_exact_match_duplicate_case_insensitive() { + let candidates = vec!["John Smith".into(), "john smith".into()]; + match partial_match("john smith", &candidates) { + MatchResult::ExactMultiple(name) => { + // Preserves casing of the first match + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } +``` + +- [ ] **Step 5: Update `test_exact_match_three_duplicates`** + +In `src/partial_match.rs`, replace the test (lines 128-142) with: + +```rust + #[test] + fn test_exact_match_three_duplicates() { + let candidates = vec![ + "John Smith".into(), + "Jane Doe".into(), + "John Smith".into(), + "John Smith".into(), + ]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(name) => { + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } +``` + +- [ ] **Step 6: Update the `duplicate_candidates_yield_exact_multiple` proptest** + +In `src/partial_match.rs`, replace the proptest (lines 180-198) with: + +```rust + #[test] + fn duplicate_candidates_yield_exact_multiple(idx in 0usize..4) { + let base: Vec = vec![ + "In Progress".into(), "In Review".into(), + "Blocked".into(), "Done".into(), + ]; + // Duplicate one candidate + let mut candidates = base.clone(); + candidates.push(base[idx].clone()); + let input = base[idx].clone(); + match partial_match(&input, &candidates) { + MatchResult::ExactMultiple(name) => { + prop_assert_eq!(name.to_lowercase(), input.to_lowercase()); + } + _ => prop_assert!(false, "Expected ExactMultiple for duplicated '{}'", input), + } + } +``` + +- [ ] **Step 7: Run partial_match tests to verify** + +Run: `cargo test --lib partial_match` + +Expected: All tests pass (unit tests + proptests). + +- [ ] **Step 8: Commit** + +```bash +git add src/partial_match.rs +git commit -m "refactor: simplify ExactMultiple variant from Vec to String (#127)" +``` + +--- + +### Task 2: Replace 6 unreachable ExactMultiple arms and update 2 filtering callers + +> **Implementation note:** During PR review, it was identified that 5 of the 6 dedup +> sites use case-sensitive dedup while `partial_match` operates case-insensitively. +> Only `workflow.rs` (which uses `to_lowercase()` keys in its HashSet) is truly +> unreachable. The other 5 sites were changed to graceful fallback +> (`MatchResult::ExactMultiple(name) => name`) instead of `unreachable!()`. +> Steps 2-5 and 7 below show the original plan; the actual implementation differs. + +**Files:** +- Modify: `src/cli/issue/workflow.rs:143-157` +- Modify: `src/cli/issue/list.rs:181-184` +- Modify: `src/cli/issue/links.rs:64-67, 136-139` +- Modify: `src/cli/assets.rs:334-337, 459-471, 668-671` +- Modify: `src/cli/queue.rs:148-176, 213-232` + +- [ ] **Step 1: Replace ExactMultiple arm in `workflow.rs`** + +In `src/cli/issue/workflow.rs`, replace lines 143-157: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate transition names not expected; take first + let name = names.into_iter().next().unwrap(); + let idx = candidates + .iter() + .find(|(n, _)| *n == name) + .map(|(_, i)| *i) + .ok_or_else(|| { + anyhow::anyhow!( + "Internal error: matched candidate \"{}\" not found. Please report this as a bug.", + name + ) + })?; + &transitions[idx] + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: candidates are deduplicated") + } +``` + +- [ ] **Step 2: Replace ExactMultiple arm in `list.rs`** + +In `src/cli/issue/list.rs`, replace lines 181-184: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate status names not expected; take first + Some(names.into_iter().next().unwrap()) + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: statuses are unique") + } +``` + +- [ ] **Step 3: Replace first ExactMultiple arm in `links.rs` (handle_link)** + +In `src/cli/issue/links.rs`, replace lines 64-67: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate link type names not expected; take first + names.into_iter().next().unwrap() + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: link types are unique") + } +``` + +- [ ] **Step 4: Replace second ExactMultiple arm in `links.rs` (handle_unlink)** + +In `src/cli/issue/links.rs`, replace lines 136-139: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate link type names not expected; take first + names.into_iter().next().unwrap() + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: link types are unique") + } +``` + +- [ ] **Step 5: Replace ExactMultiple arm in `assets.rs` ticket status filter** + +In `src/cli/assets.rs`, replace lines 334-337: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate status names not expected; take first + names.into_iter().next().unwrap() + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: statuses are deduplicated") + } +``` + +- [ ] **Step 6: Update ExactMultiple arm in `assets.rs` resolve_schema** + +In `src/cli/assets.rs`, replace lines 459-471: + +```rust + MatchResult::ExactMultiple(names) => { + let duplicates: Vec = schemas + .iter() + .filter(|s| names.contains(&s.name)) + .map(|s| format!("{} (id: {})", s.name, s.id)) + .collect(); + Err(JrError::UserError(format!( + "Multiple schemas named \"{}\": {}. Use the schema ID instead.", + input, + duplicates.join(", ") + )) + .into()) + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + let input_lower = input.to_lowercase(); + let duplicates: Vec = schemas + .iter() + .filter(|s| s.name.to_lowercase() == input_lower) + .map(|s| format!("{} (id: {})", s.name, s.id)) + .collect(); + Err(JrError::UserError(format!( + "Multiple schemas named \"{}\": {}. Use the schema ID instead.", + input, + duplicates.join(", ") + )) + .into()) + } +``` + +- [ ] **Step 7: Replace ExactMultiple arm in `assets.rs` object type** + +In `src/cli/assets.rs`, replace lines 668-671: + +```rust + MatchResult::ExactMultiple(names) => { + // Duplicate type names not expected after dedup; take first + names.into_iter().next().unwrap() + } +``` + +With: + +```rust + MatchResult::ExactMultiple(_) => { + unreachable!("ExactMultiple should not occur: type names are deduplicated") + } +``` + +- [ ] **Step 8: Update ExactMultiple arm and remove dead code in `queue.rs` production code** + +In `src/cli/queue.rs`, replace the entire `Exact` and `ExactMultiple` arms (lines 148-177): + +```rust + MatchResult::Exact(matched_name) => { + let matching: Vec<&crate::types::jsm::Queue> = + queues.iter().filter(|q| q.name == matched_name).collect(); + + if matching.len() > 1 { + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + matched_name, + ids.join(", "), + ids[0] + )) + .into()) + } else { + Ok(matching[0].id.clone()) + } + } + MatchResult::ExactMultiple(names) => { + // ExactMultiple means partial_match found duplicate candidate strings. + let matching: Vec<&crate::types::jsm::Queue> = + queues.iter().filter(|q| names.contains(&q.name)).collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + names[0], + ids.join(", "), + ids[0] + )) + .into()) + } +``` + +With: + +```rust + MatchResult::Exact(matched_name) => { + Ok(queues + .iter() + .find(|q| q.name == matched_name) + .expect("matched name must exist in queues") + .id + .clone()) + } + MatchResult::ExactMultiple(matched_name) => { + let name_lower = name.to_lowercase(); + let matching: Vec<&crate::types::jsm::Queue> = queues + .iter() + .filter(|q| q.name.to_lowercase() == name_lower) + .collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + matched_name, + ids.join(", "), + ids[0] + )) + .into()) + } +``` + +- [ ] **Step 9: Update the `find_queue_id` test helper in `queue.rs`** + +In `src/cli/queue.rs`, replace the test helper (lines 213-233): + +```rust + fn find_queue_id(name: &str, queues: &[Queue]) -> Result { + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + match crate::partial_match::partial_match(name, &names) { + crate::partial_match::MatchResult::Exact(matched_name) => { + let matching: Vec<&Queue> = + queues.iter().filter(|q| q.name == matched_name).collect(); + if matching.len() > 1 { + Err(format!("duplicate: {}", matching.len())) + } else { + Ok(matching[0].id.clone()) + } + } + crate::partial_match::MatchResult::ExactMultiple(names) => { + Err(format!("duplicate: {}", names.len())) + } + crate::partial_match::MatchResult::Ambiguous(m) => { + Err(format!("ambiguous: {}", m.len())) + } + crate::partial_match::MatchResult::None(_) => Err("none".into()), + } + } +``` + +With: + +```rust + fn find_queue_id(name: &str, queues: &[Queue]) -> Result { + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + match crate::partial_match::partial_match(name, &names) { + crate::partial_match::MatchResult::Exact(matched_name) => { + Ok(queues + .iter() + .find(|q| q.name == matched_name) + .expect("matched name must exist in queues") + .id + .clone()) + } + crate::partial_match::MatchResult::ExactMultiple(_) => { + Err("duplicate".into()) + } + crate::partial_match::MatchResult::Ambiguous(m) => { + Err(format!("ambiguous: {}", m.len())) + } + crate::partial_match::MatchResult::None(_) => Err("none".into()), + } + } +``` + +- [ ] **Step 10: Run all tests to verify** + +Run: `cargo test` + +Expected: All tests pass. The 4 helpers.rs callers use `ExactMultiple(_)` which compiles with both `Vec` and `String`. + +- [ ] **Step 11: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` + +Expected: No warnings, no format violations. + +- [ ] **Step 12: Commit** + +```bash +git add src/cli/issue/workflow.rs src/cli/issue/list.rs src/cli/issue/links.rs src/cli/assets.rs src/cli/queue.rs +git commit -m "refactor: replace unreachable ExactMultiple arms and update filtering callers (#126)" +``` diff --git a/docs/superpowers/plans/2026-04-04-issue-assign-account-id.md b/docs/superpowers/plans/2026-04-04-issue-assign-account-id.md new file mode 100644 index 0000000..edc8e5f --- /dev/null +++ b/docs/superpowers/plans/2026-04-04-issue-assign-account-id.md @@ -0,0 +1,443 @@ +# Issue Assign/Create --account-id Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--account-id` flag to `issue assign` and `issue create` as a mutually exclusive alternative to `--to` that bypasses name search and passes the accountId directly to the Jira API. + +**Architecture:** Add an `account_id: Option` field with `conflicts_with` to both CLI enum variants. In the handlers, branch on `account_id` before the existing `to` branch — if present, skip all user resolution and use the ID directly. Also fix the `issue create` assignee field from `{"id": ...}` to `{"accountId": ...}` (the documented Jira Cloud v3 format). + +**Tech Stack:** Rust, clap (derive), wiremock, serde_json + +**Spec:** `docs/superpowers/specs/2026-04-04-issue-assign-account-id-design.md` + +--- + +## File Map + +| File | Change type | What changes | +|------|-------------|--------------| +| `src/cli/mod.rs` | Modify | Add `account_id` field to `Assign` and `Create` enum variants | +| `src/cli/issue/workflow.rs` | Modify | Branch on `account_id` in `handle_assign`, update destructuring | +| `src/cli/issue/create.rs` | Modify | Branch on `account_id` in `handle_create`, fix `id` → `accountId`, update destructuring | +| `tests/issue_commands.rs` | Modify | Update two existing tests (`{"id":...}` → `{"accountId":...}`), add two new integration tests | + +--- + +### Task 1: Add `--account-id` to `issue assign` with integration test + +**Files:** +- Modify: `src/cli/mod.rs:308-317` +- Modify: `src/cli/issue/workflow.rs:282,307-312` +- Modify: `tests/issue_commands.rs` (append new test) + +- [ ] **Step 1: Add `account_id` field to the `Assign` variant** + +In `src/cli/mod.rs`, change lines 308-317 from: + +```rust + Assign { + /// Issue key + key: String, + /// Assign to this user (omit to assign to self) + #[arg(long)] + to: Option, + /// Remove assignee + #[arg(long)] + unassign: bool, + }, +``` + +To: + +```rust + Assign { + /// Issue key + key: String, + /// Assign to this user (omit to assign to self) + #[arg(long, conflicts_with = "account_id")] + to: Option, + /// Assign to this Jira accountId directly (bypasses name search) + #[arg(long, conflicts_with_all = ["to", "unassign"])] + account_id: Option, + /// Remove assignee + #[arg(long)] + unassign: bool, + }, +``` + +- [ ] **Step 2: Update destructuring and add `account_id` branch in `handle_assign`** + +In `src/cli/issue/workflow.rs`, change line 282 from: + +```rust + let IssueCommand::Assign { key, to, unassign } = command else { +``` + +To: + +```rust + let IssueCommand::Assign { key, to, account_id, unassign } = command else { +``` + +Then change lines 307-312 from: + +```rust + // Resolve account ID and display name + let (account_id, display_name) = if let Some(ref user_query) = to { + helpers::resolve_assignee(client, user_query, &key, no_input).await? + } else { + let me = client.get_myself().await?; + (me.account_id, me.display_name) + }; +``` + +To: + +```rust + // Resolve account ID and display name + let (account_id, display_name) = if let Some(ref id) = account_id { + (id.clone(), id.clone()) + } else if let Some(ref user_query) = to { + helpers::resolve_assignee(client, user_query, &key, no_input).await? + } else { + let me = client.get_myself().await?; + (me.account_id, me.display_name) + }; +``` + +- [ ] **Step 3: Verify it compiles** + +Run: `cargo build 2>&1 | head -20` + +Expected: Build succeeds (or only warnings, no errors). + +- [ ] **Step 4: Write integration test for assign with `--account-id`** + +Append this test at the end of `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_assign_issue_with_account_id() { + let server = MockServer::start().await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/ACC-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response_with_assignee( + "ACC-1", + "Test assign by accountId", + None, + )), + ) + .mount(&server) + .await; + + // Mock PUT assignee — verify accountId in request body + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/ACC-1/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "direct-account-id-456" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + // Assign directly by accountId — no user search mock needed + client + .assign_issue("ACC-1", Some("direct-account-id-456")) + .await + .unwrap(); + + // Verify idempotent check works: mock issue as already assigned + let server2 = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/ACC-2")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::issue_response_with_assignee( + "ACC-2", + "Already assigned", + Some(("direct-account-id-456", "direct-account-id-456")), + )), + ) + .mount(&server2) + .await; + + let client2 = + jr::api::client::JiraClient::new_for_test(server2.uri(), "Basic dGVzdDp0ZXN0".into()); + + let issue = client2.get_issue("ACC-2", &[]).await.unwrap(); + let assignee = issue.fields.assignee.unwrap(); + assert_eq!(assignee.account_id, "direct-account-id-456"); +} +``` + +- [ ] **Step 5: Run the new test** + +Run: `cargo test --test issue_commands test_assign_issue_with_account_id` + +Expected: PASS. The test exercises the API layer directly (same pattern as existing create-with-assignee tests). No user search mocks are registered, confirming the accountId path bypasses name resolution. + +- [ ] **Step 6: Run all tests to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass. + +- [ ] **Step 7: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` + +Expected: No warnings, no format violations. + +- [ ] **Step 8: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/workflow.rs tests/issue_commands.rs +git commit -m "feat: add --account-id flag to issue assign (#115)" +``` + +--- + +### Task 2: Add `--account-id` to `issue create`, fix `id` → `accountId`, and update existing tests + +**Files:** +- Modify: `src/cli/mod.rs:248-250` +- Modify: `src/cli/issue/create.rs:21-34,127-131` +- Modify: `tests/issue_commands.rs:994-999,1047-1049,1027` (update existing tests, append new test) + +- [ ] **Step 1: Add `account_id` field to the `Create` variant** + +In `src/cli/mod.rs`, change lines 248-250 from: + +```rust + /// Assign to user (name/email, or "me" for self) + #[arg(long)] + to: Option, + }, +``` + +To: + +```rust + /// Assign to user (name/email, or "me" for self) + #[arg(long, conflicts_with = "account_id")] + to: Option, + /// Assign to this Jira accountId directly (bypasses name search) + #[arg(long, conflicts_with = "to")] + account_id: Option, + }, +``` + +- [ ] **Step 2: Update destructuring and add `account_id` branch in `handle_create`** + +In `src/cli/issue/create.rs`, change lines 21-34 from: + +```rust + let IssueCommand::Create { + project, + issue_type, + summary, + description, + description_stdin, + priority, + label: labels, + team, + points, + markdown, + parent, + to, + } = command +``` + +To: + +```rust + let IssueCommand::Create { + project, + issue_type, + summary, + description, + description_stdin, + priority, + label: labels, + team, + points, + markdown, + parent, + to, + account_id, + } = command +``` + +Then change lines 127-131 from: + +```rust + if let Some(ref user_query) = to { + let (account_id, _display_name) = + helpers::resolve_assignee_by_project(client, user_query, &project_key, no_input) + .await?; + fields["assignee"] = json!({"id": account_id}); + } +``` + +To: + +```rust + if let Some(ref id) = account_id { + fields["assignee"] = json!({"accountId": id}); + } else if let Some(ref user_query) = to { + let (acct_id, _display_name) = + helpers::resolve_assignee_by_project(client, user_query, &project_key, no_input) + .await?; + fields["assignee"] = json!({"accountId": acct_id}); + } +``` + +- [ ] **Step 3: Update existing test `test_create_issue_with_assignee`** + +In `tests/issue_commands.rs`, change the body matcher at line 994-999 from: + +```rust + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Test with assignee", + "assignee": {"id": "acc-jane-123"} + } + }))) +``` + +To: + +```rust + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Test with assignee", + "assignee": {"accountId": "acc-jane-123"} + } + }))) +``` + +And change the field assignment at line 1027 from: + +```rust + fields["assignee"] = serde_json::json!({"id": users[0].account_id}); +``` + +To: + +```rust + fields["assignee"] = serde_json::json!({"accountId": users[0].account_id}); +``` + +- [ ] **Step 4: Update existing test `test_create_issue_with_assignee_me`** + +In `tests/issue_commands.rs`, change the body matcher at line 1047-1049 from: + +```rust + .and(body_partial_json(serde_json::json!({ + "fields": { + "assignee": {"id": "abc123"} + } + }))) +``` + +To: + +```rust + .and(body_partial_json(serde_json::json!({ + "fields": { + "assignee": {"accountId": "abc123"} + } + }))) +``` + +And change the field assignment at line 1072 from: + +```rust + fields["assignee"] = serde_json::json!({"id": me.account_id}); +``` + +To: + +```rust + fields["assignee"] = serde_json::json!({"accountId": me.account_id}); +``` + +- [ ] **Step 5: Write integration test for create with `--account-id`** + +Append this test at the end of `tests/issue_commands.rs`: + +```rust +#[tokio::test] +async fn test_create_issue_with_account_id() { + let server = MockServer::start().await; + + // Mock create issue — verify assignee uses accountId format + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Assigned by accountId", + "assignee": {"accountId": "direct-acct-789"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("FOO-200")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // Build fields with accountId directly — no user search mock needed + let mut fields = serde_json::json!({ + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Assigned by accountId", + }); + fields["assignee"] = serde_json::json!({"accountId": "direct-acct-789"}); + + let response = client.create_issue(fields).await.unwrap(); + assert_eq!(response.key, "FOO-200"); +} +``` + +- [ ] **Step 6: Run the new test** + +Run: `cargo test --test issue_commands test_create_issue_with_account_id` + +Expected: PASS. + +- [ ] **Step 7: Run all tests to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass. The updated existing tests (`test_create_issue_with_assignee`, `test_create_issue_with_assignee_me`) pass because both the production code and test assertions now use `{"accountId": ...}`. + +- [ ] **Step 8: Run clippy and fmt** + +Run: `cargo clippy -- -D warnings && cargo fmt --all -- --check` + +Expected: No warnings, no format violations. + +- [ ] **Step 9: Commit** + +```bash +git add src/cli/mod.rs src/cli/issue/create.rs tests/issue_commands.rs +git commit -m "feat: add --account-id flag to issue create, fix assignee field to accountId (#115)" +``` diff --git a/docs/superpowers/plans/2026-04-05-date-filters.md b/docs/superpowers/plans/2026-04-05-date-filters.md new file mode 100644 index 0000000..8764b74 --- /dev/null +++ b/docs/superpowers/plans/2026-04-05-date-filters.md @@ -0,0 +1,489 @@ +# Date Filter Flags Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add `--created-after`, `--created-before`, `--updated-after`, `--updated-before` date filter flags to `jr issue list`. + +**Architecture:** Four new clap args on `IssueCommand::List` that generate JQL date clauses via `build_filter_clauses`. Date validation uses chrono in `jql.rs`. The `--before` flags add +1 day and use `<` to handle JQL's midnight semantics correctly. + +**Tech Stack:** Rust, clap (derive), chrono::NaiveDate, wiremock (tests), assert_cmd/predicates (smoke tests) + +--- + +## File Structure + +| File | Responsibility | Change type | +|------|---------------|-------------| +| `src/jql.rs` | Date validation (`validate_date`) | Add function + unit tests | +| `src/cli/mod.rs` | CLI arg definitions for `IssueCommand::List` | Add 4 args | +| `src/cli/issue/list.rs` | Early validation, JQL clause generation | Modify `handle_list` + `build_filter_clauses` | +| `tests/cli_smoke.rs` | Clap conflict smoke tests | Add 1 test | +| `tests/cli_handler.rs` | Handler-level tests with wiremock | Add 2 tests | + +--- + +### Task 1: Date Validation in `jql.rs` + +**Files:** +- Modify: `src/jql.rs` + +- [ ] **Step 1: Write failing unit tests for `validate_date`** + +Add these tests to the existing `#[cfg(test)] mod tests` block at the bottom of `src/jql.rs` (after the last existing test, before the closing `}`): + +```rust + #[test] + fn validate_date_valid_simple() { + let d = validate_date("2026-03-18").unwrap(); + assert_eq!(d.to_string(), "2026-03-18"); + } + + #[test] + fn validate_date_valid_leap_day() { + let d = validate_date("2024-02-29").unwrap(); + assert_eq!(d.to_string(), "2024-02-29"); + } + + #[test] + fn validate_date_invalid_format_slash() { + let err = validate_date("2026/03/18").unwrap_err(); + assert!(err.contains("Invalid date")); + assert!(err.contains("YYYY-MM-DD")); + } + + #[test] + fn validate_date_invalid_format_us() { + let err = validate_date("03-18-2026").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_impossible_feb30() { + let err = validate_date("2026-02-30").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_impossible_month13() { + let err = validate_date("2026-13-01").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_empty() { + let err = validate_date("").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_non_leap_feb29() { + let err = validate_date("2026-02-29").unwrap_err(); + assert!(err.contains("Invalid date")); + } +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo test --lib jql::tests::validate_date 2>&1 | tail -20` + +Expected: FAIL — `validate_date` does not exist yet. + +- [ ] **Step 3: Implement `validate_date`** + +Add this function to `src/jql.rs`, after the existing `validate_asset_key` function (before the `/// Strip ORDER BY` doc comment around line 84): + +```rust +/// Validate and parse an absolute date string in ISO 8601 format (YYYY-MM-DD). +/// +/// Returns the parsed `NaiveDate` on success. The caller needs the parsed date +/// to compute +1 day for `--before` flag JQL generation. +pub fn validate_date(s: &str) -> Result { + chrono::NaiveDate::parse_from_str(s, "%Y-%m-%d").map_err(|_| { + format!("Invalid date \"{s}\". Expected format: YYYY-MM-DD (e.g., 2026-03-18).") + }) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo test --lib jql::tests::validate_date 2>&1 | tail -20` + +Expected: All 8 new tests PASS. + +- [ ] **Step 5: Commit** + +```bash +git add src/jql.rs +git commit -m "feat: add validate_date function for date filter flags (#113)" +``` + +--- + +### Task 2: Add CLI Flags to `mod.rs` + +**Files:** +- Modify: `src/cli/mod.rs:173-212` (the `IssueCommand::List` variant) + +- [ ] **Step 1: Add the four date filter args** + +In `src/cli/mod.rs`, inside the `List` variant of `IssueCommand`, add these four fields after the `asset` field (before the closing `}` of the `List` variant, around line 211): + +```rust + /// Show issues created on or after this date (YYYY-MM-DD) + #[arg(long, conflicts_with = "recent")] + created_after: Option, + /// Show issues created on or before this date (YYYY-MM-DD) + #[arg(long)] + created_before: Option, + /// Show issues updated on or after this date (YYYY-MM-DD) + #[arg(long)] + updated_after: Option, + /// Show issues updated on or before this date (YYYY-MM-DD) + #[arg(long)] + updated_before: Option, +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo check 2>&1 | tail -20` + +Expected: Compiler error about exhaustive pattern match in `list.rs` — the `IssueCommand::List` destructuring doesn't include the new fields yet. This confirms the fields were added. + +- [ ] **Step 3: Commit** + +```bash +git add src/cli/mod.rs +git commit -m "feat: add date filter CLI flags to issue list (#113)" +``` + +--- + +### Task 3: Wire Date Flags into `list.rs` + +**Files:** +- Modify: `src/cli/issue/list.rs:57-96` (destructuring + early validation) +- Modify: `src/cli/issue/list.rs:209-218` (`build_filter_clauses` call) +- Modify: `src/cli/issue/list.rs:291-305` (unbounded query error message) +- Modify: `src/cli/issue/list.rs:491-523` (`build_filter_clauses` function) + +- [ ] **Step 1: Update the `IssueCommand::List` destructuring** + +In `src/cli/issue/list.rs`, in the `handle_list` function, update the destructuring (around line 65-81) to include the new fields. Add after `asset: asset_key,`: + +```rust + created_after, + created_before, + updated_after, + updated_before, +``` + +- [ ] **Step 2: Add early date validation** + +In `src/cli/issue/list.rs`, after the `--asset` validation block (after line 96), add validation for all four date flags: + +```rust + // Validate date filter flags early + let created_after_date = if let Some(ref d) = created_after { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let created_before_date = if let Some(ref d) = created_before { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let updated_after_date = if let Some(ref d) = updated_after { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let updated_before_date = if let Some(ref d) = updated_before { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; +``` + +- [ ] **Step 3: Build date JQL clauses** + +After the date validation block (Step 2), compute the JQL clause strings. The `--after` flags use `>=` directly; the `--before` flags add +1 day and use `<`: + +```rust + // Build date filter JQL clauses + let created_after_clause = created_after_date.map(|d| format!("created >= \"{}\"", d)); + let created_before_clause = created_before_date.map(|d| { + let next_day = d + chrono::Days::new(1); + format!("created < \"{}\"", next_day) + }); + let updated_after_clause = updated_after_date.map(|d| format!("updated >= \"{}\"", d)); + let updated_before_clause = updated_before_date.map(|d| { + let next_day = d + chrono::Days::new(1); + format!("updated < \"{}\"", next_day) + }); +``` + +- [ ] **Step 4: Update `build_filter_clauses` signature and call** + +Update the `build_filter_clauses` function signature in `src/cli/issue/list.rs` (around line 491) to accept the four new clauses: + +```rust +fn build_filter_clauses( + assignee_jql: Option<&str>, + reporter_jql: Option<&str>, + status: Option<&str>, + team_clause: Option<&str>, + recent: Option<&str>, + open: bool, + asset_clause: Option<&str>, + created_after_clause: Option<&str>, + created_before_clause: Option<&str>, + updated_after_clause: Option<&str>, + updated_before_clause: Option<&str>, +) -> Vec { +``` + +Add these lines at the end of the function body, before the `parts` return (after the `asset_clause` block): + +```rust + if let Some(c) = created_after_clause { + parts.push(c.to_string()); + } + if let Some(c) = created_before_clause { + parts.push(c.to_string()); + } + if let Some(c) = updated_after_clause { + parts.push(c.to_string()); + } + if let Some(c) = updated_before_clause { + parts.push(c.to_string()); + } +``` + +Update the call site (around line 210) to pass the new clauses: + +```rust + let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + resolved_status.as_deref(), + team_clause.as_deref(), + recent.as_deref(), + open, + asset_clause.as_deref(), + created_after_clause.as_deref(), + created_before_clause.as_deref(), + updated_after_clause.as_deref(), + updated_before_clause.as_deref(), + ); +``` + +- [ ] **Step 5: Update the unbounded query error message** + +In the guard against unbounded query (around line 298-305), update the error message to mention the new flags: + +```rust + if all_parts.is_empty() { + return Err(JrError::UserError( + "No project or filters specified. Use --project, --assignee, --reporter, --status, --open, --team, --recent, --created-after, --created-before, --updated-after, --updated-before, --asset, or --jql. \ + You can also set a default project in .jr.toml or run \"jr init\"." + .into(), + ) + .into()); + } +``` + +- [ ] **Step 6: Verify it compiles and existing tests pass** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo check 2>&1 | tail -10 && cargo test --lib 2>&1 | tail -10` + +Expected: Compiles. All existing tests pass. + +- [ ] **Step 7: Commit** + +```bash +git add src/cli/issue/list.rs +git commit -m "feat: wire date filter flags into JQL generation (#113)" +``` + +--- + +### Task 4: Smoke Test for `--created-after` / `--recent` Conflict + +**Files:** +- Modify: `tests/cli_smoke.rs` + +- [ ] **Step 1: Write the conflict smoke test** + +Add this test to `tests/cli_smoke.rs`, after the last existing test: + +```rust +#[test] +fn test_issue_list_created_after_and_recent_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "list", + "--created-after", + "2026-03-18", + "--recent", + "7d", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo test --test cli_smoke test_issue_list_created_after_and_recent_conflict 2>&1 | tail -10` + +Expected: PASS — clap enforces the conflict declared in Task 2. + +- [ ] **Step 3: Commit** + +```bash +git add tests/cli_smoke.rs +git commit -m "test: add smoke test for --created-after/--recent conflict (#113)" +``` + +--- + +### Task 5: Handler Tests for Date Flags + +**Files:** +- Modify: `tests/cli_handler.rs` + +These tests verify that the date flags produce correct JQL when the handler runs against a wiremock server. + +- [ ] **Step 1: Write handler test for `--created-after`** + +Add this test to `tests/cli_handler.rs`, after the last existing test: + +```rust +#[tokio::test] +async fn test_handler_list_created_after() { + let server = MockServer::start().await; + + // The search endpoint should receive JQL with the date clause + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": "project = \"PROJ\" AND created >= \"2026-03-18\" ORDER BY updated DESC" + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "Test issue", + "To Do", + )]), + )) + .expect(1) + .mount(&server) + .await; + + Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "issue", + "list", + "--project", + "PROJ", + "--created-after", + "2026-03-18", + "--no-input", + ]) + .assert() + .success(); +} +``` + +- [ ] **Step 2: Write handler test for `--created-before` (verifies +1 day)** + +```rust +#[tokio::test] +async fn test_handler_list_created_before() { + let server = MockServer::start().await; + + // --created-before 2026-03-18 should produce created < "2026-03-19" (next day) + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": "project = \"PROJ\" AND created < \"2026-03-19\" ORDER BY updated DESC" + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "Test issue", + "To Do", + )]), + )) + .expect(1) + .mount(&server) + .await; + + Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "issue", + "list", + "--project", + "PROJ", + "--created-before", + "2026-03-18", + "--no-input", + ]) + .assert() + .success(); +} +``` + +- [ ] **Step 3: Run handler tests** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo test --test cli_handler test_handler_list_created 2>&1 | tail -20` + +Expected: Both tests PASS. + +- [ ] **Step 4: Commit** + +```bash +git add tests/cli_handler.rs +git commit -m "test: add handler tests for date filter flags (#113)" +``` + +--- + +### Task 6: Format and Lint Check + +**Files:** (none — formatting/linting only) + +- [ ] **Step 1: Run formatter** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo fmt --all` + +- [ ] **Step 2: Run clippy** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo clippy -- -D warnings 2>&1 | tail -20` + +Expected: Zero warnings. + +- [ ] **Step 3: Run full test suite** + +Run: `export PATH="$HOME/.cargo/bin:$PATH" && cargo test 2>&1 | tail -20` + +Expected: All tests pass. + +- [ ] **Step 4: Commit if any formatting changes** + +```bash +git add -A +git commit -m "style: format date filter implementation (#113)" +``` + +(Skip commit if no changes.) diff --git a/docs/superpowers/plans/2026-04-05-handler-tests-me-keyword.md b/docs/superpowers/plans/2026-04-05-handler-tests-me-keyword.md new file mode 100644 index 0000000..d19271e --- /dev/null +++ b/docs/superpowers/plans/2026-04-05-handler-tests-me-keyword.md @@ -0,0 +1,210 @@ +# Handler-Level Tests for --to me and Idempotent Name Resolution Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add 3 handler-level integration tests covering `--to me` keyword resolution in assign and create, plus idempotent assign when account ID comes from name search. + +**Architecture:** All tests follow the existing pattern in `tests/cli_handler.rs`: wiremock MockServer for API mocking, `jr_cmd()` helper for command execution, JSON output assertions. No production code changes — these are pure test additions covering untested sub-paths. + +**Tech Stack:** Rust, wiremock, assert_cmd, predicates, tokio (multi_thread) + +--- + +## File Structure + +| File | Role | Action | +|------|------|--------| +| `tests/cli_handler.rs` | Handler-level integration tests | Modify: add 3 new test functions after existing tests (line 323) | + +--- + +### Task 1: Add handler-level tests for --to me and idempotent name resolution + +**Files:** +- Modify: `tests/cli_handler.rs:323` (append after `test_handler_create_basic`) + +- [ ] **Step 1: Add `test_handler_assign_to_me`** + +Append this test after the closing `}` of `test_handler_create_basic` (line 323): + +```rust +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_to_me() { + let server = MockServer::start().await; + + // Mock GET myself — the "me" keyword resolves via get_myself() + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-6")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-6", "Assign to me test", None), + )) + .mount(&server) + .await; + + // Mock PUT assignee + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-6/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "abc123" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-6", "--to", "me"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": true")) + .stdout(predicate::str::contains("\"assignee\": \"Test User\"")) + .stdout(predicate::str::contains( + "\"assignee_account_id\": \"abc123\"", + )); +} +``` + +- [ ] **Step 2: Run the test to verify it passes** + +Run: `cargo test --test cli_handler test_handler_assign_to_me -- --exact` + +Expected: PASS. The test exercises the `--to me` → `resolve_assignee` → `is_me_keyword` → `get_myself()` code path, which is already implemented — we're just adding coverage. + +- [ ] **Step 3: Add `test_handler_create_to_me`** + +Append this test after `test_handler_assign_to_me`: + +```rust +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_create_to_me() { + let server = MockServer::start().await; + + // Mock GET myself — the "me" keyword resolves via get_myself() + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock POST create issue — verify assignee uses accountId from get_myself() + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "HDL"}, + "issuetype": {"name": "Task"}, + "summary": "Created with --to me", + "assignee": {"accountId": "abc123"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("HDL-200")), + ) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args([ + "issue", + "create", + "-p", + "HDL", + "-t", + "Task", + "-s", + "Created with --to me", + "--to", + "me", + ]) + .assert() + .success() + .stdout(predicate::str::contains("\"key\": \"HDL-200\"")); +} +``` + +- [ ] **Step 4: Run the test to verify it passes** + +Run: `cargo test --test cli_handler test_handler_create_to_me -- --exact` + +Expected: PASS. The test exercises `--to me` → `resolve_assignee_by_project` → `is_me_keyword` → `get_myself()` during issue creation. + +- [ ] **Step 5: Add `test_handler_assign_idempotent_with_name_search`** + +Append this test after `test_handler_create_to_me`: + +```rust +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_idempotent_with_name_search() { + let server = MockServer::start().await; + + // Mock assignable user search — returns Jane Doe + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Jane")) + .and(query_param("issueKey", "HDL-7")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-jane-456", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + // Mock GET issue — already assigned to Jane Doe (same account ID) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-7")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "HDL-7", + "Already assigned to Jane", + Some(("acc-jane-456", "Jane Doe")), + ), + )) + .mount(&server) + .await; + + // PUT assignee should NOT be called — already assigned to target + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-7/assignee")) + .respond_with(ResponseTemplate::new(204)) + .expect(0) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-7", "--to", "Jane"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": false")); +} +``` + +- [ ] **Step 6: Run the test to verify it passes** + +Run: `cargo test --test cli_handler test_handler_assign_idempotent_with_name_search -- --exact` + +Expected: PASS. The test exercises `--to Jane` → `resolve_assignee` → user search → single result → idempotent check → already assigned → returns `changed: false` without calling PUT. + +- [ ] **Step 7: Run all handler tests together** + +Run: `cargo test --test cli_handler` + +Expected: All 11 tests pass (8 existing + 3 new). + +- [ ] **Step 8: Run clippy** + +Run: `cargo clippy --tests -- -D warnings` + +Expected: Zero warnings. + +- [ ] **Step 9: Commit** + +```bash +git add tests/cli_handler.rs +git commit -m "test: add handler-level tests for --to me keyword and idempotent name search (#148)" +``` diff --git a/docs/superpowers/plans/2026-04-05-snapshot-tests-json-output.md b/docs/superpowers/plans/2026-04-05-snapshot-tests-json-output.md new file mode 100644 index 0000000..197b5af --- /dev/null +++ b/docs/superpowers/plans/2026-04-05-snapshot-tests-json-output.md @@ -0,0 +1,552 @@ +# Snapshot Tests for Write Command JSON Output Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Extract inline `json!({...})` output construction from write command handlers into named builder functions and pin them with `insta::assert_json_snapshot!` tests to protect `--output json` schemas from accidental drift. + +**Architecture:** Pure builder functions in `src/cli/issue/json_output.rs` for issue commands (move, assign, edit, link, unlink) and private functions in `src/cli/sprint.rs` for sprint commands. Handlers call these builders instead of constructing JSON inline. Each builder gets a snapshot test using `insta::assert_json_snapshot!`. + +**Tech Stack:** Rust, serde_json, insta (with `json` feature — already a dev-dependency) + +--- + +## File Structure + +| File | Role | Action | +|------|------|--------| +| `src/cli/issue/json_output.rs` | Builder functions for issue command JSON responses + snapshot tests | Create | +| `src/cli/issue/mod.rs` | Issue module declarations | Modify: add `mod json_output;` | +| `src/cli/issue/workflow.rs` | Move + assign handlers | Modify: replace inline `json!()` with builder calls | +| `src/cli/issue/create.rs` | Edit handler | Modify: replace inline `json!()` with builder call | +| `src/cli/issue/links.rs` | Link + unlink handlers | Modify: replace inline `json!()` with builder calls | +| `src/cli/sprint.rs` | Sprint add/remove handlers | Modify: extract builders, add snapshot tests | + +--- + +### Task 1: Create json_output.rs with builder functions and snapshot tests + +**Files:** +- Create: `src/cli/issue/json_output.rs` +- Modify: `src/cli/issue/mod.rs:1` + +- [ ] **Step 1: Create `src/cli/issue/json_output.rs` with all builder functions and snapshot tests** + +```rust +use serde_json::{Value, json}; + +/// JSON response for `issue move` — both changed and idempotent cases. +pub(crate) fn move_response(key: &str, status: &str, changed: bool) -> Value { + json!({ + "key": key, + "status": status, + "changed": changed + }) +} + +/// JSON response for `issue assign` when the assignment changed. +pub(crate) fn assign_changed_response(key: &str, display_name: &str, account_id: &str) -> Value { + json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": true + }) +} + +/// JSON response for `issue assign` when already assigned to the target user. +pub(crate) fn assign_unchanged_response( + key: &str, + display_name: &str, + account_id: &str, +) -> Value { + json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": false + }) +} + +/// JSON response for `issue assign --unassign`. +pub(crate) fn unassign_response(key: &str) -> Value { + json!({ + "key": key, + "assignee": null, + "changed": true + }) +} + +/// JSON response for `issue edit`. +pub(crate) fn edit_response(key: &str) -> Value { + json!({ + "key": key, + "updated": true + }) +} + +/// JSON response for `issue link`. +pub(crate) fn link_response(key1: &str, key2: &str, link_type: &str) -> Value { + json!({ + "key1": key1, + "key2": key2, + "type": link_type, + "linked": true + }) +} + +/// JSON response for `issue unlink` — covers both success and no-match cases. +pub(crate) fn unlink_response(unlinked: bool, count: usize) -> Value { + json!({ + "unlinked": unlinked, + "count": count + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::assert_json_snapshot; + + #[test] + fn test_move_response_changed() { + assert_json_snapshot!(move_response("TEST-1", "In Progress", true)); + } + + #[test] + fn test_move_response_unchanged() { + assert_json_snapshot!(move_response("TEST-1", "Done", false)); + } + + #[test] + fn test_assign_changed() { + assert_json_snapshot!(assign_changed_response("TEST-1", "Jane Doe", "abc123")); + } + + #[test] + fn test_assign_unchanged() { + assert_json_snapshot!(assign_unchanged_response("TEST-1", "Jane Doe", "abc123")); + } + + #[test] + fn test_unassign() { + assert_json_snapshot!(unassign_response("TEST-1")); + } + + #[test] + fn test_edit() { + assert_json_snapshot!(edit_response("TEST-1")); + } + + #[test] + fn test_link() { + assert_json_snapshot!(link_response("TEST-1", "TEST-2", "Blocks")); + } + + #[test] + fn test_unlink_success() { + assert_json_snapshot!(unlink_response(true, 2)); + } + + #[test] + fn test_unlink_no_match() { + assert_json_snapshot!(unlink_response(false, 0)); + } +} +``` + +- [ ] **Step 2: Add the module declaration to `src/cli/issue/mod.rs`** + +In `src/cli/issue/mod.rs`, add `mod json_output;` after the existing module declarations. The top of the file should become: + +```rust +mod assets; +mod create; +mod format; +mod helpers; +mod json_output; +mod links; +mod list; +mod workflow; +``` + +- [ ] **Step 3: Run the snapshot tests to generate initial snapshot files** + +Run: `cargo test --lib json_output` + +Expected: All 9 tests **FAIL** — this is expected on first run. insta writes `.snap.new` files to `src/cli/issue/snapshots/` but does not auto-accept them. + +Accept the new snapshots (requires `cargo-insta` — install with `cargo install cargo-insta` if not present): + +Run: `cargo insta test --accept --lib -- json_output` + +This runs the tests and auto-accepts all new snapshots in one step. + +Verify snapshots exist: + +Run: `ls src/cli/issue/snapshots/` + +Expected: 9 `.snap` files like `jr__cli__issue__json_output__tests__move_response_changed.snap`. + +Confirm tests now pass cleanly: + +Run: `cargo test --lib json_output` + +Expected: All 9 tests PASS. + +- [ ] **Step 4: Run full test suite to verify no regressions** + +Run: `cargo test` + +Expected: All tests pass, including the 9 new snapshot tests. + +- [ ] **Step 5: Commit** + +```bash +git add src/cli/issue/json_output.rs src/cli/issue/mod.rs src/cli/issue/snapshots/ +git commit -m "feat: add JSON output builder functions with snapshot tests (#135)" +``` + +--- + +### Task 2: Wire issue command handlers to use builder functions + +**Files:** +- Modify: `src/cli/issue/workflow.rs:2,76-84,220-229,294-303,328-338,353-363` +- Modify: `src/cli/issue/create.rs:273-276,296-299` +- Modify: `src/cli/issue/links.rs:2,92-102,189-197,211-219` + +This task replaces all inline `json!({...})` output construction in the issue command handlers with calls to the builder functions from Task 1. + +- [ ] **Step 1: Update `src/cli/issue/workflow.rs`** + +**Replace the import** on line 2. Change: + +```rust +use serde_json::json; +``` + +to: + +```rust +use super::json_output; +``` + +**Replace the move idempotent JSON** (lines 79-83). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key": key, + "status": current_status, + "changed": false + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::move_response( + &key, + ¤t_status, + false, + ))? +``` + +**Replace the move changed JSON** (lines 224-228). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key": key, + "status": new_status, + "changed": true + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::move_response( + &key, new_status, true, + ))? +``` + +**Replace the unassign JSON** (lines 298-302). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key": key, + "assignee": null, + "changed": true + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::unassign_response(&key))? +``` + +**Replace the assign idempotent JSON** (lines 332-336). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": false + }))? +``` + +to: + +```rust + serde_json::to_string_pretty( + &json_output::assign_unchanged_response( + &key, + &display_name, + &account_id, + ), + )? +``` + +**Replace the assign changed JSON** (lines 357-361). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": true + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::assign_changed_response( + &key, + &display_name, + &account_id, + ))? +``` + +- [ ] **Step 2: Update `src/cli/issue/create.rs`** + +Add an import after line 11 (`use super::helpers;`): + +```rust +use super::json_output; +``` + +**Replace both `handle_edit` JSON outputs.** There are two identical occurrences (lines 275 and 298). In both places, change: + +```rust + serde_json::to_string_pretty(&json!({ "key": key, "updated": true }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::edit_response(&key))? +``` + +Note: `use serde_json::json;` on line 2 must stay — `handle_create` uses `json!()` extensively for building request bodies. + +- [ ] **Step 3: Update `src/cli/issue/links.rs`** + +**Replace the import** on line 2. Change: + +```rust +use serde_json::json; +``` + +to: + +```rust +use super::json_output; +``` + +**Replace the link JSON** (lines 96-101). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "key1": key1, + "key2": key2, + "type": resolved_name, + "linked": true + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::link_response( + &key1, + &key2, + &resolved_name, + ))? +``` + +**Replace the unlink no-match JSON** (lines 193-196). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "unlinked": false, + "count": 0 + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::unlink_response(false, 0))? +``` + +**Replace the unlink success JSON** (lines 215-218). Change: + +```rust + serde_json::to_string_pretty(&json!({ + "unlinked": true, + "count": count + }))? +``` + +to: + +```rust + serde_json::to_string_pretty(&json_output::unlink_response(true, count))? +``` + +- [ ] **Step 4: Run full test suite** + +Run: `cargo test` + +Expected: All tests pass — the existing handler-level tests in `cli_handler.rs` and `issue_commands.rs` validate that the refactored handlers produce identical output. + +- [ ] **Step 5: Run clippy** + +Run: `cargo clippy -- -D warnings` + +Expected: Zero warnings. Check that removed `json` imports don't trigger `unused_import` warnings and that new imports don't trigger `unused` warnings. + +- [ ] **Step 6: Commit** + +```bash +git add src/cli/issue/workflow.rs src/cli/issue/create.rs src/cli/issue/links.rs +git commit -m "refactor: replace inline json!() with builder functions in issue handlers (#135)" +``` + +--- + +### Task 3: Add sprint response builders and snapshot tests + +**Files:** +- Modify: `src/cli/sprint.rs:1-2,100-110,131-140,295-367` + +- [ ] **Step 1: Add builder functions to `src/cli/sprint.rs`** + +Add two private functions right before the existing `const MAX_SPRINT_ISSUES` (line 90). Insert: + +```rust +fn sprint_add_response(sprint_id: u64, issues: &[String]) -> serde_json::Value { + json!({ + "sprint_id": sprint_id, + "issues": issues, + "added": true + }) +} + +fn sprint_remove_response(issues: &[String]) -> serde_json::Value { + json!({ + "issues": issues, + "removed": true + }) +} +``` + +- [ ] **Step 2: Wire `handle_add` to use the builder** + +In `handle_add` (around line 105 after the insertion), change the JSON output from: + +```rust + output::render_json(&json!({ + "sprint_id": sprint_id, + "issues": issues, + "added": true + }))? +``` + +to: + +```rust + output::render_json(&sprint_add_response(sprint_id, &issues))? +``` + +- [ ] **Step 3: Wire `handle_remove` to use the builder** + +In `handle_remove` (around line 144 after the insertion), change the JSON output from: + +```rust + output::render_json(&json!({ + "issues": issues, + "removed": true + }))? +``` + +to: + +```rust + output::render_json(&sprint_remove_response(&issues))? +``` + +- [ ] **Step 4: Add snapshot tests to the existing test module** + +At the end of `src/cli/sprint.rs`, inside the existing `#[cfg(test)] mod tests { ... }`, add before the closing `}`: + +```rust + #[test] + fn test_sprint_add_response() { + insta::assert_json_snapshot!(sprint_add_response( + 100, + &["TEST-1".to_string(), "TEST-2".to_string()] + )); + } + + #[test] + fn test_sprint_remove_response() { + insta::assert_json_snapshot!(sprint_remove_response(&[ + "TEST-1".to_string(), + "TEST-2".to_string() + ])); + } +``` + +- [ ] **Step 5: Run the snapshot tests and accept** + +Run: `cargo test --lib sprint` + +Expected: The 2 new snapshot tests **FAIL** (first run, no `.snap` files yet). Existing sprint tests pass. + +Accept the new snapshots: + +Run: `cargo insta test --accept --lib -- sprint` + +Verify: `ls src/cli/snapshots/` should show 2 new `.snap` files. + +Confirm tests pass cleanly: + +Run: `cargo test --lib sprint` + +Expected: All sprint tests PASS. + +- [ ] **Step 6: Run full test suite and clippy** + +Run: `cargo test && cargo clippy -- -D warnings` + +Expected: All tests pass, zero clippy warnings. + +- [ ] **Step 7: Commit** + +```bash +git add src/cli/sprint.rs src/cli/snapshots/ +git commit -m "refactor: extract sprint JSON output builders with snapshot tests (#135)" +``` diff --git a/docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md b/docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md index 23b110f..3ebd17b 100644 --- a/docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md +++ b/docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md @@ -34,7 +34,7 @@ jr auth login # Authenticate (OAuth 2.0 or API token) jr auth login --token # Authenticate with API token jr auth status # Show current auth state jr me # Show current user info -jr project fields FOO # List valid issue types, priorities, statuses for a project +jr project fields --project FOO # List valid issue types, priorities, statuses for a project ``` ### Issues @@ -406,7 +406,7 @@ This allows AI agents to capture created keys and IDs for follow-up commands. ### Project Field Discovery -`jr project fields FOO --output json` returns valid issue types, priorities, and statuses for a project. AI agents use this to know what values are valid before creating or editing issues, avoiding trial-and-error guessing. +`jr project fields --project FOO --output json` returns valid issue types, priorities, and statuses for a project. AI agents use this to know what values are valid before creating or editing issues, avoiding trial-and-error guessing. ### Querying Transitions Without Moving diff --git a/docs/superpowers/specs/2026-03-24-assets-cmdb-design.md b/docs/superpowers/specs/2026-03-24-assets-cmdb-design.md new file mode 100644 index 0000000..1fce39b --- /dev/null +++ b/docs/superpowers/specs/2026-03-24-assets-cmdb-design.md @@ -0,0 +1,495 @@ +# Assets/CMDB Support — Design Spec + +## Goal + +Add standalone Assets/CMDB support to `jr`, enabling users to search assets via AQL, view asset details, and list connected Jira issues. Assets is workspace-scoped (not project-scoped) and works across all project types — this is Layer 1 of a two-layer design where Layer 2 (future) adds project-level integrations (e.g., filtering issues by asset custom fields). + +## Background + +Atlassian Assets (formerly Insight) is a CMDB that lives at the Jira Cloud site level. Asset objects (e.g., clients, servers, software) can be linked to issues across any Jira project type — Software, JSM, or Business. The Assets REST API is a separate API surface at `api.atlassian.com/ex/jira/{cloudId}/jsm/assets/workspace/{workspaceId}/v1/`, requiring a workspace ID that is site-wide and discovered via a dedicated endpoint. + +The `jr` codebase uses product-namespaced modules: `api/jira/`, `api/jsm/`, `types/jira/`, `types/jsm/`. Assets adds `api/assets/` and `types/assets/` as peers. + +## Architecture + +### Workspace ID Discovery + Cache + +Assets API calls require a `workspaceId`. This is site-wide (not per-project) and discovered once: + +1. `GET {instance_url}/rest/servicedeskapi/assets/workspace` → returns a paginated `ServiceDeskPage`, e.g. `{ "values": [{ "workspaceId": "..." }] }`. Uses `get_from_instance()` (same pattern as JSM endpoints). +2. Cache in `~/.cache/jr/workspace.json` with 7-day TTL (separate from `project_meta.json` since workspace ID is site-wide). + +Cache structure: + +```json +{ + "workspace_id": "abc-123-def-456", + "fetched_at": "2026-03-24T12:00:00Z" +} +``` + +Cache invalidation strategy (planned): on 404 from Assets endpoints, clear the workspace cache and re-fetch. The current implementation does not yet perform automatic invalidation and retry; it may be added in a future iteration. + +Cache functions in `src/cache.rs` (filesystem only): + +```rust +pub fn read_workspace_cache() -> Result> +pub fn write_workspace_cache(workspace_id: &str) -> Result<()> +``` + +`write_workspace_cache` sets `fetched_at` internally (matching `write_team_cache` pattern). + +Orchestration function in `src/api/assets/workspace.rs`: + +```rust +pub async fn get_or_fetch_workspace_id(client: &JiraClient) -> Result +``` + +### API Layer + +Assets API calls live in `src/api/assets/`, peer to `api/jira/` and `api/jsm/`. Methods are implemented on the existing `JiraClient`. + +**URL construction:** Assets endpoints use a third base URL pattern: + +| API Surface | Base URL | Method | +|-------------|----------|--------| +| Platform (Jira) | `{instance_url}/rest/api/3/...` | `get()` | +| JSM | `{instance_url}/rest/servicedeskapi/...` | `get_from_instance()` | +| Assets | `{assets_base_url}/...` | new `get_assets()` / `post_assets()` | + +**`JiraClient` changes:** Add a new `assets_base_url` field to `JiraClient`: + +- In `from_config()`: If `cloud_id` is present, set `assets_base_url` to `https://api.atlassian.com/ex/jira/{cloud_id}/jsm/assets`. If `cloud_id` is absent, set to `None`. +- In `new_for_test()`: Set `assets_base_url` to `Some("{base_url}/jsm/assets")` so wiremock intercepts assets calls on the same mock server. Test paths become `/jsm/assets/workspace/{workspaceId}/v1/object/...`. +- `get_assets(workspace_id, path)` constructs: `{assets_base_url}/workspace/{workspace_id}/v1/{path}`. +- `post_assets(workspace_id, path, body)` same pattern for POST. + +```rust +// Added to JiraClient +assets_base_url: Option, + +pub async fn get_assets( + &self, + workspace_id: &str, + path: &str, +) -> anyhow::Result { + let base = self.assets_base_url.as_ref().ok_or_else(|| { + JrError::ConfigError("Cloud ID not configured. Run \"jr init\" to set up your instance.".into()) + })?; + let url = format!("{}/workspace/{}/v1/{}", base, workspace_id, path); + let request = self.client.get(&url); + let response = self.send(request).await?; + Ok(response.json::().await?) +} +``` + +This approach ensures: +1. Production: routes to `api.atlassian.com` via `cloud_id` +2. Tests: routes to wiremock via `base_url` +3. Missing `cloud_id`: clear error message +4. Same auth header used everywhere + +**Important:** The workspace discovery endpoint still uses `get_from_instance()` (hits the real instance URL), since the discovery endpoint is at `/rest/servicedeskapi/assets/workspace` on the instance, not the API gateway. + +### Pagination + +The `POST /object/aql` endpoint returns: + +```json +{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": "false", + "values": [...] +} +``` + +**Critical:** `isLast` may be returned as a string (`"false"`) or boolean (`false`) depending on context. The `AssetsPage` type needs a custom serde deserializer for this field. + +`total` is capped at 1000. A `hasMoreResults` field indicates if more exist. For pagination, use `isLast` as the stop condition (not computed from `total`). + +```rust +#[derive(Debug, Deserialize)] +pub struct AssetsPage { + #[serde(rename = "startAt", default)] + pub start_at: u32, + #[serde(rename = "maxResults", default)] + pub max_results: u32, + #[serde(default)] + pub total: u32, + #[serde(rename = "isLast", deserialize_with = "deserialize_bool_or_string")] + pub is_last: bool, + #[serde(default)] + pub values: Vec, +} +``` + +The `deserialize_bool_or_string` function handles both `true`/`false` and `"true"`/`"false"`. + +## File Structure + +### New Files + +| File | Responsibility | +|------|---------------| +| `src/api/assets/mod.rs` | Re-exports workspace, objects, tickets | +| `src/api/assets/workspace.rs` | Workspace ID discovery, `get_assets()` helper, `get_or_fetch_workspace_id()` | +| `src/api/assets/objects.rs` | `search_assets()` (AQL), `get_asset()` (by ID), `resolve_object_key()` | +| `src/api/assets/tickets.rs` | `get_connected_tickets()` | +| `src/types/assets/mod.rs` | Re-exports object, ticket | +| `src/types/assets/object.rs` | `AssetObject`, `ObjectType`, `AssetAttribute`, `ObjectAttributeValue` | +| `src/types/assets/ticket.rs` | `ConnectedTicketsResponse`, `ConnectedTicket`, `TicketStatus`, `TicketType`, `TicketPriority` | +| `src/cli/assets.rs` | `jr assets search`, `jr assets view`, `jr assets tickets` handlers | +| `tests/assets.rs` | Integration tests with wiremock | + +### Modified Files + +| File | Change | +|------|--------| +| `src/api/client.rs` | Add `assets_base_url` field, `get_assets()`, `post_assets()`, update `from_config()` and `new_for_test()` | +| `src/api/mod.rs` | Add `pub mod assets;` | +| `src/types/mod.rs` | Add `pub mod assets;` | +| `src/api/pagination.rs` | Add `AssetsPage` with `deserialize_bool_or_string` | +| `src/cache.rs` | Add `WorkspaceCache`, `read_workspace_cache()`, `write_workspace_cache()` | +| `src/cli/mod.rs` | Add `Assets` command variant and `AssetsCommand` enum | +| `src/main.rs` | Add dispatch for `Command::Assets` | + +## Types + +### AssetsPage + +```rust +// src/api/pagination.rs +#[derive(Debug, Deserialize)] +pub struct AssetsPage { + #[serde(rename = "startAt", default)] + pub start_at: u32, + #[serde(rename = "maxResults", default)] + pub max_results: u32, + #[serde(default)] + pub total: u32, + #[serde(rename = "isLast", deserialize_with = "deserialize_bool_or_string")] + pub is_last: bool, + #[serde(default)] + pub values: Vec, +} + +impl AssetsPage { + pub fn has_more(&self) -> bool { + !self.is_last + } + + pub fn next_start(&self) -> u32 { + self.start_at + self.max_results + } +} +``` + +### WorkspaceCache + +```rust +// src/cache.rs +#[derive(Debug, Serialize, Deserialize)] +pub struct WorkspaceCache { + pub workspace_id: String, + pub fetched_at: DateTime, +} +``` + +### AssetObject + +```rust +// src/types/assets/object.rs +#[derive(Debug, Deserialize, Serialize)] +pub struct AssetObject { + pub id: String, + pub label: String, + #[serde(rename = "objectKey")] + pub object_key: String, + #[serde(rename = "objectType")] + pub object_type: ObjectType, + pub created: Option, + pub updated: Option, + #[serde(default)] + pub attributes: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectType { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct AssetAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttributeValue { + pub value: Option, + #[serde(rename = "displayValue")] + pub display_value: Option, +} +``` + +### ConnectedTicket + +Connected tickets have a different shape than standard Jira issues — `title` instead of `summary`, nested `status`/`type`/`priority` with different structures. + +```rust +// src/types/assets/ticket.rs +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicketsResponse { + #[serde(default)] + pub tickets: Vec, + #[serde(rename = "allTicketsQuery")] + pub all_tickets_query: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicket { + pub key: String, + pub id: String, + pub title: String, + pub reporter: Option, + pub created: Option, + pub updated: Option, + pub status: Option, + #[serde(rename = "type")] + pub issue_type: Option, + pub priority: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketStatus { + pub name: String, + #[serde(rename = "colorName")] + pub color_name: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketType { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketPriority { + pub name: String, +} +``` + +## API Methods + +All methods are implemented on `JiraClient` in `src/api/assets/`. + +### get_or_fetch_workspace_id + +``` +GET {instance_url}/rest/servicedeskapi/assets/workspace +``` + +Returns workspace ID from cache or API. Caches site-wide. + +### get_assets (helper) + +Constructs URL: `https://api.atlassian.com/ex/jira/{cloud_id}/jsm/assets/workspace/{workspace_id}/v1/{path}`. Sends GET request with same auth header. + +### post_assets (helper) + +Same URL construction as `get_assets` but for POST requests (AQL search). + +### search_assets + +``` +POST .../v1/object/aql?startAt={start}&maxResults={limit}&includeAttributes={attrs} +Body: {"qlQuery": ""} +``` + +Auto-paginates using `AssetsPage`. Returns `Vec`. + +### get_asset + +``` +GET .../v1/object/{id}?includeAttributes={attrs} +``` + +Returns single `AssetObject`. + +### resolve_object_key + +Resolves an object key (e.g., `OBJ-1`) to its numeric ID via AQL: + +``` +POST .../v1/object/aql?maxResults=1&includeAttributes=false +Body: {"qlQuery": "Key = \"OBJ-1\""} +``` + +Returns the ID or errors with "No asset matching ...". If the input is purely numeric, treats it as an ID directly (skip AQL). + +### get_connected_tickets + +``` +GET .../v1/objectconnectedtickets/{objectId}/tickets +``` + +Returns `ConnectedTicketsResponse`. No pagination — returns all tickets in one response. The `--limit` flag on the CLI command performs client-side truncation only. + +## CLI Commands + +### jr assets search + +``` +$ jr assets search "objectType = Client" + + Key Type Name + ────────────────────────────────── + OBJ-1 Client Acme Corp + OBJ-2 Client Globex Inc + OBJ-3 Client Initech +``` + +With `--attributes` adds `Created` and `Updated` columns to the table output and passes `includeAttributes=true` to the API. + +JSON mode returns the full `AssetObject` array. The `--attributes` flag controls whether `includeAttributes=true` is passed to the API — this applies to both table and JSON output. Without `--attributes`, JSON output includes the object metadata but no attributes array. + +### jr assets view + +``` +$ jr assets view OBJ-1 + + Field Value + ────────────────────────── + Key OBJ-1 + Type Client + Name Acme Corp + Created 2025-12-17 14:58 + Updated 2026-01-29 19:52 +``` + +With `--attributes` adds an attributes section showing `displayValue` for each attribute. Note: attribute names are shown by their `objectTypeAttributeId` (opaque IDs) since resolving IDs to human-readable names requires additional API calls to object type attribute metadata — this is out of scope for Layer 1 but can be added later. + +Accepts either an object key (e.g., `OBJ-1`) or numeric ID. Object keys are resolved to IDs via AQL. + +### jr assets tickets + +``` +$ jr assets tickets OBJ-1 + + Key Type Title Status Priority + ────────────────────────────────────────────────────────────────────────────────────────────── + PROJ-42 Service Request VPN access not working after update In Progress High + PROJ-38 Change Issue Update firewall rules for new subnet Closed Medium +``` + +JSON mode returns the full `ConnectedTicketsResponse` object (includes `tickets` array and `allTicketsQuery` JQL). The `--limit` flag only affects table output; JSON always returns the full response. + +### CLI Enum + +```rust +// src/cli/mod.rs +#[derive(Subcommand)] +pub enum Command { + // ... existing commands ... + + /// Manage Assets/CMDB objects + Assets { + #[command(subcommand)] + command: AssetsCommand, + }, +} + +#[derive(Subcommand)] +pub enum AssetsCommand { + /// Search assets with AQL query + Search { + /// AQL query (e.g. "objectType = Client") + query: String, + /// Maximum number of results + #[arg(long)] + limit: Option, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, + /// View asset details + View { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, + /// Show Jira issues connected to an asset + Tickets { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Maximum number of tickets to show + #[arg(long)] + limit: Option, + }, +} +``` + +## Error Handling + +All errors follow the project convention: suggest what to do next. + +| Scenario | Error Message | +|----------|--------------| +| No Assets workspace | `Error: Assets is not available on this Jira site. Assets requires Jira Service Management Premium or Enterprise.` | +| No cloud_id in config | `Error: Cloud ID not configured. Run "jr init" to set up your instance.` | +| Object not found | `Error: No asset matching "OBJ-99" found. Check the object key and try again.` | +| Invalid AQL syntax | Pass through the API error message (Jira returns descriptive AQL parse errors) | +| No connected tickets | Display "No results found." (dimmed text, not an error — same pattern as `jr issue list` with no results) | +| Workspace cache miss + API failure | Falls through to standard network error handling in `JiraClient` | + +## Testing Strategy + +### Unit Tests + +| Test | Location | +|------|----------| +| `AssetsPage` deserialization — `isLast` as boolean and as string | `src/api/pagination.rs` | +| `AssetsPage` `has_more()` and `next_start()` | `src/api/pagination.rs` | +| `WorkspaceCache` read/write/TTL/expiry | `src/cache.rs` | +| `AssetObject` deserialization with/without attributes | `src/types/assets/object.rs` | +| `ConnectedTicketsResponse` deserialization | `src/types/assets/ticket.rs` | +| Object key detection (key vs numeric ID) | `src/api/assets/objects.rs` or `src/cli/assets.rs` | + +### Integration Tests (wiremock) + +| Test | File | +|------|------| +| `jr assets search` — returns objects | `tests/assets.rs` | +| `jr assets search` — empty results | `tests/assets.rs` | +| `jr assets search` — pagination across pages | `tests/assets.rs` | +| `jr assets view` — returns object details | `tests/assets.rs` | +| `jr assets view` — object not found | `tests/assets.rs` | +| `jr assets tickets` — returns connected tickets | `tests/assets.rs` | +| `jr assets tickets` — no connected tickets | `tests/assets.rs` | +| Workspace ID discovery — cache miss fetches from API | `tests/assets.rs` | + +## Out of Scope + +- Asset creation, update, or deletion (read-only for now) +- Object schema management (`/objectschema/`) +- Object type listing (`/objecttype/`) +- Layer 2 project integrations (`--client` flag on `jr issue list`, `jr queue view`) +- Attribute name resolution (would need object type attribute metadata to map IDs to names) +- Object references (showing linked assets from an asset) +- Assets import API + +## Dependencies + +No new crate dependencies. Uses existing `reqwest`, `serde`, `chrono`, `comfy-table`, `colored`. + +## Migration + +No migration needed. Purely additive — new commands, new API module, new types. Existing commands and behavior are unchanged. diff --git a/docs/superpowers/specs/2026-03-24-common-filter-flags-design.md b/docs/superpowers/specs/2026-03-24-common-filter-flags-design.md new file mode 100644 index 0000000..12f4585 --- /dev/null +++ b/docs/superpowers/specs/2026-03-24-common-filter-flags-design.md @@ -0,0 +1,236 @@ +# Common Filter Flags for Issue List — Design Spec + +**Goal:** Add `--assignee`, `--reporter`, and `--recent` shorthand flags to `jr issue list` so common queries don't require JQL knowledge. + +**Problem:** The most common queries — "show my tickets," "show what I reported," "show recent tickets" — require `--jql 'assignee = currentUser() AND ...'` syntax. This is a barrier for users unfamiliar with JQL. + +**Addresses:** [GitHub Issue #44](https://github.com/Zious11/jira-cli/issues/44) + +--- + +## Architecture + +Three new flags on `jr issue list` that generate JQL clauses under the hood. All flags compose additively with each other, with `--jql`, and with existing flags (`--status`, `--team`). The `me` keyword resolves to `currentUser()` server-side (no API call). Non-`me` values resolve via the Jira user search API with partial-match disambiguation. + +**Precedent:** GitHub CLI (`gh issue list`) composes `--search` with shorthand flags (`--assignee`, `--label`, `--state`) additively — they AND together. We follow the same pattern. + +--- + +## 1. New CLI Flags + +``` +--assignee Filter by assignee. "me" resolves to currentUser() +--reporter Filter by reporter. "me" resolves to currentUser() +--recent Show issues created within duration (e.g., 7d, 4w, 2M) +``` + +### Flag Definitions + +In `IssueCommand::List`: + +```rust +/// Filter by assignee ("me" for current user, or a name to search) +#[arg(long)] +assignee: Option, + +/// Filter by reporter ("me" for current user, or a name to search) +#[arg(long)] +reporter: Option, + +/// Show issues created within duration (e.g., 7d, 4w, 2M) +#[arg(long)] +recent: Option, +``` + +### Composition Rules + +- All flags AND together: `--assignee me --status "In Progress" --recent 7d` produces `assignee = currentUser() AND status = "In Progress" AND created >= -7d` +- Flags compose with `--jql`: `--jql "type = Bug" --assignee me` produces `type = Bug AND assignee = currentUser()` +- Flags compose with auto-detected board JQL (scrum sprint, kanban) +- Each flag can only be specified once (clap default for `Option`) + +### Duration Format + +JQL relative dates use the format `(+/-)nn(unit)` where units are case-sensitive: + +| Unit | Meaning | +|------|---------| +| `y` | years | +| `M` | months (uppercase) | +| `w` | weeks | +| `d` | days | +| `h` | hours | +| `m` | minutes (lowercase) | + +Combined units like `4w2d` are not supported by Jira. Client-side validation regex: `^\d+[yMwdhm]$`. + +--- + +## 2. User Resolution + +### The `me` Keyword + +`--assignee me` and `--reporter me` resolve to `currentUser()` in JQL — a server-side function requiring no API call. Case-insensitive match (`Me`, `ME`, `me` all work). + +### Name Resolution (non-`me` values) + +For non-`me` values like `--assignee "Jane"`: + +1. Call `GET /rest/api/3/user/search?query=` — prefix-matches displayName and email +2. Filter results to only active users (`active == true`) — the endpoint returns both active and inactive users +3. If exactly 1 active user matches → use their `accountId` in JQL: `assignee = ` +4. If multiple active users match → use `partial_match` module for disambiguation (same pattern as `--team`) + - Interactive mode: prompt user to pick + - `--no-input` mode: error listing the matches +5. If 0 active matches → error: `"No active user found matching ''. The user may be deactivated."` + +**Note:** accountId is used without quotes in JQL: `assignee = 5b10ac8d82e05b22cc7d4ef5` + +### API Endpoint Details (validated via Perplexity) + +- **Endpoint:** `GET /rest/api/3/user/search?query=` +- **Response:** Conflicting documentation — Perplexity sources disagree on whether this is a flat array `[User, ...]` or paginated `{ "values": [...] }`. The implementation should try deserializing as `Vec` first (flat array) and fall back to extracting from a paginated wrapper if needed. Verify empirically during implementation. Each User object has `accountId`, `displayName`, `active`, etc. +- **Permission:** Requires "Browse users and groups" global permission (standard) +- **Behavior without permission:** May return empty results (200 OK) rather than 403 — indistinguishable from "no matches" +- **Prefix matching:** Matches start of displayName or emailAddress words + +--- + +## 3. JQL Construction Changes + +### Current Behavior + +- `--jql` bypasses all auto-detection; `--status` and `--team` are silently ignored +- Scrum path hardcodes `assignee = currentUser()` +- Kanban path hardcodes `assignee = currentUser()` + +### New Behavior + +All paths use a unified JQL assembly flow: + +1. **Start with base JQL parts** (`Vec`): + - If `--jql` provided → push as first part + - If no `--jql` → auto-detect board context: + - Scrum: `sprint = {id}` (no implicit `assignee = currentUser()`) + - Kanban: `project = "KEY" AND statusCategory != Done` (no implicit `assignee = currentUser()`) + - Fallback: `project = "KEY"` (if available) +2. **Append filter flag clauses:** + - `--assignee me` → push `assignee = currentUser()` + - `--assignee "Jane"` → resolve → push `assignee = ` + - `--reporter me` → push `reporter = currentUser()` + - `--reporter "Jane"` → resolve → push `reporter = ` + - `--status "In Progress"` → push `status = "In Progress"` (unchanged) + - `--team "Alpha"` → push ` = ""` (unchanged) + - `--recent 7d` → push `created >= -7d` +3. **Join** all parts with ` AND ` +4. **Append** `ORDER BY` clause (rank for board queries, updated DESC for fallback) + +**Unbounded query guard:** The current `build_fallback_jql` errors when no filters are provided (`project_key`, `status`, `resolved_team` all `None`). This guard must be updated to consider the new flags as well — a query like `--assignee me` with no project is valid JQL. The error should only fire when *all* filter sources (project, status, team, assignee, reporter, recent, jql) are empty. + +### Breaking Changes + +**1. Implicit `assignee = currentUser()` removed.** The scrum and kanban auto-detection paths currently hardcode `assignee = currentUser()`. This is removed. Users who want their own tickets use `--assignee me` explicitly. This makes the behavior consistent and predictable — no hidden filters. + +**Migration:** Users running `jr issue list` (no flags, board configured) will now see all sprint/board tickets instead of just their own. The `--assignee me` flag restores the previous behavior. + +**2. `--jql` now composes with filter flags.** Previously, `--jql` silently ignored `--status` and `--team`. Now all flags AND together with `--jql`. Users who relied on `--status` being silently dropped when `--jql` was present will get different (more specific) results. This is the correct behavior — the old silent-ignore was a bug, not a feature. + +--- + +## 4. Duration Validation + +Client-side validation gives better errors than Jira's generic 400: + +```rust +pub fn validate_duration(s: &str) -> Result<(), String> { + let re = regex or manual check: digits followed by one of [yMwdhm] + // Valid: "7d", "30d", "4w", "2M", "1y", "5h", "10m" + // Invalid: "7x", "d7", "", "4w2d" +} +``` + +No regex crate needed — a simple manual check (all chars except last are digits, last char is one of `yMwdhm`, at least 2 chars total) is cleaner. + +**Why `jql.rs` and not `duration.rs`:** The existing `src/duration.rs` handles worklog durations (`1h30m`, `2d`) which support combined units and a different format. JQL relative date durations (`7d`, `2M`) are a distinct format — single unit only, case-sensitive `M` for months. They belong in `jql.rs` alongside other JQL utilities (`escape_value`, `strip_order_by`). + +Error message: `"Invalid duration '7x'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)."` + +--- + +## 5. Error Handling + +| Scenario | Behavior | +|----------|----------| +| `--assignee "nonexistent"` → 0 matches | Error: `"No user found matching 'nonexistent'. Check the name and try again."` | +| `--assignee "J"` → multiple, interactive | Prompt to pick (same UX as `--team` disambiguation) | +| `--assignee "J"` → multiple, `--no-input` | Error: `"Multiple users match 'J': Jane Doe, John Smith. Use a more specific name."` | +| `--recent "7x"` → invalid duration | Error: `"Invalid duration '7x'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)."` | +| User search API returns empty (no permission) | Same as "no matches" — `"No user found matching 'X'."` | +| User search API fails (network/500) | Propagate error with context | + +--- + +## 6. File Structure + +### Modified Files + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add `assignee`, `reporter`, `recent` flags to `IssueCommand::List` | +| `src/cli/issue/list.rs` | Refactor JQL construction to compose all flags additively; remove implicit `assignee = currentUser()` from scrum/kanban paths; add duration validation; call `resolve_user()` for assignee/reporter | +| `src/cli/issue/helpers.rs` | Add `resolve_user()` helper (user search + partial match disambiguation) | +| `src/api/jira/users.rs` | Add `search_users()` method | +| `src/jql.rs` | Add `validate_duration()` function | + +### Not Changed + +- `src/api/client.rs` — no new HTTP methods needed +- `src/config.rs` — no new config entries +- `src/cache.rs` — no caching for user search results (names change, no TTL benefit) + +--- + +## 7. Testing Strategy + +### Unit Tests + +- `validate_duration()` — valid formats (`7d`, `30d`, `4w`, `2M`, `1y`, `5h`, `10m`, `0d`), invalid formats (`7x`, `d7`, ``, `4w2d`) +- `resolve_user()` with `me`/`Me`/`ME` → returns `"currentUser()"` without API call +- JQL composition — all flag combinations produce correct JQL strings +- JQL composition with `--jql` base + filter flags + +### Integration Tests (wiremock) + +- User search returns 1 result → accountId used in JQL +- User search returns 0 results → error message +- User search returns multiple → disambiguation (may be hard to test interactively; test the non-interactive error path) +- `--recent 7d` → `created >= -7d` in JQL +- `--jql "type = Bug" --assignee me` → `type = Bug AND assignee = currentUser()` +- `--assignee me --status "Done" --recent 30d` → three AND clauses + +### Manual Testing + +- `jr issue list --project KEY --assignee me` +- `jr issue list --project KEY --reporter me --recent 7d` +- `jr issue list --project KEY --assignee "Jane" --status "In Progress"` +- `jr issue list --jql "type = Bug" --assignee me --recent 30d` +- `jr issue list` (board configured) — should show all tickets, not just own + +--- + +## Validation Sources + +| Decision | Validated by | +|----------|-------------| +| `gh issue list` composes `--search` with shorthand flags additively | Perplexity (GitHub CLI docs) | +| `assignee = currentUser()` and `reporter = currentUser()` are valid JQL | Perplexity (JQL reference) | +| `created >= -7d` is valid JQL for relative dates | Perplexity (Atlassian JQL docs) | +| Duration units: `y`, `M` (months), `w`, `d`, `h`, `m` (minutes) — case-sensitive | Perplexity (Atlassian JQL functions reference) | +| Combined units like `4w2d` are not supported | Perplexity | +| Display names don't work directly in JQL assignee/reporter fields | Perplexity (Atlassian community) | +| `~` (CONTAINS) operator doesn't work on assignee/reporter fields | Perplexity (Atlassian JQL operators) | +| `GET /rest/api/3/user/search?query=` for prefix-matching user lookup | Perplexity (Atlassian API docs) | +| User search response format conflicted — may be flat array or paginated; verify during implementation | Perplexity (conflicting Atlassian API docs) | +| accountId used without quotes in JQL: `assignee = accountId` | Perplexity (Atlassian community) | +| User search requires "Browse users and groups" permission; may return empty instead of 403 | Perplexity (Atlassian developer community) | +| `/user/search` is better than `/user/assignable/search` for reporter resolution | Perplexity (Atlassian API docs) | diff --git a/docs/superpowers/specs/2026-03-24-default-result-limit-design.md b/docs/superpowers/specs/2026-03-24-default-result-limit-design.md new file mode 100644 index 0000000..144abb1 --- /dev/null +++ b/docs/superpowers/specs/2026-03-24-default-result-limit-design.md @@ -0,0 +1,244 @@ +# Default Result Limit for Issue List — Design Spec + +**Goal:** Add a sensible default result limit to `jr issue list` so large projects don't dump thousands of rows into the terminal. + +**Problem:** `jr issue list --project ` returns all issues with no default limit. On projects with thousands of issues, this produces megabytes of unusable output. The only way to control it is `--limit`, which is optional and defaults to unlimited. + +**Addresses:** [GitHub Issue #43](https://github.com/Zious11/jira-cli/issues/43) + +--- + +## Architecture + +The feature touches three layers: CLI argument parsing, the API search method's return type, and a new lightweight API call for approximate counts. No new code files or modules are introduced — all runtime changes fit within existing modules. + +**Precedent:** GitHub CLI (`gh issue list`) defaults to 30 results with `-L/--limit` to override. It has no `--all` flag and no truncation message. We improve on this by adding both `--all` and a truncation hint with approximate total count. + +--- + +## 1. CLI Changes + +### New Flag: `--all` + +Add `--all` boolean flag to `IssueCommand::List`, mutually exclusive with `--limit` via clap's `conflicts_with`: + +```rust +/// Fetch all results (no default limit) +#[arg(long, conflicts_with = "limit")] +all: bool, +``` + +Clap automatically rejects `--all --limit 50` with a clear error message. + +### Default Limit Constant + +```rust +const DEFAULT_LIMIT: u32 = 30; +``` + +### Effective Limit Resolution + +```rust +let effective_limit = if all { + None // unlimited — current behavior +} else { + Some(limit.unwrap_or(DEFAULT_LIMIT)) +}; +``` + +| User input | `effective_limit` | Behavior | +|------------|-------------------|----------| +| (nothing) | `Some(30)` | Default 30 results | +| `--limit 50` | `Some(50)` | Explicit 50 results | +| `--all` | `None` | Unlimited (current behavior) | +| `--all --limit 50` | N/A | Clap rejects with conflict error | + +### Scope + +Only `jr issue list` gets the default limit. Other list-like commands (`queue view`, `assets search`, `sprint current`) are unaffected — they can be standardized in a follow-up issue. + +--- + +## 2. API Layer Changes + +### New Return Type: `SearchResult` + +`search_issues()` currently returns `Result>`. The `has_more` signal from the last page's `next_page_token` is computed but discarded. Change the return type to preserve it: + +```rust +pub struct SearchResult { + pub issues: Vec, + pub has_more: bool, +} +``` + +The `has_more` field is `true` when either: (a) the API's `next_page_token` is present when we stop fetching, OR (b) we fetched more issues than the effective limit and truncated (i.e., `all_issues.len() > limit` before truncation). This covers two distinct break paths: the limit was reached mid-page (where the API may say no more pages, but we still truncated) and the API indicates more pages exist. + +All callers of `search_issues()` must be updated to destructure `SearchResult` instead of `Vec`. + +### New Method: `approximate_count()` + +```rust +pub async fn approximate_count(&self, jql: &str) -> Result { + let body = serde_json::json!({ "jql": jql }); + let resp: ApproximateCountResponse = self + .post("/rest/api/3/search/approximate-count", &body) + .await?; + Ok(resp.count) +} +``` + +Response struct (file-private): + +```rust +#[derive(Deserialize)] +struct ApproximateCountResponse { + count: u64, +} +``` + +**JQL preparation:** Strip `ORDER BY` clauses from the JQL before passing to `approximate_count()` — ordering is meaningless for a count query and the endpoint requires bounded JQL. A simple `jql.split(" ORDER BY").next()` or regex suffices. + +**Endpoint details (validated via Perplexity):** +- `POST /rest/api/3/search/approximate-count` +- Request: `{"jql": "project = PROJ"}` +- Response: `{"count": 36}` +- Requires only `Browse projects` permission (standard, no special scopes) +- Returns `{"count": 0}` for zero matches (200 OK) +- Returns 400 for invalid JQL (won't happen — we use the same JQL that just succeeded) +- Count is approximate — recent updates may lag slightly +- Available on all Jira Cloud plans + +--- + +## 3. Truncation Message + +### When It Fires + +Only when **all three conditions** are met: +1. `has_more == true` (results were truncated) +2. `--all` was not passed +3. The search returned at least one issue + +### Flow + +1. `search_issues(jql, effective_limit)` → `SearchResult { issues, has_more }` +2. Render table/JSON to stdout +3. If `has_more` → call `approximate_count(jql)` +4. Print hint to **stderr** + +### Message Format + +``` +Showing 30 of ~1234 results. Use --limit or --all to see more. +``` + +The tilde (`~`) indicates the count is approximate. + +### Graceful Degradation + +If `approximate_count()` fails (network error, unexpected 403, etc.), fall back to a message without the total: + +``` +Showing 30 results. Use --limit or --all to see more. +``` + +### Why stderr + +The message goes to stderr so it doesn't pollute piped output (`jr issue list | grep`) or JSON output (`--output json`). This follows the CLI composability principle: stdout is for data, stderr is for humans. + +### JSON Output + +`--output json` continues to emit the issues array to stdout unchanged. Truncation metadata (e.g., `"truncated": true, "approximate_total": 1234`) in the JSON body is out of scope for this change — the stderr hint is sufficient for now. A structured JSON envelope can be added in a follow-up if scripting users need programmatic truncation detection. + +--- + +## 4. Error Handling & Edge Cases + +### approximate_count Fails + +Degrade gracefully — show hint without total. Never fail the command because of a count call. + +### Zero Results + +`search_issues` returns 0 issues with `has_more: false`. No truncation message shown. No approximate count call. + +### --all on Small Result Set + +`search_issues` returns all issues with `has_more: false`. No truncation message. No extra API call. + +### Race Condition: Issues Deleted Between Search and Count + +`approximate_count` returns a lower number than `issues.len()`. Harmless — the tilde already signals approximation. If count is 0, skip the message. + +### JQL Matches Exactly the Limit + +e.g., 30 issues exist, limit is 30. The API may return `has_more: false` if all fit in one page, or `has_more: true` if pagination boundaries don't align perfectly. Either way the behavior is correct: no false truncation messages (if `has_more: false`), or a harmless hint (if `has_more: true` and count returns ~30). + +--- + +## 5. File Structure + +### Modified Files + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add `all: bool` flag to `IssueCommand::List` with `conflicts_with = "limit"` | +| `src/cli/issue/list.rs` | Default limit logic (`DEFAULT_LIMIT`), pass `effective_limit` to search, truncation message to stderr after output | +| `src/api/jira/issues.rs` | New `SearchResult` struct, modify `search_issues()` return type to `Result`, add `approximate_count()` method + `ApproximateCountResponse` | +| `src/cli/board.rs` | Update caller (line 69) to destructure `SearchResult` | +| `tests/issue_commands.rs` | Update `test_search_issues` and `test_search_issues_with_story_points` to destructure `SearchResult` | + +### Potentially Modified (if they call `search_issues`) + +| File | Change | +|------|--------| +| `src/cli/issue/assets.rs` | Update caller to destructure `SearchResult` (if it calls `search_issues`) | + +### Not Changed + +- `src/api/pagination.rs` — no changes to pagination structs +- `src/api/client.rs` — no new HTTP methods needed +- `src/config.rs` — no new config entries +- `src/cache.rs` — no caching for this feature + +--- + +## 6. Testing Strategy + +### Unit Tests + +- `ApproximateCountResponse` deserializes `{"count": 36}` and `{"count": 0}` correctly +- Default limit logic: no flags → `Some(30)`, `--limit 50` → `Some(50)`, `--all` → `None` +- `SearchResult` propagates `has_more` correctly + +### Integration Tests (wiremock) + +- Search returns 30 results + `has_more: true` → approximate count endpoint called → stderr contains truncation message with `~` total +- Search returns 10 results + `has_more: false` → approximate count endpoint **not** called → stderr is empty +- `--all` flag → search called with no limit → no truncation message +- `--limit 50` → respects explicit limit → truncation message if more exist +- Approximate count endpoint returns 500 → graceful degradation → stderr contains hint without total +- Zero results → no truncation message + +### Not Tested + +- Clap `conflicts_with` behavior — this is clap's responsibility, well-tested upstream + +--- + +## Validation Sources + +| Decision | Validated by | +|----------|-------------| +| `gh issue list` defaults to 30, no `--all` flag | Perplexity (GitHub CLI docs) | +| kubectl defaults to all results, `--limit` to restrict | Perplexity | +| UX research: silent truncation causes users to mistake partial for complete | Perplexity (Baymard Institute research) | +| `POST /rest/api/3/search/approximate-count` exists, returns `{"count": N}` | Perplexity (Atlassian support KB + API docs) | +| Approximate count requires only `Browse projects` permission | Perplexity (Atlassian API docs) | +| Cursor-based JQL endpoint does NOT return `total` field | Perplexity (Atlassian developer docs) | +| Truncation messages should go to stderr for composability | Perplexity (CLI best practices) | +| Clap `conflicts_with` is bi-directional, auto-generates error | Perplexity (clap docs + Rust users forum) | +| `approximate-count` requires bounded JQL, ORDER BY may be unnecessary | Perplexity (Atlassian API docs) | +| Default 30 matches `gh` precedent for developer CLI tools | Perplexity (GitHub CLI docs + CLI conventions) | +| `nextPageToken` is null/absent when total equals maxResults (no false has_more) | Perplexity (Atlassian developer community) | diff --git a/docs/superpowers/specs/2026-03-24-issue-linked-assets-design.md b/docs/superpowers/specs/2026-03-24-issue-linked-assets-design.md new file mode 100644 index 0000000..d4ace4c --- /dev/null +++ b/docs/superpowers/specs/2026-03-24-issue-linked-assets-design.md @@ -0,0 +1,297 @@ +# Issue Linked Assets — Design Spec + +**Goal:** Expose CMDB/Assets objects linked to Jira issues, bridging the gap between the issue and asset domains. + +**Problem:** `jr issue view` does not show connected CMDB/Assets objects. The only way to discover which asset is linked to a ticket is to reverse-search by iterating through assets using `jr assets tickets ` — impractical at scale. There is no issue→asset lookup, only asset→issue. + +**Addresses:** [GitHub Issue #46](https://github.com/Zious11/jira-cli/issues/46) + +--- + +## Architecture + +Assets are linked to Jira issues via CMDB custom fields (`com.atlassian.jira.plugins.cmdb:cmdb-object-cftype`). These fields are returned inline in the issue response when explicitly requested. The feature has three layers: + +1. **Field Discovery** — auto-discover which custom fields are CMDB type +2. **Adaptive Parsing** — extract asset references from varying response shapes +3. **Commands** — three new/modified commands to surface linked assets + +No new API endpoints are introduced. The feature reuses the existing `GET /rest/api/3/field` endpoint for discovery and the existing `get_asset()` method for enrichment. + +--- + +## 1. Field Discovery & Caching + +### Discovery + +Use `GET /rest/api/3/field` (already called by `list_fields()` in `api/jira/fields.rs`) to find CMDB fields: + +``` +Filter: schema.custom == "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype" +Result: Vec of field IDs, e.g., ["customfield_10191", "customfield_10245"] +``` + +This endpoint requires no special permissions — basic Jira access (`read:jira-work` scope) is sufficient. It works on all Jira Cloud plans. Assets itself requires JSM Premium/Enterprise, but the field endpoint is platform-level. + +### Caching + +Cache discovered field IDs in `~/.cache/jr/cmdb_fields.json` with 7-day TTL. Same pattern as teams, project meta, and workspace caches. + +```json +{ + "field_ids": ["customfield_10191"], + "fetched_at": "2026-03-24T12:00:00Z" +} +``` + +### Lazy Initialization + +Discovery runs on first use of a CMDB-dependent command (`jr issue view`, `jr issue list --assets`, `jr issue assets`), not during `jr init`. Users who don't use Assets never pay the cost. The unpaginated `GET /rest/api/3/field` is used (the paginated `GET /rest/api/3/field/search` requires admin permissions). + +--- + +## 2. Adaptive CMDB Field Parsing + +### Problem + +The CMDB custom field response shape varies across Jira instances: + +| Instance type | Response shape | +|---------------|----------------| +| Modern Assets | `[{"label": "Acme Corp", "objectKey": "OBJ-1", ...}]` | +| Legacy Insight | `[{"workspaceId": "...", "objectId": "123", "id": "wid:123"}]` | +| Mixed | May include some or all fields | + +### Parsed Representation + +```rust +pub struct LinkedAsset { + pub key: Option, // e.g., "OBJ-1" + pub name: Option, // e.g., "Acme Corp" + pub asset_type: Option, // e.g., "Client" (serialized as "type") + pub id: Option, // numeric ID + pub workspace_id: Option, // workspace ID +} +``` + +### Parsing Strategy + +Extract CMDB values from `IssueFields.extra` (the `#[serde(flatten)] HashMap`): + +1. For each discovered CMDB field ID, look up the key in `extra` +2. If value is `null` or absent → no linked assets for this field, skip +3. If value is an array → parse each element: + - Extract `label` and `objectKey` if present (best case) + - Fall back to `objectId` and `workspaceId` if that's all we get + - If value is an unexpected shape (string, number), store as `label` +4. Collect all `LinkedAsset` values across all CMDB fields on the issue + +### Enrichment Fallback + +When `object_id` is present but `label`/`objectKey` are missing, enrichment requires two preconditions: + +1. **`assets_base_url` is configured** — cloud_id was set during `jr init` +2. **Workspace ID is available** — obtained via `get_or_fetch_workspace_id()` in `api/assets/workspace.rs` (already cached with 7-day TTL, handles 403/404 gracefully) + +If both are satisfied: +- Call `get_asset(workspace_id, object_id, false)` — reuses existing method in `api/assets/objects.rs` +- Extract `label` and `object_key` from the returned `AssetObject` +- Enrichment calls are parallelized via `futures::future::join_all` (both for `view` and `list`) + +If either precondition fails: +- `assets_base_url` not configured → display raw ID with hint: `#12345 (run "jr init" to resolve asset names)` +- Workspace discovery fails (403/404) → same degradation, display raw ID +- Individual object fetch fails (deleted, permissions) → skip that asset, display raw ID — don't fail the command + +### Display Format + +| Scenario | Display | +|----------|---------| +| Key + label available | `OBJ-1 (Acme Corp)` | +| Key only, no label | `OBJ-1` | +| ID only, after enrichment | `OBJ-1 (Acme Corp)` | +| ID only, no Assets API | `#12345` | +| Multiple assets | `OBJ-1 (Acme Corp), OBJ-2 (Other Inc)` | + +--- + +## 3. Command Changes + +### 3a. `jr issue view` — Assets Row + +Add an "Assets" row to the key-value table, after "Links" and before "Points": + +``` +Key: PROJ-123 +Type: Event Alert +Status: In Progress +Assignee: Jane Doe +Links: blocks PROJ-456 (Config review) +Assets: OBJ-1 (Acme Corp), OBJ-2 (Other Inc) +Points: 5 +``` + +**Behavior:** +- The Assets row is shown **automatically** whenever CMDB fields are discovered (no `--assets` flag needed for `view`). This mirrors the Points pattern: always shown in `view`, opt-in via flag in `list`. +- CMDB field IDs are added to `extra_fields` when fetching the issue +- Values extracted from `extra`, parsed adaptively, enriched if needed (parallelized via `join_all`) +- If no CMDB fields discovered on this instance → row omitted silently +- If CMDB fields exist but issue has no linked assets → Assets row shows "(none)" +- JSON output: no change needed — raw issue JSON already includes CMDB fields in `extra` + +### 3b. `jr issue list --assets` — Assets Column + +Add `--assets` flag (same pattern as existing `--points`): + +``` +Key Type Status Assets Summary +PROJ-123 Event Alert In Progress OBJ-1 (Acme Corp) Config change detected +PROJ-124 Task To Do OBJ-2 (Other Inc) Review alert tuning +PROJ-125 Bug Done - Fix login page +``` + +**Behavior:** +- When `--assets` is passed, add discovered CMDB field IDs to `extra_fields` in the search request +- Search API returns CMDB field values identically to single-issue GET (confirmed via Perplexity) +- Add "Assets" column to table after "Assignee" +- Display first asset + count if multiple: `OBJ-1 (+2 more)` +- If `--assets` passed but no CMDB fields discovered → warn to stderr (same pattern as `--points`) + +**Performance:** +- Enrichment deduplicates across the page: collect unique `(workspace_id, object_id)` pairs from all issues, resolve in parallel via `futures::future::join_all`, then map results back to each issue +- Deduplication avoids redundant API calls when the same asset appears on multiple issues +- 429 retry already handled by `JiraClient.send()` + +### 3c. `jr issue assets KEY` — New Subcommand + +Show assets linked to a specific issue: + +``` +$ jr issue assets PROJ-123 +Key Type Name +OBJ-1 Client Acme Corp +OBJ-2 Server Web-Prod-01 +``` + +**Rationale:** A separate subcommand rather than overloading `jr assets tickets` because: +- Asset keys and issue keys have the same `PREFIX-123` format — ambiguous if shared +- CLI best practice (gh, kubectl) is separate subcommands per resource type +- Each direction has its own command: `jr issue assets PROJ-123` (issue→assets) and `jr assets tickets OBJ-1` (asset→issues) + +**Behavior:** +- Fetch the issue with CMDB field IDs in `extra_fields` +- Extract and parse linked assets, enrich if needed (parallelized) +- Table output: Key, Type (objectType.name), Name (label) +- JSON output: array of asset objects with all available fields: + ```json + [ + {"key": "OBJ-1", "type": "Client", "name": "Acme Corp", "id": "88"}, + {"key": "OBJ-2", "type": "Server", "name": "Web-Prod-01", "id": "92"} + ] + ``` + When enrichment provides additional data, it is included. When only IDs are available, `key`/`type`/`name` may be `null`. +- If no assets linked → `"No assets linked to PROJ-123."` + +--- + +## 4. Error Handling & Edge Cases + +### No CMDB Fields on Instance + +- `find_cmdb_field_ids()` returns empty list +- `jr issue view`: Assets row omitted silently +- `jr issue list --assets`: warn to stderr: `"warning: --assets ignored. No Assets custom fields found on this Jira instance."` +- `jr issue assets KEY`: error: `"No Assets custom fields found on this Jira instance. Assets requires Jira Service Management Premium or Enterprise."` + +### CMDB Fields Exist but Issue Has No Linked Assets + +- Custom field value is `null`, absent, or empty array `[]` +- `jr issue view`: Assets row shows "(none)" +- `jr issue list --assets`: Assets column shows "-" +- `jr issue assets KEY`: `"No assets linked to PROJ-123."` + +### Enrichment Failures + +- **Assets API not configured** (`assets_base_url` is `None`): display raw IDs with hint +- **Single object fetch fails** (deleted, permissions): skip that asset's enrichment, display raw ID — don't fail the entire command +- **Rate limiting**: handled by existing 429 retry in `JiraClient.send()` + +### Cache Staleness + +- Admin adds/removes CMDB fields — cache has wrong IDs +- 7-day TTL handles this naturally +- If a cached field ID returns `null` on an issue, treated as "no linked assets" — harmless + +--- + +## 5. File Structure + +### Modified Files + +| File | Change | +|------|--------| +| `src/api/jira/fields.rs` | Add `find_cmdb_field_ids()` — filter by `schema.custom`. Lives here because it queries `GET /rest/api/3/field` (Jira platform endpoint), alongside existing `find_story_points_field_id()` and `find_team_field_id()`. | +| `src/cache.rs` | Add `CmdbFieldsCache` with read/write + 7-day TTL | +| `src/cli/mod.rs` | Add `Assets` variant to `IssueCommand`, add `--assets` flag to `List` | +| `src/cli/issue/mod.rs` | Wire up `IssueCommand::Assets` dispatch | +| `src/cli/issue/list.rs` | Modify `handle_view` for Assets row, `handle_list` for `--assets` column | + +### New Files + +| File | Purpose | +|------|---------| +| `src/api/assets/linked.rs` | `get_or_fetch_cmdb_field_ids()` (cache orchestration wrapping `find_cmdb_field_ids()`), `extract_linked_assets()` (adaptive parsing), `enrich_assets()` (parallel enrichment via `get_asset()`). Lives in `api/assets/` because it orchestrates Assets-domain caching and enrichment, even though discovery calls the Jira field endpoint. | +| `src/cli/issue/assets.rs` | `handle_issue_assets()` — the `jr issue assets KEY` command handler. Lives in `cli/issue/` because it operates on issues, consistent with the issue subcommand pattern. | +| `src/types/assets/linked.rs` | `LinkedAsset` struct — represents an asset reference extracted from an issue. Lives in `types/assets/` because it represents asset data, even though it is parsed from issue responses. | +| `tests/cmdb_fields.rs` | Integration tests for field discovery + linked asset extraction | + +### Not Changed + +- `src/api/client.rs` — no new HTTP methods needed +- `src/api/assets/objects.rs` — `get_asset()` already exists for enrichment +- `src/cli/assets.rs` — `jr assets tickets` stays as-is +- `src/config.rs` — no new config entries +- `main.rs` — routing already handles `IssueCommand` variants + +--- + +## 6. Testing Strategy + +### Unit Tests + +- `find_cmdb_field_ids()` — filters correctly by schema.custom, ignores non-CMDB fields +- `extract_linked_assets()` — parses `{label, objectKey}` shape, `{workspaceId, objectId}` shape, null, empty array, unexpected shapes +- `LinkedAsset` display formatting — all display scenarios +- Cache read/write/expiry for `CmdbFieldsCache` + +### Integration Tests (wiremock) + +- Field discovery returns CMDB field IDs +- Issue fetch with CMDB fields in `extra_fields` returns asset values +- Enrichment fallback — mock Assets API to return `AssetObject` +- Search with `--assets` includes CMDB fields in request +- No CMDB fields on instance — graceful degradation + +### Manual Testing + +- `jr issue view` on an issue with linked assets +- `jr issue view` on an issue without linked assets +- `jr issue list --assets` with mixed assets/no-assets issues +- `jr issue assets KEY` on issue with and without assets + +--- + +## Validation Sources + +| Decision | Validated by | +|----------|-------------| +| CMDB field type: `com.atlassian.jira.plugins.cmdb:cmdb-object-cftype` | Perplexity (Atlassian community + developer docs) | +| `GET /rest/api/3/field` returns `schema.custom` | Perplexity + Context7 (official API docs) | +| No special permissions for field endpoint | Perplexity + Context7 | +| Search API returns CMDB fields same as single-issue GET | Perplexity | +| CMDB field response varies across instances | Perplexity (legacy Insight vs modern Assets) | +| No reverse lookup (issue→assets via AQL) | Perplexity | +| Empty custom fields return `null` in response | Context7 (JSM API examples) | +| Separate subcommands > try-and-fallback | Perplexity (CLI design best practices, gh/kubectl patterns) | +| Lazy discovery best practice for optional features | Perplexity | +| Assets API object has `label`, `objectKey`, `objectType` | Existing `AssetObject` type tested against real API | diff --git a/docs/superpowers/specs/2026-03-24-jsm-queues-design.md b/docs/superpowers/specs/2026-03-24-jsm-queues-design.md new file mode 100644 index 0000000..1bad309 --- /dev/null +++ b/docs/superpowers/specs/2026-03-24-jsm-queues-design.md @@ -0,0 +1,384 @@ +# JSM Queue Support — Design Spec + +## Goal + +Add Jira Service Management (JSM) queue support to `jr`, enabling service desk agents to list queues and view queue contents from the CLI. This is the first JSM feature, establishing the foundational infrastructure (project type detection, JSM API module, servicedeskapi pagination) that future JSM features (SLAs, request types, customers) will build on. + +## Background + +The `jr` CLI currently wraps Jira REST API v3 and Agile REST API. JSM projects use JSM-specific issue types and workflows, but the JSM-specific features — queues, SLAs, request types, customers — are only accessible through the separate `/rest/servicedeskapi/` API surface. + +The codebase was designed for multi-product expansion: `src/api/jira/` and `src/types/jira/` are product-namespaced so future products add sibling directories. + +## Architecture + +### Project Type Detection + Cache + +JSM commands need to know whether a project is a service desk and what its `serviceDeskId` is. This is resolved transparently at runtime: + +1. `GET /rest/api/3/project/{key}` → extract `projectTypeKey` (`software` | `service_desk` | `business`) and `simplified` (team-managed vs company-managed) and `id` (numeric project ID) +2. If `service_desk`: `GET /rest/servicedeskapi/servicedesk` → paginate through results, match by `projectId` to find `serviceDeskId` +3. Cache result in `~/.cache/jr/project_meta.json` with 7-day TTL + +Cache structure: + +```json +{ + "HELPDESK": { + "project_type": "service_desk", + "simplified": false, + "project_id": "10042", + "service_desk_id": "15", + "fetched_at": "2026-03-24T12:00:00Z" + } +} +``` + +Cache invalidation: on 404 from JSM endpoints (project may have been recreated), clear entry and re-fetch. + +Cache functions in `src/cache.rs` (filesystem only — no API calls): + +```rust +pub fn read_project_meta(project_key: &str) -> Result> +pub fn write_project_meta(project_key: &str, meta: &ProjectMeta) -> Result<()> +``` + +Orchestration function in `src/api/jsm/servicedesks.rs` (calls API on cache miss): + +```rust +pub async fn get_or_fetch_project_meta(client: &JiraClient, project_key: &str) -> Result +``` + +### API Layer + +JSM API calls live in a new `src/api/jsm/` module, sibling to `src/api/jira/`. Methods are implemented on the existing `JiraClient` — same auth, same HTTP infrastructure, different base path (`/rest/servicedeskapi/` on the instance URL instead of `/rest/api/3`). + +**Important:** JSM methods MUST use the existing `get_from_instance()` / `post_to_instance()` methods on `JiraClient` (not `get()` / `post()` which use `base_url`). The `get_from_instance` methods construct URLs from `instance_url`, which is correct for both API-token and OAuth auth flows. Using `get()` would break OAuth users because `base_url` points to the API proxy (`https://api.atlassian.com/ex/jira/{cloudId}`) which does not serve `/rest/servicedeskapi/`. + +### Pagination + +The servicedeskapi uses a `PagedDTO` pagination format distinct from the platform API: + +```json +{ + "size": 5, + "start": 0, + "limit": 50, + "isLastPage": false, + "values": [...], + "_links": { "next": "...", "self": "..." } +} +``` + +| Field | Meaning | +|-------|---------| +| `size` | Count of items in current page | +| `start` | Zero-based starting index | +| `limit` | Max items per page | +| `isLastPage` | Whether this is the last page | +| `values` | Array of result objects | +| `_links` | Navigation links (self, next, prev) | + +This is a new `ServiceDeskPage` struct in `src/api/pagination.rs`, separate from the existing `OffsetPage`. Includes `has_more()` and `next_start()` helper methods matching the `OffsetPage` pattern. + +## File Structure + +### New Files + +| File | Responsibility | +|------|---------------| +| `src/api/jsm/mod.rs` | Re-exports servicedesks, queues | +| `src/api/jsm/servicedesks.rs` | `list_service_desks()` — resolve serviceDeskId from project | +| `src/api/jsm/queues.rs` | `list_queues()`, `get_queue_issues()` | +| `src/types/jsm/mod.rs` | Re-exports servicedesk, queue | +| `src/types/jsm/servicedesk.rs` | `ServiceDesk` struct | +| `src/types/jsm/queue.rs` | `Queue`, `QueueIssue`, `QueueIssueFields` structs | +| `src/cli/queue.rs` | `jr queue list`, `jr queue view` handlers | +| `tests/queue.rs` | Integration tests with wiremock | +| `tests/project_meta.rs` | Integration tests for project type detection + cache | + +### Modified Files + +| File | Change | +|------|--------| +| `src/api/mod.rs` | Add `pub mod jsm;` | +| `src/types/mod.rs` | Add `pub mod jsm;` | +| `src/api/pagination.rs` | Add `ServiceDeskPage` struct | +| `src/api/client.rs` | Add `pub fn instance_url(&self) -> &str` accessor | +| `src/cache.rs` | Add `ProjectMeta` struct, `read_project_meta()`, `write_project_meta()` with TTL logic | +| `src/cli/mod.rs` | Add `Queue` command variant and `QueueCommand` enum | +| `src/main.rs` | Add dispatch for `Command::Queue` | + +## Types + +### ServiceDeskPage + +```rust +// src/api/pagination.rs +#[derive(Debug, Deserialize)] +pub struct ServiceDeskPage { + pub size: u32, + pub start: u32, + pub limit: u32, + #[serde(rename = "isLastPage")] + pub is_last_page: bool, + #[serde(default)] + pub values: Vec, +} + +impl ServiceDeskPage { + pub fn has_more(&self) -> bool { + !self.is_last_page + } + + pub fn next_start(&self) -> u32 { + self.start + self.size + } +} +``` + +### ProjectMeta + +```rust +// src/cache.rs +#[derive(Debug, Serialize, Deserialize)] +pub struct ProjectMeta { + pub project_type: String, + pub simplified: bool, + pub project_id: String, + pub service_desk_id: Option, + pub fetched_at: DateTime, +} +``` + +Note: `project_id` and `service_desk_id` are `String` (not numeric) to match Jira's convention — all Jira IDs are strings. See `Transition.id` in `types/jira/issue.rs` for precedent. + +All JSM-specific errors use `JrError::UserError(String)` (exit code 64), consistent with other user-facing validation errors in the codebase. + +### ServiceDesk + +```rust +// src/types/jsm/servicedesk.rs +#[derive(Debug, Deserialize, Serialize)] +pub struct ServiceDesk { + pub id: String, + #[serde(rename = "projectId")] + pub project_id: String, + #[serde(rename = "projectName")] + pub project_name: String, +} +``` + +### Queue + +```rust +// src/types/jsm/queue.rs +#[derive(Debug, Deserialize, Serialize)] +pub struct Queue { + pub id: String, + pub name: String, + pub jql: Option, + pub fields: Option>, + #[serde(rename = "issueCount")] + pub issue_count: Option, +} +``` + +### QueueIssue + +Queue issues return a limited field set — only the fields configured for that queue, not full Jira issue objects. + +```rust +// src/types/jsm/queue.rs +#[derive(Debug, Deserialize, Serialize)] +pub struct QueueIssue { + pub key: String, + pub fields: QueueIssueFields, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct QueueIssueFields { + pub summary: Option, + pub status: Option, + pub issuetype: Option, + pub priority: Option, + pub assignee: Option, + pub reporter: Option, + pub created: Option, +} +``` + +`Status`, `IssueType`, `Priority`, and `User` are reused from `src/types/jira/issue.rs` and `src/types/jira/user.rs`. + +Note: The existing `User` struct has `account_id: String` as a required field. If the servicedeskapi returns user objects without `accountId`, the `User` struct may need `account_id` changed to `Option` or a separate `QueueUser` type. Verify during implementation by checking actual API responses. + +## API Methods + +All methods are implemented on `JiraClient` in the `src/api/jsm/` module. + +### list_service_desks + +``` +GET {instance_url}/rest/servicedeskapi/servicedesk?start={start}&limit=50 +``` + +Auto-paginates using `ServiceDeskPage`. Returns `Vec`. + +### list_queues + +``` +GET {instance_url}/rest/servicedeskapi/servicedesk/{serviceDeskId}/queue?includeCount=true&start={start}&limit=50 +``` + +Auto-paginates. The `includeCount=true` parameter includes `issueCount` per queue. Returns `Vec`. + +### get_queue_issues + +``` +GET {instance_url}/rest/servicedeskapi/servicedesk/{serviceDeskId}/queue/{queueId}/issue?start={start}&limit={limit} +``` + +Paginates with optional user-specified limit (same pattern as `list_comments`). Returns `Vec`. + +## CLI Commands + +### jr queue list + +Lists all queues for the current project's service desk. + +``` +$ jr queue list --project HELPDESK + + Queue Issues + ───────────────────────────────────── + Triage 12 + In Progress 7 + Escalated 3 + Waiting for Customer 2 + Resolution Request 1 + All open 25 +``` + +JSON mode returns array of queue objects with `id`, `name`, `issue_count`, `jql`. + +### jr queue view + +Shows issues in a specific queue. Queue name supports partial matching via existing `partial_match.rs`. + +``` +$ jr queue view triage --project HELPDESK + + Key Type Summary Status Assignee + ──────────────────────────────────────────────────────────────────────────────────────────── + HELPDESK-42 Service Request VPN access not working after update New — + HELPDESK-41 Service Request Need software license renewal New — + HELPDESK-40 Change Issue Update firewall rules for new subnet New Jane D. +``` + +Supports `--limit N` for pagination and `--id N` as escape hatch for duplicate queue names. + +### CLI Enum + +```rust +// src/cli/mod.rs +#[derive(Subcommand)] +pub enum Command { + // ... existing commands ... + + /// Manage JSM queues + Queue { + #[command(subcommand)] + command: QueueCommand, + }, +} + +#[derive(Subcommand)] +pub enum QueueCommand { + /// List queues for the service desk + List, + /// View issues in a queue + View { + /// Queue name (partial match supported) + name: Option, + /// Queue ID (use if name is ambiguous) + #[arg(long)] + id: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + }, +} +``` + +If neither `name` nor `--id` is provided, error with: `Error: Specify a queue name or use --id. Run "jr queue list" to see available queues.` + +**JSON output for `jr queue view`:** Returns a JSON array of `QueueIssue` objects, consistent with `jr issue list` returning an array of issues. + +```json +[ + {"key": "PROJ-100", "fields": {"summary": "...", "status": {"name": "New", ...}, ...}}, + ... +] +``` + +## Error Handling + +All errors follow the project convention: suggest what to do next. + +| Scenario | Error Message | +|----------|--------------| +| Non-JSM project | `Error: "{key}" is a Jira Software project. Queue commands require a Jira Service Management project. Run "jr project fields {key}" to see available commands.` | +| Not an agent | `Error: You don't have agent access to the "{name}" service desk. Contact your JSM administrator.` | +| No matching queue | `Error: No queue matching "{name}" found. Run "jr queue list" to see available queues.` | +| Ambiguous queue name | `Error: "{name}" matches multiple queues: "{q1}", "{q2}". Be more specific or use --id.` | +| Duplicate queue names | `Error: Multiple queues named "{name}" found (IDs: {id1}, {id2}). Use --id {id1} to specify.` | +| Service desk not found | `Error: No service desk found for project "{key}". The project may not be configured as a service desk.` | +| Cache miss + API failure | Falls through to standard network error handling in `JiraClient` | + +## Testing Strategy + +### Unit Tests + +| Test | Location | +|------|----------| +| `ServiceDeskPage` deserialization (empty, single page, multiple items) | `src/api/pagination.rs` | +| `ProjectMeta` cache: write, read, TTL expiry, cache miss | `src/cache.rs` | +| Queue partial matching (exact, prefix, ambiguous, no match, duplicate names) | `src/cli/queue.rs` | +| `QueueIssue` deserialization with missing optional fields | `src/types/jsm/queue.rs` | + +### Integration Tests (wiremock) + +| Test | File | +|------|------| +| `jr queue list` — returns queues with issue counts | `tests/queue.rs` | +| `jr queue list` — empty queues | `tests/queue.rs` | +| `jr queue view` — returns queue issues | `tests/queue.rs` | +| `jr queue view --limit` — respects limit | `tests/queue.rs` | +| `jr queue view` — pagination across pages | `tests/queue.rs` | +| `jr queue view` — partial name match | `tests/queue.rs` | +| `jr queue` on non-JSM project — error message | `tests/queue.rs` | +| Project type detection — cache miss fetches from API | `tests/project_meta.rs` | +| Project type detection — cache hit skips API call | `tests/project_meta.rs` | +| Project type detection — expired TTL re-fetches | `tests/project_meta.rs` | + +### Snapshot Tests (insta) + +Table output for `jr queue list` and `jr queue view` to catch formatting regressions. + +## Out of Scope + +- SLA tracking (`/rest/servicedeskapi/request/{key}/sla`) — future spec +- Request types (`/rest/servicedeskapi/servicedesk/{id}/requesttype`) — future spec +- Customer/organization management — future spec +- Assets/CMDB — future spec +- Changes to `jr init` — project type is auto-detected at runtime +- Queue write operations (create/delete/reorder queues) — read-only for now +- `--full` flag on `jr queue view` to re-query via platform API — future enhancement + +## Dependencies + +No new crate dependencies. Uses existing `reqwest`, `serde`, `chrono`, `comfy-table`, `colored`. + +## Migration + +No migration needed. This is purely additive — new commands, new API module, new types. Existing commands and behavior are unchanged. diff --git a/docs/superpowers/specs/2026-03-25-jql-project-scope-design.md b/docs/superpowers/specs/2026-03-25-jql-project-scope-design.md new file mode 100644 index 0000000..2cc8a85 --- /dev/null +++ b/docs/superpowers/specs/2026-03-25-jql-project-scope-design.md @@ -0,0 +1,109 @@ +# Design: Fix `--jql` + `--project` scope composition + +**Issue:** #54 — Bug: --jql filter overrides --project scope, returning cross-project results +**Date:** 2026-03-25 +**Status:** Draft + +## Problem + +When combining `--jql` with `--project`, the JQL filter overrides the project scope entirely instead of composing with it. Users get cross-project results despite specifying `--project`. + +**Root cause:** In `src/cli/issue/list.rs`, the `if let Some(raw_jql) = jql` branch (line 90) builds base parts from only the raw JQL, skipping the project-scope logic that lives in the `else` branch. The project key is never added when `--jql` is present. + +**Current behavior:** +``` +jr issue list --project PROJ --jql "priority = Highest" +→ JQL: priority = Highest ORDER BY updated DESC (project missing) +``` + +**Expected behavior:** +``` +jr issue list --project PROJ --jql "priority = Highest" +→ JQL: project = "PROJ" AND priority = Highest ORDER BY updated DESC +``` + +## Design Principle + +Per the principle of least surprise, structured flags (`--project`, `--status`, etc.) compose additively with raw queries (`--jql`) using AND logic. This matches the behavior of `gh`, `kubectl`, and `aws cli`. The `--jql` flag provides additional filtering, not a complete override. + +## Fix + +Refactor the `handle_list` function in `src/cli/issue/list.rs` to resolve the project key before the `if/else` branch, so it's available regardless of whether `--jql` is provided. + +### Current flow (buggy) + +``` +if --jql provided: + base_parts = [user_jql] # project key never considered + order_by = "updated DESC" +else: + base_parts = [project clause] # project key only here + order_by = board-aware logic +``` + +### Fixed flow + +``` +project_key = resolve project from --project flag or config + +if --jql provided: + stripped = strip_order_by(user_jql) + base_parts = [] + if project_key exists: + base_parts.push(project clause) + if stripped is non-empty: + base_parts.push(stripped) + order_by = "updated DESC" +else: + # uses same hoisted project_key, otherwise unchanged board-aware logic +``` + +The `else` branch is refactored to reference the hoisted `project_key` variable instead of calling `config.project_key()` inline. The logic is functionally identical. + +### Generated JQL after fix + +| Flags | Generated JQL | +|-------|---------------| +| `--project PROJ --jql "priority = Highest"` | `project = "PROJ" AND priority = Highest ORDER BY updated DESC` | +| `--project PROJ --jql "priority = Highest" --status "In Progress"` | `project = "PROJ" AND priority = Highest AND status = "In Progress" ORDER BY updated DESC` | +| `--jql "priority = Highest"` (no project anywhere) | `priority = Highest ORDER BY updated DESC` | +| `--project PROJ` (no jql) | Unchanged — board-aware logic applies | +| `--project PROJ --jql "project = OTHER AND type = Bug"` | `project = "PROJ" AND project = OTHER AND type = Bug ORDER BY ...` — contradictory project clauses are the user's responsibility, consistent with how `--status` + JQL `status =` behaves | +| `--project PROJ --jql "ORDER BY created DESC"` | `project = "PROJ" ORDER BY updated DESC` — empty JQL after stripping ORDER BY is skipped (requires `strip_order_by` fix, see below) | +| No flags | Unchanged — unbounded query guard triggers | + +## Scope + +### `strip_order_by` fix + +The current `strip_order_by` in `src/jql.rs` searches for `" ORDER BY"` (with a leading space). This misses JQL that starts with `ORDER BY` at position 0 (e.g., `"ORDER BY created DESC"`). Fix: also match `ORDER BY` at the start of the string. Add a unit test for this case. + +### What changes + +| File | Change | +|------|--------| +| `src/cli/issue/list.rs` | Refactor `handle_list` to resolve project key before the `if/else`, include it in base parts when `--jql` is present; skip empty stripped JQL | +| `src/cli/issue/list.rs` | Add unit tests for the new composition behavior | +| `src/jql.rs` | Fix `strip_order_by` to handle ORDER BY at position 0; add unit test | +| `tests/issue_commands.rs` | Add integration test verifying `--project` + `--jql` sends correct composed JQL | + +### What doesn't change + +- Board-aware logic (scrum sprint detection, kanban `statusCategory != Done`) — only applies when `--jql` is absent +- All existing filter flags (`--assignee`, `--reporter`, `--status`, `--team`, `--recent`, `--open`) — already compose correctly via `build_filter_clauses` +- The `build_filter_clauses` function — untouched + +## Testing + +### Unit tests (in `src/cli/issue/list.rs`) + +To make the JQL composition logic directly testable, extract the base-parts-building logic into a pure function that can be unit tested without async or wiremock. Tests: + +- `jql_with_project_composes` — both `--jql` and project key present: project clause prepended +- `jql_without_project_unchanged` — `--jql` with no project key: JQL passes through unchanged, does not trigger unbounded guard (non-empty parts) +- `jql_order_by_only_with_project` — `--jql "ORDER BY created"` with project: stripped JQL is empty, only project clause remains +- `jql_order_by_only_no_project` — `--jql "ORDER BY created"` with no project: stripped JQL is empty, no base parts (may trigger unbounded guard depending on other filters) + +### Integration test (in `tests/issue_commands.rs`) + +- `test_search_issues_jql_with_project` — call `client.search_issues()` with composed JQL containing both project clause and user JQL via wiremock, verify the POST body sent to `/rest/api/3/search/jql` contains the expected composed query diff --git a/docs/superpowers/specs/2026-03-25-open-flag-design.md b/docs/superpowers/specs/2026-03-25-open-flag-design.md new file mode 100644 index 0000000..9523019 --- /dev/null +++ b/docs/superpowers/specs/2026-03-25-open-flag-design.md @@ -0,0 +1,100 @@ +# Design: `--open` Flag for `jr issue list` + +**Issue:** #45 — Add 'jr my issues' shortcut for daily workflow +**Date:** 2026-03-25 +**Status:** Draft + +## Problem + +Issue #45 requested `jr my issues` as a shortcut for the daily "what's on my plate?" query. With PRs #50 (default result limit) and #51 (common filter flags), the gap is smaller than originally scoped: + +```bash +jr issue list --assignee me # works cross-project today +jr issue list --reporter me --recent 7d # composable +``` + +The remaining gap: there's no way to exclude Done/Closed issues without raw JQL. Users must write `--jql "statusCategory != Done"` to filter out completed work. + +## Decision: `--open` Flag, Not `jr my` Subcommand + +**Rejected: `jr my issues` subcommand.** Adding a second path to list issues doubles the surface area for AI agents (primary consumer) without adding composability. Agents don't benefit from shorter syntax — they generate commands programmatically. + +**Chosen: `--open` boolean flag on `jr issue list`.** This follows the existing composable-flags pattern, adds zero new concepts, and slots into the established JQL composition pipeline. + +Precedent: GitHub CLI uses `--state {open|closed|all}` on `gh issue list` (open is the default). Our `--open` is simpler because Jira's status model differs — we already have `--status` for exact status matching, so `--open` covers the broad "not done" filter. + +## Design + +### Flag Definition + +``` +--open Show only issues in open status categories (excludes Done) +``` + +- Type: `bool` +- Conflicts with: `--status` (mutual exclusion — use `--status` for specific status, `--open` for broad "not done") +- Composes with: `--assignee`, `--reporter`, `--recent`, `--team`, `--jql`, `--project` + +### JQL Clause + +`--open` appends a single clause to the filter: + +``` +statusCategory != "Done" +``` + +This uses Jira's built-in `statusCategory` field (not a custom field — same on every instance). The three valid status categories are "To Do", "In Progress", and "Done". All statuses mapped to the Done category (Done, Closed, Resolved, etc.) are excluded. + +### Composition + +`--open` slots into the existing `build_filter_clauses()` function as one more optional clause: + +``` +assignee = currentUser() <-- --assignee me +AND reporter = currentUser() <-- --reporter me +AND statusCategory != "Done" <-- --open +AND {team_field} = "{uuid}" <-- --team +AND created >= -7d <-- --recent +``` + +All AND together, same pipeline as today. No changes to JQL builder, ORDER BY handling, or board detection logic. + +Edge case with `--jql`: `--open` composes freely. If user's `--jql` already has a statusCategory clause, the AND produces a redundant-but-valid query. No special handling needed (Jira evaluates clauses independently). + +### Conflicts + +`--open` conflicts with `--status`. Rationale: +- `--status "In Progress"` is already in the open category — `--open` would be redundant +- `--status "Done" --open` is contradictory (zero results) +- Clean separation: `--status` for specific, `--open` for broad + +## Files Changed + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add `--open` bool arg with `conflicts_with = "status"` to `IssueCommand::List` | +| `src/cli/issue/list.rs` | Destructure `open`, pass to `build_filter_clauses()`, add clause | +| `README.md` | Add `--open` to command table | + +No new runtime/code modules or API calls; only updates to existing CLI code and documentation. + +Additionally, the unbounded query guard error message (`list.rs:151`) must be updated to include `--open` in the list of available flags. + +## Testing + +Unit tests in `src/cli/issue/list.rs`: +- `build_jql_parts_open` — `--open` alone produces `statusCategory != "Done"` +- `build_jql_parts_open_with_assignee` — composes correctly +- `build_jql_parts_all_filters_with_open` — all flags together + +Existing tests unchanged. + +## Outcome + +After this change, the daily workflow from issue #45 becomes: + +```bash +jr issue list --assignee me --open # "what's on my plate?" — cross-project, excludes done +``` + +This closes issue #45 without a new subcommand, new config, or new concepts for AI agents to learn. diff --git a/docs/superpowers/specs/2026-03-25-project-fields-global-flag-design.md b/docs/superpowers/specs/2026-03-25-project-fields-global-flag-design.md new file mode 100644 index 0000000..3ccdb42 --- /dev/null +++ b/docs/superpowers/specs/2026-03-25-project-fields-global-flag-design.md @@ -0,0 +1,131 @@ +# Fix `project fields` to Use Global `--project` Flag — Design Spec + +**Issue:** #56 — `project fields` rejects global `--project` flag, only accepts positional arg + +**Goal:** Remove the positional `[PROJECT]` argument from `project fields` so it uses the global `--project` flag and `.jr.toml` default, consistent with every other subcommand. + +## Problem + +`jr project fields --project PROJ` fails with: +``` +error: unexpected argument '--project' found +``` + +`ProjectCommand::Fields` defines a positional `project: Option` argument that creates a naming collision with the global `--project` flag on `Cli`. Clap cannot resolve the global flag at the nested subcommand level when a positional argument with the same name exists. This is the only subcommand with this inconsistency — all others (`issue list`, `sprint list`, `board view`, etc.) use the global `--project` flag. + +**Root cause:** The positional `project` field in the `Fields` variant shadows the global `--project` flag, preventing clap from recognizing it at the `project fields` subcommand level. Confirmed empirically: `jr issue list --project FOO` works (no naming conflict), `jr project fields --project FOO` fails (naming conflict). Also confirmed via clap issues [#2053](https://github.com/clap-rs/clap/issues/2053) and [#3428](https://github.com/clap-rs/clap/issues/3428). + +## Design + +### Clap definition (`src/cli/mod.rs`) + +Remove the `project` field from the `Fields` variant, converting it from a struct variant to a unit variant: + +```rust +// Before +Fields { + /// Project key (uses configured project if omitted) + project: Option, +} + +// After +/// Show valid issue types, priorities, and statuses +Fields, +``` + +Unit variants are already used throughout the codebase (`BoardCommand::List`, `BoardCommand::View`, `SprintCommand::List`, `SprintCommand::Current`, `AuthCommand::Status`, `IssueCommand::LinkTypes`). + +### Handler (`src/cli/project.rs`) + +Update the dispatch and handler to remove the `project` parameter: + +**Dispatch** — `ProjectCommand::Fields { project }` becomes `ProjectCommand::Fields`: + +```rust +ProjectCommand::Fields => { + handle_fields(config, client, output_format, project_override).await +} +``` + +**Handler** — `handle_fields` drops the `project` parameter and resolves the project key solely from `config.project_key(project_override)`: + +```rust +async fn handle_fields( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let project_key = config.project_key(project_override).ok_or_else(|| { + anyhow::anyhow!( + "No project specified. Run \"jr project list\" to see available projects." + ) + })?; + // ... rest unchanged +} +``` + +`config.project_key(project_override)` resolves in this order: +1. `--project` CLI flag (passed as `project_override` from `cli.project` in `main.rs`) +2. `.jr.toml` project config (per-project default) + +This is the same resolution used by `issue list`, `sprint list`, `board view`, and every other project-scoped command. + +### Help text + +After the fix, `jr project fields --help` will show the global `--project` flag in its options list (currently hidden by the naming conflict): + +``` +Show valid issue types, priorities, and statuses + +Usage: jr project fields [OPTIONS] + +Options: + --project Override project key + --output Output format [default: table] [possible values: table, json] + ... +``` + +This matches the help output of all other subcommands. + +### Shell completions + +Shell completions are generated on demand via `jr completion `. They will automatically reflect the removal of the positional argument — no manual update needed. + +## Error handling + +No change. When no project is specified via `--project`, `.jr.toml`, or global config: +``` +Error: No project specified. Run "jr project list" to see available projects. +``` + +## Backward compatibility + +`jr project fields PROJ` (the positional form) will no longer work. This is acceptable per project owner decision — the positional was the source of the bug and was inconsistent with the rest of the CLI. + +The following files reference the old positional form and should be updated to use `--project`: +- `README.md` (line ~123: `jr project fields FOO`) +- `docs/superpowers/specs/2026-03-21-jr-jira-cli-design.md` (v1 spec examples) +- `docs/superpowers/plans/2026-03-21-jr-implementation.md` (error message text) + +## Testing + +- Existing integration tests for `get_project_issue_types` and `get_priorities` continue to verify API layer correctness +- The clap parsing fix is verified by building and running: + - `jr project fields --project PROJ` (must succeed) + - `jr project fields` with `.jr.toml` default (must succeed) + - `jr project fields` with no project configured (must show error) + +## Files changed + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Remove `project` field from `Fields` variant (struct → unit) | +| `src/cli/project.rs` | Remove `project` param from dispatch and handler | +| `README.md` | Update `project fields` example to use `--project` | + +## Non-goals + +- Adding integration tests for clap parsing behavior (not done for any other subcommand) +- Changing how other subcommands resolve the project key +- Adding a local `--project` flag to `Fields` (unnecessary — the global flag covers this use case. While `issue create` has a local `--project` for interactive prompt fallback, `Fields` has no such need.) diff --git a/docs/superpowers/specs/2026-03-25-project-fields-statuses-design.md b/docs/superpowers/specs/2026-03-25-project-fields-statuses-design.md new file mode 100644 index 0000000..107d84c --- /dev/null +++ b/docs/superpowers/specs/2026-03-25-project-fields-statuses-design.md @@ -0,0 +1,164 @@ +# Add Statuses to `project fields` — Design Spec + +**Issue:** #55 — `project fields` omits statuses despite help text promising them + +**Goal:** Add project statuses grouped by issue type to `jr project fields` output (table and JSON). + +## Problem + +`jr project fields` help text says "Show valid issue types, priorities, and statuses" but the implementation only fetches and displays issue types and priorities. Statuses are missing from both table and JSON output. Users must use `jr issue transitions ` on an existing issue to discover statuses, which requires already knowing an issue key. + +## API + +**Endpoint:** `GET /rest/api/3/project/{projectIdOrKey}/statuses` + +- Accepts both project key (e.g., `PROJ`) and numeric project ID +- Permission: Browse Projects (same as existing project calls) +- Authentication: required (handled by `JiraClient`) + +**Response:** Top-level array of issue type objects, each with a nested `statuses` array: + +```json +[ + { + "id": "3", + "name": "Task", + "self": "https://your-domain.atlassian.net/rest/api/3/issueType/3", + "subtask": false, + "statuses": [ + { + "id": "10000", + "name": "In Progress", + "description": "The issue is currently being worked on.", + "iconUrl": "https://your-domain.atlassian.net/images/icons/progress.gif", + "self": "https://your-domain.atlassian.net/rest/api/3/status/10000" + } + ] + } +] +``` + +## Design + +### Serde types (`src/api/jira/projects.rs`) + +Two new structs following the existing `IssueTypeMetadata` / `PriorityMetadata` pattern: + +```rust +#[derive(Debug, Deserialize, Serialize)] +pub struct StatusMetadata { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct IssueTypeWithStatuses { + pub id: String, + pub name: String, + pub subtask: Option, + pub statuses: Vec, +} +``` + +`IssueTypeWithStatuses` is a purpose-specific struct for the `/statuses` endpoint response — it is distinct from the existing `IssueTypeMetadata` (which comes from the `/project/{key}` endpoint and has no `statuses` field). No consolidation needed. + +`subtask` is `Option` for defensive deserialization, matching the existing `IssueTypeMetadata` pattern — the API always returns it as a boolean, but `Option` prevents hard failures on unexpected responses. + +Only fields needed for display are included (`id`, `name`, `description`). Unknown fields (`iconUrl`, `self`, potential `statusCategory`) are silently ignored by serde's default behavior, matching the existing codebase pattern. + +### API method (`src/api/jira/projects.rs`) + +```rust +pub async fn get_project_statuses(&self, project_key: &str) -> Result> { + self.get(&format!("/rest/api/3/project/{project_key}/statuses")).await +} +``` + +### CLI handler (`src/cli/project.rs`) + +Add a third fetch in `handle_fields` after priorities: + +```rust +let statuses = client.get_project_statuses(&project_key).await?; +``` + +### Table output + +Append a "Statuses by Issue Type" section after Priorities. Skip issue types that have an empty statuses list (newly created projects or unconfigured workflows): + +``` +Project: PROJ + +Issue Types: + - Task + - Bug + - Story (subtask) + +Priorities: + - Highest + - High + - Medium + - Low + - Lowest + +Statuses by Issue Type: + Task: + - To Do + - In Progress + - Done + Bug: + - Open + - In Progress + - Closed +``` + +If the API returns an empty array (no issue types with statuses), omit the "Statuses by Issue Type" section entirely. + +### JSON output + +Add a `statuses_by_issue_type` field to the JSON object. The key is named `statuses_by_issue_type` (not `statuses`) because the value is an array of issue type objects with nested statuses — the name prevents confusion with a flat list of status objects: + +```json +{ + "project": "PROJ", + "issue_types": [...], + "priorities": [...], + "statuses_by_issue_type": [ + { + "id": "3", + "name": "Task", + "subtask": false, + "statuses": [ + {"id": "10000", "name": "In Progress", "description": "..."} + ] + } + ] +} +``` + +The JSON output preserves the full API response structure (including `id`, `description`) for scripting use cases. + +## Error handling + +The new fetch uses `?` propagation (hard fail), unlike the existing `get_project_issue_types` which uses `.unwrap_or_default()` (soft fail on parse error). This is intentional: if the statuses endpoint fails due to permission issues or network errors, the user should see a clear error rather than silently missing data — which is the exact bug we're fixing. + +## Testing + +- **Integration test** in `tests/project_commands.rs`: mock `/rest/api/3/project/FOO/statuses`, call `get_project_statuses`, verify deserialization of issue types and nested statuses. Tests go in `project_commands.rs` (not `project_meta.rs`, which tests JSM service desk metadata caching). +- **Fixture helper** `project_statuses_response` in `tests/common/fixtures.rs`: returns a realistic response with 2 issue types, each having 2-3 statuses. + +## Files Changed + +| File | Change | +|------|--------| +| `src/api/jira/projects.rs` | Add `StatusMetadata`, `IssueTypeWithStatuses`, `get_project_statuses` | +| `src/cli/project.rs` | Fetch statuses, render in table + JSON | +| `tests/common/fixtures.rs` | Add `project_statuses_response` fixture | +| `tests/project_commands.rs` | Add integration test for `get_project_statuses` | + +## Non-goals + +- Parallel fetching with `tokio::join!` (unnecessary for infrequent command) +- Merging statuses into the issue types section (changes existing output format) +- Caching statuses (no caching exists for issue types or priorities today) diff --git a/docs/superpowers/specs/2026-03-25-project-list-design.md b/docs/superpowers/specs/2026-03-25-project-list-design.md new file mode 100644 index 0000000..f323fc5 --- /dev/null +++ b/docs/superpowers/specs/2026-03-25-project-list-design.md @@ -0,0 +1,169 @@ +# Design: `jr project list` for Project Discovery + +**Issue:** #47 — Add 'jr project list' to discover available projects +**Date:** 2026-03-25 +**Status:** Draft + +## Problem + +There is no way to discover which Jira projects are available from the CLI. `jr project` only has a `fields` subcommand that requires already knowing the project key. New users and AI agents have no programmatic way to look up valid project keys. + +For AI agents specifically, project discovery is a critical primitive. Agents need to verify valid parameter values before constructing commands. Without `jr project list`, agents must guess project keys or rely on out-of-band documentation, leading to avoidable API errors and recovery loops. + +## Scope + +Three deliverables: + +1. **`jr project list` command** — list accessible projects with key, name, lead, and type +2. **`--type` filter** — filter by project type (software, service_desk, business) +3. **Error message enhancement** — suggest valid projects when an invalid key is used + +## API Endpoint + +**Chosen: `GET /rest/api/3/project/search`** over `/rest/api/3/project`. + +The `/project/search` endpoint is purpose-built for listing: it supports server-side pagination (`startAt` + `maxResults`), server-side filtering (`typeKey`), ordering (`orderBy`), and returns richer per-project data (`projectTypeKey`, `lead` with `displayName`). The simpler `/project` endpoint returns all projects in a flat array with no pagination — unsuitable for large instances. + +### Response Structure + +The response uses the standard offset-based pagination envelope (`startAt`, `maxResults`, `total`) with projects in a `values` array. This matches the existing `OffsetPage` generic in `src/api/pagination.rs`. + +```json +{ + "values": [ + { + "key": "FOO", + "name": "Project Alpha", + "projectTypeKey": "software", + "lead": { + "accountId": "abc-123", + "displayName": "Jane Doe" + } + } + ], + "startAt": 0, + "maxResults": 50, + "total": 12 +} +``` + +## Command Design + +### Flag Definition + +``` +jr project list [--type ] [--limit ] [--all] +``` + +| Flag | Type | Description | +|------|------|-------------| +| `--type` | `Option` | Filter by project type: `software`, `service_desk`, `business` | +| `--limit` | `Option` | Maximum results (default: 50, API max: 50) | +| `--all` | `bool` | Fetch all projects (paginate through all pages) | + +- **`--all` flag:** The API caps at 50 results per page. Instances with >50 projects need pagination. `--all` loops through pages using `startAt` offsets until all projects are returned. Conflicts with `--limit`. +- **`--type` validation:** No client-side validation. Invalid values produce an HTTP 400 from the API with an error message. This is consistent with how `--status` works on `jr issue list`. +- **`--limit` clamping:** Values above 50 are clamped to 50 before sending to the API (the API's maximum per page). + +### Table Output + +``` +Key Name Lead Type +ABC Project Alpha Jane Doe software +DEF Operations Desk John Smith service_desk +GHI Platform Engineering Alex Jones software +``` + +### JSON Output + +`--output json` returns a JSON array of project objects with camelCase keys (matching Jira's native field names, consistent with how other types in this codebase use per-field `#[serde(rename)]`): + +```json +[ + { + "key": "ABC", + "name": "Project Alpha", + "projectTypeKey": "software", + "lead": { "displayName": "Jane Doe", "accountId": "abc-123" } + } +] +``` + +## Types + +New types in `src/types/jira/project.rs` alongside the existing `Project` struct: + +```rust +#[derive(Debug, Deserialize, Serialize)] +pub struct ProjectSummary { + pub key: String, + pub name: String, + #[serde(rename = "projectTypeKey")] + pub project_type_key: String, + pub lead: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ProjectLead { + #[serde(rename = "displayName")] + pub display_name: String, + #[serde(rename = "accountId")] + pub account_id: String, +} +``` + +The existing `Project` struct (just `key` + `name`) remains unchanged — it's used in other contexts. + +No custom response wrapper needed — the `list_projects` API method uses `OffsetPage` from `src/api/pagination.rs` to deserialize the paginated response, then extracts `.items()`. + +## API Method + +New method on `JiraClient` in `src/api/jira/projects.rs`: + +```rust +pub async fn list_projects( + &self, + type_key: Option<&str>, + max_results: Option, +) -> Result> +``` + +- Calls `GET /rest/api/3/project/search` with query params: `orderBy=key`, optional `typeKey`, optional `maxResults` (clamped to 50) +- Deserializes as `OffsetPage`, returns `.items().to_vec()` +- When `max_results` is `None` (the `--all` case), paginates using `startAt` offsets until `OffsetPage::has_more()` returns false, collecting all pages into a single `Vec` + +## Error Message Enhancement + +Static `Run "jr project list" to see available projects.` hints are appended to error messages where project keys are missing or invalid. This avoids the complexity of dynamic project matching (which would require parsing Jira's error responses to distinguish project-specific 404s from other failures) while still guiding users to the discovery command. + +### Touchpoints + +1. **`src/cli/project.rs`** — `"No project specified"` error includes `jr project list` hint +2. **`src/cli/issue/create.rs`** — `"Project key is required"` error includes `jr project list` hint +3. **`src/cli/queue.rs`** — `"No project configured"` error includes `jr project list` hint + +The issue list command (`src/cli/issue/list.rs`) doesn't hard-fail on invalid projects — it passes the key into JQL and Jira returns empty results. No enhancement needed there. + +## Files Changed + +| File | Change | +|------|--------| +| `src/types/jira/project.rs` | Add `ProjectSummary`, `ProjectLead` types | +| `src/api/jira/projects.rs` | Add `list_projects` method using `OffsetPage` | +| `src/cli/mod.rs` | Add `List` variant to `ProjectCommand` with `--type`, `--limit`, `--all` | +| `src/cli/project.rs` | Add `handle_list` handler, enhance "No project specified" error | +| `src/cli/issue/create.rs` | Enhance "Project key is required" error with `jr project list` hint | +| `src/cli/queue.rs` | Enhance "No project configured" error with `jr project list` hint | +| `README.md` | Add `jr project list` to command table and quick start | + +No new runtime modules or API endpoints beyond the single `/project/search` call. + +## Testing + +### Integration Tests + +In `tests/project_commands.rs`: +- `test_list_projects` — mock `/rest/api/3/project/search`, verify response parsing and field mapping +- `test_list_projects_with_type_filter` — verify `typeKey` param passed to API +- `test_list_projects_empty` — verify empty result handling (prints "No results found.") +- `test_list_projects_lead_missing` — verify graceful handling when `lead` is null diff --git a/docs/superpowers/specs/2026-03-26-asset-attribute-names-design.md b/docs/superpowers/specs/2026-03-26-asset-attribute-names-design.md new file mode 100644 index 0000000..3c97f6e --- /dev/null +++ b/docs/superpowers/specs/2026-03-26-asset-attribute-names-design.md @@ -0,0 +1,273 @@ +# Asset Attribute Names Instead of Numeric IDs — Design Spec + +**Issue:** #58 + +**Goal:** Replace raw numeric `Attribute ID` column in `jr assets view --attributes` with human-readable attribute names by using the `/object/{id}/attributes` endpoint which returns attribute definitions inline. + +## Problem + +`jr assets view --attributes` displays an `Attribute ID` column with raw numeric IDs (e.g., `61`, `80`) because the current implementation uses `GET /object/{id}?includeAttributes=true`, which returns attributes with only `objectTypeAttributeId` (a numeric string) and no attribute name. + +**Current output:** +``` +┌──────────────┬──────────────┐ +│ Attribute ID │ Value │ +╞══════════════╪══════════════╡ +│ 61 │ Acme Corp │ +│ 80 │ New York, NY │ +│ 81 │ 0 │ +│ 82 │ 4 │ +└──────────────┴──────────────┘ +``` + +Without cross-referencing the object type schema, users have no idea what these IDs represent. + +## Expected + +``` +┌───────────┬──────────────┐ +│ Attribute │ Value │ +╞═══════════╪══════════════╡ +│ Location │ New York, NY │ +│ Seats │ 0 │ +│ Endpoints │ 4 │ +└───────────┴──────────────┘ +``` + +System attributes (Key, Created, Updated), the label attribute (Name), and hidden attributes are filtered out because they already appear in the main view table above or are not meant to be displayed. + +## API Approach + +The Jira Assets API provides two ways to get attribute data for an object: + +1. **`GET /object/{id}?includeAttributes=true`** — Returns attributes with only `objectTypeAttributeId` (numeric ID). No attribute name. This is what the code currently uses. + +2. **`GET /object/{id}/attributes`** — Returns attributes with a full `objectTypeAttribute` nested object that includes `name`, `system`, `hidden`, and `position` fields. This gives us everything we need in a single API call. + +We use option 2. No second API call or caching is needed. + +### `/object/{id}/attributes` Response Structure + +Each entry in the response array contains: + +```json +{ + "objectTypeAttribute": { + "id": "134", + "name": "Location", + "system": false, + "hidden": false, + "position": 4 + }, + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { + "value": "New York, NY", + "displayValue": "New York, NY" + } + ] +} +``` + +Key fields used: +- `objectTypeAttribute.name` — human-readable attribute name (replaces numeric ID) +- `objectTypeAttribute.system` — `true` for Key, Created, Updated (filter these out) +- `objectTypeAttribute.label` — `true` for the Name attribute (the object's display name, already shown in main view table; filter out) +- `objectTypeAttribute.hidden` — `true` for attributes hidden in the Jira UI (filter these out) +- `objectTypeAttribute.position` — display order +- `objectAttributeValues[].displayValue` — preferred display value (falls back to `value`) + +## Fix + +### 1. New Serde Types (`src/types/assets/object.rs`) + +Add types for the richer `/object/{id}/attributes` response. These are separate from the existing `AssetAttribute` type which remains unchanged for use by `get_asset()`, search, and linked asset enrichment. + +```rust +/// A single attribute entry from `GET /object/{id}/attributes`. +/// Includes the full attribute definition with name, unlike `AssetAttribute` +/// which only has the numeric `objectTypeAttributeId`. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectTypeAttribute")] + pub object_type_attribute: ObjectTypeAttributeDef, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +/// Attribute definition from the object type schema. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeAttributeDef { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, +} +``` + +The existing `ObjectAttributeValue` type is reused — it already has `value` and `display_value`. + +### 2. New API Method (`src/api/assets/objects.rs`) + +```rust +/// Get all attributes for a single object, with full attribute definitions. +pub async fn get_object_attributes( + &self, + workspace_id: &str, + object_id: &str, +) -> Result> { + let path = format!("object/{}/attributes", urlencoding::encode(object_id)); + self.get_assets(workspace_id, &path).await +} +``` + +### 3. Update CLI (`src/cli/assets.rs`) + +In `handle_view`, when `attributes` is true: + +1. Call `get_object_attributes()` instead of relying on `get_asset()`'s `includeAttributes` parameter +2. Filter out system and hidden attributes +3. Sort by position +4. Render using `attr.object_type_attribute.name` as the attribute column +5. Use `displayValue` when available, fall back to `value` + +**Also change line 95** from `client.get_asset(workspace_id, &object_id, attributes)` to `client.get_asset(workspace_id, &object_id, false)` — the object itself no longer needs to carry attribute data. + +**Before (lines 118-139):** +```rust +if attributes && !object.attributes.is_empty() { + println!(); + let attr_rows: Vec> = object + .attributes + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute_id.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute ID", "Value"], &attr_rows) + ); +} +``` + +**After:** +```rust +if attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + // Filter out system (Key, Created, Updated), label (Name), and hidden attributes + attrs.retain(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + && !a.object_type_attribute.label + }); + attrs.sort_by_key(|a| a.object_type_attribute.position); + + if !attrs.is_empty() { + println!(); + let attr_rows: Vec> = attrs + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute.name.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute", "Value"], &attr_rows) + ); + } +} +``` + +### 4. JSON Output (`src/cli/assets.rs`) + +When `--output json` is used with `--attributes`, emit a combined JSON object containing both the asset and its named attributes: + +```rust +OutputFormat::Json => { + if attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + attrs.retain(|a| !a.object_type_attribute.system && !a.object_type_attribute.hidden); + attrs.sort_by_key(|a| a.object_type_attribute.position); + let combined = serde_json::json!({ + "object": object, + "attributes": attrs, + }); + println!("{}", serde_json::to_string_pretty(&combined)?); + } else { + println!("{}", output::render_json(&object)?); + } +} +``` + +This wraps the object and attributes in a single JSON envelope, giving consumers attribute names directly. Without `--attributes`, the JSON output is unchanged (just the object). + +## Files Changed + +| File | Change | +|------|--------| +| `src/types/assets/object.rs` | Add `ObjectAttribute` and `ObjectTypeAttributeDef` types | +| `src/api/assets/objects.rs` | Add `get_object_attributes()` API method | +| `src/cli/assets.rs` | Update `handle_view` to use new endpoint, filter, sort, and render names | + +## What Doesn't Change + +- `get_asset()` API method and `AssetObject`/`AssetAttribute` types — still used by search and linked asset enrichment +- `search --attributes` — unchanged (doesn't render individual attributes in CLI output) +- `handle_search()` — unchanged +- `handle_tickets()` — unchanged + +## Testing + +### Unit Tests (`src/types/assets/object.rs`) + +- Deserialize `ObjectAttribute` with full nested `objectTypeAttribute` including name +- Deserialize with missing optional fields (`system`, `hidden` default to `false`) +- Verify `ObjectAttributeValue` reuse (existing type) +- Attribute with empty `objectAttributeValues` array produces zero rows (omitted from table, consistent with existing behavior) + +### Integration Test (`tests/`) + +- Wiremock `GET /object/{id}/attributes` returning a mix of system, hidden, and user-defined attributes +- Verify only non-system, non-hidden attributes appear in table output +- Verify attributes are sorted by position +- Verify `displayValue` is preferred over `value` + +### Live Verification + +`jr assets view --attributes` shows attribute names instead of numeric IDs. + +## Backward Compatibility + +Table output: No breaking changes. The fix produces human-readable output where previously it produced opaque numeric IDs. The `--attributes` flag was effectively unusable without this fix. + +JSON output: When using `--output json --attributes`, the `attributes` array in the root object is replaced with richer attribute entries that include `objectTypeAttribute.name` and other metadata. The root-level object schema is preserved (no envelope wrapper). Existing fields (`objectTypeAttributeId`, `objectAttributeValues`) remain present. The addition of `objectTypeAttribute` is additive. System and hidden attributes are filtered out. diff --git a/docs/superpowers/specs/2026-03-26-board-flag-design.md b/docs/superpowers/specs/2026-03-26-board-flag-design.md new file mode 100644 index 0000000..7f23dfd --- /dev/null +++ b/docs/superpowers/specs/2026-03-26-board-flag-design.md @@ -0,0 +1,150 @@ +# Add `--board` Flag to Sprint and Board Commands — Design Spec + +**Issue:** #57 + +**Goal:** Allow users to specify a board ID from the CLI via `--board `, eliminating the requirement to configure `board_id` in `.jr.toml` before using sprint and board commands. + +## Problem + +`sprint list`, `sprint current`, and `board view` resolve `board_id` exclusively from `config.project.board_id` (`.jr.toml`). There is no CLI override. This forces a two-step config-editing workflow: + +``` +jr board list --project PROJ # Shows boards with IDs +# manually edit .jr.toml to set board_id = 119 +jr sprint list --project PROJ # Now works +``` + +Meanwhile, `--project` already has a global CLI flag that overrides the config value. Board commands lack an equivalent. + +## Solution + +Add a `--board ` flag scoped to the three subcommands that need a board ID. The flag overrides `board_id` from `.jr.toml`; config remains the fallback when `--board` is not specified. + +**Target syntax:** + +``` +jr sprint list --board 119 +jr sprint current --board 119 +jr board view --board 382 +``` + +`jr board list` is unaffected — it lists all boards and does not need a board ID. + +## Approach: Per-Subcommand Struct Variants + +Convert the three unit variants that need a board ID to struct variants with an inline `board: Option` field. This matches the existing codebase pattern (e.g., `IssueCommand::List`, `IssueCommand::Create`) and is unambiguously supported by clap 4's derive API. + +### Enum changes + +**`SprintCommand`** — convert both unit variants to struct variants: + +```rust +#[derive(Subcommand)] +pub enum SprintCommand { + /// List sprints + List { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, + /// Show current sprint issues + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +} +``` + +**`BoardCommand`** — convert `View` to struct variant; `List` stays unit: + +```rust +#[derive(Subcommand)] +pub enum BoardCommand { + /// List boards + List, + /// View current board issues + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, +} +``` + +### Config resolver + +Add `Config::board_id()` mirroring the existing `project_key()` pattern: + +```rust +pub fn board_id(&self, cli_override: Option) -> Option { + cli_override.or(self.project.board_id) +} +``` + +### Handler changes + +**`sprint.rs`:** Extract `board` from each struct variant, pass to `config.board_id()`: + +```rust +match command { + SprintCommand::List { board } => { + let board_id = config.board_id(board).ok_or_else(|| /* error */)?; + // ... + } + SprintCommand::Current { board } => { + let board_id = config.board_id(board).ok_or_else(|| /* error */)?; + // ... + } +} +``` + +The scrum-board guard (`get_board_config` + type check) remains unchanged — it still runs after board_id resolution. + +**`board.rs`:** Extract `board` from `View` struct variant: + +```rust +match command { + BoardCommand::List => handle_list(client, output_format).await, + BoardCommand::View { board } => { + handle_view(config, client, output_format, board).await + } +} +``` + +### Error messages + +Update all "No board_id configured" errors from: + +``` +No board_id configured. Set board_id in .jr.toml or run "jr init". +``` + +To: + +``` +No board configured. Use --board or set board_id in .jr.toml. +Run "jr board list" to see available boards. +``` + +## Files Changed + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Convert 3 unit variants to struct variants with `board: Option` | +| `src/cli/sprint.rs` | Extract `board` from variants, use `config.board_id()` | +| `src/cli/board.rs` | Extract `board` from `View`, use `config.board_id()` | +| `src/config.rs` | Add `board_id()` method | +| `src/main.rs` | No change — dispatch already passes `config` | + +## Testing + +- Existing `compute_sprint_summary` unit tests: unaffected (no board_id involvement) +- New `Config::board_id()` unit test: CLI override wins over config, config is fallback, both `None` returns `None` (mirrors `test_project_key_cli_override`) +- No integration test changes: existing tests don't exercise board_id resolution + +## Backward Compatibility + +- `.jr.toml` `board_id` continues to work as before (fallback when `--board` not specified) +- `jr board list` unchanged +- No breaking changes to existing commands or config format diff --git a/docs/superpowers/specs/2026-03-26-issue-view-fields-design.md b/docs/superpowers/specs/2026-03-26-issue-view-fields-design.md new file mode 100644 index 0000000..971ace1 --- /dev/null +++ b/docs/superpowers/specs/2026-03-26-issue-view-fields-design.md @@ -0,0 +1,292 @@ +# Include Standard Fields in Issue View — Design Spec + +**Issue:** #59 + +**Goal:** Add `created`, `updated`, `reporter`, `resolution`, `components`, and `fixVersions` to `jr issue view` JSON output, and show `created`, `updated`, `reporter` in the table view. + +## Problem + +`jr issue view --output json` omits several standard Jira fields commonly needed for scripting and automation: + +- `created` — issue creation timestamp +- `updated` — last update timestamp +- `reporter` — who created the issue +- `resolution` — resolution status (e.g., "Fixed", "Won't Do") +- `components` — project components +- `fixVersions` — target fix versions + +These fields are available from the Jira API but are not requested by `get_issue()` or `search_issues()`, so they never appear in the output. + +**Current `get_issue` field list:** +``` +summary,status,issuetype,priority,assignee,project,description,labels,parent,issuelinks +``` + +Missing: `created`, `updated`, `reporter`, `resolution`, `components`, `fixVersions`. + +## Scope + +| Output | Fields added | +|--------|-------------| +| `issue view --output json` | All 6: `created`, `updated`, `reporter`, `resolution`, `components`, `fixVersions` | +| `issue view` (table) | 3: `created`, `updated`, `reporter` | +| `issue list --output json` | All 6 (via `search_issues` field list) | +| `issue list` (table) | None — already crowded | + +## API Field Structures + +From the Jira REST API v3: + +- **`created`** / **`updated`** — ISO 8601 strings (e.g., `"2026-03-20T14:32:00.000+0000"`) +- **`reporter`** — Simplified User object with `accountId`, `displayName`, `active`. Same shape as `assignee`. The existing `User` type handles this — `emailAddress: Option` naturally handles its absence. Note: `User.account_id` and `display_name` are non-optional `String` fields. When a user account is deleted, the Jira API returns `null` for the entire `reporter` field — handled by the `Option` wrapper (`None`). Deactivated users retain `accountId` and `displayName` with `active: false`. Right-to-be-forgotten users have `displayName` set to a placeholder (e.g., "Former user") and `accountId` still present. All cases are safe with our `Option` type. +- **`resolution`** — Object with `name` when resolved (e.g., `{"name": "Fixed"}`), `null` when unresolved +- **`components`** — Array of objects with `name` (e.g., `[{"name": "Backend"}]`). Typically an array (even when empty `[]`), but may be `null` or absent depending on project configuration and issue screens. Typed as `Option>` with `#[serde(default)]` to handle all three cases: absent → `None` (via `#[serde(default)]`, needed because of `#[serde(flatten)]` on `extra`), `null` → `None` (via `Option` natively), `[]` → `Some(vec![])` (via `Option>` natively). +- **`fixVersions`** — Array of version objects with `name`, `released`, `releaseDate`. Same `Option>` + `#[serde(default)]` pattern as `components` — `#[serde(default)]` handles the absent-key case, `Option` handles `null` natively. + +## Fix + +### 1. New Types (`src/types/jira/issue.rs`) + +Three small structs following existing patterns (`Status`, `Priority`, `IssueType` all capture just `name`): + +```rust +#[derive(Debug, Deserialize, Serialize)] +pub struct Resolution { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct Component { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct Version { + pub name: String, + pub released: Option, + #[serde(rename = "releaseDate")] + pub release_date: Option, +} +``` + +Only fields needed for display are captured. Additional API fields (`id`, `description`, `self`) are ignored during deserialization — serde skips unknown fields by default. + +### 2. `IssueFields` Changes (`src/types/jira/issue.rs`) + +Add 6 fields to the existing struct. These were previously captured (if requested) in the `#[serde(flatten)] extra: HashMap` catch-all. With typed fields, serde routes them to the typed field first — confirmed as the correct serde behavior. + +```rust +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct IssueFields { + pub summary: String, + pub description: Option, + pub status: Option, + #[serde(rename = "issuetype")] + pub issue_type: Option, + pub priority: Option, + pub assignee: Option, + pub reporter: Option, + pub project: Option, + pub created: Option, + pub updated: Option, + pub resolution: Option, + #[serde(default)] + pub components: Option>, + #[serde(rename = "fixVersions", default)] + pub fix_versions: Option>, + #[serde(default)] + pub labels: Option>, + pub parent: Option, + pub issuelinks: Option>, + #[serde(flatten)] + pub extra: HashMap, +} +``` + +The `Default` derive requires all new types to either implement `Default` or be wrapped in `Option`. All 6 new fields are `Option`, so no `Default` impl needed on the new types. + +### 3. API Field Lists (`src/api/jira/issues.rs`) + +**`get_issue()`** — add to the hardcoded field string: + +```rust +let mut fields = + "summary,status,issuetype,priority,assignee,reporter,project,description,labels,parent,issuelinks,created,updated,resolution,components,fixVersions".to_string(); +``` + +**`search_issues()`** — add the 6 new fields to the existing vec. Note: `labels`, `parent`, and `issuelinks` are intentionally absent from `search_issues` — they are only fetched by `get_issue()` for the single-issue view. The search endpoint returns a lighter payload for list rendering. + +```rust +let mut fields = vec![ + "summary", + "status", + "issuetype", + "priority", + "assignee", + "reporter", + "project", + "description", + "created", + "updated", + "resolution", + "components", + "fixVersions", +]; +``` + +### 4. Table View (`src/cli/issue/list.rs` — `handle_view`) + +Insert Reporter, Created, Updated rows between Assignee and Project. The existing `handle_view` builds the initial `rows` vec with Key, Summary, Type, Status, Priority, Assignee, Project, Labels in one block. Restructure this block so the 3 new rows appear after Assignee and before Project: + +```rust +let mut rows = vec![ + vec!["Key".into(), issue.key.clone()], + vec!["Summary".into(), issue.fields.summary.clone()], + vec!["Type".into(), /* ... existing ... */], + vec!["Status".into(), /* ... existing ... */], + vec!["Priority".into(), /* ... existing ... */], + vec![ + "Assignee".into(), + /* ... existing ... */ + ], + // ── NEW: Reporter, Created, Updated ── + vec![ + "Reporter".into(), + issue + .fields + .reporter + .as_ref() + .map(|r| r.display_name.clone()) + .unwrap_or_else(|| "Unassigned".into()), + ], + vec![ + "Created".into(), + issue + .fields + .created + .as_deref() + .map(format_comment_date) + .unwrap_or_default(), + ], + vec![ + "Updated".into(), + issue + .fields + .updated + .as_deref() + .map(format_comment_date) + .unwrap_or_default(), + ], + vec!["Project".into(), /* ... existing ... */], + vec!["Labels".into(), /* ... existing ... */], +]; +``` + +The `/* ... existing ... */` placeholders represent the current formatting logic — no changes to the existing field rendering, just repositioning within the vec initialization. + +The existing `format_comment_date()` function (private, defined in the same file `list.rs`) already handles Jira's ISO 8601 format and produces `YYYY-MM-DD HH:MM` output — reused here. No visibility change needed since `handle_view` is in the same module. + +### 5. JSON Output + +No changes needed. `IssueFields` derives `Serialize`, so the 6 new typed fields automatically appear in `--output json`. The existing `render_json(&issue)` call handles it. + +## Expected Output + +### Table view (`jr issue view PROJ-123`): + +``` +┌─────────────┬─────────────────────────────────┐ +│ Field │ Value │ +╞═════════════╪═════════════════════════════════╡ +│ Key │ PROJ-123 │ +│ Summary │ Fix login redirect │ +│ Type │ Bug │ +│ Status │ In Progress │ +│ Priority │ High │ +│ Assignee │ John Doe │ +│ Reporter │ Jane Smith │ ← NEW +│ Created │ 2026-03-20 14:32 │ ← NEW +│ Updated │ 2026-03-25 09:15 │ ← NEW +│ Project │ My Project (PROJ) │ +│ Labels │ bug, frontend │ +│ Parent │ PROJ-100 (Login Epic) │ +│ Links │ blocks PROJ-456 (Deploy) │ +│ Points │ 5 │ +│ Description │ Users are redirected to... │ +└─────────────┴─────────────────────────────────┘ +``` + +### JSON view (`jr issue view PROJ-123 --output json`): + +```json +{ + "key": "PROJ-123", + "fields": { + "summary": "Fix login redirect", + "status": { "name": "In Progress", "statusCategory": { "name": "In Progress", "key": "indeterminate" } }, + "issuetype": { "name": "Bug" }, + "priority": { "name": "High" }, + "assignee": { "accountId": "5b10a284...", "displayName": "John Doe", "emailAddress": null, "active": true }, + "reporter": { "accountId": "5b10a2844c20...", "displayName": "Jane Smith", "emailAddress": null, "active": true }, + "created": "2026-03-20T14:32:00.000+0000", + "updated": "2026-03-25T09:15:22.000+0000", + "resolution": { "name": "Fixed" }, + "components": [{ "name": "Backend" }], + "fixVersions": [{ "name": "v2.0", "released": false, "releaseDate": "2026-04-01" }], + "project": { "key": "PROJ", "name": "My Project" }, + "labels": ["bug", "frontend"], + "description": { "type": "doc", "version": 1, "content": [...] } + } +} +``` + +## Files Changed + +| File | Change | +|------|--------| +| `src/types/jira/issue.rs` | Add `Resolution`, `Component`, `Version` types; add 6 fields to `IssueFields` | +| `src/api/jira/issues.rs` | Add field names to `get_issue()` and `search_issues()` field lists | +| `src/cli/issue/list.rs` | Add Reporter, Created, Updated rows to `handle_view` table | + +## What Doesn't Change + +- `issue list` table output — unchanged (columns already crowded) +- `issue create` / `issue edit` — no changes +- `Comment` type — already has `created: Option`, no conflict +- `handle_search()`, `handle_comments()`, `handle_list()` table rendering — unchanged +- `extra` HashMap — still captures custom fields; the 6 new fields are now typed and won't land in `extra` + +## Testing + +### Unit Tests (`src/types/jira/issue.rs`) + +- Deserialize `IssueFields` with all 6 new fields present — verify typed access +- Deserialize with all 6 fields absent/null — verify all default to `None` +- Deserialize with `components: []` (empty array) — verify `Some(vec![])`, distinct from `None` +- Deserialize with `fixVersions: []` (empty array) — verify `Some(vec![])`, distinct from `None` +- Deserialize `Resolution` with `name` field +- Deserialize `Component` with `name` field +- Deserialize `Version` with `name`, `released`, `releaseDate` (and with optional fields absent) +- Verify `fixVersions` JSON key maps to `fix_versions` Rust field via serde rename +- Verify new fields don't appear in `extra` HashMap when typed field is present + +### Integration Tests (`tests/`) + +- Wiremock `GET /rest/api/3/issue/{key}` returning all 6 fields — verify JSON output contains them +- Wiremock returning null/empty for all 6 fields — verify graceful handling +- Verify table output contains Reporter, Created, Updated rows with formatted values + +### Live Verification + +```bash +jr issue view --output json | jq '.fields | {created, updated, reporter, resolution, components, fixVersions}' +jr issue view # table should show Reporter, Created, Updated +``` + +## Backward Compatibility + +**JSON output:** Additive only — 6 new fields appear that weren't previously present. No existing fields change. Consumers using `jq .fields.summary` are unaffected. + +**Table output:** 3 new rows added (Reporter, Created, Updated) after Assignee. No existing rows change position or content. + +**`extra` HashMap:** Fields that previously landed in `extra` (if someone was requesting them via a custom integration) now have typed fields. The JSON serialization output is identical — the field appears at the same path with the same structure. The only difference is serde routing during deserialization. diff --git a/docs/superpowers/specs/2026-03-26-jrerror-exit-codes-design.md b/docs/superpowers/specs/2026-03-26-jrerror-exit-codes-design.md new file mode 100644 index 0000000..c6781f6 --- /dev/null +++ b/docs/superpowers/specs/2026-03-26-jrerror-exit-codes-design.md @@ -0,0 +1,107 @@ +# JrError Exit Codes — Design Spec + +## Problem + +Several commands guard against missing config or missing user input with `anyhow::anyhow!()` instead of `JrError` variants. Since `main.rs` uses `downcast_ref::()` to map exit codes, these fall through to exit code 1 (generic error) instead of the semantically correct exit code. + +This makes it impossible for scripts and AI agents to distinguish "missing config" (fixable by running `jr init`) from "runtime error" (transient failure). + +**Issue:** #30 + +## Solution + +Replace `anyhow::anyhow!(...)` with the appropriate `JrError` variant at 13 locations. Also change the `ConfigError` display format from `"Configuration error: {0}"` to `"{0}"` so error messages remain identical. No new variants, no new types. + +### ConfigError Display Format Change + +The current `ConfigError` variant has `#[error("Configuration error: {0}")]`, which prepends "Configuration error: " to the message. To preserve existing error message text exactly, change the format to `#[error("{0}")]` — matching `UserError`'s format. The error is already semantically identified by its variant and exit code; the prefix is redundant. + +### Exit Code Mapping (existing, unchanged) + +| Variant | Exit Code | sysexits.h | Meaning | +|---------|-----------|------------|---------| +| `JrError::ConfigError` | 78 | EX_CONFIG | Missing config file values | +| `JrError::UserError` | 64 | EX_USAGE | Missing CLI input / bad usage | +| `JrError::NotAuthenticated` | 2 | — | Auth required | +| `JrError::Interrupted` | 130 | — | Ctrl+C | +| All others | 1 | — | Generic / runtime | + +### Group 1 — ConfigError (exit 78) + +Missing values in config files that the user needs to set up via `jr init` or manual config editing. + +| File | Line | Current | Message | +|------|------|---------|---------| +| `src/cli/board.rs` | 50 | `anyhow::anyhow!(...)` | "No board configured. Use --board or set board_id in .jr.toml..." | +| `src/cli/sprint.rs` | 20 | `anyhow::anyhow!(...)` | "No board configured. Use --board or set board_id in .jr.toml..." | +| `src/api/client.rs` | 36 | `anyhow::anyhow!(...)` | "No Jira instance configured. Run \"jr init\" first." | +| `src/cli/team.rs` | 86 | `anyhow::anyhow!(...)` | "No Jira instance configured. Run \"jr init\" first." | +| `src/config.rs` | 98 | `anyhow::anyhow!(...)` | "No Jira instance configured. Run \"jr init\" first." | +| `src/cli/issue/helpers.rs` | 20 | `anyhow::anyhow!(...)` | "No \"Team\" field found on this Jira instance..." | +| `src/cli/issue/helpers.rs` | 78 | `anyhow::anyhow!(...)` | "Story points field not configured..." | + +Each becomes `JrError::ConfigError("...".into())`. + +### Group 2 — UserError (exit 64) + +Missing required CLI input that the user should provide via flags. + +| File | Line | Current | Message | +|------|------|---------|---------| +| `src/cli/issue/create.rs` | 48 | `anyhow::anyhow!(...)` | "Project key is required. Use --project or configure .jr.toml..." | +| `src/cli/issue/create.rs` | 63 | `anyhow::anyhow!(...)` | "Issue type is required. Use --type" | +| `src/cli/issue/create.rs` | 74 | `anyhow::anyhow!(...)` | "Summary is required. Use --summary" | +| `src/cli/project.rs` | 70 | `anyhow::anyhow!(...)` | "No project specified. Run \"jr project list\"..." | +| `src/cli/issue/workflow.rs` | 121 | `anyhow::anyhow!(...)` | "Invalid selection" | +| `src/cli/issue/workflow.rs` | 123 | `bail!(...)` | "Selection out of range" | + +Each becomes `JrError::UserError("...".into())`. Note: line 123 uses `bail!()` (which is `anyhow::bail!`), same issue — the error is an untyped anyhow string that fails the downcast. + +### Not Touched + +These `anyhow::anyhow!` calls stay as-is because exit code 1 is appropriate: + +- `src/api/auth.rs:145,147` — OAuth callback errors (no auth code, no state parameter). These are transient runtime errors during the OAuth flow. +- `src/api/auth.rs:205` — "No accessible Jira sites found." Arguably a config issue (wrong account), but fires during the OAuth flow itself, before config exists. Leaving as exit 1 is consistent with the other OAuth errors in the same function. +- `src/duration.rs:24` — Duration parse error (invalid number in a worklog duration string). This is a value-level parse failure, not a missing-config or missing-input error. +- `src/api/jira/teams.rs:26` — "Could not resolve organization ID." This can indicate a permissions problem or a network/API issue; the ambiguity makes exit 1 (generic) the safest choice. + +### Conversion Mechanics + +`JrError` derives `thiserror::Error`, which implements `std::error::Error`. `anyhow` has a blanket `From` impl, so returning `JrError::ConfigError("...".into())` from an `ok_or_else` closure in a function returning `anyhow::Result` compiles without explicit `.into()` on the variant. + +The `?` operator handles the `JrError` → `anyhow::Error` conversion. `downcast_ref::()` in `main.rs` then successfully finds the variant and maps the exit code. + +### What Changes + +- `ConfigError` display format: `"Configuration error: {0}"` → `"{0}"` (removes redundant prefix, preserves message text) +- Exit codes for 13 error paths (from 1 to 78 or 64) + +### What Doesn't Change + +- Error message text (identical strings after the format change above) +- The `JrError` enum variants (no new variants added) +- Auth, API, or runtime error paths +- Any command's success path + +## Testing + +### Unit Tests + +Verify `JrError::exit_code()` mapping for `ConfigError` and `UserError` (may already exist — add if missing). + +### Integration Tests + +For each affected command, trigger the error condition and assert: +1. The error message text is preserved +2. The process exits with the correct code (78 or 64) + +Priority integration tests (representative of all 13 sites — both groups use the same mechanical pattern): +- `board view` without `board_id` configured → exit 78 (representative of all 7 ConfigError sites) +- `issue create` without `--project` in non-interactive mode → exit 64 (representative of all 6 UserError sites) + +### Existing Tests + +No existing tests should break — error messages are identical (after the ConfigError format change), and no test currently asserts exit codes for these paths. + +Any test that previously asserted the `"Configuration error: "` prefix in `ConfigError` display output would need updating — but grep confirms no such tests exist. diff --git a/docs/superpowers/specs/2026-03-26-kanban-jql-fix-design.md b/docs/superpowers/specs/2026-03-26-kanban-jql-fix-design.md new file mode 100644 index 0000000..784afbf --- /dev/null +++ b/docs/superpowers/specs/2026-03-26-kanban-jql-fix-design.md @@ -0,0 +1,94 @@ +# Fix Kanban JQL `AND ORDER BY` Bug — Design Spec + +**Issue:** #31 + +**Goal:** Fix invalid JQL syntax in `board view` kanban path where `ORDER BY rank ASC` is joined with predicates using `AND`, producing `AND ORDER BY` which returns HTTP 400 from Jira Cloud. + +## Problem + +In `src/cli/board.rs`, the kanban branch (find by the `"ORDER BY rank ASC"` push into `jql_parts`) builds JQL by pushing `ORDER BY` as a predicate and joining with `" AND "`: + +```rust +jql_parts.push("statusCategory != Done".into()); +jql_parts.push("ORDER BY rank ASC".into()); +let jql = jql_parts.join(" AND "); +``` + +This produces: + +``` +project = "FOO" AND statusCategory != Done AND ORDER BY rank ASC +``` + +`AND ORDER BY` is invalid JQL. Jira Cloud's `/rest/api/3/search` returns 400 Bad Request. + +The scrum branch (`board_type == "scrum"`) is **not affected** — it calls `client.get_sprint_issues()` which uses the Agile API directly rather than building JQL. + +## Expected + +``` +project = "FOO" AND statusCategory != Done ORDER BY rank ASC +``` + +`ORDER BY` is a separate clause — not a predicate — and must be appended after all WHERE predicates without `AND`. + +## Fix + +Remove `ORDER BY rank ASC` from `jql_parts`. Join only filter predicates with `" AND "`. Append `ORDER BY rank ASC` separately via `format!()`. + +This matches the existing pattern in `src/cli/issue/list.rs`: + +```rust +let where_clause = all_parts.join(" AND "); +let effective_jql = format!("{where_clause} ORDER BY {order_by}"); +``` + +Note: `jql_parts` is never empty in this path — `"statusCategory != Done"` is pushed unconditionally, so no empty-string guard is needed. + +### Code Change + +**`src/cli/board.rs` — kanban JQL construction block:** + +Before: +```rust +let mut jql_parts: Vec = Vec::new(); +if let Some(ref pk) = project_key { + jql_parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); +} +jql_parts.push("statusCategory != Done".into()); +jql_parts.push("ORDER BY rank ASC".into()); +let jql = jql_parts.join(" AND "); +``` + +After: +```rust +let mut jql_parts: Vec = Vec::new(); +if let Some(ref pk) = project_key { + jql_parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); +} +jql_parts.push("statusCategory != Done".into()); +let where_clause = jql_parts.join(" AND "); +let jql = format!("{where_clause} ORDER BY rank ASC"); +``` + +Note: In the final implementation, this logic was extracted into a helper function `fn build_kanban_jql(project_key: Option<&str>) -> String` for testability, and `handle_view` calls that helper. The inline example above illustrates the core JQL construction logic. + +## Files Changed + +| File | Change | +|------|--------| +| `src/cli/board.rs` | Fix JQL construction: remove ORDER BY from predicates, append separately | + +## Testing + +Extract the kanban JQL construction into a testable helper function `build_kanban_jql(project_key: Option<&str>) -> String`. Add unit tests asserting: + +- With project: `project = "FOO" AND statusCategory != Done ORDER BY rank ASC` +- Without project: `statusCategory != Done ORDER BY rank ASC` +- Project key with special characters is escaped correctly + +Live verification: `jr board view --board ` returns issues without 400 error. + +## Backward Compatibility + +No breaking changes. The fix produces correct JQL where previously it produced invalid JQL (400 error). Any workflow that was hitting this code path was already broken. diff --git a/docs/superpowers/specs/2026-03-27-board-auto-resolve-design.md b/docs/superpowers/specs/2026-03-27-board-auto-resolve-design.md new file mode 100644 index 0000000..7afea07 --- /dev/null +++ b/docs/superpowers/specs/2026-03-27-board-auto-resolve-design.md @@ -0,0 +1,229 @@ +# Board Auto-Resolve — Design Spec + +## Problem + +`jr sprint list --project PROJ` fails with a generic error telling the user to run `jr board list` — which returns all boards across all projects with no filtering. An AI agent or human must: + +1. Call `jr board list` to get all boards (unfiltered) +2. Guess which board corresponds to the target project +3. Filter to scrum boards (sprint commands reject kanban) +4. Retry with `--board ` + +This multi-step guessing process is fragile and error-prone. Additionally, the global `--project` flag is not threaded through to sprint or board handlers, so `jr board view --project FOO` ignores the flag entirely. + +**Issue:** #70 + +## Solution + +Three changes that work together: + +1. **Auto-resolve**: When `--board` is not set and no `board_id` is configured, automatically discover the board via the Jira API using the project key. If exactly one board matches, use it. If zero or multiple, error with specific guidance. +2. **Board list filters**: Add `--type` flag to `board list` and thread the global `--project` through, so `jr board list --project PROJ --type scrum` filters server-side. +3. **Thread `--project` to sprint/board**: Pass the global `--project` override to sprint and board handlers (matching how issue, project, and queue already work). + +### CLI Interface + +**Board list gains `--type` filter:** + +``` +jr board list [--project PROJ] [--type scrum|kanban] +``` + +| Flag | Type | Default | Description | +|------|------|---------|-------------| +| `--project` | global flag | from config | Filter boards by project key (existing global flag, now threaded through) | +| `--type` | `Option` | none | Filter boards by type (`scrum` or `kanban`), validated string via `PossibleValuesParser` (matches `--type` on `project list`) | + +`BoardCommand::List` changes from a unit variant to a struct variant: + +```rust +/// List boards +List { + /// Filter by board type + #[arg(long = "type", value_parser = clap::builder::PossibleValuesParser::new(["scrum", "kanban"]))] + board_type: Option, +}, +``` + +This matches how `--type` works on `ProjectCommand::List` (validated `Option`, not a dedicated enum). + +**No new flags on `sprint list/current` or `board view`.** They already accept `--board`. Auto-resolve uses the existing `--project` global flag + `.jr.toml` config fallback. + +### API Layer: `list_boards()` Changes + +Current signature: + +```rust +pub async fn list_boards(&self) -> Result> +``` + +Changes to: + +```rust +pub async fn list_boards( + &self, + project_key: Option<&str>, + board_type: Option<&str>, +) -> Result> +``` + +When `project_key` is `Some`, appends `projectKeyOrId=PROJ` to the URL. When `board_type` is `Some`, appends `type=scrum` (or `kanban`). Same pagination loop, but filtered server-side — fewer pages fetched. Typically returns in a single API call when filtering by project (most projects have 1-3 boards). + +The Jira Agile API returns 200 OK with an empty `values` array when no boards match — even if the project key doesn't exist. No 404. + +### `Board` Struct Gains `location` + +The API response includes a `location` object on every board (mandatory since 2018 in Jira Cloud). We need it for the auto-resolve stderr hint. + +```rust +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct Board { + pub id: u64, + pub name: String, + #[serde(rename = "type")] + pub board_type: String, + #[serde(default)] + pub location: Option, +} + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct BoardLocation { + #[serde(default, rename = "projectKey")] + pub project_key: Option, + #[serde(default, rename = "projectName")] + pub project_name: Option, +} +``` + +`location` is `Option` with `#[serde(default)]` as a defensive measure, even though it's always present in Jira Cloud. The inner fields use `Option` because cross-project boards may have varying field presence. + +### Auto-Resolve Helper + +New `pub` function in `src/cli/board.rs` (matching the existing `pub` convention for cross-module functions in `src/cli/`): + +```rust +pub async fn resolve_board_id( + config: &Config, + client: &JiraClient, + board_override: Option, + project_override: Option<&str>, + require_scrum: bool, +) -> Result +``` + +**Resolution order:** + +1. CLI `--board` flag → return immediately +2. Config `board_id` from `.jr.toml` → return immediately +3. Auto-discover via API: + - Get project key from `config.project_key(project_override)` — if none available, return a `ConfigError` suggesting `--board`, `board_id` in config, or `--project` + - Call `client.list_boards(Some(project_key), type_filter)` where `type_filter` is `Some("scrum")` when `require_scrum` is true, `None` otherwise + - If exactly 1 board → print hint to stderr (`Using board 42 - My Board (scrum)`), return its ID + - If 0 boards → return error with project key and suggestion to check board list + - If 2+ boards → return error listing candidate boards with IDs and names + +**Why in `board.rs`?** The function is board-resolution logic — it calls `list_boards`, formats board-related errors, and manages the auto-resolve flow. `sprint.rs` imports it as `crate::cli::board::resolve_board_id`. Keeping the helper where boards are managed follows the "code lives where its concept lives" principle. + +### Error Messages + +| Scenario | Message | Exit | +|----------|---------|------| +| No `--board`, no config, no project key | `No board configured and no project specified. Use --board , set board_id in .jr.toml, or specify --project to auto-discover.` | 78 | +| 0 boards found (sprint) | `No scrum boards found for project PROJ. Verify the project key is correct, then try "jr board list --project PROJ".` | 1 | +| 0 boards found (board view) | `No boards found for project PROJ. Verify the project key is correct, then try "jr board list --project PROJ".` | 1 | +| 2+ boards found (`require_scrum=true`) | `Multiple scrum boards found for project PROJ:\n 42 My Board\n 99 Other Board\nUse --board to select one, or set board_id in .jr.toml.` | 1 | +| 2+ boards found (`require_scrum=false`) | `Multiple boards found for project PROJ:\n 42 scrum My Board\n 99 kanban Other Board\nUse --board to select one, or set board_id in .jr.toml.` | 1 | +| Explicit `--board` is kanban (sprint) | Unchanged: `Sprint commands are only available for scrum boards. Board 42 is a kanban board.` | 1 | + +### Stderr Hint on Auto-Select + +When auto-resolve picks a board, print to stderr: + +``` +Using board 42 - My Board (scrum) +``` + +This uses stderr (not stdout) so it doesn't pollute `--output json` or piped output. Matches the existing pattern of `eprintln!` for hints (truncation hints, kanban project warnings). + +### Call Site Updates + +**`main.rs`:** Both sprint and board dispatch arms gain `cli.project.as_deref()`: + +| File | Current | New | +|------|---------|-----| +| `main.rs` Board arm | `cli::board::handle(command, &config, &client, &cli.output)` | `cli::board::handle(command, &config, &client, &cli.output, cli.project.as_deref())` | +| `main.rs` Sprint arm | `cli::sprint::handle(command, &config, &client, &cli.output)` | `cli::sprint::handle(command, &config, &client, &cli.output, cli.project.as_deref())` | + +**Handler signatures:** Both `board::handle` and `sprint::handle` gain `project_override: Option<&str>`. + +**Board handler changes:** + +| Function | Change | +|----------|--------| +| `handle` | Gains `project_override`, destructures `BoardCommand::List { board_type }` and passes `project_override` + `board_type.as_deref()` to `handle_list` | +| `handle_list` | Gains `project_override: Option<&str>` and `board_type_filter: Option<&str>`, passes both to `client.list_boards()`. Output gains a "Project" column showing `location.project_key` (useful when listing boards across projects). | +| `handle_view` | Replaces `config.board_id()` block with `resolve_board_id(config, client, board, project_override, false)` | + +**Sprint handler changes:** + +| Function | Change | +|----------|--------| +| `handle` | Gains `project_override`, replaces `config.board_id()` block with `resolve_board_id(config, client, board_override, project_override, true)` | + +**Sprint scrum guard:** The existing scrum-type check (lines 31-39 in `sprint.rs`) remains. It fires when `--board` or config provides a board directly (resolution steps 1-2), where auto-resolve was skipped. When auto-resolve discovers the board (step 3 with `require_scrum=true`), the guard is redundant but harmless — it confirms the board is scrum via `get_board_config()`. + +**Existing callers of `list_boards()`:** The only existing caller is `handle_list` in `board.rs`. Update to pass `(project_key, board_type_filter)` instead of `()`. + +## What Changes + +- `list_boards()` gains `project_key` and `board_type` parameters +- `Board` struct gains `location: Option` field +- New `BoardLocation` struct in `types/jira/board.rs` +- New `resolve_board_id()` helper in `cli/board.rs` (`pub` visibility) +- `board list` output gains a "Project" column +- `BoardCommand::List` gains `board_type` field with clap `value_parser` +- `main.rs` threads `cli.project` to board and sprint handlers +- Both `board::handle` and `sprint::handle` gain `project_override` parameter +- Error messages updated for missing board scenarios + +**Note on pagination:** The existing `OffsetPage` struct computes `has_more()` from `start_at + max_results < total`, while the Agile API also returns an `isLast` field that is currently ignored. This is a pre-existing concern, not introduced by this feature. With project filtering, results are typically 1-3 boards (single page), so pagination edge cases are unlikely in practice. + +## What Doesn't Change + +- Board view output formatting (table/JSON) +- Sprint list/current behavior (when `--board` is explicit) +- Kanban JQL generation (`build_kanban_jql`) +- The scrum-type guard in `sprint.rs` (still needed for explicit `--board`) +- Exit codes for existing error scenarios +- Any command other than board and sprint + +## Testing + +### Test Fixtures + +Add to `tests/common/fixtures.rs`: +- `board_response(id, name, board_type, project_key)` — single board with location +- `board_list_response(boards)` — paginated board list wrapper + +### Unit Tests + +- `BoardLocation` deserialization: verify serde parses the location object correctly +- `list_boards` URL construction: verify query params are appended when present, omitted when `None` + +### Integration Tests + +Using wiremock to mock Jira API responses: + +1. **Auto-resolve success (sprint)**: Mock `list_boards?projectKeyOrId=PROJ&type=scrum` returning 1 board. Mock board config + sprint list. Run sprint handler. Assert sprint list endpoint called with correct board ID. +2. **Auto-resolve ambiguous**: Mock `list_boards` returning 2 scrum boards. Assert error contains both board IDs and names. +3. **Auto-resolve no boards**: Mock `list_boards` returning empty array. Assert error mentions project key and suggests checking board list. +4. **Board list with filters**: Mock `list_boards`. Call `list_boards(Some("PROJ"), Some("scrum"))`. Assert wiremock received `?projectKeyOrId=PROJ&type=scrum`. +5. **Explicit --board skips auto-resolve**: Call with `board_override=Some(42)`. Assert `list_boards` endpoint is NOT called. +6. **Global --project threads through**: Call board view handler with `project_override=Some("PROJ")`, no config board_id. Assert auto-resolve uses PROJ. + +### Existing Tests + +- `build_kanban_jql` tests: unchanged +- `compute_sprint_summary` tests: unchanged +- `board_commands.rs` integration tests: if PR #73 has been merged, the existing tests need `list_boards()` calls updated to pass `None, None` params. If PR #73 has not been merged, create the test file as part of this feature's test suite. +- `missing_board_id_returns_config_error` unit test in `board.rs`: replaced by `resolve_board_id` integration tests diff --git a/docs/superpowers/specs/2026-03-27-board-view-limit-design.md b/docs/superpowers/specs/2026-03-27-board-view-limit-design.md new file mode 100644 index 0000000..5474e50 --- /dev/null +++ b/docs/superpowers/specs/2026-03-27-board-view-limit-design.md @@ -0,0 +1,192 @@ +# Board View --limit Flag — Design Spec + +## Problem + +`jr board view` returns unbounded output with no way to control output size. A kanban board returned **1.8MB** of table data during testing. Every other list-style command (`issue list`, `assets search`, `assets tickets`, `queue view`) supports `--limit` — `board view` is the only outlier. + +This is critical for AI agent consumption: an AI calling `board view` will blow its context window with no way to cap results. + +**Issue:** #69 + +## Solution + +Add `--limit ` and `--all` flags to `board view`, following the established `issue list` pattern. Default limit: 30 (same as `issue list`). Both the scrum and kanban paths respect the limit with efficient early-stop pagination. + +Split out `sprint current` (same unbounded problem) to #72 to keep scope tight. + +### CLI Interface + +``` +jr board view [--board ] [--limit ] [--all] +``` + +| Flag | Type | Default | Description | +|------|------|---------|-------------| +| `--board` | `Option` | from config | Board ID override | +| `--limit` | `Option` | 30 | Maximum issues to return | +| `--all` | `bool` | false | Fetch all results (no limit) | + +`--limit` and `--all` conflict (clap `conflicts_with`). This is bidirectional — only needs to be declared on one arg, matching `issue list`'s existing pattern. + +### Shared Helper Extraction + +`resolve_effective_limit()` and `DEFAULT_LIMIT` currently live in `src/cli/issue/list.rs` as private items. Since `board view` needs the same logic, extract both to `src/cli/mod.rs` as `pub(crate)`: + +```rust +pub(crate) const DEFAULT_LIMIT: u32 = 30; + +pub(crate) fn resolve_effective_limit(limit: Option, all: bool) -> Option { + if all { + None + } else { + Some(limit.unwrap_or(DEFAULT_LIMIT)) + } +} +``` + +`issue/list.rs` imports from `cli/mod.rs` instead of defining its own copy. + +### API Layer: `get_sprint_issues()` Changes + +The current signature: + +```rust +pub async fn get_sprint_issues( + &self, + sprint_id: u64, + jql: Option<&str>, + extra_fields: &[&str], +) -> Result> +``` + +Changes to: + +```rust +pub async fn get_sprint_issues( + &self, + sprint_id: u64, + jql: Option<&str>, + limit: Option, + extra_fields: &[&str], +) -> Result +``` + +New return type, defined in `src/api/jira/sprints.rs` (alongside `get_sprint_issues()`): + +```rust +pub struct SprintIssuesResult { + pub issues: Vec, + pub has_more: bool, +} +``` + +**Why not reuse `SearchResult`?** Different pagination models — `SearchResult` comes from cursor-based search, `SprintIssuesResult` from offset-based Agile API. Separate types keep semantics clear. + +**Why `has_more: bool` instead of `total: u32`?** The Jira Agile API's `total` field is unreliable — it can change mid-pagination due to concurrent modifications. The `has_more` signal (derived from `OffsetPage.has_more()`) is the reliable pagination indicator. + +#### Early-Stop Pagination Logic + +The pagination loop adds limit-aware early-stop. A mutable `result_has_more` tracks whether more results exist beyond what was collected: + +```rust +let mut result_has_more = false; + +// Inside the loop, after extending all_issues from page: +if let Some(max) = limit { + if all_issues.len() >= max as usize { + result_has_more = all_issues.len() > max as usize || page_has_more; + all_issues.truncate(max as usize); + break; + } +} +``` + +This stops fetching pages as soon as enough issues are collected. For the 1.8MB case, `--limit 30` fetches a single page of 50 instead of all pages. + +### Kanban Path + +Already uses `search_issues()` which accepts `limit: Option`. Pass the effective limit through and capture `has_more` for the truncation hint: + +```rust +// Before (unbounded): +client.search_issues(&jql, None, &[]).await?.issues + +// After (limit-aware): +let result = client.search_issues(&jql, effective_limit, &[]).await?; +let has_more = result.has_more; +let issues = result.issues; +``` + +### Handler Changes in `board.rs` + +`handle_view` gains `limit` and `all` parameters from the destructured `BoardCommand::View` enum. The flow: + +1. Resolve effective limit via `resolve_effective_limit(limit, all)` +2. Fetch issues (scrum or kanban path, both limit-aware) +3. Format and print (unchanged) +4. If truncated, show hint on stderr + +### Truncation Hint + +When output is truncated, show a hint on stderr matching `issue list`'s pattern: + +**Kanban path** (has `approximate_count()`): +``` +Showing 30 of ~487 results. Use --limit or --all to see more. +``` + +**Scrum path** (no reliable total count): +``` +Showing 30 results. Use --limit or --all to see more. +``` + +Both match `issue list`'s existing behavior — the kanban variant uses `approximate_count()` for the total, and the scrum variant uses the simpler message (same as `issue list`'s fallback when `approximate_count()` fails). + +### Call Site Updates + +All existing callers of `get_sprint_issues()` must pass the new `limit` parameter: + +| File | Current Call | New Call | Notes | +|------|-------------|---------|-------| +| `src/cli/board.rs` (scrum path) | `client.get_sprint_issues(sprint.id, None, &[]).await?` returns `Vec` | `client.get_sprint_issues(sprint.id, None, effective_limit, &[]).await?` returns `SprintIssuesResult` | Destructure: `let result = ...; let has_more = result.has_more; let issues = result.issues;` | +| `src/cli/sprint.rs` (`handle_current`) | `client.get_sprint_issues(sprint.id, None, &extra).await?` returns `Vec` | `client.get_sprint_issues(sprint.id, None, None, &extra).await?` returns `SprintIssuesResult` | Extract `.issues`: `let result = ...; let issues = result.issues;` — no behavior change, `has_more` ignored | +| `src/cli/issue/list.rs` (scrum branch) | Not a caller of `get_sprint_issues()` — uses JQL with `sprint = {id}` | No change | — | + +## What Changes + +- `BoardCommand::View` gains `--limit` and `--all` flags +- `resolve_effective_limit()` and `DEFAULT_LIMIT` move to `cli/mod.rs` +- `get_sprint_issues()` gains `limit` param, returns `SprintIssuesResult` +- `handle_view` in `board.rs` passes limit to both paths, shows truncation hint +- `sprint.rs` passes `None` for the new limit param (no behavior change) + +## What Doesn't Change + +- Error messages or exit codes +- Board list, sprint list, or any other command +- The kanban JQL generation (`build_kanban_jql`) +- Output formatting (table/JSON) +- `search_issues()` API (already supports limit) + +## Testing + +### Unit Tests + +- `resolve_effective_limit` tests move from `issue/list.rs` to `cli/mod.rs` (same tests, new location) +- `SprintIssuesResult` construction tests in `sprints.rs` + +### Integration Tests + +Using wiremock to mock Jira API responses: + +1. **Scrum board with limit**: Mock sprint list + sprint issues (page of 50). Run `board view --limit 3`. Assert output has 3 rows AND wiremock received exactly 1 sprint issues request (early-stop verified). +2. **Kanban board with limit**: Mock board config + search endpoint. Run `board view --limit 5`. Assert output has 5 rows. +3. **Flag conflict**: Run `board view --limit 3 --all`. Assert exit code 2 (clap `ArgumentConflict`). +4. **Default limit**: Run `board view` (no flags). Assert output has at most 30 rows. + +### Existing Tests + +No existing tests should break: +- `build_kanban_jql` tests are unchanged +- `sprint.rs` tests don't call `get_sprint_issues()` (they test `compute_sprint_summary`) +- `issue/list.rs` tests for `resolve_effective_limit` move to `cli/mod.rs` (imports adjusted from `use super::*` to direct function references since they'll be in the same module) diff --git a/docs/superpowers/specs/2026-03-28-input-validation-design.md b/docs/superpowers/specs/2026-03-28-input-validation-design.md new file mode 100644 index 0000000..7f51b28 --- /dev/null +++ b/docs/superpowers/specs/2026-03-28-input-validation-design.md @@ -0,0 +1,259 @@ +# Input Validation for issue list — Design Spec + +## Problem + +`jr issue list --project NONEXISTENT` returns "No results found" with exit code 0. The same happens with `--status "Nonexistant"`. Invalid filter values silently produce empty results instead of errors, making them indistinguishable from legitimate empty results. + +An AI agent or script cannot tell the difference between "this project exists but has no matching issues" and "this project does not exist." Compare with `issue move` which correctly validates statuses and suggests alternatives. + +**Issue:** #71 + +## Solution + +Add pre-flight validation to `issue list` for two inputs: + +1. **`--project` validation**: Before building JQL, check that the project exists via `GET /rest/api/3/project/{key}`. If 404, error with a suggestion to run `jr project list`. +2. **`--status` validation**: Before building JQL, fetch valid statuses and partial-match the `--status` value. If no match, error listing valid statuses. If ambiguous, error listing matches. + +Both validations happen in `handle_list` (CLI layer), after resolving the project key but before building JQL. They are independent — neither requires the other. + +### Project Validation + +**New API method in `src/api/jira/projects.rs`:** + +```rust +pub async fn project_exists(&self, key: &str) -> Result +``` + +Calls `GET /rest/api/3/project/{key}`. Returns `Ok(true)` on 200, `Ok(false)` on 404 (by catching `JrError::ApiError { status: 404, .. }`), propagates other errors (401, 500, network). + +**Validation logic in `handle_list`:** + +After resolving `project_key` (line 108 in `list.rs`) and before building JQL: + +```rust +if let Some(ref pk) = project_key { + if !client.project_exists(pk).await? { + return Err(JrError::UserError( + format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + ) + ).into()); + } +} +``` + +**Error message:** +``` +Project "NONEXISTENT" not found. Run "jr project list" to see available projects. +``` + +Exit code 64 via `JrError::UserError` — consistent with how `handle_list` already treats bad user input (e.g., `validate_duration` on line 67 uses `JrError::UserError`). + +### Status Validation + +**Strategy depends on whether `--project` is set:** + +| `--project` set? | Endpoint | Why | +|-------------------|----------|-----| +| Yes | `GET /rest/api/3/project/{key}/statuses` | More precise — only statuses valid for that project's workflows. Also validates the project (404 if invalid), combining both checks into one API call. | +| No | `GET /rest/api/3/status` | Global list of all statuses in the instance. Still catches typos. | + +**When `--project` IS set:** The project-scoped endpoint returns statuses grouped by issue type with duplicates across types. Extract unique status names into a `Vec` (deduplicate by name). This replaces the separate `project_exists()` call — if this endpoint returns 404, the project doesn't exist. + +**When `--project` is NOT set:** The global endpoint returns a flat array of `StatusDetails` with `name` fields. No deduplication needed. + +**New API method in `src/api/jira/statuses.rs`** (new file — `GET /rest/api/3/status` is a global endpoint, not project-scoped, so it gets its own file following the one-file-per-resource convention): + +```rust +pub async fn get_all_statuses(&self) -> Result> +``` + +Calls `GET /rest/api/3/status` (not paginated — returns all statuses for active workflows), extracts unique status names, returns as a flat `Vec`. + +**New helper in `src/cli/issue/list.rs`:** + +```rust +fn extract_unique_status_names(issue_types: &[IssueTypeWithStatuses]) -> Vec +``` + +Extracts and deduplicates status names from the project-scoped response. Uses a `HashSet` for deduplication, returns sorted `Vec`. + +**Partial matching:** + +Reuse the existing `crate::partial_match::partial_match()` function (already used by `issue move` for transitions and `queue view` for queue names). It takes `(input: &str, candidates: &[String])` and returns a `MatchResult` enum: +- `MatchResult::Exact(String)` — exact or single substring match +- `MatchResult::Ambiguous(Vec)` — multiple matches +- `MatchResult::None(Vec)` — no match (contains all candidates for error messages) + +The caller constructs error messages from the `MatchResult` variants, following the same pattern used in `workflow.rs` and `helpers.rs`. + +**Validation placement in `handle_list`:** + +The validation must run BEFORE `build_filter_clauses()` is called (currently line 98), because the resolved status name needs to reach `build_filter_clauses`. The validation also requires `project_key`, which is currently resolved at line 108. Reorder: move `let project_key = config.project_key(project_override);` up from line 108 to after the team clause (line 95), then insert validation, then call `build_filter_clauses`. Use a separate `resolved_status` variable rather than mutating the destructured `status`: + +```rust +// After team_clause (line 95), before build_filter_clauses (line 98): + +// Validate --project exists +if let Some(ref pk) = project_key { + // Skip if --status is set (project will be validated via statuses endpoint below) + if status.is_none() && !client.project_exists(pk).await? { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )).into()); + } +} + +// Validate --status and resolve to exact name +let resolved_status: Option = if let Some(ref status_input) = status { + let valid_statuses = if let Some(ref pk) = project_key { + // Project-scoped: also validates project existence (404 = not found) + match client.get_project_statuses(pk).await { + Ok(issue_types) => extract_unique_status_names(&issue_types), + Err(e) => { + if let Some(JrError::ApiError { status: 404, .. }) = e.downcast_ref::() { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )).into()); + } + return Err(e); + } + } + } else { + client.get_all_statuses().await? + }; + + match crate::partial_match::partial_match(status_input, &valid_statuses) { + crate::partial_match::MatchResult::Exact(name) => Some(name), + crate::partial_match::MatchResult::Ambiguous(matches) => { + return Err(JrError::UserError(format!( + "Ambiguous status \"{}\". Matches: {}", + status_input, + matches.join(", ") + )).into()); + } + crate::partial_match::MatchResult::None(all) => { + let available = all.join(", "); + let scope = if project_key.is_some() { + format!(" for project {}", project_key.as_ref().unwrap()) + } else { + String::new() + }; + return Err(JrError::UserError(format!( + "No status matching \"{}\"{scope}. Available: {available}", + status_input, + )).into()); + } + } +} else { + None +}; + +// Use resolved_status in build_filter_clauses instead of status +let filter_parts = build_filter_clauses( + assignee_jql.as_deref(), + reporter_jql.as_deref(), + resolved_status.as_deref(), // <-- resolved name, not raw input + team_clause.as_deref(), + recent.as_deref(), + open, +); +``` + +This approach: +- Uses a new `resolved_status` variable instead of mutating the destructured `status` +- Moves `build_filter_clauses` after validation (passes `resolved_status` instead of `status`) +- Uses `JrError::UserError` for exit code 64 +- Constructs error messages from `MatchResult` variants (matching the pattern in `workflow.rs`) +- Passes `&[String]` to `partial_match()` (correct type) + +**Error messages (constructed by the caller from `MatchResult` variants):** + +| Scenario | Message | +|----------|---------| +| No match (with project) | `No status matching "Nonexistant" for project PROJ. Available: Done, In Progress, To Do` | +| No match (no project) | `No status matching "Nonexistant". Available: Done, In Progress, To Do, ...` | +| Ambiguous | `Ambiguous status "in". Matches: In Progress, In Review` | + +### Combined Flow + +When both `--project` and `--status` are set, the validation is efficient: + +1. Call `GET /rest/api/3/project/{key}/statuses` — one API call +2. If 404 → project not found error +3. If 200 → extract unique statuses, partial-match `--status` +4. If match fails → error with available statuses +5. If match succeeds → proceed with resolved status name in JQL + +When only `--project` is set (no `--status`): +1. Call `GET /rest/api/3/project/{key}` — one API call +2. If 404 → project not found error +3. If 200 → proceed + +When only `--status` is set (no `--project`): +1. Call `GET /rest/api/3/status` — one API call +2. Partial-match against global list +3. If match fails → error +4. If match succeeds → proceed + +**API call overhead: 0 extra calls when neither flag is set, 1 call when either or both are set.** + +### Scope + +**Only `issue list` gets validation.** Other commands that accept `--project`: +- `board list/view` and `sprint list/current` — already validated via `resolve_board_id()` which calls `list_boards()` with the project key (returns 400 for invalid projects) +- `issue create` — uses `get_project_issue_types()` which returns 404 for invalid projects +- `project fields` — uses `get_project_statuses()` which returns 404 for invalid projects +- `queue list/view` — uses service desk lookup which fails for invalid projects + +`issue list` is the only command that passes `--project` directly into JQL without any API validation. + +## What Changes + +- New `project_exists()` method in `src/api/jira/projects.rs` +- New `get_all_statuses()` method in `src/api/jira/statuses.rs` (new file) +- New `extract_unique_status_names()` helper in `src/cli/issue/list.rs` +- `handle_list` in `list.rs` gains validation blocks before `build_filter_clauses` +- New `resolved_status` variable replaces raw `status` in `build_filter_clauses` call +- `project_key` resolution moves up from line 108 to before validation block +- `build_filter_clauses` call moves after validation (receives resolved status name) + +## What Doesn't Change + +- JQL construction logic (`build_jql_base_parts`, `build_filter_clauses` — function unchanged, only call site moves after validation) +- Output formatting +- Exit codes for other error scenarios +- Any command other than `issue list` +- The `--status` flag's position in JQL (still uses `status = "name"`) +- Behavior when neither `--project` nor `--status` is set + +## Testing + +### Unit Tests + +- `extract_unique_status_names` — deduplicates statuses across issue types, returns sorted +- `project_exists` — returns true on 200, false on 404, propagates other errors + +### Integration Tests + +Using wiremock: + +1. **Invalid project**: Mock project endpoint returning 404. Assert error contains "not found" and "jr project list". +2. **Valid project proceeds**: Mock project endpoint returning 200. Mock search endpoint. Assert search runs normally. +3. **Invalid status with project**: Mock project statuses returning valid statuses. Assert error from partial_match with available statuses listed. +4. **Valid status partial match**: Mock project statuses with "In Progress", "To Do", "Done". Call `--status "in prog"`. Assert the resolved JQL uses the full "In Progress" name. +5. **Ambiguous status**: Mock statuses with "In Progress" and "In Review". Call `--status "in"`. Assert error lists both matches. +6. **Status without project**: Mock global status endpoint. Call `--status "Nonexistant"` with `--assignee me`. Assert error lists available statuses. +7. **Both project and status invalid**: Mock project statuses returning 404. Assert project error (project validated first). +8. **No validation when flags absent**: Call `--assignee me` (no --project, no --status). Assert no project/status API calls made. + +### Existing Tests + +- `build_kanban_jql` tests — unchanged +- `build_filter_clauses` tests — unchanged (status string already resolved before reaching these) +- `resolve_effective_limit` tests — unchanged +- All other integration tests — unchanged (they don't use invalid project keys or status names) diff --git a/docs/superpowers/specs/2026-03-28-sprint-current-limit-design.md b/docs/superpowers/specs/2026-03-28-sprint-current-limit-design.md new file mode 100644 index 0000000..25039b7 --- /dev/null +++ b/docs/superpowers/specs/2026-03-28-sprint-current-limit-design.md @@ -0,0 +1,134 @@ +# Sprint Current --limit — Design Spec + +## Problem + +`jr sprint current` fetches all issues from the active sprint with no way to control output size. The Jira Agile API sprint issues endpoint (`/rest/agile/1.0/sprint/{sprintId}/issue`) pages at max 50 items — `get_sprint_issues()` paginates through ALL pages with no early-stop when no limit is provided. + +Every other list-style command in jr has `--limit` — `sprint current` is the remaining outlier. AI agents cannot safely call it without risking context overflow. + +**Issue:** #72 + +## Solution + +Add `--limit ` and `--all` flags to `sprint current`, matching the pattern already used by `issue list` and `board view`. + +The infrastructure already exists: +- `resolve_effective_limit(limit, all)` in `cli/mod.rs` (defaults to 30 when neither flag is set) +- `get_sprint_issues()` already accepts `limit: Option` and returns `SprintIssuesResult { issues, has_more }` +- `handle_current` currently passes `None` as the limit (unbounded) + +This is a wiring change — no new API calls, no new utility functions. + +### CLI Changes (`src/cli/mod.rs`) + +Add `--limit` and `--all` to `SprintCommand::Current`: + +```rust +Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, +}, +``` + +Matches `BoardCommand::View` exactly. + +### Handler Changes (`src/cli/sprint.rs`) + +**`handle` function:** + +Extract `limit` and `all` from `SprintCommand::Current` in the match arms: + +```rust +let board_override = match &command { + SprintCommand::List { board } => *board, + SprintCommand::Current { board, .. } => *board, +}; +``` + +Pass `limit` and `all` to `handle_current`: + +```rust +SprintCommand::Current { limit, all, .. } => { + handle_current(board_id, client, output_format, config, limit, all).await +} +``` + +**`handle_current` function:** + +Add `limit: Option` and `all: bool` parameters. Compute effective limit and pass to API: + +```rust +async fn handle_current( + board_id: u64, + client: &JiraClient, + output_format: &OutputFormat, + config: &Config, + limit: Option, + all: bool, +) -> Result<()> { + let effective_limit = crate::cli::resolve_effective_limit(limit, all); + // ... + let result = client + .get_sprint_issues(sprint.id, None, effective_limit, &extra) + .await?; + let issues = result.issues; + let has_more = result.has_more; + // ... +``` + +After printing the table (both Table and Json paths), print the "more results" hint: + +```rust +if has_more && !all { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); +} +``` + +The hint uses the same wording as `board view`'s scrum path. While the Agile API sprint issues endpoint returns a `total` field (used by `OffsetPage.has_more()` for pagination), the hint only reports how many issues were fetched — not the full sprint total — since the purpose of `--limit` is to avoid fetching everything. + +### Sprint Points Summary on Limited Results + +When `--limit` caps results, `compute_sprint_summary` runs on the limited set. This means points shown may not reflect the full sprint. This is the same tradeoff `board view` makes — fetching all issues just for a summary would defeat the purpose of `--limit`. The sprint header still shows the sprint name and end date (from the sprint object, not issues), so the user retains context. + +## What Changes + +- `SprintCommand::Current` gains `limit: Option` and `all: bool` fields in `src/cli/mod.rs` +- `handle` in `sprint.rs` updates both match sites: the `board_override` extraction match (add `..` to ignore new fields) and the dispatch match (extract `limit` and `all`, pass to `handle_current`) +- `handle_current` in `sprint.rs` computes `effective_limit` and passes it to `get_sprint_issues`, prints "more results" hint + +## What Doesn't Change + +- `get_sprint_issues()` API method — already supports limits +- `resolve_effective_limit()` — reused as-is +- `SprintCommand::List` — no limit needed (sprints are few) +- `compute_sprint_summary` — unchanged (operates on whatever issues it receives) +- `handle_list` — unchanged +- Sprint header output (name, end date) — unchanged +- JSON output structure — unchanged (just fewer issues when limited) + +## Testing + +### Integration Tests + +Using wiremock: + +1. **Default limit caps at 30**: Mock sprint issues with 35 results. Assert only 30 returned and "Showing 30 results" hint printed to stderr. +2. **--limit flag**: Mock sprint issues with 20 results. Call `--limit 5`. Assert 5 returned and hint printed. +3. **--all flag**: Mock sprint issues with 35 results. Call `--all`. Assert all 35 returned and no hint. +4. **Under default limit**: Mock sprint issues with 10 results. Assert all 10 returned and no hint. + +### Existing Tests + +- `compute_sprint_summary` unit tests — unchanged +- `build_kanban_jql` unit tests — unchanged +- Board integration tests — unchanged diff --git a/docs/superpowers/specs/2026-04-01-issue-edit-description-design.md b/docs/superpowers/specs/2026-04-01-issue-edit-description-design.md new file mode 100644 index 0000000..302ce40 --- /dev/null +++ b/docs/superpowers/specs/2026-04-01-issue-edit-description-design.md @@ -0,0 +1,137 @@ +# Issue Edit Description Support + +**Date:** 2026-04-01 +**Issue:** #82 +**Status:** Design + +## Summary + +Add `--description`, `--description-stdin`, and `--markdown` flags to `jr issue edit`, mirroring the existing flags on `jr issue create`. Description updates fully replace the existing description (no append mode). + +## Motivation + +`jr issue edit` supports editing summary, type, priority, labels, team, points, and parent — but not the description. Users must fall back to raw `curl` calls to update descriptions, losing `jr`'s auth handling, ADF conversion, and output formatting. + +Use cases from #82: +- Enriching tickets after creation (e.g., appending implementation notes via scripted workflows) +- Correcting or updating descriptions without opening the browser +- Batch description updates via JQL-driven scripts +- Two-step creation workflows (create with minimal fields, add description later) + +## CLI Surface + +``` +jr issue edit PROJ-123 --description "New description text" +jr issue edit PROJ-123 -d "Short description" +jr issue edit PROJ-123 --description-stdin < file.txt +echo "piped text" | jr issue edit PROJ-123 --description-stdin +jr issue edit PROJ-123 --description-stdin --markdown < notes.md +jr issue edit PROJ-123 --description "**bold text**" --markdown +``` + +### Flag Definitions + +| Flag | Short | Type | Description | +|------|-------|------|-------------| +| `--description` | `-d` | `Option` | Description text (conflicts with `--description-stdin`) | +| `--description-stdin` | — | `bool` | Read description from stdin (conflicts with `--description`) | +| `--markdown` | — | `bool` | Interpret description as Markdown instead of plain text | + +### Conflict Rules + +- `--description` and `--description-stdin` are mutually exclusive (enforced by clap `conflicts_with`). +- `--markdown` without a description source is silently ignored (matches `create` behavior). +- `--description ""` sets a description with an empty paragraph (valid — Jira stores this as a non-null ADF document with no visible text). To fully clear a description, a future `--no-description` flag would send `"description": null`. + +## API Behavior + +Uses `PUT /rest/api/3/issue/{key}` with the description in the `fields` object as ADF. This is the same pattern used by all other `edit` fields today. + +```json +{ + "fields": { + "description": { + "type": "doc", + "version": 1, + "content": [ + { + "type": "paragraph", + "content": [ + { "type": "text", "text": "Updated description" } + ] + } + ] + } + } +} +``` + +Key behaviors confirmed via Jira REST API v3 documentation: +- **Replace semantics:** The description field is a "set" operation — the entire description is replaced. +- **Atomic updates:** When combined with other fields (summary, priority, etc.), all updates succeed or all fail. +- **ADF required:** The description field in v3 must be Atlassian Document Format, not plain text or HTML. + +## Implementation + +### CLI Definition (`src/cli/mod.rs`) + +Add three fields to `IssueCommand::Edit`: + +```rust +Edit { + // ... existing fields ... + + /// Description + #[arg(short, long, conflicts_with = "description_stdin")] + description: Option, + /// Read description from stdin (for piping) + #[arg(long, conflicts_with = "description")] + description_stdin: bool, + /// Interpret description as Markdown + #[arg(long)] + markdown: bool, +} +``` + +### Handler Logic (`src/cli/issue/create.rs`) + +In `handle_edit`, after destructuring the new fields: + +1. Resolve description text: + - If `description_stdin` → read stdin into `String` + - Else → use `description` as-is (`Option`) +2. If description text is `Some`: + - Convert to ADF via `adf::markdown_to_adf` (if `--markdown`) or `adf::text_to_adf` + - Set `fields["description"] = adf_body` + - Set `has_updates = true` + +This slots in alongside the existing field updates, before the label-handling block. No changes to `edit_issue` API method or `JiraClient`. + +### Error Message Update + +Update the "no fields specified" bail message to include `--description` and `--description-stdin` in the list of available flags. + +## Testing + +### CLI Tests (`tests/cli_smoke.rs`) + +- `--description` and `--description-stdin` conflict at parse time (clap validation) + +### Integration Tests (`tests/issue_commands.rs`) + +| Test | Input | Assertion | Status | +|------|-------|-----------|--------| +| Plain text description | `--description "hello"` | PUT body contains ADF paragraph with "hello" | Implemented | +| Markdown description | `--description "**bold**" --markdown` | PUT body contains ADF with bold markup | Implemented | +| Combined fields | `--description "text" --summary "new"` | PUT body contains both description ADF and summary | Implemented | +| Stdin description | `--description-stdin` with piped input | PUT body contains ADF from stdin text | Deferred (stdin piping in test harness) | +| Markdown flag alone | `--markdown --summary "new"` | PUT body has summary but no description field | Covered implicitly | +| JSON output | `--description "text" --output json` | Outputs `{"key": "...", "updated": true}` | Covered by existing edit JSON test path | + +Integration tests use wiremock to mock the Jira API and verify PUT request bodies via `body_partial_json` matcher. + +## Out of Scope + +- **Append mode** — Would require GET (fetch current description) + ADF content array merge + PUT. Meaningful feature but adds complexity. Can be a follow-up. +- **Interactive editor** — Opening `$EDITOR` for description editing. Separate feature request. +- **Description clearing** (`--no-description`) — Not requested. Can be added later following the `--no-points` pattern. diff --git a/docs/superpowers/specs/2026-04-02-cache-dedup-design.md b/docs/superpowers/specs/2026-04-02-cache-dedup-design.md new file mode 100644 index 0000000..22f4409 --- /dev/null +++ b/docs/superpowers/specs/2026-04-02-cache-dedup-design.md @@ -0,0 +1,161 @@ +# Cache Deduplication Refactor + +**Issue:** #104 +**Date:** 2026-04-02 + +## Problem + +`src/cache.rs` has 5 nearly identical read/write function pairs that repeat the +same pattern: build path, check exists, read + deserialize JSON, check TTL, +return `Ok(None)` if expired/missing, handle corrupt files. ~260 lines of +production code is mostly this boilerplate. + +Additionally, 3 of the 5 read functions propagate deserialization errors instead +of treating them as cache misses, which can surface user-facing errors for a +non-critical cache. + +## Design + +### Approach: Generic free functions + minimal trait + +Extract two internal generic functions and a one-method trait. Keep the existing +public API unchanged — all callers continue using the same function signatures. + +### New internal abstractions + +All `pub(crate)` or private to `cache.rs`: + +```rust +/// Implemented by cache structs that carry a timestamp for TTL checks. +pub(crate) trait Expiring { + fn fetched_at(&self) -> DateTime; +} + +/// Read a whole-file cache. Returns Ok(None) on missing, expired, or corrupt files. +fn read_cache(filename: &str) -> Result> { + let path = cache_dir().join(filename); + if !path.exists() { + return Ok(None); + } + let content = std::fs::read_to_string(&path)?; + let cache: T = match serde_json::from_str(&content) { + Ok(c) => c, + Err(_) => return Ok(None), // corrupt = cache miss + }; + if (Utc::now() - cache.fetched_at()).num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + Ok(Some(cache)) +} + +/// Write a whole-file cache. Creates the cache directory if needed. +fn write_cache(filename: &str, data: &T) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + let content = serde_json::to_string_pretty(data)?; + std::fs::write(dir.join(filename), content)?; + Ok(()) +} +``` + +### Whole-file caches (fully deduplicated) + +Three caches share an identical structure and collapse to thin wrappers: + +| Cache | File | Struct | +|-------|------|--------| +| Teams | `teams.json` | `TeamCache` | +| Workspace | `workspace.json` | `WorkspaceCache` | +| CMDB fields | `cmdb_fields.json` | `CmdbFieldsCache` | + +Each struct gains a one-liner `Expiring` impl: + +```rust +impl Expiring for TeamCache { + fn fetched_at(&self) -> DateTime { self.fetched_at } +} +``` + +Public functions collapse to: + +```rust +pub fn read_team_cache() -> Result> { + read_cache("teams.json") +} + +pub fn write_team_cache(teams: &[CachedTeam]) -> Result<()> { + write_cache("teams.json", &TeamCache { + fetched_at: Utc::now(), + teams: teams.to_vec(), + }) +} +``` + +### Keyed caches (kept explicit) + +Two caches use `HashMap` with different TTL semantics and are not +worth genericizing: + +| Cache | File | TTL model | +|-------|------|-----------| +| Project meta | `project_meta.json` | Per-entry (`ProjectMeta.fetched_at`) | +| Object type attrs | `object_type_attrs.json` | Per-file (`ObjectTypeAttrCache.fetched_at`) | + +These stay as explicit functions with a doc comment explaining why they are not +genericized (different TTL semantics). The only change is normalizing corrupt-file +handling: `read_project_meta` currently propagates deserialization errors via `?`. +After this refactor it will return `Ok(None)` on corrupt data, matching +`read_cmdb_fields_cache` and `read_object_type_attr_cache` which already do this. + +### Behavior changes + +1. **Corrupt-file handling normalized:** All 5 read functions treat + deserialization failures as cache misses (`Ok(None)`). Previously, 3 of 5 + (teams, workspace, project_meta) propagated the error. This aligns with how + Cargo handles corrupt caches (silently skip/ignore). + +2. **No on-disk format changes.** All existing structs keep their field names and + shapes. The `Expiring` trait is a Rust-side abstraction only with no serde + impact. + +### What does NOT change + +- Public function signatures +- Caller code in `api/`, `cli/` +- On-disk JSON format +- Cache file names or paths +- TTL duration (7 days) +- Existing test assertions + +### New tests + +Add 3 corrupt-file tests for caches that previously lacked them: + +- `corrupt_team_cache_returns_none` +- `corrupt_workspace_cache_returns_none` +- `corrupt_project_meta_returns_none` + +Each corrupt test should cover both garbage data (`"not json"`) and valid JSON +with a wrong shape (e.g., `{"unexpected": true}`) to exercise the `Expiring` +deserialization path. + +### Estimated impact + +- ~60-70 lines of production code eliminated (from ~260 to ~190) +- 3 new tests added +- 5 one-liner trait impls added +- Net reduction: ~40-50 lines + +## Validation + +Design decisions validated against Rust community best practices via research: + +- **Corrupt = cache miss:** Cargo silently ignores cache errors. Consensus for + non-critical caches is to treat deserialization failures as misses. +- **Trait over closure:** A one-method trait (`Expiring`) is more idiomatic than + passing a `FnOnce` closure for simple field access on generic functions. +- **Rule of Three:** Only the 3 identical whole-file caches are genericized. The + 2 keyed caches have different TTL semantics and don't justify a shared + abstraction. +- **Trait has no serde impact:** Adding a non-serde trait impl to a struct with + `#[derive(Serialize, Deserialize)]` has zero effect on serialization. diff --git a/docs/superpowers/specs/2026-04-02-issue-move-status-name-design.md b/docs/superpowers/specs/2026-04-02-issue-move-status-name-design.md new file mode 100644 index 0000000..267e3be --- /dev/null +++ b/docs/superpowers/specs/2026-04-02-issue-move-status-name-design.md @@ -0,0 +1,112 @@ +# Issue Move: Accept Target Status Name + +**GitHub Issue:** #108 + +**Goal:** Allow `jr issue move KEY "Completed"` to work when "Completed" is the target status name, not just the transition name. + +## Problem + +`jr issue move` currently matches user input only against transition names. Many users think in terms of target statuses ("move to Done") rather than transitions ("execute the Done transition"). When transition and status names differ (e.g., transition "Complete" → status "Completed"), users get a confusing error: + +``` +Error: No transition matching "Completed". Available: Review, Cancel, Complete +``` + +## Design + +### Unified candidate pool + +Replace the current single-pass transition-name matching with a unified candidate pool that includes both transition names and target status names. + +1. For each transition, collect two candidate strings: the transition `name` and the target status `to.name`. +2. Each candidate maps back to its source transition. +3. Deduplicate by candidate string (case-insensitive). If a transition name equals its status name (e.g., `"Done"` → `"Done"`), keep one entry. This is the common case in default Jira workflows — behavior is unchanged. +4. Run `partial_match` once against the deduplicated candidate list. +5. If match resolves to a single transition → use it. +6. If match resolves to multiple transitions → treat as ambiguous (existing interactive prompt / `--no-input` error). + +### Matching priority within partial_match + +The existing `partial_match` function already handles priority correctly: + +- Exact match (case-insensitive) takes precedence over substring match. +- If user types `"Complete"` and both `"Complete"` (transition) and `"Completed"` (status) are in the pool, `"Complete"` is an exact match and wins. +- If user types `"Completed"` and only `"Completed"` (status) is in the pool as an exact match, it wins. + +No changes to `partial_match` are needed. + +### Ambiguity handling + +Ambiguity arises when user input partially matches multiple candidates in the unified pool. For example, typing `"Re"` when transitions include `"Reopen"` and `"Review"`. + +Note: If two transitions lead to the same status (e.g., "Reopen" → "Open" and "Restart" → "Open"), deduplication means `"Open"` only appears once in the pool (mapped to the first transition). Typing `"Open"` is an exact match, not ambiguous. This is acceptable — both transitions reach the same status. If the user needs a specific transition's post-functions, they can type the transition name directly. + +When ambiguity does occur: + +- **Interactive mode:** Show disambiguation prompt listing the matching candidates. +- **`--no-input` mode:** Error with the list of matches. + +This is consistent with existing ambiguous-transition-name handling. + +### Idempotency check + +The previous idempotency check compared user input only against the issue's current status name (case-insensitive). That correctly handled status-name input — e.g., if the issue is already in `"Completed"` and the user types `"Completed"`, the early-return fires before matching is attempted. + +However, once `jr issue move` accepts both transition names and target status names, idempotency should also apply when the user types a transition name whose target status is the issue's current status. For example, if transition `"Complete"` leads to `"Completed"` and the issue is already in `"Completed"`, typing `"Complete"` should be treated as a no-op rather than attempting the transition. + +Implementation change: treat user input as idempotent if either (a) the raw input matches the current status name, or (b) the input matches a transition whose `to.name` matches the current status name (case-insensitive). This preserves the existing early-return for status names and extends the same behavior to equivalent transition-name input. + +### Error message improvement + +When no match is found, show both transition and status names: + +``` +No transition matching "Foo". Available: Complete (→ Completed), Review (→ In Review), Cancel (→ Cancelled) +``` + +The `to` field is always present in the Jira Cloud transitions API response (confirmed via API docs and Perplexity), so the `→ Status` annotation is always available. Our Rust type keeps `to: Option` for defensive parsing; if `to` is `None`, fall back to just the transition name. + +### Interactive prompt + +The existing interactive prompt already shows `"Name -> Status"` format. No change needed. + +## Scope + +### Files changed + +- `src/cli/issue/workflow.rs` — `handle_move` function: replace transition-name-only matching with unified pool, update error message format. + +### Files NOT changed + +- `src/partial_match.rs` — no changes needed. +- `src/types/jira/issue.rs` — `Transition` struct unchanged. +- `src/api/jira/issues.rs` — API calls unchanged. +- `src/cli/mod.rs` — CLI args unchanged. + +### Out of scope + +- Changing the `transitions` subcommand output format. +- Changing how numeric selection (typing `"1"`, `"2"`) works. +- Adding `--by-status` or `--by-transition` flags to force one matching mode. + +## Testing + +### Unit tests + +None needed — `partial_match` is already well-tested. The logic change is in candidate list construction, which is covered by integration tests. + +### Integration tests + +1. **Match by transition name** (existing behavior preserved): `"In Progress"` matches transition name directly. +2. **Match by status name** (new behavior): `"Completed"` matches target status name when transition name is `"Complete"`. +3. **Deduplication**: When transition name equals status name (e.g., `"Done"` → `"Done"`), no duplicate candidates — single match. +4. **Shared target status name**: Two transitions leading to the same status produce one deduplicated status-name candidate, so an exact status-name match is not ambiguous solely for that reason. +5. **Error message format**: No match → error shows `"Name (→ Status)"` format. +6. **Idempotent with status name input**: Issue already in target status → exit 0 with "already in status" message. +7. **Idempotent with transition name input**: Issue already in target status, user types transition name → exit 0 with "already in status" message. + +## API Validation + +- **`to` field always present:** Confirmed by Jira Cloud REST API docs and Perplexity. Every transition has a target status object with `name`, `id`, `self`, and `description` fields. +- **Multiple transitions to same status:** Confirmed possible. Different transitions may have different post-functions. Under this design, the shared status-name candidate is deduplicated, so an exact status-name match is not ambiguous solely because multiple transitions reach that status. +- **Default workflows:** Transition names match status names in 3 of 4 transitions (`To Do`, `In Progress`, `Done`). The 4th (`Create` → `To Do`) is an INITIAL transition. Deduplication makes this a no-op for default workflow users. diff --git a/docs/superpowers/specs/2026-04-03-handle-list-error-propagation-design.md b/docs/superpowers/specs/2026-04-03-handle-list-error-propagation-design.md new file mode 100644 index 0000000..5b7e8f5 --- /dev/null +++ b/docs/superpowers/specs/2026-04-03-handle-list-error-propagation-design.md @@ -0,0 +1,87 @@ +# Design: Propagate board/sprint API errors in handle_list + +**Issue:** [#32](https://github.com/Zious11/jira-cli/issues/32) +**Date:** 2026-04-03 +**Status:** Draft + +## Problem + +In `src/cli/issue/list.rs`, `handle_list` silently swallows API errors from two board-related calls when no `--jql` is provided and a `board_id` is configured in `.jr.toml`: + +1. **`get_board_config` error (line 255):** `Err(_) =>` discards the error and falls back to generic project-scoped JQL. Auth failures, network timeouts, and 404s are all hidden. +2. **`list_sprints` catch-all (line 234):** `_ =>` treats API errors identically to "no active sprint found", falling back to project-scoped JQL. + +Both paths may ultimately hit the "No project or filters specified" guard if no other filters are set, producing a misleading `UserError` that masks the real cause. + +## Design + +Propagate all errors from `get_board_config` and `list_sprints` with contextual messages and actionable recovery suggestions. No silent fallbacks. + +### Change 1: `get_board_config` error handling + +Replace the `Err(_) =>` catch-all with error propagation. Distinguish 404 (ambiguous in Jira's API — covers both "deleted" and "no permission" per [JRACLOUD-97947](https://jira.atlassian.com/browse/JRACLOUD-97947)) from other errors: + +- **404:** `JrError::UserError` — "Board {id} not found or not accessible. Verify the board exists and you have permission, or remove board_id from .jr.toml. Use --jql to query directly." +- **Other errors:** Propagate with context via `anyhow::Context` — "Failed to fetch config for board {id}. Remove board_id from .jr.toml or use --jql to query directly." + +404 uses `UserError` (exit code 64) because it indicates a configuration problem. Other errors propagate as-is (exit code 1) since they may be transient. + +### Change 2: `list_sprints` error handling + +Replace the `_ =>` catch-all with explicit match arms: + +- **`Ok(sprints) if !sprints.is_empty()`** → Use sprint JQL (unchanged). +- **`Ok(_)`** → No active sprint. Fall back to project-scoped JQL (unchanged — this is the correct behavior for a scrum board between sprints). +- **`Err(e)`** → Propagate with context: "Failed to list sprints for board {id}: {error}. Use --jql to query directly." + +### What stays the same + +- No active sprint on a scrum board → falls back to project-scoped JQL with `updated DESC` ordering. This is legitimate, not an error. +- Kanban board path → unchanged (filters `statusCategory != Done`, orders by `rank ASC`). +- No `board_id` configured → unchanged (goes straight to project-scoped JQL). +- All JQL composition logic downstream → untouched. +- `--jql` provided → board_id path is skipped entirely, unaffected. + +### API behavior note + +Jira's board agile API (`/rest/agile/1.0/board/{id}/configuration` and `/rest/agile/1.0/board/{id}/sprint`) returns 404 for both "board does not exist" and "user lacks permission." There is no 403 response. This is a known Atlassian limitation. Error messages must account for this ambiguity. + +## Error message examples + +``` +# Board deleted or no access +Error: Board 42 not found or not accessible. Verify the board exists and you +have permission, or remove board_id from .jr.toml. Use --jql to query directly. + +# Network/server error on board config +Error: Failed to fetch config for board 42. Remove board_id from .jr.toml or +use --jql to query directly. + +Caused by: connection timed out + +# Sprint list failure +Error: Failed to list sprints for board 42. Use --jql to query directly. + +Caused by: 500 Internal Server Error +``` + +## Edge cases + +- **Board type changed after config written:** If a board was scrum when `board_id` was configured but later changed to kanban, `get_board_config` succeeds and returns the new type. The code already reads `board_type` from the response and branches on it, so it will correctly take the kanban path. No sprint call is made. This is handled without changes. +- **Board in trash:** Returns 404, handled by the board config 404 path. +- **Permissions revoked after config written:** Returns 404 (per JRACLOUD-97947), handled by the board config 404 path. + +## Testing + +1. **Unit test: board config 404** — Mock `get_board_config` returning `JrError::ApiError { status: 404, .. }`. Verify error message contains board ID and suggests removing `board_id` from config. +2. **Unit test: board config other error** — Mock `get_board_config` returning a generic error. Verify error propagates with "Failed to fetch config for board" context. +3. **Unit test: list_sprints error** — Mock `list_sprints` returning an error on a scrum board. Verify error propagates with context and suggests `--jql`. +4. **Unit test: list_sprints empty (no active sprint)** — Mock scrum board with empty sprint list. Verify fallback to project-scoped JQL (existing behavior preserved). +5. **Existing tests** — All current tests continue to pass since no happy-path behavior changes. + +## Scope + +- **Files modified:** `src/cli/issue/list.rs` (the two match arms, ~20 lines changed) +- **No new dependencies.** +- **No new CLI flags or config options.** +- **No changes to API client layer.** diff --git a/docs/superpowers/specs/2026-04-03-issue-create-url-design.md b/docs/superpowers/specs/2026-04-03-issue-create-url-design.md new file mode 100644 index 0000000..355ae76 --- /dev/null +++ b/docs/superpowers/specs/2026-04-03-issue-create-url-design.md @@ -0,0 +1,112 @@ +# Design: Add browse URL to `issue create` output + +**Issue:** [#112](https://github.com/Zious11/jira-cli/issues/112) +**Date:** 2026-04-03 +**Status:** Draft + +## Problem + +`jr issue create` table output prints `Created issue PROJ-123` but does not include the Jira browse URL. The `gh` CLI prints the full URL after creating an issue, which is useful for both humans (clickable link) and AI agents (direct URL without constructing it from config). + +The original issue reported the key wasn't visible, but investigation confirmed the key is already displayed. The real gap is the missing browse URL. + +JSON output currently returns only `{"key": "PROJ-123"}` — also missing the URL. + +## Approach + +Construct the browse URL from `client.instance_url()` + `/browse/` + key, and add it to both table and JSON output. This is a minimal change to `handle_create` in `src/cli/issue/create.rs`. + +`instance_url()` is used instead of `base_url()` because the browse URL must point to the real Jira instance, not the OAuth proxy (which `base_url()` may point to for OAuth users). + +## Design + +### Change 1: Table output — add URL on second line + +Current: +``` +Created issue PROJ-123 +``` + +After: +``` +Created issue PROJ-123 +https://mycompany.atlassian.net/browse/PROJ-123 +``` + +The URL is printed as plain text (not green) so terminal link detection works and it's clickable. + +In `src/cli/issue/create.rs`, the `OutputFormat::Table` arm changes from: + +```rust +output::print_success(&format!("Created issue {}", response.key)); +``` + +To: + +```rust +let url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key +); +output::print_success(&format!("Created issue {}", response.key)); +println!("{}", url); +``` + +### Change 2: JSON output — add `url` field + +Current: +```json +{ + "key": "PROJ-123" +} +``` + +After: +```json +{ + "key": "PROJ-123", + "url": "https://mycompany.atlassian.net/browse/PROJ-123" +} +``` + +In `src/cli/issue/create.rs`, the `OutputFormat::Json` arm changes from: + +```rust +println!("{}", serde_json::to_string_pretty(&response)?); +``` + +To: + +```rust +let url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key +); +let json_response = serde_json::json!({ + "key": response.key, + "url": url, +}); +println!("{}", serde_json::to_string_pretty(&json_response)?); +``` + +## What stays the same + +- `handle_edit` output — unchanged (user already knows the issue key when editing) +- `CreateIssueResponse` struct — unchanged (URL is constructed, not deserialized) +- `output::print_success` — unchanged +- All other commands — unchanged + +## Testing + +One integration test added to `tests/issue_commands.rs` that validates the building blocks at the API layer: +1. Mounts a wiremock mock for `POST /rest/api/3/issue` returning `{"key": "URL-1"}` +2. Calls `JiraClient::create_issue` directly and verifies the key is returned +3. Verifies the browse URL can be constructed from `client.instance_url()` and the key + +This follows the codebase convention of testing the API layer via wiremock and verifying CLI output formatting via live testing. + +## Files modified + +- `src/cli/issue/create.rs` — table and JSON output in `handle_create` (~6 lines changed) diff --git a/docs/superpowers/specs/2026-04-03-partial-match-duplicate-names-design.md b/docs/superpowers/specs/2026-04-03-partial-match-duplicate-names-design.md new file mode 100644 index 0000000..29c7d1d --- /dev/null +++ b/docs/superpowers/specs/2026-04-03-partial-match-duplicate-names-design.md @@ -0,0 +1,191 @@ +# Design: Fix partial_match duplicate name disambiguation + +**Issues:** [#117](https://github.com/Zious11/jira-cli/issues/117), [#122](https://github.com/Zious11/jira-cli/issues/122) +**Date:** 2026-04-03 +**Status:** Draft + +## Problem + +`partial_match` returns `Exact(String)` on the first case-insensitive exact match, stopping iteration. When multiple candidates share the same name (e.g., two Jira users named "John Smith"), the function silently returns the first one. All callers then use `.find(|u| u.display_name == matched_name)` to map the name back to an object, which again picks the first match. + +Jira display names are not unique. Atlassian confirmed this is by design (JSDCLOUD-10963, Won't Fix). Two users in the same project can have identical display names with different accountIds. The current code can assign work to the wrong person with no warning. + +The `Ambiguous` branch has the same problem: after interactive `dialoguer::Select`, the selected name is mapped back via `.find()`, which picks the first user with that name — potentially the wrong one if two users share both the matched substring and the display name. + +## Scope + +This fix covers two layers: + +1. **`partial_match` module** — detect and report duplicate exact matches via a new `ExactMultiple` variant. +2. **User-resolution callers** (`resolve_user`, `resolve_assignee`, `resolve_assignee_by_project`, `resolve_team_field`) — handle `ExactMultiple` with disambiguation, and fix the index-based mapping bug in `Exact` and `Ambiguous` branches. + +Non-user callers (statuses, transitions, link types, queue names, asset types) add a trivial match arm. Duplicates are not realistic for these domains (names are unique within their scope), but the compiler enforces exhaustive matching. + +Issue #123 (extract shared disambiguation logic) is a follow-up refactor, not part of this fix. + +## Design + +### Change 1: New `ExactMultiple` variant in `MatchResult` + +Add `ExactMultiple(Vec)` to the enum: + +```rust +pub enum MatchResult { + Exact(String), + ExactMultiple(Vec), + Ambiguous(Vec), + None(Vec), +} +``` + +`ExactMultiple` contains all candidates that matched exactly (case-insensitive). The vec is never empty and always has length >= 2. Names in the vec are the original (not lowercased) candidate strings, so entries may differ in casing even though they all match the same input exactly under case-insensitive comparison (e.g., `["John Smith", "john smith"]`). Callers must not assume the strings in this vec are byte-for-byte identical. + +### Change 2: Update `partial_match()` logic + +Replace the early-return exact match loop with a collecting loop: + +```rust +let exact_matches: Vec = candidates + .iter() + .filter(|c| c.to_lowercase() == lower_input) + .cloned() + .collect(); + +match exact_matches.len() { + 0 => { /* fall through to substring matching (unchanged) */ } + 1 => return MatchResult::Exact(exact_matches.into_iter().next().unwrap()), + _ => return MatchResult::ExactMultiple(exact_matches), +} +``` + +Substring matching logic is unchanged. + +### Change 3: Non-user callers — trivial `ExactMultiple` arm + +Every caller that matches on `MatchResult` adds: + +```rust +MatchResult::ExactMultiple(names) => { + names.into_iter().next().unwrap() +} +``` + +This preserves existing behavior (take first) for domains where duplicates don't occur. Affected callers: + +- `src/cli/issue/workflow.rs` — transition name matching +- `src/cli/issue/list.rs` — status filter matching +- `src/cli/issue/links.rs` — link type matching (2 call sites) +- `src/cli/assets.rs` — asset status, schema, type matching (3 call sites) +- `src/cli/queue.rs` — queue name matching (2 call sites) + +### Change 4: User-resolution callers — duplicate disambiguation + +For `resolve_user`, `resolve_assignee`, `resolve_assignee_by_project`, and `resolve_team_field`: + +**`ExactMultiple` handling:** + +Collect all objects whose display name matches (there will be >= 2). Then: + +- **`--no-input` mode:** Bail with an error listing each duplicate with its accountId (or teamId for teams): + ``` + Multiple users named "John Smith" found: + John Smith (account: abc123) + John Smith (account: def456) + Specify the accountId directly or use a more specific name. + ``` + +- **Interactive mode:** Present a `dialoguer::Select` with disambiguating labels. Use email if available, accountId as fallback: + ``` + Multiple users named "John Smith": + > John Smith (john.smith@acme.com) + John Smith (jsmith@other.org) + ``` + Map the selection back by **index into the filtered duplicates list**, not by name. + +**Fix existing `Exact` and `Ambiguous` branches:** + +Replace `.find(|u| u.display_name == matched_name)` with index-based lookup. For `Exact`: find the index of the first user whose display name matches, then index into the users vec. For `Ambiguous` after `dialoguer::Select`: the selection index maps to the `matches` vec, which maps back to the original users vec by searching for users whose display name matches `matches[selection]`. Use position-aware iteration to get the correct user even when names collide. + +Concretely, the `Ambiguous` interactive branch changes from: + +```rust +// BEFORE — broken on duplicate names +let selected_name = &matches[selection]; +let user = users.iter().find(|u| &u.display_name == selected_name).unwrap(); +``` + +To index-based mapping: + +```rust +// AFTER — find the Nth user whose display name is in the ambiguous set +let selected_name = &matches[selection]; +let matching_users: Vec<&User> = users + .iter() + .filter(|u| u.display_name == *selected_name) + .collect(); +// If only one user has this name, take it. If multiple, we need secondary disambiguation. +// But this scenario (ambiguous substring match + duplicate names among the filtered set) +// is extremely unlikely. For now, take first — the ExactMultiple path handles the +// realistic duplicate-name case. +let user = matching_users[0]; +``` + +The `Exact` branch is the more important fix since it's the realistic path for duplicate names. + +### Change 5: Team disambiguation + +`resolve_team_field` follows the same pattern as user resolution but uses `team.id` and `team.name` for disambiguation labels. The Team struct does not have an email field, so labels use team ID only: `"Alpha Team (team-uuid-alpha)"`. + +## What stays the same + +- Substring matching logic in `partial_match` — unchanged. +- `Ambiguous` and `None` variant semantics — unchanged. +- Non-user callers — behavior unchanged (take first on `ExactMultiple`). +- `--no-input` behavior for `Ambiguous` matches — already bails with error, unchanged. +- The `is_me_keyword` shortcut — unchanged. +- API layer — no changes to search endpoints. + +## Error message examples + +``` +# --no-input mode, two users with same display name +Error: Multiple users named "John Smith" found: + John Smith (account: abc123) + John Smith (account: def456) +Specify the accountId directly or use a more specific name. + +# --no-input mode, two teams with same name +Error: Multiple teams named "Platform" found: + Platform (id: team-uuid-1) + Platform (id: team-uuid-2) +Use a more specific name. +``` + +## Edge cases + +- **Email field missing:** Jira does not guarantee `emailAddress` is present (privacy settings, managed accounts). Fall back to accountId for disambiguation labels. +- **Three or more duplicates:** The design handles any count >= 2. The `dialoguer::Select` list and error messages scale naturally. +- **Exact match on name that also appears as substring of another name:** `partial_match` checks exact matches first, so `ExactMultiple` is returned before substring matching runs. No interaction. +- **Single user returned by API:** The `users.len() == 1` early return fires before `partial_match` is called. No change needed. +- **Mixed active/inactive users with same name:** `resolve_user` filters to active users before calling `partial_match`. `resolve_assignee` and `resolve_assignee_by_project` use Jira's assignable-user endpoints which already filter. No change needed. + +## Testing + +1. **Unit test: `partial_match` with duplicate exact candidates** — Two identical strings in candidates, verify `ExactMultiple` returned with both. +2. **Unit test: `partial_match` with unique exact match** — Verify `Exact` still returned (regression guard). +3. **Proptest: duplicate candidates always yield `ExactMultiple`** — Generate candidate lists with intentional duplicates. +4. **Integration test: `resolve_assignee` with duplicate display names in `--no-input` mode** — wiremock returns two users with same `displayName` but different `accountId`. Verify CLI exits with error containing both accountIds. +5. **Integration test: `resolve_user` with duplicate display names in `--no-input` mode** — Same pattern for JQL user resolution. +6. **Existing tests:** All current `partial_match` tests continue to pass since no existing test has duplicate candidates. + +## Files modified + +- `src/partial_match.rs` — new variant + logic change (~15 lines) +- `src/cli/issue/helpers.rs` — `ExactMultiple` handling + index-based mapping fix in all 4 resolve functions (~60 lines) +- `src/cli/issue/workflow.rs` — trivial `ExactMultiple` arm (~3 lines) +- `src/cli/issue/list.rs` — trivial `ExactMultiple` arm (~3 lines) +- `src/cli/issue/links.rs` — trivial `ExactMultiple` arm x2 (~6 lines) +- `src/cli/assets.rs` — trivial `ExactMultiple` arm x3 (~9 lines) +- `src/cli/queue.rs` — trivial `ExactMultiple` arm x2 (~6 lines) + +No new dependencies. No new CLI flags or config options. diff --git a/docs/superpowers/specs/2026-04-03-queue-case-insensitive-test-design.md b/docs/superpowers/specs/2026-04-03-queue-case-insensitive-test-design.md new file mode 100644 index 0000000..30c8631 --- /dev/null +++ b/docs/superpowers/specs/2026-04-03-queue-case-insensitive-test-design.md @@ -0,0 +1,50 @@ +# Design: Add case-insensitive duplicate queue name integration test + +**Issue:** [#131](https://github.com/Zious11/jira-cli/issues/131) +**Date:** 2026-04-03 +**Status:** Draft + +## Problem + +The integration test `resolve_queue_duplicate_names_error_message` in `tests/queue.rs` (added in #129) mocks two queues both named `"Triage"` — identical casing. It exercises the `ExactMultiple` path in `partial_match` but does not exercise the case-insensitive `to_lowercase()` filter in `resolve_queue_by_name` at `src/cli/queue.rs:155-158`. + +A developer could remove the `to_lowercase()` call and the existing test would still pass, since both mock queue names are already identical. + +## Approach + +Add one integration test with mixed-case queue names and a lowercase user input to exercise both sides of the `to_lowercase()` comparison. + +Perplexity and Context7 searches found no authoritative documentation on whether JSM enforces case-insensitive queue name uniqueness. The `to_lowercase()` logic already exists in production as defensive code — this test ensures it stays working. + +## Design + +### Test: `resolve_queue_mixed_case_duplicate_names_error_message` + +Added to `tests/queue.rs` alongside the existing same-casing test. + +**Mock setup:** Two queues with different casing — `"Triage"` (ID `"30"`) and `"TRIAGE"` (ID `"40"`) — in service desk `"15"`. + +**Call:** `resolve_queue_by_name("15", "triage", &client)` — lowercase input that matches neither stored name exactly, forcing both sides of `to_lowercase()` to do work. + +**Assertions:** +1. The call returns an error (not a successful queue ID) +2. Error message contains `Multiple queues named` with the matched name +3. Error message contains both queue IDs (`30, 40`) +4. Error message contains the `--id` suggestion + +This mirrors the structure of the existing `resolve_queue_duplicate_names_error_message` test but with mixed casing. + +## What stays the same + +- No production code changes — `resolve_queue_by_name` already handles this correctly +- Existing `resolve_queue_duplicate_names_error_message` test unchanged (covers same-casing path) +- `partial_match` module unchanged (has its own unit-level case-insensitive tests) +- `resolve_queue_by_name` visibility unchanged (`pub(crate)` since #129) + +## Testing + +One integration test added to `tests/queue.rs`. No other test changes. + +## Files modified + +- `tests/queue.rs` — append one test function (~30 lines) diff --git a/docs/superpowers/specs/2026-04-03-simplify-exact-multiple-design.md b/docs/superpowers/specs/2026-04-03-simplify-exact-multiple-design.md new file mode 100644 index 0000000..d3ef7a3 --- /dev/null +++ b/docs/superpowers/specs/2026-04-03-simplify-exact-multiple-design.md @@ -0,0 +1,194 @@ +# Design: Simplify ExactMultiple variant and replace unreachable arms + +**Issues:** [#126](https://github.com/Zious11/jira-cli/issues/126), [#127](https://github.com/Zious11/jira-cli/issues/127) +**Date:** 2026-04-03 +**Status:** Draft + +## Problem + +`MatchResult::ExactMultiple(Vec)` carries a Vec that no caller actually needs: + +- 4 user-resolution callers (`helpers.rs`) ignore the Vec with `_` and re-query the original data source by lowercased name. +- 6 non-user callers have provably unreachable `ExactMultiple` arms (candidates are pre-deduplicated upstream) but silently take first with `names.into_iter().next().unwrap()`. +- 2 callers (`queue.rs`, `resolve_schema` in `assets.rs`) use `names.contains()` for filtering, but since the Vec only holds duplicate copies of the same case-insensitive string, this is equivalent to a lowercased comparison. + +The Vec contains near-zero unique information — just duplicate copies of the matched name, possibly differing in casing. A single `String` is sufficient. The silent take-first fallback in 6 unreachable arms hides invariant violations. + +Validated with Perplexity: simplifying to `ExactMultiple(String)` aligns with Rust's principle of minimal payloads matching actual usage. `unreachable!()` is idiomatic for provably impossible match arms. + +## Scope + +Two changes, one PR: + +1. **Simplify variant** — `ExactMultiple(Vec)` to `ExactMultiple(String)` (#127) +2. **Replace silent take-first arms** — all 6 arms use graceful fallback (treat `ExactMultiple` like `Exact`) for consistency (#126) + +Plus cleanup of one dead code path in `queue.rs`. + +## Design + +### Change 1: Simplify `ExactMultiple` variant + +```rust +// Before +ExactMultiple(Vec), + +// After +ExactMultiple(String), +``` + +The `String` is one representative matched name (the first exact match found, preserving original casing). + +Construction in `partial_match()` changes from: + +```rust +n if n > 1 => return MatchResult::ExactMultiple(exact_matches), +``` + +To: + +```rust +n if n > 1 => return MatchResult::ExactMultiple(exact_matches.into_iter().next().unwrap()), +``` + +### Change 2: Replace 6 silent take-first arms + +All 6 sites use graceful fallback — treat `ExactMultiple` like `Exact` (take the representative name). Although `workflow.rs` has code-enforced case-insensitive dedup making `ExactMultiple` provably unreachable there, we use the same pattern everywhere for consistency and readability. + +```rust +// Treat like Exact — take the representative name +MatchResult::ExactMultiple(name) => name, +``` + +| File | Line | Dedup mechanism | +|------|------|-----------------| +| `src/cli/issue/workflow.rs` | ~143 | `HashSet` with `to_lowercase()` keys (case-insensitive) | +| `src/cli/issue/list.rs` | ~181 | `HashSet` / `sort()+dedup()` (case-sensitive) | +| `src/cli/issue/links.rs` | ~64 | Link types unique per Jira API (no code dedup) | +| `src/cli/issue/links.rs` | ~136 | Link types unique per Jira API (no code dedup) | +| `src/cli/assets.rs` | ~334 | `HashSet` (case-sensitive) | +| `src/cli/assets.rs` | ~668 | `.sort(); .dedup()` (case-sensitive) | + +### Change 3: Update 2 filtering callers + +**`resolve_schema` (`assets.rs:~459`):** Change from `names.contains(&s.name)` to `s.name.to_lowercase() == input.to_lowercase()`. The `input` variable is already in scope (function parameter). + +Before: +```rust +MatchResult::ExactMultiple(names) => { + let duplicates: Vec = schemas + .iter() + .filter(|s| names.contains(&s.name)) + .map(|s| format!("{} (id: {})", s.name, s.id)) + .collect(); + // ...error with duplicates +} +``` + +After: +```rust +MatchResult::ExactMultiple(name) => { + let input_lower = input.to_lowercase(); + let duplicates: Vec = schemas + .iter() + .filter(|s| s.name.to_lowercase() == input_lower) + .map(|s| format!("{} (id: {})", s.name, s.id)) + .collect(); + // ...error with duplicates (unchanged message format) +} +``` + +**`resolve_queue_id` (`queue.rs:~165`):** Same pattern. + +Before: +```rust +MatchResult::ExactMultiple(names) => { + let matching: Vec<&Queue> = + queues.iter().filter(|q| names.contains(&q.name)).collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + names[0], ids.join(", "), ids[0] + )).into()) +} +``` + +After: +```rust +MatchResult::ExactMultiple(matched_name) => { + let name_lower = name.to_lowercase(); + let matching: Vec<&Queue> = + queues.iter().filter(|q| q.name.to_lowercase() == name_lower).collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + matched_name, ids.join(", "), ids[0] + )).into()) +} +``` + +Note: `name` (the function parameter / original user input) is used for lowercased comparison. `matched_name` (from the variant) is used for display. + +### Change 4: Remove dead code in queue.rs Exact branch + +The `matching.len() > 1` check in the `Exact` branch (`queue.rs:~152`) is now dead — `ExactMultiple` catches duplicate candidate strings before `Exact` fires. Remove the branch, leaving just `Ok(matching[0].id.clone())`. + +This also applies to the `find_queue_id` test helper which mirrors the production code. + +### Change 5: Simplify helpers.rs arms + +The 4 user-resolution callers already use `ExactMultiple(_)` (ignoring the Vec). The destructuring pattern stays the same — no functional change. They continue filtering from the original data source using `name.to_lowercase()`. + +### Change 6: Update tests + +**`partial_match.rs` unit tests:** Assert on `String` instead of `Vec`: + +```rust +// Before +MatchResult::ExactMultiple(names) => { + assert_eq!(names.len(), 2); + assert!(names.iter().all(|n| n == "John Smith")); +} + +// After +MatchResult::ExactMultiple(name) => { + assert_eq!(name, "John Smith"); +} +``` + +The `test_exact_match_duplicate_case_insensitive` test changes to verify the representative name (first match), not all casing variants. + +The `test_exact_match_three_duplicates` test simplifies — it can no longer assert on count (the count is not in the variant). It verifies that `ExactMultiple` fires and contains a representative name. + +**`partial_match.rs` proptest:** `duplicate_candidates_yield_exact_multiple` simplifies — assert the name matches case-insensitively instead of checking Vec length. + +**`queue.rs` test helper:** Update `ExactMultiple` arm from `names.len()` to a fixed error string. + +**Integration tests (`tests/duplicate_user_disambiguation.rs`):** No changes — they test CLI output (error messages), not variant internals. + +## What stays the same + +- All user-facing behavior: error messages, disambiguation prompts, exit codes +- `Exact`, `Ambiguous`, `None` variant semantics +- Substring matching logic in `partial_match` +- The 4 user-resolution callers in `helpers.rs` (functional behavior unchanged) +- Integration tests +- Unicode handling: `to_lowercase()` is used consistently with `partial_match` itself (Unicode case folding is a pre-existing concern, not introduced by this change) + +## Edge cases + +- **`unreachable!()` fires in production:** Means an upstream dedup was removed or broken. The panic message identifies the cause. This is strictly better than silently picking the wrong item. +- **`resolve_schema` with case-differing duplicates:** Two schemas named "Assets" and "assets" would both match. The error message lists both with IDs — same behavior as before. +- **Queue with case-differing duplicate names:** Same as schema — error lists all matching queues with IDs. + +## Files modified + +- `src/partial_match.rs` — variant change + construction + test updates (~20 lines) +- `src/cli/issue/workflow.rs` — `unreachable!()` (~8 lines removed, 3 added) +- `src/cli/issue/list.rs` — `unreachable!()` (~3 lines changed) +- `src/cli/issue/links.rs` — `unreachable!()` x2 (~6 lines changed) +- `src/cli/assets.rs` — `unreachable!()` x2 + `resolve_schema` filter change (~10 lines changed) +- `src/cli/queue.rs` — filter change + dead code removal (~15 lines changed) +- `src/cli/issue/helpers.rs` — no functional change (destructuring stays `_`) + +No new dependencies. No new CLI flags or config options. diff --git a/docs/superpowers/specs/2026-04-04-issue-assign-account-id-design.md b/docs/superpowers/specs/2026-04-04-issue-assign-account-id-design.md new file mode 100644 index 0000000..d403315 --- /dev/null +++ b/docs/superpowers/specs/2026-04-04-issue-assign-account-id-design.md @@ -0,0 +1,160 @@ +# Design: Accept accountId as fallback for issue assign and create + +**Issue:** [#115](https://github.com/Zious11/jira-cli/issues/115) +**Date:** 2026-04-04 +**Status:** Draft + +## Problem + +When `--to` name search fails (ambiguous names, deactivated users, API quirks), there is no way to pass a known Jira accountId directly. The error messages already say "Specify the accountId directly" but no flag exists to do so. + +This is especially painful for AI agents and scripts that already have an accountId from another API response (e.g., extracted from an issue's assignee field) and want to bypass name search entirely. + +## Approach + +Add `--account-id` as a mutually exclusive alternative to `--to` on both `issue assign` and `issue create`. When provided, skip all user search/disambiguation logic and pass the accountId straight to the Jira API. + +**Why mutually exclusive flags instead of auto-detection:** Jira accountIds are opaque strings with no documented stable format. Auto-detecting whether a `--to` value is a name or an accountId would be fragile and could misinterpret display names as IDs. Popular CLIs (kubectl, docker) auto-detect because their ID formats are well-defined; Jira's are not. A separate flag is explicit and unambiguous. + +**Why not client-side validation:** The Jira API returns clear errors for invalid accountIds — 404 on the assign endpoint ("The user with account ID '...' does not exist") and 400 on the create endpoint ("User '...' cannot be assigned issues"). Client-side format validation would be redundant and could reject valid IDs if Atlassian changes the format. + +## Design + +### Change 1: Add `--account-id` flag to `Assign` variant + +In `src/cli/mod.rs`, add to the `Assign` variant: + +```rust +Assign { + /// Issue key + key: String, + /// Assign to this user (omit to assign to self) + #[arg(long, conflicts_with = "account_id")] + to: Option, + /// Assign to this Jira accountId directly (bypasses name search) + #[arg(long, conflicts_with_all = ["to", "unassign"])] + account_id: Option, + /// Remove assignee + #[arg(long)] + unassign: bool, +}, +``` + +### Change 2: Add `--account-id` flag to `Create` variant + +In `src/cli/mod.rs`, add to the `Create` variant: + +```rust +/// Assign to user (name/email, or "me" for self) +#[arg(long, conflicts_with = "account_id")] +to: Option, +/// Assign to this Jira accountId directly (bypasses name search) +#[arg(long, conflicts_with = "to")] +account_id: Option, +``` + +### Change 3: Branch on `account_id` in `handle_assign` + +In `src/cli/issue/workflow.rs`, the assignee resolution block changes from: + +```rust +let (account_id, display_name) = if let Some(ref user_query) = to { + helpers::resolve_assignee(client, user_query, &key, no_input).await? +} else { + let me = client.get_myself().await?; + (me.account_id, me.display_name) +}; +``` + +To: + +```rust +let (account_id, display_name) = if let Some(ref id) = account_id { + (id.clone(), id.clone()) +} else if let Some(ref user_query) = to { + helpers::resolve_assignee(client, user_query, &key, no_input).await? +} else { + let me = client.get_myself().await?; + (me.account_id, me.display_name) +}; +``` + +When `--account-id` is used, the display name is set to the accountId string itself since no user search is performed and we don't have a display name. + +### Change 4: Branch on `account_id` in `handle_create` + +In `src/cli/issue/create.rs`, the assignee block changes from: + +```rust +if let Some(ref user_query) = to { + let (account_id, _display_name) = + helpers::resolve_assignee_by_project(client, user_query, &project_key, no_input) + .await?; + fields["assignee"] = json!({"id": account_id}); +} +``` + +To: + +```rust +if let Some(ref id) = account_id { + fields["assignee"] = json!({"accountId": id}); +} else if let Some(ref user_query) = to { + let (acct_id, _display_name) = + helpers::resolve_assignee_by_project(client, user_query, &project_key, no_input) + .await?; + fields["assignee"] = json!({"accountId": acct_id}); +} +``` + +Note: This also fixes the existing `--to` path from `{"id": account_id}` to `{"accountId": acct_id}`, which is the documented Jira Cloud REST API v3 format (confirmed via Perplexity from Atlassian community sources). + +### Output format + +**Table (assign with `--account-id`):** +``` +Assigned FOO-123 to 6279395793111000689f87d2 +``` + +**JSON (assign with `--account-id`):** +```json +{ + "key": "FOO-123", + "assignee": "6279395793111000689f87d2", + "assignee_account_id": "6279395793111000689f87d2", + "changed": true +} +``` + +Both `assignee` and `assignee_account_id` contain the accountId since no display name is available. This keeps the JSON schema consistent with the existing `--to` output shape. + +**Create output** is unchanged — it shows the issue key and browse URL, not the assignee. + +### Idempotent behavior + +The existing idempotent check in `handle_assign` compares `assignee.account_id == account_id`. This works identically whether the accountId came from `--to` resolution or `--account-id` — no changes needed. + +## What stays the same + +- `--to` behavior on both commands — unchanged +- `--unassign` behavior — unchanged +- Self-assign (no flags on `assign`) — unchanged +- `resolve_assignee`, `resolve_assignee_by_project` functions — unchanged +- `resolve_user` (used by `issue list --user`) — unchanged, separate concern +- `issue list --user` — unchanged (no `--account-id` for JQL filtering) +- Error messages — unchanged (Jira API provides clear errors for invalid accountIds: 404 on assign, 400 on create) + +## Testing + +Two integration tests added to `tests/issue_commands.rs`: + +1. **`test_assign_issue_with_account_id`**: Mount wiremock mocks for GET issue (to check current assignee) and PUT assignee. Call `handle_assign` with `account_id = Some("abc123")`. Verify the PUT request body contains `{"accountId": "abc123"}` and the output contains the accountId. + +2. **`test_create_issue_with_account_id`**: Mount wiremock mock for POST create issue. Call `handle_create` with `account_id = Some("abc123")`. Verify the request body's `fields.assignee` is `{"accountId": "abc123"}`. + +## Files modified + +- `src/cli/mod.rs` — Add `account_id` field to `Assign` and `Create` variants (~4 lines each) +- `src/cli/issue/workflow.rs` — Branch on `account_id` in `handle_assign` (~3 lines added) +- `src/cli/issue/create.rs` — Branch on `account_id` in `handle_create`, fix `id` to `accountId` (~5 lines changed) +- `tests/issue_commands.rs` — Two integration tests (~60 lines total) diff --git a/docs/superpowers/specs/2026-04-05-date-filters-design.md b/docs/superpowers/specs/2026-04-05-date-filters-design.md new file mode 100644 index 0000000..57bb8f6 --- /dev/null +++ b/docs/superpowers/specs/2026-04-05-date-filters-design.md @@ -0,0 +1,121 @@ +# Date Filter Flags for `jr issue list` — Design Spec + +> **Issue:** #113 — `issue list: add --created-after and --created-before date filters` + +## Problem + +`jr issue list` supports `--recent 7d` for relative date filtering but not absolute date ranges. Users who need issues created or updated within a specific date range must use raw JQL: + +```bash +jr issue list --jql 'project = PROJ AND created >= "2026-03-18"' +``` + +This requires knowing JQL syntax, which is a knowledge barrier for common queries. + +## Solution + +Add four convenience flags that generate JQL date clauses: + +| Flag | JQL generated | Meaning | +|------|--------------|---------| +| `--created-after YYYY-MM-DD` | `created >= "YYYY-MM-DD"` | Issues created on or after this date | +| `--created-before YYYY-MM-DD` | `created < "YYYY-MM-DD+1"` | Issues created on or before this date | +| `--updated-after YYYY-MM-DD` | `updated >= "YYYY-MM-DD"` | Issues updated on or after this date | +| `--updated-before YYYY-MM-DD` | `updated < "YYYY-MM-DD+1"` | Issues updated on or before this date | + +### Operator semantics — the midnight problem + +JQL interprets date-only values as midnight (00:00:00). This creates a subtle trap: + +- `created >= "2026-03-18"` means "from midnight March 18 onwards" — **includes** all of March 18 ✅ +- `created <= "2026-03-18"` means "up to midnight March 18" — **excludes** issues created during March 18 ❌ + +To give users intuitive "on or before this date" behavior, the `--before` flags add one day and use `<`: + +- `--created-before 2026-03-18` generates `created < "2026-03-19"` — includes all of March 18 ✅ + +The `--after` flags use `>=` directly since midnight-of-date is the correct lower bound. + +### Date format + +Accept `YYYY-MM-DD` only (ISO 8601 calendar date). Jira JQL also accepts `YYYY/MM/DD` and optional `HH:MM` time, but we accept only the canonical format for simplicity and consistency. Users who need time precision or alternate formats can use `--jql`. + +### Validation + +Parse dates with `chrono::NaiveDate::parse_from_str(input, "%Y-%m-%d")` before sending to JQL. This catches: + +- Invalid format (e.g., `03-18-2026`, `2026/03/18`) +- Impossible dates (e.g., `2026-02-30`, `2026-13-01`) + +Add `validate_date(s: &str) -> Result` to `jql.rs`. Returns the parsed `NaiveDate` (needed by `--before` flags to compute +1 day). Validation happens early in `handle_list`, same pattern as `--recent`. + +### Flag conflicts + +| Flag | Conflicts with | +|------|---------------| +| `--created-after` | `--recent` (both set a lower bound on `created`) | +| `--created-before` | (none) | +| `--updated-after` | (none) | +| `--updated-before` | (none) | + +`--created-after` and `--created-before` do NOT conflict with each other — using both creates a date range. Same for the `--updated-*` pair. + +None of the date flags conflict with `--jql`. When combined, date clauses are AND'd with the user's JQL, same as all other filter flags. + +### JQL generation + +Each flag adds a clause via `build_filter_clauses` in `list.rs`. For `--after` flags, the clause is a simple string interpolation. For `--before` flags, the date is incremented by one day using `chrono::Days::new(1)` and formatted back to `YYYY-MM-DD`. + +### Composability + +All four flags combine freely with each other and with existing flags: + +```bash +# Date range +jr issue list --created-after 2026-03-01 --created-before 2026-03-31 + +# With other filters +jr issue list --created-after 2026-03-18 --assignee me --open + +# Updated date range +jr issue list --updated-after 2026-03-01 --updated-before 2026-04-01 --status "In Progress" +``` + +### Error messages + +**Invalid date format:** +``` +Invalid date "03-18-2026". Expected format: YYYY-MM-DD (e.g., 2026-03-18). +``` + +**Impossible date:** +``` +Invalid date "2026-02-30". Expected format: YYYY-MM-DD (e.g., 2026-03-18). +``` + +**Conflict with `--recent`:** +``` +error: the argument '--created-after ' cannot be used with '--recent ' +``` +(Clap's automatic conflict error message.) + +### Non-interactive / JSON output + +No interactive behavior. The flags are fully non-interactive — they take a value and generate JQL. No special JSON output handling needed; the flags only affect which issues are returned. + +## Files changed + +| File | Change | +|------|--------| +| `src/cli/mod.rs` | Add 4 new args to `IssueCommand::List` with `conflicts_with` on `created_after` | +| `src/jql.rs` | Add `validate_date(s: &str) -> Result` | +| `src/cli/issue/list.rs` | Validate dates early, pass to `build_filter_clauses`, add 4 JQL clauses | +| `tests/cli_smoke.rs` | Smoke test for `--created-after`/`--recent` conflict | +| `tests/cli_handler.rs` | Handler test for date flags generating correct JQL | + +## Out of scope + +- Time-of-day precision (`--created-after "2026-03-18 14:30"`) — use `--jql` +- `YYYY/MM/DD` format — use `--jql` +- Relative date expressions in these flags (e.g., `--created-after "2 weeks ago"`) — use `--recent` +- `startOfDay()` / `endOfDay()` JQL functions — the +1 day approach is simpler and equivalent diff --git a/docs/superpowers/specs/2026-04-05-handler-tests-me-keyword-design.md b/docs/superpowers/specs/2026-04-05-handler-tests-me-keyword-design.md new file mode 100644 index 0000000..0dbf356 --- /dev/null +++ b/docs/superpowers/specs/2026-04-05-handler-tests-me-keyword-design.md @@ -0,0 +1,85 @@ +# Handler-Level Tests for --to me Keyword and Idempotent Name Resolution + +**Issue:** #148 +**Date:** 2026-04-05 + +## Goal + +Add handler-level integration tests to `tests/cli_handler.rs` covering three untested sub-paths in the assign and create command handlers: + +1. `--to me` in assign (resolves via `get_myself()` through `resolve_assignee`) +2. `--to me` in create (resolves via `get_myself()` through `resolve_assignee_by_project`) +3. Idempotent assign when account ID comes from name search (not `--account-id`) + +## Background + +The existing handler-level tests (added in the PR for #139) cover the main branching logic but miss these sub-paths. Each exercises a distinct code path that could regress independently: + +- `test_handler_assign_self` tests the **no-flag default** path: handler calls `get_myself()` directly. But `--to me` takes a **different path**: handler passes "me" to `resolve_assignee`, which checks `is_me_keyword`, then calls `get_myself()`. Same result, different code path. +- `test_handler_assign_idempotent` tests idempotency with `--account-id` (no user resolution). But idempotency with `--to Jane` exercises the search→resolve→idempotent-check sequence, which could fail if the resolved account ID doesn't match correctly. + +## Tests + +### Test 1: `test_handler_assign_to_me` + +**Code path:** `handle_assign` → `--to` present → `resolve_assignee("me", "HDL-6")` → `is_me_keyword` returns true → `client.get_myself()` → idempotent check → `assign_issue` → `assign_changed_response` + +**Mocks:** +- `GET /rest/api/3/myself` → `user_response()` (accountId "abc123", displayName "Test User") +- `GET /rest/api/3/issue/HDL-6` → unassigned issue +- `PUT /rest/api/3/issue/HDL-6/assignee` with `{"accountId": "abc123"}` → 204 + +**Command:** `issue assign HDL-6 --to me` + +**Assertions:** +- Exit success +- `"changed": true` +- `"assignee": "Test User"` +- `"assignee_account_id": "abc123"` + +### Test 2: `test_handler_create_to_me` + +**Code path:** `handle_create` → `--to` present → `resolve_assignee_by_project("me", "HDL")` → `is_me_keyword` returns true → `client.get_myself()` → sets `fields["assignee"]` → POST create + +**Mocks:** +- `GET /rest/api/3/myself` → `user_response()` (accountId "abc123") +- `POST /rest/api/3/issue` with `body_partial_json` verifying `fields.assignee.accountId == "abc123"` → 201 with `create_issue_response("HDL-200")` + +**Command:** `issue create -p HDL -t Task -s "Created with --to me" --to me` + +**Assertions:** +- Exit success +- `"key": "HDL-200"` in output + +### Test 3: `test_handler_assign_idempotent_with_name_search` + +**Code path:** `handle_assign` → `--to` present → `resolve_assignee("Jane", "HDL-7")` → search → single result (acc-jane-456) → idempotent check → issue already assigned to acc-jane-456 → early return with `changed: false` + +**Mocks:** +- `GET /rest/api/3/user/assignable/search` with `query=Jane`, `issueKey=HDL-7` → single result `[{accountId: "acc-jane-456", displayName: "Jane Doe"}]` +- `GET /rest/api/3/issue/HDL-7` → assigned to `("acc-jane-456", "Jane Doe")` +- `PUT /rest/api/3/issue/HDL-7/assignee` with `.expect(0)` — must NOT be called + +**Command:** `issue assign HDL-7 --to Jane` + +**Assertions:** +- Exit success +- `"changed": false` + +## Fixtures + +No new fixtures needed. All tests reuse existing helpers: +- `user_response()` — for `/myself` endpoint +- `issue_response_with_assignee(key, summary, assignee_opt)` — for issue GET +- `user_search_response(vec![...])` — for assignable search +- `create_issue_response(key)` — for create POST + +## Issue Keys + +Continue the existing HDL sequence: HDL-6, HDL-7, HDL-200 (200 for create to avoid confusion with assign keys). + +## Out of Scope + +- Negative assertions on bypassed search endpoints for `--to me` tests (Perplexity validated: over-testing implementation details) +- Testing `is_me_keyword` unit behavior (already covered by unit tests in `helpers.rs`) +- Create command idempotency (create always creates a new issue — no idempotent check exists) diff --git a/docs/superpowers/specs/2026-04-05-snapshot-tests-json-output-design.md b/docs/superpowers/specs/2026-04-05-snapshot-tests-json-output-design.md new file mode 100644 index 0000000..fc351fa --- /dev/null +++ b/docs/superpowers/specs/2026-04-05-snapshot-tests-json-output-design.md @@ -0,0 +1,195 @@ +# Snapshot Tests for Write Command JSON Output Schemas + +**Issue:** #135 +**Date:** 2026-04-05 + +## Goal + +Protect `--output json` schemas on write commands from accidental drift by extracting inline `json!({...})` construction into named builder functions and pinning them with `insta::assert_json_snapshot!` tests. + +## Background + +Write command handlers (move, assign, edit, link, unlink, sprint add/remove) construct JSON output inline using `serde_json::json!({...})`. These schemas have no compile-time enforcement — a typo or field rename silently changes the contract for downstream consumers. Snapshot tests catch this: any schema change requires an explicit `cargo insta review` approval. + +Commands that serialize full API response structs (create, comment, worklog) are excluded — their schemas are already enforced by the struct definition. + +## Approach + +**Extract + snapshot.** Move inline `json!()` calls into pure builder functions, call them from handlers, snapshot-test the builders directly. + +This was validated with Perplexity as idiomatic Rust for CLI JSON schema protection. It complements the existing handler-level integration tests in `cli_handler.rs` which test the full request/response path. + +### Why not handler-level snapshot tests? + +Handler tests require async runtime + wiremock mocks for every test. The JSON builders are pure functions — testing them directly is faster, simpler, and more focused. The handler tests already cover the wiring. + +## File Structure + +### New files + +- `src/cli/issue/json_output.rs` — Builder functions for issue command JSON responses + snapshot tests +- `src/cli/issue/snapshots/` — insta snapshot files for issue command tests + +### Modified files + +- `src/cli/issue/mod.rs` — Add `mod json_output;` +- `src/cli/issue/workflow.rs` — Replace inline `json!()` in `handle_move` and `handle_assign` with calls to `json_output::*` +- `src/cli/issue/create.rs` — Replace inline `json!()` in `handle_edit` with call to `json_output::edit_response` +- `src/cli/issue/links.rs` — Replace inline `json!()` in `handle_link` and `handle_unlink` with calls to `json_output::*` +- `src/cli/sprint.rs` — Replace inline `json!()` in `handle_add` and `handle_remove` with calls to sprint response builders (defined in same file or a small helper) + +## Builder Functions + +All functions return `serde_json::Value`. + +### Issue commands (`src/cli/issue/json_output.rs`) + +```rust +pub(crate) fn move_response(key: &str, status: &str, changed: bool) -> Value + +pub(crate) fn assign_changed_response(key: &str, display_name: &str, account_id: &str) -> Value + +pub(crate) fn assign_unchanged_response(key: &str, display_name: &str, account_id: &str) -> Value + +pub(crate) fn unassign_response(key: &str) -> Value + +pub(crate) fn edit_response(key: &str) -> Value + +pub(crate) fn link_response(key1: &str, key2: &str, link_type: &str) -> Value + +pub(crate) fn unlink_response(unlinked: bool, count: usize) -> Value +``` + +### Sprint commands (inline in `src/cli/sprint.rs`) + +```rust +fn sprint_add_response(sprint_id: u64, issues: &[String]) -> Value + +fn sprint_remove_response(issues: &[String]) -> Value +``` + +Sprint builders stay in `sprint.rs` as private functions with `#[cfg(test)]`-gated snapshot tests at the bottom of the file, following the pattern used by `src/adf.rs`. + +## Snapshot Test Pattern + +Each builder gets one snapshot test with representative values: + +```rust +#[cfg(test)] +mod tests { + use super::*; + use insta::assert_json_snapshot; + + #[test] + fn test_move_response_changed() { + assert_json_snapshot!(move_response("TEST-1", "In Progress", true)); + } + + #[test] + fn test_move_response_unchanged() { + assert_json_snapshot!(move_response("TEST-1", "Done", false)); + } +} +``` + +No redactions needed — all inputs are deterministic test values. + +Snapshot files land in `src/cli/issue/snapshots/` (insta auto-creates this directory based on the source file location). + +## Schemas + +### `move_response` +```json +{ + "key": "TEST-1", + "status": "In Progress", + "changed": true +} +``` + +### `assign_changed_response` +```json +{ + "key": "TEST-1", + "assignee": "Jane Doe", + "assignee_account_id": "abc123", + "changed": true +} +``` + +### `assign_unchanged_response` +```json +{ + "key": "TEST-1", + "assignee": "Jane Doe", + "assignee_account_id": "abc123", + "changed": false +} +``` + +### `unassign_response` +```json +{ + "key": "TEST-1", + "assignee": null, + "changed": true +} +``` + +### `edit_response` +```json +{ + "key": "TEST-1", + "updated": true +} +``` + +### `link_response` +```json +{ + "key1": "TEST-1", + "key2": "TEST-2", + "type": "Blocks", + "linked": true +} +``` + +### `unlink_response` (success) +```json +{ + "unlinked": true, + "count": 2 +} +``` + +### `unlink_response` (no match) +```json +{ + "unlinked": false, + "count": 0 +} +``` + +### `sprint_add_response` +```json +{ + "sprint_id": 100, + "issues": ["TEST-1", "TEST-2"], + "added": true +} +``` + +### `sprint_remove_response` +```json +{ + "issues": ["TEST-1", "TEST-2"], + "removed": true +} +``` + +## Out of Scope + +- `issue create` JSON output — serializes the API response struct (`CreateIssueResponse`) with a `url` field appended. Schema is struct-enforced. +- `issue comment` JSON output — serializes the full `Comment` struct. Struct-enforced. +- `worklog add` JSON output — serializes the full `Worklog` struct. Struct-enforced. +- Read commands (list, view, etc.) — different concern, not requested. diff --git a/proptest-regressions/jql.txt b/proptest-regressions/jql.txt new file mode 100644 index 0000000..41d0c26 --- /dev/null +++ b/proptest-regressions/jql.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc c696552d795390c45278b7f3fe08317d68e81494e898dd32ba3a2c97f5dc7df5 # shrinks to s = "" diff --git a/src/api/assets/linked.rs b/src/api/assets/linked.rs new file mode 100644 index 0000000..8b6356d --- /dev/null +++ b/src/api/assets/linked.rs @@ -0,0 +1,556 @@ +use std::collections::HashMap; + +use anyhow::Result; +use serde_json::Value; + +use crate::api::assets::workspace::get_or_fetch_workspace_id; +use crate::api::client::JiraClient; +use crate::cache; +use crate::types::assets::LinkedAsset; + +/// Get CMDB fields (id, name pairs), using cache when available. +pub async fn get_or_fetch_cmdb_fields(client: &JiraClient) -> Result> { + if let Some(cached) = cache::read_cmdb_fields_cache()? { + return Ok(cached.fields); + } + + let fields = client.find_cmdb_fields().await?; + let _ = cache::write_cmdb_fields_cache(&fields); + Ok(fields) +} + +/// Convenience: extract just the field IDs from CMDB fields. +pub fn cmdb_field_ids(fields: &[(String, String)]) -> Vec { + fields.iter().map(|(id, _)| id.clone()).collect() +} + +/// Extract linked assets from issue extra fields using discovered CMDB field IDs. +pub fn extract_linked_assets( + extra: &HashMap, + cmdb_field_ids: &[String], +) -> Vec { + let mut assets = Vec::new(); + + for field_id in cmdb_field_ids { + let Some(value) = extra.get(field_id) else { + continue; + }; + if value.is_null() { + continue; + } + + match value { + Value::Array(arr) => { + for item in arr { + if let Some(asset) = parse_cmdb_value(item) { + assets.push(asset); + } + } + } + Value::Object(_) => { + if let Some(asset) = parse_cmdb_value(value) { + assets.push(asset); + } + } + Value::String(s) => { + assets.push(LinkedAsset { + name: Some(s.clone()), + ..Default::default() + }); + } + _ => {} + } + } + + assets +} + +fn parse_cmdb_value(value: &Value) -> Option { + let obj = value.as_object()?; + + let label = obj.get("label").and_then(|v| v.as_str()).map(String::from); + let object_key = obj + .get("objectKey") + .and_then(|v| v.as_str()) + .map(String::from); + let object_id = obj.get("objectId").and_then(|v| { + v.as_str() + .map(String::from) + .or_else(|| v.as_u64().map(|n| n.to_string())) + }); + let workspace_id = obj + .get("workspaceId") + .and_then(|v| v.as_str()) + .map(String::from); + + // Only create an asset if we got at least something useful. + if label.is_none() && object_key.is_none() && object_id.is_none() { + return None; + } + + Some(LinkedAsset { + key: object_key, + name: label, + asset_type: None, + id: object_id, + workspace_id, + }) +} + +/// Extract linked assets grouped by CMDB field, returning (field_name, assets) pairs. +/// Skips fields that have no linked assets on the issue. +pub fn extract_linked_assets_per_field( + extra: &HashMap, + cmdb_fields: &[(String, String)], +) -> Vec<(String, Vec)> { + let mut result = Vec::new(); + for (field_id, field_name) in cmdb_fields { + let assets = extract_linked_assets(extra, std::slice::from_ref(field_id)); + if !assets.is_empty() { + result.push((field_name.clone(), assets)); + } + } + result +} + +/// Inject enriched fields into a single JSON object from a `LinkedAsset`. +fn inject_asset_fields(obj: &mut serde_json::Map, asset: &LinkedAsset) { + if let Some(ref key) = asset.key { + obj.insert("objectKey".to_string(), Value::String(key.clone())); + } + if let Some(ref name) = asset.name { + obj.insert("label".to_string(), Value::String(name.clone())); + } + if let Some(ref asset_type) = asset.asset_type { + obj.insert("objectType".to_string(), Value::String(asset_type.clone())); + } +} + +/// Inject enriched asset data back into the issue's `fields.extra` HashMap. +/// +/// For each CMDB field, matches enriched `LinkedAsset` entries by position to the +/// original JSON elements and injects `objectKey`, `label`, and `objectType` +/// as additional fields (additive, does not remove existing fields). +/// +/// Handles both array-shaped fields (`[{...}, {...}]`) and single-object fields (`{...}`). +pub fn enrich_json_assets( + extra: &mut HashMap, + per_field: &[(String, Vec)], +) { + for (field_id, assets) in per_field { + if assets.is_empty() { + continue; + } + let Some(value) = extra.get_mut(field_id) else { + continue; + }; + + // Array shape: match assets by position + if let Some(arr) = value.as_array_mut() { + for (i, asset) in assets.iter().enumerate() { + if i >= arr.len() { + break; + } + if let Some(obj) = arr[i].as_object_mut() { + inject_asset_fields(obj, asset); + } + } + continue; + } + + // Single-object shape: enrich with first asset + if let Some(obj) = value.as_object_mut() { + inject_asset_fields(obj, &assets[0]); + } + } +} + +/// Enrich assets that only have IDs by fetching from the Assets API. +pub async fn enrich_assets(client: &JiraClient, assets: &mut [LinkedAsset]) { + // Only enrich assets that have an ID but are missing key/name. + let needs_enrichment: Vec = assets + .iter() + .enumerate() + .filter(|(_, a)| a.id.is_some() && a.key.is_none() && a.name.is_none()) + .map(|(i, _)| i) + .collect(); + + if needs_enrichment.is_empty() { + return; + } + + // Check whether all assets that need enrichment carry their own workspace_id. + // If any are missing it, we fall back to fetching the global workspace ID. + let all_have_workspace = needs_enrichment + .iter() + .all(|&idx| assets[idx].workspace_id.is_some()); + + let fallback_workspace_id: Option = if all_have_workspace { + None + } else { + // Get workspace ID — required for Assets API calls. + match get_or_fetch_workspace_id(client).await { + Ok(wid) => Some(wid), + Err(_) => return, // Degrade gracefully + } + }; + + let futures: Vec<_> = needs_enrichment + .iter() + .map(|&idx| { + // Prefer the per-asset workspace_id; fall back to the global one. + let wid = assets[idx] + .workspace_id + .clone() + .or_else(|| fallback_workspace_id.clone()) + .expect("workspace_id must be available (checked above)"); + let oid = assets[idx].id.clone().unwrap(); + async move { + let result = client.get_asset(&wid, &oid, false).await; + (idx, result) + } + }) + .collect(); + + let results = futures::future::join_all(futures).await; + + for (idx, result) in results { + if let Ok(obj) = result { + assets[idx].key = Some(obj.object_key); + assets[idx].name = Some(obj.label); + assets[idx].asset_type = Some(obj.object_type.name); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn make_extra(field_id: &str, value: Value) -> HashMap { + let mut map = HashMap::new(); + map.insert(field_id.to_string(), value); + map + } + + #[test] + fn parse_modern_label_and_key() { + let extra = make_extra( + "customfield_10191", + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert!(assets[0].id.is_none()); + } + + #[test] + fn parse_legacy_ids_only() { + let extra = make_extra( + "customfield_10191", + json!([{"workspaceId": "ws-1", "objectId": "88", "id": "ws-1:88"}]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].id.as_deref(), Some("88")); + assert_eq!(assets[0].workspace_id.as_deref(), Some("ws-1")); + assert!(assets[0].key.is_none()); + assert!(assets[0].name.is_none()); + } + + #[test] + fn parse_mixed_fields() { + let extra = make_extra( + "customfield_10191", + json!([{ + "label": "Acme Corp", + "objectKey": "OBJ-1", + "workspaceId": "ws-1", + "objectId": "88" + }]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert_eq!(assets[0].id.as_deref(), Some("88")); + } + + #[test] + fn parse_null_field_skipped() { + let extra = make_extra("customfield_10191", Value::Null); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_empty_array() { + let extra = make_extra("customfield_10191", json!([])); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_missing_field_skipped() { + let extra = HashMap::new(); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_string_value_as_name() { + let extra = make_extra("customfield_10191", json!("Some Asset")); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].name.as_deref(), Some("Some Asset")); + } + + #[test] + fn parse_multiple_cmdb_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme", "objectKey": "OBJ-1"}]), + ); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Server-1", "objectKey": "SRV-1"}]), + ); + let field_ids = vec!["customfield_10191".into(), "customfield_10245".into()]; + let assets = extract_linked_assets(&extra, &field_ids); + assert_eq!(assets.len(), 2); + } + + #[test] + fn parse_multiple_objects_in_array() { + let extra = make_extra( + "customfield_10191", + json!([ + {"label": "Acme", "objectKey": "OBJ-1"}, + {"label": "Globex", "objectKey": "OBJ-2"} + ]), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 2); + assert_eq!(assets[0].name.as_deref(), Some("Acme")); + assert_eq!(assets[1].name.as_deref(), Some("Globex")); + } + + #[test] + fn parse_single_object_not_array() { + let extra = make_extra( + "customfield_10191", + json!({"label": "Acme", "objectKey": "OBJ-1"}), + ); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + } + + #[test] + fn parse_empty_object_skipped() { + let extra = make_extra("customfield_10191", json!([{}])); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert!(assets.is_empty()); + } + + #[test] + fn parse_numeric_object_id() { + let extra = make_extra("customfield_10191", json!([{"objectId": 88}])); + let assets = extract_linked_assets(&extra, &["customfield_10191".into()]); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].id.as_deref(), Some("88")); + } + + #[test] + fn extract_per_field_single_field() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + let cmdb_fields = vec![("customfield_10191".to_string(), "Client".to_string())]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 1); + assert_eq!(result[0].0, "Client"); + assert_eq!(result[0].1.len(), 1); + assert_eq!(result[0].1[0].key.as_deref(), Some("OBJ-1")); + } + + #[test] + fn extract_per_field_multiple_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".into(), + json!([{"label": "Acme Corp", "objectKey": "OBJ-1"}]), + ); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Email Server", "objectKey": "SRV-42"}]), + ); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ( + "customfield_10245".to_string(), + "Affected Service".to_string(), + ), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, "Client"); + assert_eq!(result[1].0, "Affected Service"); + } + + #[test] + fn extract_per_field_skips_empty() { + let mut extra = HashMap::new(); + extra.insert("customfield_10191".into(), json!(null)); + extra.insert( + "customfield_10245".into(), + json!([{"label": "Email Server", "objectKey": "SRV-42"}]), + ); + let cmdb_fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ( + "customfield_10245".to_string(), + "Affected Service".to_string(), + ), + ]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert_eq!(result.len(), 1); + assert_eq!(result[0].0, "Affected Service"); + } + + #[test] + fn extract_per_field_missing_field() { + let extra = HashMap::new(); + let cmdb_fields = vec![("customfield_10191".to_string(), "Client".to_string())]; + let result = extract_linked_assets_per_field(&extra, &cmdb_fields); + assert!(result.is_empty()); + } + + #[test] + fn enrich_json_injects_resolved_fields() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([{"objectId": "88", "workspaceId": "ws-1"}]), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-1".into()), + key: Some("OBJ-88".into()), + name: Some("Acme Corp".into()), + asset_type: Some("Client".into()), + }], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let enriched = &extra["customfield_10191"]; + let arr = enriched.as_array().unwrap(); + assert_eq!(arr.len(), 1); + assert_eq!(arr[0]["objectId"], "88"); + assert_eq!(arr[0]["workspaceId"], "ws-1"); + assert_eq!(arr[0]["objectKey"], "OBJ-88"); + assert_eq!(arr[0]["label"], "Acme Corp"); + assert_eq!(arr[0]["objectType"], "Client"); + } + + #[test] + fn enrich_json_preserves_already_enriched() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([{"objectKey": "OBJ-1", "label": "Already There"}]), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Already There".into()), + ..Default::default() + }], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let arr = extra["customfield_10191"].as_array().unwrap(); + assert_eq!(arr[0]["objectKey"], "OBJ-1"); + assert_eq!(arr[0]["label"], "Already There"); + } + + #[test] + fn enrich_json_partial_enrichment() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!([ + {"objectId": "88", "workspaceId": "ws-1"}, + {"objectId": "99", "workspaceId": "ws-1"} + ]), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![ + LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-1".into()), + key: Some("OBJ-88".into()), + name: Some("Acme".into()), + asset_type: Some("Client".into()), + }, + LinkedAsset { + id: Some("99".into()), + workspace_id: Some("ws-1".into()), + key: None, + name: None, + asset_type: None, + }, + ], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let arr = extra["customfield_10191"].as_array().unwrap(); + assert_eq!(arr[0]["objectKey"], "OBJ-88"); + assert!(arr[1].get("objectKey").is_none()); + } + + #[test] + fn enrich_json_single_object_shape() { + let mut extra = HashMap::new(); + extra.insert( + "customfield_10191".to_string(), + json!({"objectId": "88", "workspaceId": "ws-1"}), + ); + + let per_field = vec![( + "customfield_10191".to_string(), + vec![LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-1".into()), + key: Some("OBJ-88".into()), + name: Some("Acme Corp".into()), + asset_type: Some("Client".into()), + }], + )]; + + enrich_json_assets(&mut extra, &per_field); + + let obj = extra["customfield_10191"].as_object().unwrap(); + assert_eq!(obj["objectId"], "88"); + assert_eq!(obj["workspaceId"], "ws-1"); + assert_eq!(obj["objectKey"], "OBJ-88"); + assert_eq!(obj["label"], "Acme Corp"); + assert_eq!(obj["objectType"], "Client"); + } +} diff --git a/src/api/assets/mod.rs b/src/api/assets/mod.rs new file mode 100644 index 0000000..8a00402 --- /dev/null +++ b/src/api/assets/mod.rs @@ -0,0 +1,5 @@ +pub mod linked; +pub mod objects; +pub mod schemas; +pub mod tickets; +pub mod workspace; diff --git a/src/api/assets/objects.rs b/src/api/assets/objects.rs new file mode 100644 index 0000000..f6d2e84 --- /dev/null +++ b/src/api/assets/objects.rs @@ -0,0 +1,236 @@ +use std::collections::HashMap; + +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::AssetsPage; +use crate::cache::{self, CachedObjectTypeAttr}; +use crate::error::JrError; +use crate::types::assets::{AssetObject, ObjectAttribute, ObjectTypeAttributeDef}; + +impl JiraClient { + /// Search assets via AQL with auto-pagination. + /// + /// The `aql` parameter is passed to the API verbatim — callers must ensure + /// the query is trusted input. For user-supplied object keys interpolated + /// into AQL, use `resolve_object_key()` which escapes special characters. + pub async fn search_assets( + &self, + workspace_id: &str, + aql: &str, + limit: Option, + include_attributes: bool, + ) -> Result> { + let mut all = Vec::new(); + let mut start_at = 0u32; + let max_page_size = 25u32; + + loop { + let page_size = match limit { + Some(cap) => { + let remaining = cap.saturating_sub(all.len() as u32); + if remaining == 0 { + break; + } + remaining.min(max_page_size) + } + None => max_page_size, + }; + + let path = format!( + "object/aql?startAt={}&maxResults={}&includeAttributes={}", + start_at, page_size, include_attributes + ); + let body = serde_json::json!({ "qlQuery": aql }); + let page: AssetsPage = + self.post_assets(workspace_id, &path, &body).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + + if let Some(cap) = limit { + if all.len() >= cap as usize { + all.truncate(cap as usize); + break; + } + } + if !has_more { + break; + } + start_at = next; + } + Ok(all) + } + + /// Get a single asset by its numeric ID. + pub async fn get_asset( + &self, + workspace_id: &str, + object_id: &str, + include_attributes: bool, + ) -> Result { + let path = format!( + "object/{}?includeAttributes={}", + urlencoding::encode(object_id), + include_attributes + ); + self.get_assets(workspace_id, &path).await + } + + /// Get all attributes for a single object, with full attribute definitions + /// including human-readable names. + pub async fn get_object_attributes( + &self, + workspace_id: &str, + object_id: &str, + ) -> Result> { + let path = format!("object/{}/attributes", urlencoding::encode(object_id)); + self.get_assets(workspace_id, &path).await + } + + /// Get all attribute definitions for an object type. + /// + /// Returns schema-level metadata (name, system, hidden, label, position) + /// for every attribute defined on the type. Used to enrich search results + /// where only `objectTypeAttributeId` is present. + pub async fn get_object_type_attributes( + &self, + workspace_id: &str, + object_type_id: &str, + ) -> Result> { + let path = format!( + "objecttype/{}/attributes", + urlencoding::encode(object_type_id) + ); + self.get_assets(workspace_id, &path).await + } +} + +/// Resolve an object key (e.g., "OBJ-1") to its numeric ID. +/// If the input is purely numeric, returns it as-is. +pub async fn resolve_object_key( + client: &JiraClient, + workspace_id: &str, + key_or_id: &str, +) -> Result { + if key_or_id.is_empty() { + return Err(JrError::UserError("Object key or ID cannot be empty.".into()).into()); + } + + if key_or_id.chars().all(|c| c.is_ascii_digit()) { + return Ok(key_or_id.to_string()); + } + + // Escape quotes and backslashes to prevent AQL injection. + // AQL uses "Key" (not "objectKey") to match the object key field. + let escaped = key_or_id.replace('\\', "\\\\").replace('"', "\\\""); + + let results = client + .search_assets( + workspace_id, + &format!("Key = \"{}\"", escaped), + Some(1), + false, + ) + .await?; + + results.into_iter().next().map(|obj| obj.id).ok_or_else(|| { + JrError::UserError(format!( + "No asset matching \"{}\" found. Check the object key and try again.", + key_or_id + )) + .into() + }) +} + +/// Enrich search results by resolving attribute definitions for each unique object type. +/// +/// Returns a HashMap mapping `objectTypeAttributeId` → `CachedObjectTypeAttr` for use +/// in output formatting (filtering system/hidden, sorting by position, displaying names). +/// +/// Fetches definitions from cache first, falling back to the API. Results are cached +/// for 7 days per object type. +pub async fn enrich_search_attributes( + client: &JiraClient, + workspace_id: &str, + objects: &[AssetObject], +) -> Result> { + // Collect unique object type IDs + let mut type_ids: Vec = objects.iter().map(|o| o.object_type.id.clone()).collect(); + type_ids.sort(); + type_ids.dedup(); + + let mut attr_map: HashMap = HashMap::new(); + + for type_id in &type_ids { + // Try cache first + let attrs = match cache::read_object_type_attr_cache(type_id) { + Ok(Some(cached)) => cached, + _ => { + // Cache miss — fetch from API + match client + .get_object_type_attributes(workspace_id, type_id) + .await + { + Ok(defs) => { + let cached: Vec = defs + .iter() + .map(|d| CachedObjectTypeAttr { + id: d.id.clone(), + name: d.name.clone(), + system: d.system, + hidden: d.hidden, + label: d.label, + position: d.position, + }) + .collect(); + // Best-effort cache write + let _ = cache::write_object_type_attr_cache(type_id, &cached); + cached + } + Err(_) => { + // Graceful degradation: skip this type, let caller decide on warnings + continue; + } + } + } + }; + + for attr in attrs { + attr_map.insert(attr.id.clone(), attr); + } + } + + Ok(attr_map) +} + +#[cfg(test)] +mod tests { + #[test] + fn numeric_id_detected() { + assert!("123".chars().all(|c| c.is_ascii_digit())); + assert!("0".chars().all(|c| c.is_ascii_digit())); + } + + #[test] + fn object_key_not_numeric() { + assert!(!"OBJ-1".chars().all(|c| c.is_ascii_digit())); + assert!(!"SCHEMA-88".chars().all(|c| c.is_ascii_digit())); + assert!(!"abc".chars().all(|c| c.is_ascii_digit())); + } + + #[test] + fn empty_string_is_numeric_but_rejected_by_resolve() { + // Empty string passes chars().all() vacuously, but resolve_object_key + // has an explicit empty check that rejects it before the numeric check. + assert!("".chars().all(|c| c.is_ascii_digit())); + } + + #[test] + fn aql_escaping() { + let input = r#"OBJ-1" OR objectType = "Server"#; + let escaped = input.replace('\\', "\\\\").replace('"', "\\\""); + let query = format!("objectKey = \"{}\"", escaped); + assert_eq!(query, r#"objectKey = "OBJ-1\" OR objectType = \"Server""#); + } +} diff --git a/src/api/assets/schemas.rs b/src/api/assets/schemas.rs new file mode 100644 index 0000000..4dc4696 --- /dev/null +++ b/src/api/assets/schemas.rs @@ -0,0 +1,44 @@ +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::AssetsPage; +use crate::types::assets::{ObjectSchema, ObjectTypeEntry}; + +impl JiraClient { + /// List all object schemas in the workspace with auto-pagination. + pub async fn list_object_schemas(&self, workspace_id: &str) -> Result> { + let mut all = Vec::new(); + let mut start_at = 0u32; + let page_size = 25u32; + + loop { + let path = format!( + "objectschema/list?startAt={}&maxResults={}&includeCounts=true", + start_at, page_size + ); + let page: AssetsPage = self.get_assets(workspace_id, &path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + + if !has_more { + break; + } + start_at = next; + } + Ok(all) + } + + /// List all object types for a given schema (flat, no pagination). + pub async fn list_object_types( + &self, + workspace_id: &str, + schema_id: &str, + ) -> Result> { + let path = format!( + "objectschema/{}/objecttypes/flat?includeObjectCounts=true", + urlencoding::encode(schema_id) + ); + self.get_assets(workspace_id, &path).await + } +} diff --git a/src/api/assets/tickets.rs b/src/api/assets/tickets.rs new file mode 100644 index 0000000..05761ef --- /dev/null +++ b/src/api/assets/tickets.rs @@ -0,0 +1,19 @@ +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::types::assets::ConnectedTicketsResponse; + +impl JiraClient { + /// Get Jira issues connected to an asset object. + pub async fn get_connected_tickets( + &self, + workspace_id: &str, + object_id: &str, + ) -> Result { + let path = format!( + "objectconnectedtickets/{}/tickets", + urlencoding::encode(object_id) + ); + self.get_assets(workspace_id, &path).await + } +} diff --git a/src/api/assets/workspace.rs b/src/api/assets/workspace.rs new file mode 100644 index 0000000..f05ce75 --- /dev/null +++ b/src/api/assets/workspace.rs @@ -0,0 +1,57 @@ +use anyhow::Result; +use serde::Deserialize; + +use crate::api::client::JiraClient; +use crate::api::pagination::ServiceDeskPage; +use crate::cache; +use crate::error::JrError; + +#[derive(Debug, Default, Deserialize)] +struct WorkspaceEntry { + #[serde(rename = "workspaceId")] + workspace_id: String, +} + +/// Get the Assets workspace ID, using cache when available. +/// +/// The discovery endpoint returns a paginated response with workspace entries. +/// In practice there's only one workspace per site. +pub async fn get_or_fetch_workspace_id(client: &JiraClient) -> Result { + if let Some(cached) = cache::read_workspace_cache()? { + return Ok(cached.workspace_id); + } + + let page: ServiceDeskPage = client + .get_from_instance("/rest/servicedeskapi/assets/workspace") + .await + .map_err(|e| { + if let Some(JrError::ApiError { status, .. }) = e.downcast_ref::() { + if *status == 404 || *status == 403 { + return JrError::UserError( + "Assets is not available on this Jira site. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + .into(); + } + } + e + })?; + + let workspace_id = page + .values + .into_iter() + .next() + .map(|w| w.workspace_id) + .ok_or_else(|| { + JrError::UserError( + "No Assets workspace found on this Jira site. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + })?; + + let _ = cache::write_workspace_cache(&workspace_id); + + Ok(workspace_id) +} diff --git a/src/api/client.rs b/src/api/client.rs index d1ac24b..19d70c9 100644 --- a/src/api/client.rs +++ b/src/api/client.rs @@ -20,6 +20,7 @@ pub struct JiraClient { instance_url: String, auth_header: String, verbose: bool, + assets_base_url: Option, } impl JiraClient { @@ -27,14 +28,21 @@ impl JiraClient { /// from the system keychain. pub fn from_config(config: &Config, verbose: bool) -> anyhow::Result { let base_url = config.base_url()?; - let instance_url = config - .global - .instance - .url - .as_ref() - .ok_or_else(|| anyhow::anyhow!("No Jira instance configured. Run \"jr init\" first."))? - .trim_end_matches('/') - .to_string(); + + // JR_BASE_URL overrides all URL targets (used by integration tests to inject wiremock). + let test_override = std::env::var("JR_BASE_URL").ok(); + + let instance_url = if let Some(ref override_url) = test_override { + // Test mode: route all traffic (including instance and assets) to the mock server. + override_url.trim_end_matches('/').to_string() + } else if let Some(url) = config.global.instance.url.as_ref() { + url.trim_end_matches('/').to_string() + } else { + return Err(JrError::ConfigError( + "No Jira instance configured. Run \"jr init\" first.".into(), + ) + .into()); + }; let auth_method = config .global .instance @@ -42,40 +50,60 @@ impl JiraClient { .as_deref() .unwrap_or("api_token"); - let auth_header = match auth_method { - "oauth" => { - let (access, _refresh) = crate::api::auth::load_oauth_tokens()?; - format!("Bearer {access}") - } - _ => { - // api_token (default) - let (email, token) = crate::api::auth::load_api_token()?; - let encoded = - base64::engine::general_purpose::STANDARD.encode(format!("{email}:{token}")); - format!("Basic {encoded}") + // JR_AUTH_HEADER env var overrides keychain auth (used by tests to inject mock auth) + let auth_header = if let Ok(header) = std::env::var("JR_AUTH_HEADER") { + header + } else { + match auth_method { + "oauth" => { + let (access, _refresh) = crate::api::auth::load_oauth_tokens()?; + format!("Bearer {access}") + } + _ => { + // api_token (default) + let (email, token) = crate::api::auth::load_api_token()?; + let encoded = base64::engine::general_purpose::STANDARD + .encode(format!("{email}:{token}")); + format!("Basic {encoded}") + } } }; let client = Client::builder().timeout(Duration::from_secs(30)).build()?; + let assets_base_url = if let Some(ref override_url) = test_override { + // Test mode: assets API goes to the mock server under /jsm/assets. + Some(format!("{}/jsm/assets", override_url.trim_end_matches('/'))) + } else { + config.global.instance.cloud_id.as_ref().map(|cloud_id| { + format!( + "https://api.atlassian.com/ex/jira/{}/jsm/assets", + urlencoding::encode(cloud_id) + ) + }) + }; + Ok(Self { client, base_url, instance_url, auth_header, verbose, + assets_base_url, }) } /// Create a client for integration testing. This is **not** gated behind /// `#[cfg(test)]` so that integration tests in `tests/` can use it. pub fn new_for_test(base_url: String, auth_header: String) -> Self { + let assets_base_url = Some(format!("{}/jsm/assets", &base_url)); Self { client: Client::new(), instance_url: base_url.clone(), base_url, auth_header, verbose: false, + assets_base_url, } } @@ -216,7 +244,7 @@ impl JiraClient { body } } - Err(_) => "Unknown error".to_string(), + Err(e) => format!("Could not read error response: {e}"), }; JrError::ApiError { status, message }.into() @@ -254,6 +282,54 @@ impl JiraClient { Ok(parsed) } + /// Perform a GET request against the Assets/CMDB API gateway. + /// + /// Constructs URL: `{assets_base_url}/workspace/{workspace_id}/v1/{path}`. + /// Requires `cloud_id` in config (set during `jr init`). + pub async fn get_assets( + &self, + workspace_id: &str, + path: &str, + ) -> anyhow::Result { + let base = self.assets_base_url.as_ref().ok_or_else(|| { + JrError::ConfigError( + "Cloud ID not configured. Run \"jr init\" to set up your instance.".into(), + ) + })?; + let url = format!( + "{}/workspace/{}/v1/{}", + base, + urlencoding::encode(workspace_id), + path + ); + let request = self.client.get(&url); + let response = self.send(request).await?; + Ok(response.json::().await?) + } + + /// Perform a POST request against the Assets/CMDB API gateway. + pub async fn post_assets( + &self, + workspace_id: &str, + path: &str, + body: &B, + ) -> anyhow::Result { + let base = self.assets_base_url.as_ref().ok_or_else(|| { + JrError::ConfigError( + "Cloud ID not configured. Run \"jr init\" to set up your instance.".into(), + ) + })?; + let url = format!( + "{}/workspace/{}/v1/{}", + base, + urlencoding::encode(workspace_id), + path + ); + let request = self.client.post(&url).json(body); + let response = self.send(request).await?; + Ok(response.json::().await?) + } + /// Returns the HTTP method for building requests externally (if needed). pub fn request(&self, method: Method, path: &str) -> RequestBuilder { let url = format!("{}{}", self.base_url, path); diff --git a/src/api/jira/boards.rs b/src/api/jira/boards.rs index 663ffb5..87950d0 100644 --- a/src/api/jira/boards.rs +++ b/src/api/jira/boards.rs @@ -5,16 +5,29 @@ use anyhow::Result; impl JiraClient { /// List all boards accessible to the authenticated user. - pub async fn list_boards(&self) -> Result> { + /// + /// Optionally filter by `project_key` (`projectKeyOrId` query param) and/or + /// `board_type` (`type` query param, e.g. `"scrum"` or `"kanban"`). + pub async fn list_boards( + &self, + project_key: Option<&str>, + board_type: Option<&str>, + ) -> Result> { let mut all_boards: Vec = Vec::new(); let mut start_at: u32 = 0; let max_results: u32 = 50; loop { - let path = format!( + let mut path = format!( "/rest/agile/1.0/board?startAt={}&maxResults={}", start_at, max_results ); + if let Some(pk) = project_key { + path.push_str(&format!("&projectKeyOrId={}", urlencoding::encode(pk))); + } + if let Some(bt) = board_type { + path.push_str(&format!("&type={}", urlencoding::encode(bt))); + } let page: OffsetPage = self.get(&path).await?; let has_more = page.has_more(); let next = page.next_start(); diff --git a/src/api/jira/fields.rs b/src/api/jira/fields.rs index fd3a1f4..51f3ee8 100644 --- a/src/api/jira/fields.rs +++ b/src/api/jira/fields.rs @@ -35,6 +35,11 @@ impl JiraClient { let fields = self.list_fields().await?; Ok(filter_story_points_fields(&fields)) } + + pub async fn find_cmdb_fields(&self) -> Result> { + let fields = self.list_fields().await?; + Ok(filter_cmdb_fields(&fields)) + } } const KNOWN_SP_SCHEMA_TYPES: &[&str] = &[ @@ -75,6 +80,23 @@ pub fn filter_story_points_fields(fields: &[Field]) -> Vec<(String, String)> { .collect() } +const CMDB_SCHEMA_TYPE: &str = "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype"; + +pub fn filter_cmdb_fields(fields: &[Field]) -> Vec<(String, String)> { + fields + .iter() + .filter(|f| { + f.custom == Some(true) + && f.schema + .as_ref() + .and_then(|s| s.custom.as_deref()) + .map(|c| c == CMDB_SCHEMA_TYPE) + .unwrap_or(false) + }) + .map(|f| (f.id.clone(), f.name.clone())) + .collect() +} + #[cfg(test)] mod tests { use super::*; @@ -196,4 +218,86 @@ mod tests { let result = filter_story_points_fields(&fields); assert_eq!(result.len(), 1); } + + #[test] + fn filter_cmdb_fields_finds_assets_type() { + let fields = vec![make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + )]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); + } + + #[test] + fn filter_cmdb_fields_ignores_non_cmdb() { + let fields = vec![ + make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + ), + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); + } + + #[test] + fn filter_cmdb_fields_empty_when_no_cmdb() { + let fields = vec![make_field( + "customfield_10031", + "Story Points", + true, + "number", + "com.atlassian.jira.plugin.system.customfieldtypes:float", + )]; + let result: Vec<(String, String)> = filter_cmdb_fields(&fields); + assert!(result.is_empty()); + } + + #[test] + fn filter_cmdb_fields_multiple() { + let fields = vec![ + make_field( + "customfield_10191", + "Client", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + make_field( + "customfield_10245", + "Server", + true, + "any", + "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + ), + ]; + let result = filter_cmdb_fields(&fields); + assert_eq!( + result, + vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Server".to_string()), + ] + ); + } } diff --git a/src/api/jira/issues.rs b/src/api/jira/issues.rs index 6dd6f55..29e0fb2 100644 --- a/src/api/jira/issues.rs +++ b/src/api/jira/issues.rs @@ -2,8 +2,43 @@ use crate::api::client::JiraClient; use crate::api::pagination::{CursorPage, OffsetPage}; use crate::types::jira::{Comment, CreateIssueResponse, Issue, TransitionsResponse}; use anyhow::Result; +use serde::Deserialize; use serde_json::Value; +/// Default fields requested when fetching issues (search and get). +/// +/// Both `search_issues` and `get_issue` use this list so they stay in sync. +/// Callers can request additional fields via `extra_fields` parameters. +const BASE_ISSUE_FIELDS: &[&str] = &[ + "summary", + "status", + "issuetype", + "priority", + "assignee", + "reporter", + "project", + "description", + "created", + "updated", + "resolution", + "components", + "fixVersions", + "labels", + "parent", + "issuelinks", +]; + +/// Result of a paginated issue search, including whether more results exist. +pub struct SearchResult { + pub issues: Vec, + pub has_more: bool, +} + +#[derive(Deserialize)] +struct ApproximateCountResponse { + count: u64, +} + impl JiraClient { /// Search issues using JQL with cursor-based pagination. pub async fn search_issues( @@ -11,22 +46,16 @@ impl JiraClient { jql: &str, limit: Option, extra_fields: &[&str], - ) -> Result> { + ) -> Result { let max_per_page = limit.unwrap_or(50).min(100); let mut all_issues: Vec = Vec::new(); let mut next_page_token: Option = None; - let mut fields = vec![ - "summary", - "status", - "issuetype", - "priority", - "assignee", - "project", - "description", - ]; + let mut fields = BASE_ISSUE_FIELDS.to_vec(); fields.extend_from_slice(extra_fields); + let mut more_available = false; + loop { let mut body = serde_json::json!({ "jql": jql, @@ -40,39 +69,51 @@ impl JiraClient { let page: CursorPage = self.post("/rest/api/3/search/jql", &body).await?; - let has_more = page.has_more(); + let page_has_more = page.has_more(); let token = page.next_page_token.clone(); all_issues.extend(page.issues); if let Some(max) = limit { if all_issues.len() >= max as usize { + more_available = all_issues.len() > max as usize || page_has_more; all_issues.truncate(max as usize); break; } } - if !has_more { + if !page_has_more { break; } next_page_token = token; } - Ok(all_issues) + Ok(SearchResult { + issues: all_issues, + has_more: more_available, + }) + } + + /// Get an approximate count of issues matching a JQL query. + /// + /// Uses the dedicated count endpoint which is lightweight (no issue data fetched). + /// The JQL should not include ORDER BY — use `jql::strip_order_by()` before calling. + pub async fn approximate_count(&self, jql: &str) -> Result { + let body = serde_json::json!({ "jql": jql }); + let resp: ApproximateCountResponse = self + .post("/rest/api/3/search/approximate-count", &body) + .await?; + Ok(resp.count) } /// Get a single issue by key. pub async fn get_issue(&self, key: &str, extra_fields: &[&str]) -> Result { - let mut fields = - "summary,status,issuetype,priority,assignee,project,description,labels,parent,issuelinks".to_string(); - for f in extra_fields { - fields.push(','); - fields.push_str(f); - } + let mut fields: Vec<&str> = BASE_ISSUE_FIELDS.to_vec(); + fields.extend_from_slice(extra_fields); let path = format!( "/rest/api/3/issue/{}?fields={}", urlencoding::encode(key), - fields + fields.join(",") ); self.get(&path).await } @@ -159,3 +200,40 @@ impl JiraClient { Ok(all) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn search_result_has_more_false_when_no_truncation() { + let result = SearchResult { + issues: vec![], + has_more: false, + }; + assert!(!result.has_more); + } + + #[test] + fn search_result_has_more_true_when_truncated() { + let result = SearchResult { + issues: vec![], + has_more: true, + }; + assert!(result.has_more); + } + + #[test] + fn approximate_count_response_deserializes() { + let json = r#"{"count": 1234}"#; + let resp: ApproximateCountResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.count, 1234); + } + + #[test] + fn approximate_count_response_zero() { + let json = r#"{"count": 0}"#; + let resp: ApproximateCountResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.count, 0); + } +} diff --git a/src/api/jira/mod.rs b/src/api/jira/mod.rs index c71d22d..42d163b 100644 --- a/src/api/jira/mod.rs +++ b/src/api/jira/mod.rs @@ -4,6 +4,7 @@ pub mod issues; pub mod links; pub mod projects; pub mod sprints; +pub mod statuses; pub mod teams; pub mod users; pub mod worklogs; diff --git a/src/api/jira/projects.rs b/src/api/jira/projects.rs index 71f1d39..89a2cf5 100644 --- a/src/api/jira/projects.rs +++ b/src/api/jira/projects.rs @@ -2,6 +2,8 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use crate::api::client::JiraClient; +use crate::api::pagination::OffsetPage; +use crate::types::jira::ProjectSummary; #[derive(Debug, Deserialize, Serialize)] pub struct IssueTypeMetadata { @@ -16,13 +18,31 @@ pub struct PriorityMetadata { pub id: String, } +#[derive(Debug, Deserialize, Serialize)] +pub struct StatusMetadata { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct IssueTypeWithStatuses { + pub id: String, + pub name: String, + pub subtask: Option, + pub statuses: Vec, +} + impl JiraClient { pub async fn get_project_issue_types( &self, project_key: &str, ) -> Result> { let project: serde_json::Value = self - .get(&format!("/rest/api/3/project/{project_key}")) + .get(&format!( + "/rest/api/3/project/{}", + urlencoding::encode(project_key) + )) .await?; let types = project .get("issueTypes") @@ -34,4 +54,68 @@ impl JiraClient { pub async fn get_priorities(&self) -> Result> { self.get("/rest/api/3/priority").await } + + pub async fn get_project_statuses( + &self, + project_key: &str, + ) -> Result> { + self.get(&format!( + "/rest/api/3/project/{}/statuses", + urlencoding::encode(project_key) + )) + .await + } + + /// Check whether a project with the given key exists. + /// + /// Returns `Ok(true)` if the project is accessible, `Ok(false)` if the API + /// returns 404, and propagates any other error (auth, network, etc.). + pub async fn project_exists(&self, key: &str) -> Result { + let path = format!("/rest/api/3/project/{}", urlencoding::encode(key)); + match self.get::(&path).await { + Ok(_) => Ok(true), + Err(e) => { + if let Some(crate::error::JrError::ApiError { status: 404, .. }) = + e.downcast_ref::() + { + Ok(false) + } else { + Err(e) + } + } + } + } + + pub async fn list_projects( + &self, + type_key: Option<&str>, + max_results: Option, + ) -> Result> { + let page_size = max_results.map(|m| m.min(50)).unwrap_or(50); + let mut all_projects: Vec = Vec::new(); + let mut start_at: u32 = 0; + + loop { + let mut path = format!( + "/rest/api/3/project/search?orderBy=key&startAt={}&maxResults={}", + start_at, page_size + ); + if let Some(tk) = type_key { + path.push_str(&format!("&typeKey={}", urlencoding::encode(tk))); + } + + let page: OffsetPage = self.get(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all_projects.extend(page.values.unwrap_or_default()); + + // If caller specified a limit, stop after one page + if max_results.is_some() || !has_more { + break; + } + start_at = next; + } + + Ok(all_projects) + } } diff --git a/src/api/jira/sprints.rs b/src/api/jira/sprints.rs index 5b9f5e3..d0c086b 100644 --- a/src/api/jira/sprints.rs +++ b/src/api/jira/sprints.rs @@ -32,16 +32,18 @@ impl JiraClient { Ok(all_sprints) } - /// Get issues in a specific sprint, with optional JQL filter. + /// Get issues in a specific sprint, with optional JQL filter and optional limit. pub async fn get_sprint_issues( &self, sprint_id: u64, jql: Option<&str>, + limit: Option, extra_fields: &[&str], - ) -> Result> { + ) -> Result { let mut all_issues: Vec = Vec::new(); let mut start_at: u32 = 0; let max_results: u32 = 50; + let mut result_has_more = false; loop { let mut path = format!( @@ -58,16 +60,50 @@ impl JiraClient { path.push_str(&format!("&jql={}", urlencoding::encode(q))); } let page: OffsetPage = self.get(&path).await?; - let has_more = page.has_more(); + let page_has_more = page.has_more(); let next = page.next_start(); all_issues.extend(page.issues.unwrap_or_default()); - if !has_more { + // Early-stop: if we have enough issues, truncate and break + if let Some(max) = limit { + if all_issues.len() >= max as usize { + result_has_more = all_issues.len() > max as usize || page_has_more; + all_issues.truncate(max as usize); + break; + } + } + + if !page_has_more { break; } start_at = next; } - Ok(all_issues) + Ok(SprintIssuesResult { + issues: all_issues, + has_more: result_has_more, + }) + } + + /// Add issues to a sprint. Max 50 issues per call. + /// POST /rest/agile/1.0/sprint/{sprintId}/issue → 204 No Content + pub async fn add_issues_to_sprint(&self, sprint_id: u64, issues: &[String]) -> Result<()> { + let path = format!("/rest/agile/1.0/sprint/{}/issue", sprint_id); + let body = serde_json::json!({ "issues": issues }); + self.post_no_content(&path, &body).await + } + + /// Move issues to the backlog (removes from all sprints). Max 50 issues per call. + /// POST /rest/agile/1.0/backlog/issue → 204 No Content + pub async fn move_issues_to_backlog(&self, issues: &[String]) -> Result<()> { + let path = "/rest/agile/1.0/backlog/issue"; + let body = serde_json::json!({ "issues": issues }); + self.post_no_content(path, &body).await } } + +/// Result of fetching sprint issues with optional limit. +pub struct SprintIssuesResult { + pub issues: Vec, + pub has_more: bool, +} diff --git a/src/api/jira/statuses.rs b/src/api/jira/statuses.rs new file mode 100644 index 0000000..8e304c4 --- /dev/null +++ b/src/api/jira/statuses.rs @@ -0,0 +1,21 @@ +use crate::api::client::JiraClient; +use anyhow::Result; +use serde::Deserialize; + +#[derive(Deserialize)] +struct StatusEntry { + name: String, +} + +impl JiraClient { + /// Fetch all statuses from active workflows (global, not project-scoped). + /// + /// Returns a flat list of unique status names. The endpoint is not paginated. + pub async fn get_all_statuses(&self) -> Result> { + let entries: Vec = self.get("/rest/api/3/status").await?; + let mut names: Vec = entries.into_iter().map(|e| e.name).collect(); + names.sort(); + names.dedup(); + Ok(names) + } +} diff --git a/src/api/jira/users.rs b/src/api/jira/users.rs index 161b487..9af188e 100644 --- a/src/api/jira/users.rs +++ b/src/api/jira/users.rs @@ -6,4 +6,97 @@ impl JiraClient { pub async fn get_myself(&self) -> Result { self.get("/rest/api/3/myself").await } + + /// Search for users by name or email prefix. + /// + /// Returns active and inactive users — caller should filter by `active` field. + /// The response format may vary (flat array or paginated object), so both are handled. + pub async fn search_users(&self, query: &str) -> Result> { + let path = format!( + "/rest/api/3/user/search?query={}", + urlencoding::encode(query) + ); + let raw: serde_json::Value = self.get(&path).await?; + let users: Vec = if raw.is_array() { + serde_json::from_value(raw)? + } else if let Some(values) = raw.get("values") { + serde_json::from_value(values.clone())? + } else { + anyhow::bail!( + "Unexpected response from user search API. Expected a JSON array or object with \"values\" key." + ); + }; + Ok(users) + } + + /// Search for users assignable to a specific issue. + /// + /// Uses the `/user/assignable/search` endpoint which returns users + /// eligible for assignment on the issue's project. + pub async fn search_assignable_users(&self, query: &str, issue_key: &str) -> Result> { + let path = format!( + "/rest/api/3/user/assignable/search?query={}&issueKey={}", + urlencoding::encode(query), + urlencoding::encode(issue_key), + ); + let raw: serde_json::Value = self.get(&path).await?; + let users: Vec = if raw.is_array() { + serde_json::from_value(raw)? + } else if let Some(values) = raw.get("values") { + serde_json::from_value(values.clone())? + } else { + anyhow::bail!( + "Unexpected response from assignable user search API. Expected a JSON array or object with \"values\" key." + ); + }; + Ok(users) + } + + /// Search for users assignable to issues in a project. + /// + /// Uses the `/user/assignable/multiProjectSearch` endpoint with a single project key. + /// Useful when no specific issue key is available (e.g., during issue creation). + pub async fn search_assignable_users_by_project( + &self, + query: &str, + project_key: &str, + ) -> Result> { + let path = format!( + "/rest/api/3/user/assignable/multiProjectSearch?query={}&projectKeys={}", + urlencoding::encode(query), + urlencoding::encode(project_key), + ); + let raw: serde_json::Value = self.get(&path).await?; + let users: Vec = if raw.is_array() { + serde_json::from_value(raw)? + } else if let Some(values) = raw.get("values") { + serde_json::from_value(values.clone())? + } else { + anyhow::bail!( + "Unexpected response from assignable user search API. Expected a JSON array or object with \"values\" key." + ); + }; + Ok(users) + } +} + +#[cfg(test)] +mod tests { + use crate::types::jira::User; + + #[test] + fn multi_project_search_response_deserializes() { + let json = r#"[ + {"accountId": "abc123", "displayName": "Alice", "active": true}, + {"accountId": "def456", "displayName": "Bob", "emailAddress": "bob@test.com"} + ]"#; + let users: Vec = serde_json::from_str(json).unwrap(); + assert_eq!(users.len(), 2); + assert_eq!(users[0].account_id, "abc123"); + assert_eq!(users[0].display_name, "Alice"); + assert_eq!(users[0].active, Some(true)); + assert_eq!(users[1].account_id, "def456"); + assert_eq!(users[1].email_address.as_deref(), Some("bob@test.com")); + assert_eq!(users[1].active, None); + } } diff --git a/src/api/jsm/mod.rs b/src/api/jsm/mod.rs new file mode 100644 index 0000000..dbefcaa --- /dev/null +++ b/src/api/jsm/mod.rs @@ -0,0 +1,2 @@ +pub mod queues; +pub mod servicedesks; diff --git a/src/api/jsm/queues.rs b/src/api/jsm/queues.rs new file mode 100644 index 0000000..a18d4ab --- /dev/null +++ b/src/api/jsm/queues.rs @@ -0,0 +1,85 @@ +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::ServiceDeskPage; +use crate::types::jsm::Queue; +use crate::types::jsm::QueueIssueKey; + +impl JiraClient { + /// List all queues for a service desk, auto-paginating. + pub async fn list_queues(&self, service_desk_id: &str) -> Result> { + let base = format!( + "/rest/servicedeskapi/servicedesk/{}/queue", + urlencoding::encode(service_desk_id) + ); + let mut all = Vec::new(); + let mut start = 0u32; + let page_size = 50u32; + + loop { + let path = format!( + "{}?includeCount=true&start={}&limit={}", + base, start, page_size + ); + let page: ServiceDeskPage = self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + if !has_more { + break; + } + start = next; + } + Ok(all) + } + + /// Get issue keys from a queue, with optional limit and auto-pagination. + /// + /// Returns keys in queue order. Only extracts the `key` field from each + /// issue — the caller batch-fetches full issue data via `search_issues`. + pub async fn get_queue_issue_keys( + &self, + service_desk_id: &str, + queue_id: &str, + limit: Option, + ) -> Result> { + let base = format!( + "/rest/servicedeskapi/servicedesk/{}/queue/{}/issue", + urlencoding::encode(service_desk_id), + urlencoding::encode(queue_id) + ); + let mut all = Vec::new(); + let mut start = 0u32; + let max_page_size = 50u32; + + loop { + let page_size = match limit { + Some(cap) => { + let remaining = cap.saturating_sub(all.len() as u32); + if remaining == 0 { + break; + } + remaining.min(max_page_size) + } + None => max_page_size, + }; + let path = format!("{}?start={}&limit={}", base, start, page_size); + let page: ServiceDeskPage = self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values.into_iter().map(|ik| ik.key)); + + if let Some(cap) = limit { + if all.len() >= cap as usize { + all.truncate(cap as usize); + break; + } + } + if !has_more { + break; + } + start = next; + } + Ok(all) + } +} diff --git a/src/api/jsm/servicedesks.rs b/src/api/jsm/servicedesks.rs new file mode 100644 index 0000000..3ed668f --- /dev/null +++ b/src/api/jsm/servicedesks.rs @@ -0,0 +1,126 @@ +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::pagination::ServiceDeskPage; +use crate::cache::{self, ProjectMeta}; +use crate::error::JrError; +use crate::types::jsm::ServiceDesk; +use chrono::Utc; + +impl JiraClient { + /// List all service desks, auto-paginating. + pub async fn list_service_desks(&self) -> Result> { + let mut all = Vec::new(); + let mut start = 0u32; + let page_size = 50u32; + + loop { + let path = format!( + "/rest/servicedeskapi/servicedesk?start={}&limit={}", + start, page_size + ); + let page: ServiceDeskPage = self.get_from_instance(&path).await?; + let has_more = page.has_more(); + let next = page.next_start(); + all.extend(page.values); + if !has_more { + break; + } + start = next; + } + Ok(all) + } +} + +/// Fetch project metadata, using cache when available. +/// +/// 1. Check cache for project_key — return if fresh. +/// 2. GET /rest/api/3/project/{key} — extract projectTypeKey, simplified, id. +/// 3. If service_desk: list service desks, match by projectId to find serviceDeskId. +/// 4. Write to cache and return. +pub async fn get_or_fetch_project_meta( + client: &JiraClient, + project_key: &str, +) -> Result { + // Check cache first + if let Some(cached) = cache::read_project_meta(project_key)? { + return Ok(cached); + } + + // Fetch project details from platform API + let project: serde_json::Value = client + .get(&format!( + "/rest/api/3/project/{}", + urlencoding::encode(project_key) + )) + .await?; + + let project_type = project + .get("projectTypeKey") + .and_then(|v| v.as_str()) + .unwrap_or("software") + .to_string(); + + let simplified = project + .get("simplified") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + let project_id = project + .get("id") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + + // If it's a service desk, resolve the serviceDeskId + let service_desk_id = if project_type == "service_desk" { + let desks = client.list_service_desks().await?; + desks + .iter() + .find(|d| d.project_id == project_id) + .map(|d| d.id.clone()) + } else { + None + }; + + let meta = ProjectMeta { + project_type, + simplified, + project_id, + service_desk_id, + fetched_at: Utc::now(), + }; + + // Write to cache (best-effort — don't fail the command if cache write fails) + let _ = cache::write_project_meta(project_key, &meta); + + Ok(meta) +} + +/// Require the project to be a JSM service desk. Returns the serviceDeskId or errors. +pub async fn require_service_desk(client: &JiraClient, project_key: &str) -> Result { + let meta = get_or_fetch_project_meta(client, project_key).await?; + + if meta.project_type != "service_desk" { + let type_label = match meta.project_type.as_str() { + "software" => "Jira Software", + "business" => "Jira Work Management", + _ => "Jira", + }; + return Err(JrError::UserError(format!( + "\"{}\" is a {} project. Queue commands require a Jira Service Management project. \ + Run \"jr project fields --project {}\" to see available commands.", + project_key, type_label, project_key + )) + .into()); + } + + meta.service_desk_id.ok_or_else(|| { + JrError::UserError(format!( + "No service desk found for project \"{}\". \ + The project may not be configured as a service desk.", + project_key + )) + .into() + }) +} diff --git a/src/api/mod.rs b/src/api/mod.rs index 45b0afe..e0dd176 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,5 +1,7 @@ +pub mod assets; pub mod auth; pub mod client; pub mod jira; +pub mod jsm; pub mod pagination; pub mod rate_limit; diff --git a/src/api/pagination.rs b/src/api/pagination.rs index 1b41a04..d99be14 100644 --- a/src/api/pagination.rs +++ b/src/api/pagination.rs @@ -1,4 +1,5 @@ use serde::Deserialize; +use serde::de::{self, Deserializer}; /// Offset-based pagination used by most Jira REST API endpoints. /// @@ -78,6 +79,91 @@ impl CursorPage { } } +/// Offset-based pagination used by Jira Service Management `/rest/servicedeskapi/` endpoints. +/// +/// Uses different field names than `OffsetPage`: `size` (items in page) instead of `total`, +/// `isLastPage` boolean instead of computed from startAt+maxResults, and `start`/`limit` +/// instead of `startAt`/`maxResults`. +#[derive(Debug, Deserialize)] +pub struct ServiceDeskPage { + /// Count of items in the current page. + pub size: u32, + /// Zero-based starting index. + pub start: u32, + /// Maximum items per page. + pub limit: u32, + /// Whether this is the last page of results. + #[serde(rename = "isLastPage")] + pub is_last_page: bool, + /// The items in this page. + #[serde(default)] + pub values: Vec, +} + +impl ServiceDeskPage { + /// Returns true if there are more pages after this one. + pub fn has_more(&self) -> bool { + !self.is_last_page + } + + /// Returns the `start` value for the next page. + pub fn next_start(&self) -> u32 { + self.start + self.size + } +} + +/// Deserialize a value that may be a boolean or a string representation of a boolean. +/// The Assets API returns `isLast` as `"true"`/`"false"` (string) in some contexts +/// and `true`/`false` (boolean) in others. +fn deserialize_bool_or_string<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value: serde_json::Value = Deserialize::deserialize(deserializer)?; + match value { + serde_json::Value::Bool(b) => Ok(b), + serde_json::Value::String(s) => s.parse::().map_err(de::Error::custom), + _ => Err(de::Error::custom("expected boolean or string")), + } +} + +/// Pagination used by the Assets/CMDB API (`POST /object/aql`). +/// +/// Similar to `OffsetPage` (`startAt`/`maxResults`/`total`) but uses an `isLast` +/// boolean (which may be returned as a string) instead of computing from offsets. +/// `total` is capped at 1000 by the API. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AssetsPage { + /// Zero-based starting index. + #[serde(default)] + pub start_at: u32, + /// Maximum items per page. + #[serde(default)] + pub max_results: u32, + /// Total matching items (capped at 1000). + #[serde(default)] + pub total: u32, + /// Whether this is the last page. May be a bool or string in API responses. + #[serde(deserialize_with = "deserialize_bool_or_string")] + pub is_last: bool, + /// The items in this page. + #[serde(default)] + pub values: Vec, +} + +impl AssetsPage { + /// Returns true if there are more pages after this one. + pub fn has_more(&self) -> bool { + !self.is_last + } + + /// Returns the `startAt` value for the next page. + pub fn next_start(&self) -> u32 { + self.start_at + self.max_results + } +} + #[cfg(test)] mod tests { use super::*; @@ -164,4 +250,125 @@ mod tests { }; assert!(!last_page.has_more()); } + + #[test] + fn test_service_desk_page_has_more() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 5, + start: 0, + limit: 50, + is_last_page: false, + values: vec!["a".into(), "b".into(), "c".into(), "d".into(), "e".into()], + }; + assert!(page.has_more()); + assert_eq!(page.next_start(), 5); + } + + #[test] + fn test_service_desk_page_last_page() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 3, + start: 10, + limit: 50, + is_last_page: true, + values: vec!["a".into(), "b".into(), "c".into()], + }; + assert!(!page.has_more()); + assert_eq!(page.next_start(), 13); + } + + #[test] + fn test_service_desk_page_empty() { + let page: ServiceDeskPage = ServiceDeskPage { + size: 0, + start: 0, + limit: 50, + is_last_page: true, + values: vec![], + }; + assert!(!page.has_more()); + assert_eq!(page.next_start(), 0); + assert!(page.values.is_empty()); + } + + #[test] + fn test_service_desk_page_deserialize() { + let json = r#"{ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": false, + "values": ["item1", "item2"] + }"#; + let page: ServiceDeskPage = serde_json::from_str(json).unwrap(); + assert_eq!(page.size, 2); + assert_eq!(page.values.len(), 2); + assert!(!page.is_last_page); + } + + #[test] + fn test_assets_page_has_more() { + let page: AssetsPage = AssetsPage { + start_at: 0, + max_results: 25, + total: 50, + is_last: false, + values: vec!["a".into()], + }; + assert!(page.has_more()); + assert_eq!(page.next_start(), 25); + } + + #[test] + fn test_assets_page_last_page() { + let page: AssetsPage = AssetsPage { + start_at: 25, + max_results: 25, + total: 30, + is_last: true, + values: vec!["a".into()], + }; + assert!(!page.has_more()); + } + + #[test] + fn test_assets_page_deserialize_is_last_bool() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": true, + "values": ["a", "b"] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(page.is_last); + assert_eq!(page.values.len(), 2); + } + + #[test] + fn test_assets_page_deserialize_is_last_string() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": "false", + "values": ["a"] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(!page.is_last); + } + + #[test] + fn test_assets_page_deserialize_is_last_string_true() { + let json = r#"{ + "startAt": 0, + "maxResults": 25, + "total": 5, + "isLast": "true", + "values": [] + }"#; + let page: AssetsPage = serde_json::from_str(json).unwrap(); + assert!(page.is_last); + assert!(page.values.is_empty()); + } } diff --git a/src/cache.rs b/src/cache.rs index 27a8b6a..84cbdfb 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,10 +1,44 @@ use anyhow::Result; use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use std::collections::HashMap; use std::path::PathBuf; const CACHE_TTL_DAYS: i64 = 7; +/// Implemented by cache structs that carry a timestamp for TTL checks. +pub(crate) trait Expiring { + fn fetched_at(&self) -> DateTime; +} + +/// Read a whole-file cache. Returns `Ok(None)` on missing, expired, or corrupt +/// (unparseable) files. Propagates I/O errors. +fn read_cache(filename: &str) -> Result> { + let path = cache_dir().join(filename); + let content = match std::fs::read_to_string(&path) { + Ok(c) => c, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None), + Err(e) => return Err(e.into()), + }; + let cache: T = match serde_json::from_str(&content) { + Ok(c) => c, + Err(_) => return Ok(None), + }; + if (Utc::now() - cache.fetched_at()).num_days() >= CACHE_TTL_DAYS { + return Ok(None); + } + Ok(Some(cache)) +} + +/// Write a whole-file cache. Creates the cache directory if needed. +fn write_cache(filename: &str, data: &T) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + let content = serde_json::to_string_pretty(data)?; + std::fs::write(dir.join(filename), content)?; + Ok(()) +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CachedTeam { pub id: String, @@ -17,6 +51,12 @@ pub struct TeamCache { pub teams: Vec, } +impl Expiring for TeamCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} + pub fn cache_dir() -> PathBuf { if let Ok(xdg) = std::env::var("XDG_CACHE_HOME") { PathBuf::from(xdg).join("jr") @@ -29,33 +69,212 @@ pub fn cache_dir() -> PathBuf { } pub fn read_team_cache() -> Result> { - let path = cache_dir().join("teams.json"); + read_cache("teams.json") +} + +pub fn write_team_cache(teams: &[CachedTeam]) -> Result<()> { + write_cache( + "teams.json", + &TeamCache { + fetched_at: Utc::now(), + teams: teams.to_vec(), + }, + ) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectMeta { + pub project_type: String, + pub simplified: bool, + pub project_id: String, + pub service_desk_id: Option, + pub fetched_at: DateTime, +} + +/// Read cached project metadata for a specific project key. +/// +/// Keyed cache — not genericized because TTL is checked per-entry +/// (`ProjectMeta.fetched_at`), unlike whole-file caches. +pub fn read_project_meta(project_key: &str) -> Result> { + let path = cache_dir().join("project_meta.json"); + if !path.exists() { + return Ok(None); + } + + let content = std::fs::read_to_string(&path)?; + let map: HashMap = match serde_json::from_str(&content) { + Ok(m) => m, + Err(_) => return Ok(None), + }; + + match map.get(project_key) { + Some(meta) => { + let age = Utc::now() - meta.fetched_at; + if age.num_days() >= CACHE_TTL_DAYS { + Ok(None) + } else { + Ok(Some(meta.clone())) + } + } + None => Ok(None), + } +} + +/// Write cached project metadata for a specific project key. +/// +/// Merges into the existing map file, preserving entries for other projects. +pub fn write_project_meta(project_key: &str, meta: &ProjectMeta) -> Result<()> { + let dir = cache_dir(); + std::fs::create_dir_all(&dir)?; + + let path = dir.join("project_meta.json"); + + // Read existing map or start fresh + let mut map: HashMap = if path.exists() { + let content = std::fs::read_to_string(&path)?; + serde_json::from_str(&content).unwrap_or_default() + } else { + HashMap::new() + }; + + map.insert(project_key.to_string(), meta.clone()); + + let content = serde_json::to_string_pretty(&map)?; + std::fs::write(&path, content)?; + Ok(()) +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct WorkspaceCache { + pub workspace_id: String, + pub fetched_at: DateTime, +} + +impl Expiring for WorkspaceCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} + +pub fn read_workspace_cache() -> Result> { + read_cache("workspace.json") +} + +pub fn write_workspace_cache(workspace_id: &str) -> Result<()> { + write_cache( + "workspace.json", + &WorkspaceCache { + workspace_id: workspace_id.to_string(), + fetched_at: Utc::now(), + }, + ) +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CmdbFieldsCache { + pub fields: Vec<(String, String)>, + pub fetched_at: DateTime, +} + +impl Expiring for CmdbFieldsCache { + fn fetched_at(&self) -> DateTime { + self.fetched_at + } +} + +pub fn read_cmdb_fields_cache() -> Result> { + read_cache("cmdb_fields.json") +} + +pub fn write_cmdb_fields_cache(fields: &[(String, String)]) -> Result<()> { + write_cache( + "cmdb_fields.json", + &CmdbFieldsCache { + fields: fields.to_vec(), + fetched_at: Utc::now(), + }, + ) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CachedObjectTypeAttr { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ObjectTypeAttrCache { + pub fetched_at: DateTime, + pub types: HashMap>, +} + +/// Read cached attributes for a specific object type. +/// +/// Keyed cache — not genericized because TTL is checked per-file +/// (`ObjectTypeAttrCache.fetched_at`) but lookup is per-key, with a different +/// return type (`Vec`) than the stored wrapper struct. +pub fn read_object_type_attr_cache( + object_type_id: &str, +) -> Result>> { + let path = cache_dir().join("object_type_attrs.json"); if !path.exists() { return Ok(None); } let content = std::fs::read_to_string(&path)?; - let cache: TeamCache = serde_json::from_str(&content)?; + let cache: ObjectTypeAttrCache = match serde_json::from_str(&content) { + Ok(c) => c, + Err(_) => return Ok(None), + }; let age = Utc::now() - cache.fetched_at; if age.num_days() >= CACHE_TTL_DAYS { return Ok(None); } - Ok(Some(cache)) + Ok(cache.types.get(object_type_id).cloned()) } -pub fn write_team_cache(teams: &[CachedTeam]) -> Result<()> { +/// Write cached attributes for a specific object type. +/// +/// Merges into the existing map file, preserving entries for other object types. +pub fn write_object_type_attr_cache( + object_type_id: &str, + attrs: &[CachedObjectTypeAttr], +) -> Result<()> { let dir = cache_dir(); std::fs::create_dir_all(&dir)?; - let cache = TeamCache { - fetched_at: Utc::now(), - teams: teams.to_vec(), + let path = dir.join("object_type_attrs.json"); + + let mut cache: ObjectTypeAttrCache = if path.exists() { + let content = std::fs::read_to_string(&path)?; + serde_json::from_str(&content).unwrap_or(ObjectTypeAttrCache { + fetched_at: Utc::now(), + types: HashMap::new(), + }) + } else { + ObjectTypeAttrCache { + fetched_at: Utc::now(), + types: HashMap::new(), + } }; + cache + .types + .insert(object_type_id.to_string(), attrs.to_vec()); + cache.fetched_at = Utc::now(); + let content = serde_json::to_string_pretty(&cache)?; - std::fs::write(dir.join("teams.json"), content)?; + std::fs::write(&path, content)?; Ok(()) } @@ -68,12 +287,20 @@ mod tests { static ENV_MUTEX: Mutex<()> = Mutex::new(()); fn with_temp_cache(f: F) { - let _guard = ENV_MUTEX.lock().unwrap(); + // Recover from poison: catch_unwind below ensures env cleanup completed + // even if a prior test panicked, so the guarded state is consistent. + let guard = ENV_MUTEX.lock().unwrap_or_else(|e| e.into_inner()); let dir = TempDir::new().unwrap(); - // SAFETY: test holds ENV_MUTEX, so no concurrent env access. + // SAFETY: ENV_MUTEX serialises all tests that touch XDG_CACHE_HOME; + // the variable is only read inside cache functions called within this + // lock, so no concurrent env access occurs. unsafe { std::env::set_var("XDG_CACHE_HOME", dir.path()) }; - f(); + let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(f)); unsafe { std::env::remove_var("XDG_CACHE_HOME") }; + drop(guard); + if let Err(e) = result { + std::panic::resume_unwind(e); + } } #[test] @@ -146,4 +373,341 @@ mod tests { assert_eq!(cache.teams[0].name, "Recent"); }); } + + #[test] + fn read_missing_project_meta_returns_none() { + with_temp_cache(|| { + let result = read_project_meta("NOEXIST").unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_project_meta() { + with_temp_cache(|| { + let meta = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now(), + }; + write_project_meta("HELPDESK", &meta).unwrap(); + + let loaded = read_project_meta("HELPDESK") + .unwrap() + .expect("should exist"); + assert_eq!(loaded.project_type, "service_desk"); + assert_eq!(loaded.service_desk_id.as_deref(), Some("15")); + assert_eq!(loaded.project_id, "10042"); + assert!(!loaded.simplified); + }); + } + + #[test] + fn expired_project_meta_returns_none() { + with_temp_cache(|| { + let meta = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + write_project_meta("HELPDESK", &meta).unwrap(); + + let result = read_project_meta("HELPDESK").unwrap(); + assert!(result.is_none(), "expired project meta should return None"); + }); + } + + #[test] + fn project_meta_multiple_projects() { + with_temp_cache(|| { + let jsm = ProjectMeta { + project_type: "service_desk".into(), + simplified: false, + project_id: "10042".into(), + service_desk_id: Some("15".into()), + fetched_at: Utc::now(), + }; + let software = ProjectMeta { + project_type: "software".into(), + simplified: true, + project_id: "10001".into(), + service_desk_id: None, + fetched_at: Utc::now(), + }; + write_project_meta("HELPDESK", &jsm).unwrap(); + write_project_meta("DEV", &software).unwrap(); + + let jsm_loaded = read_project_meta("HELPDESK") + .unwrap() + .expect("should exist"); + assert_eq!(jsm_loaded.project_type, "service_desk"); + + let sw_loaded = read_project_meta("DEV").unwrap().expect("should exist"); + assert_eq!(sw_loaded.project_type, "software"); + assert!(sw_loaded.service_desk_id.is_none()); + }); + } + + #[test] + fn read_missing_workspace_cache_returns_none() { + with_temp_cache(|| { + let result = read_workspace_cache().unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_workspace_cache() { + with_temp_cache(|| { + write_workspace_cache("abc-123-def").unwrap(); + + let cache = read_workspace_cache().unwrap().expect("should exist"); + assert_eq!(cache.workspace_id, "abc-123-def"); + }); + } + + #[test] + fn expired_workspace_cache_returns_none() { + with_temp_cache(|| { + let expired = WorkspaceCache { + workspace_id: "old-id".into(), + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("workspace.json"), content).unwrap(); + + let result = read_workspace_cache().unwrap(); + assert!( + result.is_none(), + "expired workspace cache should return None" + ); + }); + } + + #[test] + fn read_missing_cmdb_fields_cache_returns_none() { + with_temp_cache(|| { + let result = read_cmdb_fields_cache().unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_cmdb_fields_cache() { + with_temp_cache(|| { + write_cmdb_fields_cache(&[ + ("customfield_10191".into(), "Client".into()), + ("customfield_10245".into(), "Hardware".into()), + ]) + .unwrap(); + + let cache = read_cmdb_fields_cache().unwrap().expect("should exist"); + assert_eq!( + cache.fields, + vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Hardware".to_string()), + ] + ); + }); + } + + #[test] + fn expired_cmdb_fields_cache_returns_none() { + with_temp_cache(|| { + let expired = CmdbFieldsCache { + fields: vec![("customfield_10191".into(), "Client".into())], + fetched_at: Utc::now() - chrono::Duration::days(8), + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("cmdb_fields.json"), content).unwrap(); + + let result = read_cmdb_fields_cache().unwrap(); + assert!( + result.is_none(), + "expired cmdb fields cache should return None" + ); + }); + } + + #[test] + fn read_missing_object_type_attr_cache_returns_none() { + with_temp_cache(|| { + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none()); + }); + } + + #[test] + fn write_then_read_object_type_attr_cache() { + with_temp_cache(|| { + let attrs = vec![ + CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }, + CachedObjectTypeAttr { + id: "135".into(), + name: "Name".into(), + system: false, + hidden: false, + label: true, + position: 1, + }, + ]; + write_object_type_attr_cache("23", &attrs).unwrap(); + + let loaded = read_object_type_attr_cache("23") + .unwrap() + .expect("should exist"); + assert_eq!(loaded.len(), 2); + assert_eq!(loaded[0].name, "Key"); + assert!(loaded[0].system); + assert_eq!(loaded[1].name, "Name"); + assert!(loaded[1].label); + }); + } + + #[test] + fn expired_object_type_attr_cache_returns_none() { + with_temp_cache(|| { + let expired = ObjectTypeAttrCache { + fetched_at: Utc::now() - chrono::Duration::days(8), + types: { + let mut m = HashMap::new(); + m.insert( + "23".to_string(), + vec![CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }], + ); + m + }, + }; + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + let content = serde_json::to_string_pretty(&expired).unwrap(); + std::fs::write(dir.join("object_type_attrs.json"), content).unwrap(); + + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none(), "expired cache should return None"); + }); + } + + #[test] + fn object_type_attr_cache_multiple_types() { + with_temp_cache(|| { + let attrs_a = vec![CachedObjectTypeAttr { + id: "134".into(), + name: "Key".into(), + system: true, + hidden: false, + label: false, + position: 0, + }]; + let attrs_b = vec![CachedObjectTypeAttr { + id: "200".into(), + name: "Hostname".into(), + system: false, + hidden: false, + label: false, + position: 3, + }]; + write_object_type_attr_cache("23", &attrs_a).unwrap(); + write_object_type_attr_cache("45", &attrs_b).unwrap(); + + let loaded_a = read_object_type_attr_cache("23") + .unwrap() + .expect("type 23 should exist"); + assert_eq!(loaded_a[0].name, "Key"); + + let loaded_b = read_object_type_attr_cache("45") + .unwrap() + .expect("type 45 should exist"); + assert_eq!(loaded_b[0].name, "Hostname"); + }); + } + + #[test] + fn object_type_attr_cache_corrupt_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + std::fs::write(dir.join("object_type_attrs.json"), "not json").unwrap(); + + let result = read_object_type_attr_cache("23").unwrap(); + assert!(result.is_none(), "corrupt cache should return None"); + }); + } + + #[test] + fn corrupt_team_cache_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("teams.json"), "not json").unwrap(); + let result = read_team_cache().unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("teams.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_team_cache().unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } + + #[test] + fn corrupt_workspace_cache_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("workspace.json"), "not json").unwrap(); + let result = read_workspace_cache().unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("workspace.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_workspace_cache().unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } + + #[test] + fn corrupt_project_meta_returns_none() { + with_temp_cache(|| { + let dir = cache_dir(); + std::fs::create_dir_all(&dir).unwrap(); + + // Garbage data + std::fs::write(dir.join("project_meta.json"), "not json").unwrap(); + let result = read_project_meta("ANY").unwrap(); + assert!(result.is_none(), "garbage data should return None"); + + // Valid JSON, wrong shape + std::fs::write(dir.join("project_meta.json"), r#"{"unexpected": true}"#).unwrap(); + let result = read_project_meta("ANY").unwrap(); + assert!(result.is_none(), "wrong-shape JSON should return None"); + }); + } } diff --git a/src/cli/assets.rs b/src/cli/assets.rs new file mode 100644 index 0000000..fc1debf --- /dev/null +++ b/src/cli/assets.rs @@ -0,0 +1,1049 @@ +use std::collections::HashMap; + +use anyhow::Result; + +use crate::api::assets::{objects, workspace}; +use crate::api::client::JiraClient; +use crate::cache::CachedObjectTypeAttr; +use crate::cli::{AssetsCommand, OutputFormat}; +use crate::error::JrError; +use crate::output; +use crate::partial_match::{self, MatchResult}; +use crate::types::assets::{AssetAttribute, ConnectedTicket}; + +pub async fn handle( + command: AssetsCommand, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let workspace_id = workspace::get_or_fetch_workspace_id(client).await?; + + match command { + AssetsCommand::Search { + query, + limit, + attributes, + } => { + handle_search( + &workspace_id, + &query, + limit, + attributes, + output_format, + client, + ) + .await + } + AssetsCommand::View { key, no_attributes } => { + handle_view(&workspace_id, &key, no_attributes, output_format, client).await + } + AssetsCommand::Tickets { + key, + limit, + open, + status, + } => { + handle_tickets( + &workspace_id, + &key, + limit, + open, + status, + output_format, + client, + ) + .await + } + AssetsCommand::Schemas => handle_schemas(&workspace_id, output_format, client).await, + AssetsCommand::Types { schema } => { + handle_types(&workspace_id, schema, output_format, client).await + } + AssetsCommand::Schema { name, schema } => { + handle_schema(&workspace_id, &name, schema, output_format, client).await + } + } +} + +async fn handle_search( + workspace_id: &str, + query: &str, + limit: Option, + attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let objects = client + .search_assets(workspace_id, query, limit, attributes) + .await?; + + if attributes { + let attr_map = + crate::api::assets::objects::enrich_search_attributes(client, workspace_id, &objects) + .await?; + + match output_format { + OutputFormat::Json => { + // Serialize to Value, inject objectTypeAttribute, filter system/hidden + let mut json_objects: Vec = Vec::new(); + for obj in &objects { + let mut obj_value = serde_json::to_value(obj)?; + if let Some(attrs_array) = obj_value + .get_mut("attributes") + .and_then(|a| a.as_array_mut()) + { + // Inject objectTypeAttribute into each attribute + for attr_value in attrs_array.iter_mut() { + if let Some(attr_id) = attr_value + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + { + if let Some(def) = attr_map.get(attr_id) { + if let Some(map) = attr_value.as_object_mut() { + map.insert( + "objectTypeAttribute".to_string(), + serde_json::json!({ + "name": def.name, + "position": def.position, + }), + ); + } + } + } + } + // Filter out system and hidden attributes + attrs_array.retain(|attr| { + let attr_id = attr + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + .unwrap_or(""); + match attr_map.get(attr_id) { + Some(def) => !def.system && !def.hidden, + None => true, // keep unknown attributes + } + }); + // Sort by position + attrs_array.sort_by_key(|attr| { + let attr_id = attr + .get("objectTypeAttributeId") + .and_then(|v| v.as_str()) + .unwrap_or(""); + attr_map + .get(attr_id) + .map(|d| d.position) + .unwrap_or(i32::MAX) + }); + } + json_objects.push(obj_value); + } + println!("{}", output::render_json(&json_objects)?); + } + OutputFormat::Table => { + let rows: Vec> = objects + .iter() + .map(|o| { + let attr_str = format_inline_attributes(&o.attributes, &attr_map); + vec![ + o.object_key.clone(), + o.object_type.name.clone(), + o.label.clone(), + attr_str, + ] + }) + .collect(); + output::print_output( + output_format, + &["Key", "Type", "Name", "Attributes"], + &rows, + &objects, + )?; + } + } + Ok(()) + } else { + let rows: Vec> = objects + .iter() + .map(|o| { + vec![ + o.object_key.clone(), + o.object_type.name.clone(), + o.label.clone(), + ] + }) + .collect(); + output::print_output(output_format, &["Key", "Type", "Name"], &rows, &objects) + } +} + +/// Format attributes as inline `Name: Value` pairs for table display. +/// +/// Filters out system, hidden, and label attributes. Sorts by position. +/// Attributes without a matching definition fall back to showing the raw ID. +/// Multi-value attributes use the first displayValue (or value as fallback). +fn format_inline_attributes( + attributes: &[AssetAttribute], + attr_map: &HashMap, +) -> String { + // Pair each attribute with its definition (or None for unknown) + let mut pairs: Vec<(&AssetAttribute, Option<&CachedObjectTypeAttr>)> = attributes + .iter() + .filter(|a| { + match attr_map.get(&a.object_type_attribute_id) { + Some(def) => !def.system && !def.hidden && !def.label, + None => true, // keep unknown attributes (graceful degradation) + } + }) + .map(|a| (a, attr_map.get(&a.object_type_attribute_id))) + .collect(); + // Known attributes sorted by position; unknown appended at end + pairs.sort_by_key(|(_, def)| def.map(|d| d.position).unwrap_or(i32::MAX)); + + pairs + .iter() + .filter_map(|(attr, def)| { + let value = attr + .values + .first() + .and_then(|v| v.display_value.as_deref().or(v.value.as_deref())); + let name = def + .map(|d| d.name.as_str()) + .unwrap_or(&attr.object_type_attribute_id); + value.map(|v| format!("{}: {}", name, v)) + }) + .collect::>() + .join(" | ") +} + +async fn handle_view( + workspace_id: &str, + key: &str, + no_attributes: bool, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let object_id = objects::resolve_object_key(client, workspace_id, key).await?; + let object = client.get_asset(workspace_id, &object_id, false).await?; + + match output_format { + OutputFormat::Json => { + if !no_attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + // JSON: filter system and hidden only (keep label for programmatic consumers) + attrs + .retain(|a| !a.object_type_attribute.system && !a.object_type_attribute.hidden); + attrs.sort_by_key(|a| a.object_type_attribute.position); + // Inject richer attributes into the existing object JSON to preserve + // the root-level schema (additive change, not a wrapper envelope). + let mut object_value = serde_json::to_value(&object)?; + if let serde_json::Value::Object(ref mut map) = object_value { + map.insert("attributes".to_string(), serde_json::to_value(&attrs)?); + } + println!("{}", output::render_json(&object_value)?); + } else { + println!("{}", output::render_json(&object)?); + } + } + OutputFormat::Table => { + let mut rows = vec![ + vec!["Key".into(), object.object_key.clone()], + vec!["Type".into(), object.object_type.name.clone()], + vec!["Name".into(), object.label.clone()], + ]; + + if let Some(ref created) = object.created { + rows.push(vec!["Created".into(), created.clone()]); + } + if let Some(ref updated) = object.updated { + rows.push(vec!["Updated".into(), updated.clone()]); + } + + println!("{}", output::render_table(&["Field", "Value"], &rows)); + + if !no_attributes { + let mut attrs = client + .get_object_attributes(workspace_id, &object_id) + .await?; + attrs.retain(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + && !a.object_type_attribute.label + }); + attrs.sort_by_key(|a| a.object_type_attribute.position); + + if !attrs.is_empty() { + println!(); + let attr_rows: Vec> = attrs + .iter() + .flat_map(|attr| { + attr.values.iter().map(move |v| { + vec![ + attr.object_type_attribute.name.clone(), + v.display_value + .clone() + .or_else(|| v.value.clone()) + .unwrap_or_default(), + ] + }) + }) + .collect(); + println!( + "{}", + output::render_table(&["Attribute", "Value"], &attr_rows) + ); + } + } + } + } + Ok(()) +} + +/// Filter connected tickets by status. Returns the filtered list. +/// +/// `--open`: exclude tickets where status.colorName == "green" (Done category). +/// `--status`: partial match on status.name. +/// Tickets with no status are included by --open, excluded by --status. +fn filter_tickets( + tickets: Vec, + open: bool, + status: Option<&str>, +) -> Result> { + if open { + return Ok(tickets + .into_iter() + .filter(|t| { + t.status + .as_ref() + .and_then(|s| s.color_name.as_deref()) + .map(|c| c != "green") + .unwrap_or(true) + }) + .collect()); + } + + if let Some(status_input) = status { + let mut seen = std::collections::HashSet::new(); + let status_names: Vec = tickets + .iter() + .filter_map(|t| t.status.as_ref().map(|s| s.name.clone())) + .filter(|name| seen.insert(name.clone())) + .collect(); + + let matched = match partial_match::partial_match(status_input, &status_names) { + MatchResult::Exact(name) => name, + // Case-sensitive dedup upstream; treat like Exact if case-variant duplicates slip through + MatchResult::ExactMultiple(name) => name, + MatchResult::Ambiguous(matches) => { + return Err(JrError::UserError(format!( + "Ambiguous status \"{}\". Matches: {}", + status_input, + matches.join(", ") + )) + .into()); + } + MatchResult::None(all) => { + let available = if all.is_empty() { + "none".to_string() + } else { + all.join(", ") + }; + return Err(JrError::UserError(format!( + "No status matching \"{}\". Available: {}", + status_input, available + )) + .into()); + } + }; + + return Ok(tickets + .into_iter() + .filter(|t| { + t.status + .as_ref() + .map(|s| s.name == matched) + .unwrap_or(false) + }) + .collect()); + } + + Ok(tickets) +} + +async fn handle_tickets( + workspace_id: &str, + key: &str, + limit: Option, + open: bool, + status: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let object_id = objects::resolve_object_key(client, workspace_id, key).await?; + let resp = client + .get_connected_tickets(workspace_id, &object_id) + .await?; + + let has_filter = open || status.is_some(); + let filtered = filter_tickets(resp.tickets, open, status.as_deref())?; + + let tickets: Vec<_> = match limit { + Some(n) => filtered.into_iter().take(n as usize).collect(), + None => filtered, + }; + + match output_format { + OutputFormat::Json => { + if has_filter { + // Filtered: return bare array (allTicketsQuery no longer represents what's shown) + println!("{}", output::render_json(&tickets)?); + } else { + // Unfiltered: preserve full response envelope for backward compatibility + println!( + "{}", + output::render_json(&crate::types::assets::ConnectedTicketsResponse { + tickets, + all_tickets_query: resp.all_tickets_query, + })? + ); + } + } + OutputFormat::Table => { + let rows: Vec> = tickets + .iter() + .map(|t| { + vec![ + t.key.clone(), + t.issue_type + .as_ref() + .map(|it| it.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.title.clone(), + t.status + .as_ref() + .map(|s| s.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + t.priority + .as_ref() + .map(|p| p.name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Key", "Type", "Title", "Status", "Priority"], + &rows, + &tickets, + )?; + } + } + Ok(()) +} + +/// Resolve a --schema flag to a single schema, matching by ID (exact) or name (partial). +fn resolve_schema<'a>( + input: &str, + schemas: &'a [crate::types::assets::ObjectSchema], +) -> Result<&'a crate::types::assets::ObjectSchema> { + // Try exact ID match first + if let Some(s) = schemas.iter().find(|s| s.id == input) { + return Ok(s); + } + // Partial match on name + let names: Vec = schemas.iter().map(|s| s.name.clone()).collect(); + match partial_match::partial_match(input, &names) { + MatchResult::Exact(name) => Ok(schemas.iter().find(|s| s.name == name).unwrap()), + MatchResult::ExactMultiple(_) => { + let input_lower = input.to_lowercase(); + let duplicates: Vec = schemas + .iter() + .filter(|s| s.name.to_lowercase() == input_lower) + .map(|s| format!("{} (id: {})", s.name, s.id)) + .collect(); + Err(JrError::UserError(format!( + "Multiple schemas named \"{}\": {}. Use the schema ID instead.", + input, + duplicates.join(", ") + )) + .into()) + } + MatchResult::Ambiguous(matches) => Err(JrError::UserError(format!( + "Ambiguous schema \"{}\". Matches: {}", + input, + matches.join(", ") + )) + .into()), + MatchResult::None(all) => { + let available = if all.is_empty() { + "none".to_string() + } else { + all.join(", ") + }; + Err(JrError::UserError(format!( + "No schema matching \"{}\". Available: {}", + input, available + )) + .into()) + } + } +} + +async fn handle_schemas( + workspace_id: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + if schemas.is_empty() { + return Err(JrError::UserError("No asset schemas found in this workspace.".into()).into()); + } + + let rows: Vec> = schemas + .iter() + .map(|s| { + vec![ + s.id.clone(), + s.object_schema_key.clone(), + s.name.clone(), + s.description.clone().unwrap_or_else(|| "\u{2014}".into()), + s.object_type_count.to_string(), + s.object_count.to_string(), + ] + }) + .collect(); + + output::print_output( + output_format, + &["ID", "Key", "Name", "Description", "Types", "Objects"], + &rows, + &schemas, + ) +} + +async fn handle_types( + workspace_id: &str, + schema_filter: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + if schemas.is_empty() { + return Err(JrError::UserError("No asset schemas found in this workspace.".into()).into()); + } + + let target_schemas: Vec<&crate::types::assets::ObjectSchema> = match &schema_filter { + Some(input) => vec![resolve_schema(input, &schemas)?], + None => schemas.iter().collect(), + }; + + // Build a map of schema_id → schema_name for injection + let schema_names: HashMap<&str, &str> = schemas + .iter() + .map(|s| (s.id.as_str(), s.name.as_str())) + .collect(); + + let mut all_types = Vec::new(); + for schema in &target_schemas { + let types = client.list_object_types(workspace_id, &schema.id).await?; + all_types.extend(types); + } + + match output_format { + OutputFormat::Json => { + // Inject schemaName into each entry + let mut json_types: Vec = Vec::new(); + for t in &all_types { + let mut val = serde_json::to_value(t)?; + if let Some(map) = val.as_object_mut() { + let schema_name = schema_names.get(t.object_schema_id.as_str()).unwrap_or(&""); + map.insert( + "schemaName".to_string(), + serde_json::Value::String(schema_name.to_string()), + ); + } + json_types.push(val); + } + println!("{}", output::render_json(&json_types)?); + } + OutputFormat::Table => { + let rows: Vec> = all_types + .iter() + .map(|t| { + let schema_name = schema_names + .get(t.object_schema_id.as_str()) + .unwrap_or(&"\u{2014}"); + vec![ + t.id.clone(), + t.name.clone(), + schema_name.to_string(), + t.description.clone().unwrap_or_else(|| "\u{2014}".into()), + t.object_count.to_string(), + ] + }) + .collect(); + + output::print_output( + output_format, + &["ID", "Name", "Schema", "Description", "Objects"], + &rows, + &all_types, + )?; + } + } + Ok(()) +} + +/// Build an ambiguous type error with schema-labeled matches. +fn ambiguous_type_error( + input: &str, + matches: &[String], + candidates: &[(crate::types::assets::ObjectTypeEntry, String)], +) -> JrError { + let labeled: Vec = candidates + .iter() + .filter(|(t, _)| matches.contains(&t.name)) + .map(|(t, s)| format!("{} ({})", t.name, s)) + .collect(); + JrError::UserError(format!( + "Ambiguous type \"{}\". Matches: {}. Use --schema to narrow results.", + input, + labeled.join(", ") + )) +} + +/// Format the Type column for an attribute definition. +fn format_attribute_type(attr: &crate::types::assets::ObjectTypeAttributeDef) -> String { + if let Some(ref dt) = attr.default_type { + return dt.name.clone(); + } + if let Some(ref rot) = attr.reference_object_type { + return format!("Reference \u{2192} {}", rot.name); + } + "Unknown".to_string() +} + +async fn handle_schema( + workspace_id: &str, + type_name: &str, + schema_filter: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let schemas = client.list_object_schemas(workspace_id).await?; + if schemas.is_empty() { + return Err(JrError::UserError("No asset schemas found in this workspace.".into()).into()); + } + + let target_schemas: Vec<&crate::types::assets::ObjectSchema> = match &schema_filter { + Some(input) => vec![resolve_schema(input, &schemas)?], + None => schemas.iter().collect(), + }; + + // Collect all object types with their schema name + let mut candidates: Vec<(crate::types::assets::ObjectTypeEntry, String)> = Vec::new(); + for schema in &target_schemas { + let types = client.list_object_types(workspace_id, &schema.id).await?; + for t in types { + candidates.push((t, schema.name.clone())); + } + } + + if candidates.is_empty() { + return Err(JrError::UserError( + "No object types found. Run \"jr assets schemas\" to verify your workspace has schemas." + .into(), + ) + .into()); + } + + // Partial match on type name — deduplicated for partial_match, then + // check for cross-schema duplicates on the resolved name. + let mut deduped_names: Vec = candidates.iter().map(|(t, _)| t.name.clone()).collect(); + deduped_names.sort(); + deduped_names.dedup(); + let matched_name = match partial_match::partial_match(type_name, &deduped_names) { + MatchResult::Exact(name) => name, + // Case-sensitive dedup upstream; treat like Exact if case-variant duplicates slip through + MatchResult::ExactMultiple(name) => name, + MatchResult::Ambiguous(matches) => { + return Err(ambiguous_type_error(type_name, &matches, &candidates).into()); + } + MatchResult::None(_) => { + return Err(JrError::UserError(format!( + "No object type matching \"{}\". Run \"jr assets types\" to see available types.", + type_name + )) + .into()); + } + }; + + // Check for cross-schema duplicates: same name in multiple schemas + let same_name: Vec<&(crate::types::assets::ObjectTypeEntry, String)> = candidates + .iter() + .filter(|(t, _)| t.name == matched_name) + .collect(); + if same_name.len() > 1 { + let labeled: Vec = same_name + .iter() + .map(|(t, s)| format!("{} ({})", t.name, s)) + .collect(); + return Err(JrError::UserError(format!( + "Ambiguous type \"{}\". Matches: {}. Use --schema to narrow results.", + type_name, + labeled.join(", ") + )) + .into()); + } + + let (matched_type, schema_name) = same_name.first().unwrap(); + + // Fetch attributes + let attrs = client + .get_object_type_attributes(workspace_id, &matched_type.id) + .await?; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&attrs)?); + } + OutputFormat::Table => { + println!( + "Object Type: {} (Schema: {})\n", + matched_type.name, schema_name + ); + + let mut visible: Vec<&crate::types::assets::ObjectTypeAttributeDef> = + attrs.iter().filter(|a| !a.system && !a.hidden).collect(); + visible.sort_by_key(|a| a.position); + + let rows: Vec> = visible + .iter() + .map(|a| { + vec![ + a.position.to_string(), + a.name.clone(), + format_attribute_type(a), + if a.minimum_cardinality >= 1 { + "Yes".into() + } else { + "No".into() + }, + if a.editable { + "Yes".into() + } else { + "No".into() + }, + ] + }) + .collect(); + + if rows.is_empty() { + println!("No user-defined attributes."); + } else { + println!( + "{}", + output::render_table(&["Pos", "Name", "Type", "Required", "Editable"], &rows) + ); + } + } + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types::assets::{ConnectedTicket, TicketPriority, TicketStatus, TicketType}; + + fn make_ticket(key: &str, status_name: &str, color: &str) -> ConnectedTicket { + ConnectedTicket { + key: key.to_string(), + id: "1".to_string(), + title: format!("Ticket {}", key), + reporter: None, + created: None, + updated: None, + status: Some(TicketStatus { + name: status_name.to_string(), + color_name: Some(color.to_string()), + }), + issue_type: Some(TicketType { + name: "Task".to_string(), + }), + priority: Some(TicketPriority { + name: "Medium".to_string(), + }), + } + } + + fn make_ticket_no_status(key: &str) -> ConnectedTicket { + ConnectedTicket { + key: key.to_string(), + id: "1".to_string(), + title: format!("Ticket {}", key), + reporter: None, + created: None, + updated: None, + status: None, + issue_type: None, + priority: None, + } + } + + #[test] + fn filter_open_excludes_done() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + make_ticket("A-3", "To Do", "blue-gray"), + ]; + let result = filter_tickets(tickets, true, None).unwrap(); + assert_eq!(result.len(), 2); + assert_eq!(result[0].key, "A-1"); + assert_eq!(result[1].key, "A-3"); + } + + #[test] + fn filter_open_includes_no_status() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket_no_status("A-2"), + ]; + let result = filter_tickets(tickets, true, None).unwrap(); + assert_eq!(result.len(), 2); + } + + #[test] + fn filter_status_exact_match() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + make_ticket("A-3", "To Do", "blue-gray"), + ]; + let result = filter_tickets(tickets, false, Some("Done")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-2"); + } + + #[test] + fn filter_status_partial_match() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + ]; + let result = filter_tickets(tickets, false, Some("prog")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-1"); + } + + #[test] + fn filter_status_no_match() { + let tickets = vec![make_ticket("A-1", "In Progress", "yellow")]; + let result = filter_tickets(tickets, false, Some("Blocked")); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("No status matching")); + assert!(err.contains("In Progress")); + } + + #[test] + fn filter_status_ambiguous() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "In Review", "yellow"), + ]; + let result = filter_tickets(tickets, false, Some("In")); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Ambiguous")); + } + + #[test] + fn filter_status_excludes_no_status() { + let tickets = vec![ + make_ticket("A-1", "Done", "green"), + make_ticket_no_status("A-2"), + ]; + let result = filter_tickets(tickets, false, Some("Done")).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "A-1"); + } + + #[test] + fn no_filter_returns_all() { + let tickets = vec![ + make_ticket("A-1", "In Progress", "yellow"), + make_ticket("A-2", "Done", "green"), + ]; + let result = filter_tickets(tickets, false, None).unwrap(); + assert_eq!(result.len(), 2); + } + + use crate::types::assets::{DefaultType, ObjectTypeAttributeDef, ReferenceObjectType}; + + fn make_attr_def( + default_type: Option, + reference_object_type: Option, + ) -> ObjectTypeAttributeDef { + ObjectTypeAttributeDef { + id: "1".into(), + name: "test".into(), + system: false, + hidden: false, + label: false, + position: 0, + default_type, + reference_type: None, + reference_object_type, + minimum_cardinality: 0, + maximum_cardinality: 1, + editable: true, + description: None, + options: None, + } + } + + #[test] + fn format_attr_type_default_type() { + let attr = make_attr_def( + Some(DefaultType { + id: 0, + name: "Text".into(), + }), + None, + ); + assert_eq!(super::format_attribute_type(&attr), "Text"); + } + + #[test] + fn format_attr_type_reference() { + let attr = make_attr_def( + None, + Some(ReferenceObjectType { + id: "122".into(), + name: "Service".into(), + }), + ); + assert_eq!( + super::format_attribute_type(&attr), + "Reference \u{2192} Service" + ); + } + + #[test] + fn format_attr_type_unknown() { + let attr = make_attr_def(None, None); + assert_eq!(super::format_attribute_type(&attr), "Unknown"); + } + + #[test] + fn format_attr_type_default_takes_precedence() { + let attr = make_attr_def( + Some(DefaultType { + id: 0, + name: "Text".into(), + }), + Some(ReferenceObjectType { + id: "1".into(), + name: "Svc".into(), + }), + ); + assert_eq!(super::format_attribute_type(&attr), "Text"); + } + + // ── resolve_schema tests ───────────────────────────────────── + + fn make_schema(id: &str, name: &str) -> crate::types::assets::ObjectSchema { + crate::types::assets::ObjectSchema { + id: id.into(), + name: name.into(), + object_schema_key: format!("KEY{}", id), + description: None, + object_count: 0, + object_type_count: 0, + } + } + + #[test] + fn resolve_schema_exact_id_match() { + let schemas = vec![make_schema("10", "ITSM"), make_schema("20", "HR")]; + let result = super::resolve_schema("10", &schemas).unwrap(); + assert_eq!(result.id, "10"); + assert_eq!(result.name, "ITSM"); + } + + #[test] + fn resolve_schema_exact_name_match() { + let schemas = vec![make_schema("10", "ITSM"), make_schema("20", "HR")]; + let result = super::resolve_schema("ITSM", &schemas).unwrap(); + assert_eq!(result.id, "10"); + } + + #[test] + fn resolve_schema_case_insensitive_name_match() { + let schemas = vec![make_schema("10", "ITSM"), make_schema("20", "HR")]; + let result = super::resolve_schema("itsm", &schemas).unwrap(); + assert_eq!(result.id, "10"); + } + + #[test] + fn resolve_schema_partial_name_match() { + let schemas = vec![make_schema("10", "ITSM Assets"), make_schema("20", "HR")]; + let result = super::resolve_schema("itsm", &schemas).unwrap(); + assert_eq!(result.id, "10"); + } + + #[test] + fn resolve_schema_no_match() { + let schemas = vec![make_schema("10", "ITSM"), make_schema("20", "HR")]; + let err = super::resolve_schema("Finance", &schemas).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No schema matching"), "got: {msg}"); + assert!(msg.contains("Finance"), "got: {msg}"); + } + + #[test] + fn resolve_schema_ambiguous_match() { + let schemas = vec![ + make_schema("10", "IT Assets"), + make_schema("20", "IT Services"), + ]; + let err = super::resolve_schema("IT", &schemas).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Ambiguous"), "got: {msg}"); + } + + #[test] + fn resolve_schema_duplicate_names_returns_error_with_ids() { + let schemas = vec![make_schema("10", "Assets"), make_schema("20", "Assets")]; + let err = super::resolve_schema("Assets", &schemas).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple schemas"), "got: {msg}"); + assert!(msg.contains("id: 10"), "should list first ID, got: {msg}"); + assert!(msg.contains("id: 20"), "should list second ID, got: {msg}"); + assert!( + msg.contains("Use the schema ID instead"), + "should suggest using ID, got: {msg}" + ); + } + + #[test] + fn resolve_schema_duplicate_names_case_insensitive() { + let schemas = vec![make_schema("10", "Assets"), make_schema("20", "assets")]; + let err = super::resolve_schema("assets", &schemas).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple schemas"), "got: {msg}"); + assert!(msg.contains("id: 10"), "should list first ID, got: {msg}"); + assert!(msg.contains("id: 20"), "should list second ID, got: {msg}"); + } + + #[test] + fn resolve_schema_id_takes_priority_over_name() { + // Schema ID "HR" matches exactly, even though name "ITSM" doesn't + let schemas = vec![make_schema("HR", "ITSM"), make_schema("20", "HR")]; + let result = super::resolve_schema("HR", &schemas).unwrap(); + assert_eq!(result.id, "HR"); + assert_eq!(result.name, "ITSM"); + } +} diff --git a/src/cli/board.rs b/src/cli/board.rs index 8d00554..f03980c 100644 --- a/src/cli/board.rs +++ b/src/cli/board.rs @@ -3,70 +3,211 @@ use anyhow::{Result, bail}; use crate::api::client::JiraClient; use crate::cli::{BoardCommand, OutputFormat}; use crate::config::Config; +use crate::error::JrError; use crate::output; +/// Resolve a board ID from CLI override, config, or auto-discovery. +/// +/// Resolution order: +/// 1. CLI `--board` override +/// 2. Config `board_id` from `.jr.toml` +/// 3. Auto-discover via Jira API using project key +pub async fn resolve_board_id( + config: &Config, + client: &JiraClient, + board_override: Option, + project_override: Option<&str>, + require_scrum: bool, +) -> Result { + // Step 1: CLI override + if let Some(id) = board_override { + return Ok(id); + } + + // Step 2: Config + if let Some(id) = config.project.board_id { + return Ok(id); + } + + // Step 3: Auto-discover + let project_key = config.project_key(project_override).ok_or_else(|| { + JrError::ConfigError( + "No board configured and no project specified. \ + Use --board , set board_id in .jr.toml, or specify --project to auto-discover." + .into(), + ) + })?; + + let type_filter = if require_scrum { Some("scrum") } else { None }; + let boards = client.list_boards(Some(&project_key), type_filter).await?; + + match boards.len() { + 0 => { + let board_kind = if require_scrum { + "scrum boards" + } else { + "boards" + }; + bail!( + "No {} found for project {}. \ + The project key may be incorrect, or the project may not have any {}. \ + Run \"jr board list --project {}\" to inspect available boards.", + board_kind, + project_key, + board_kind, + project_key, + ); + } + 1 => { + let board = &boards[0]; + eprintln!( + "Using board {} - {} ({})", + board.id, board.name, board.board_type + ); + Ok(board.id) + } + _ => { + let board_kind = if require_scrum { + "scrum boards" + } else { + "boards" + }; + let mut msg = format!( + "Multiple {} found for project {}:\n", + board_kind, project_key + ); + for b in &boards { + if require_scrum { + msg.push_str(&format!(" {} {}\n", b.id, b.name)); + } else { + msg.push_str(&format!(" {} {} {}\n", b.id, b.board_type, b.name)); + } + } + msg.push_str("Use --board to select one, or set board_id in .jr.toml."); + bail!("{}", msg); + } + } +} + /// Handle all board subcommands. pub async fn handle( command: BoardCommand, config: &Config, client: &JiraClient, output_format: &OutputFormat, + project_override: Option<&str>, ) -> Result<()> { match command { - BoardCommand::List => handle_list(client, output_format).await, - BoardCommand::View => handle_view(config, client, output_format).await, + BoardCommand::List { board_type } => { + handle_list( + client, + output_format, + project_override, + board_type.as_deref(), + ) + .await + } + BoardCommand::View { board, limit, all } => { + handle_view( + config, + client, + output_format, + board, + limit, + all, + project_override, + ) + .await + } } } -async fn handle_list(client: &JiraClient, output_format: &OutputFormat) -> Result<()> { - let boards = client.list_boards().await?; +async fn handle_list( + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, + board_type_filter: Option<&str>, +) -> Result<()> { + let boards = client + .list_boards(project_override, board_type_filter) + .await?; let rows: Vec> = boards .iter() - .map(|b| vec![b.id.to_string(), b.board_type.clone(), b.name.clone()]) + .map(|b| { + let project = b + .location + .as_ref() + .and_then(|loc| loc.project_key.as_deref()) + .unwrap_or("-"); + vec![ + b.id.to_string(), + b.board_type.clone(), + project.to_string(), + b.name.clone(), + ] + }) .collect(); - output::print_output(output_format, &["ID", "Type", "Name"], &rows, &boards)?; + output::print_output( + output_format, + &["ID", "Type", "Project", "Name"], + &rows, + &boards, + )?; Ok(()) } +/// Build JQL for kanban board view: all non-Done issues, ordered by rank. +fn build_kanban_jql(project_key: Option<&str>) -> String { + let mut parts: Vec = Vec::new(); + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + parts.push("statusCategory != Done".into()); + let where_clause = parts.join(" AND "); + format!("{where_clause} ORDER BY rank ASC") +} + async fn handle_view( config: &Config, client: &JiraClient, output_format: &OutputFormat, + board_override: Option, + limit: Option, + all: bool, + project_override: Option<&str>, ) -> Result<()> { - let board_id = config.project.board_id.ok_or_else(|| { - anyhow::anyhow!("No board_id configured. Set board_id in .jr.toml or run \"jr init\".") - })?; + let effective_limit = crate::cli::resolve_effective_limit(limit, all); + + let board_id = + resolve_board_id(config, client, board_override, project_override, false).await?; let board_config = client.get_board_config(board_id).await?; let board_type = board_config.board_type.to_lowercase(); - let issues = if board_type == "scrum" { + let (issues, has_more) = if board_type == "scrum" { // For scrum boards, fetch the active sprint's issues let sprints = client.list_sprints(board_id, Some("active")).await?; if sprints.is_empty() { bail!("No active sprint found for board {}.", board_id); } let sprint = &sprints[0]; - client.get_sprint_issues(sprint.id, None, &[]).await? + let result = client + .get_sprint_issues(sprint.id, None, effective_limit, &[]) + .await?; + (result.issues, result.has_more) } else { - // Kanban: search for issues not in Done status category - let project_key = config.project_key(None); + let project_key = config.project_key(project_override); if project_key.is_none() { eprintln!( "warning: no project configured for board. Showing issues across all projects. Set project in .jr.toml to scope results." ); } - let mut jql_parts: Vec = Vec::new(); - if let Some(ref pk) = project_key { - jql_parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); - } - jql_parts.push("statusCategory != Done".into()); - jql_parts.push("ORDER BY rank ASC".into()); - let jql = jql_parts.join(" AND "); - client.search_issues(&jql, None, &[]).await? + let jql = build_kanban_jql(project_key.as_deref()); + let result = client.search_issues(&jql, effective_limit, &[]).await?; + (result.issues, result.has_more) }; let rows = super::issue::format_issue_rows_public(&issues); @@ -78,5 +219,64 @@ async fn handle_view( &issues, )?; + if has_more && !all { + if board_type != "scrum" { + // Kanban: try to get approximate total via JQL count + let project_key = config.project_key(project_override); + let jql = build_kanban_jql(project_key.as_deref()); + let count_jql = crate::jql::strip_order_by(&jql); + match client.approximate_count(count_jql).await { + Ok(total) if total > 0 => { + eprintln!( + "Showing {} of ~{} results. Use --limit or --all to see more.", + issues.len(), + total + ); + } + Ok(_) | Err(_) => { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + } else { + // Scrum: no reliable total count from Agile API + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn build_kanban_jql_with_project() { + let jql = build_kanban_jql(Some("FOO")); + assert_eq!( + jql, + "project = \"FOO\" AND statusCategory != Done ORDER BY rank ASC" + ); + } + + #[test] + fn build_kanban_jql_without_project() { + let jql = build_kanban_jql(None); + assert_eq!(jql, "statusCategory != Done ORDER BY rank ASC"); + } + + #[test] + fn build_kanban_jql_escapes_special_characters() { + let jql = build_kanban_jql(Some("FOO\"BAR")); + assert_eq!( + jql, + "project = \"FOO\\\"BAR\" AND statusCategory != Done ORDER BY rank ASC" + ); + } +} diff --git a/src/cli/init.rs b/src/cli/init.rs index d3c3c91..04560fc 100644 --- a/src/cli/init.rs +++ b/src/cli/init.rs @@ -61,7 +61,7 @@ pub async fn handle() -> Result<()> { .interact()?; if setup_project { - let boards = client.list_boards().await?; + let boards = client.list_boards(None, None).await?; if boards.is_empty() { println!("No boards found. You can configure .jr.toml manually."); } else { diff --git a/src/cli/issue/assets.rs b/src/cli/issue/assets.rs new file mode 100644 index 0000000..e7662fa --- /dev/null +++ b/src/cli/issue/assets.rs @@ -0,0 +1,65 @@ +use anyhow::Result; + +use crate::api::assets::linked::{ + cmdb_field_ids as extract_cmdb_ids, enrich_assets, extract_linked_assets, + get_or_fetch_cmdb_fields, +}; +use crate::api::client::JiraClient; +use crate::cli::OutputFormat; +use crate::error::JrError; +use crate::output; + +pub(super) async fn handle_issue_assets( + key: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let cmdb_fields = get_or_fetch_cmdb_fields(client).await?; + let cmdb_field_id_list = extract_cmdb_ids(&cmdb_fields); + + if cmdb_field_id_list.is_empty() { + return Err(JrError::UserError( + "No Assets custom fields found on this Jira instance. \ + Assets requires Jira Service Management Premium or Enterprise." + .into(), + ) + .into()); + } + + let extra_fields: Vec<&str> = cmdb_field_id_list.iter().map(|s| s.as_str()).collect(); + let issue = client.get_issue(key, &extra_fields).await?; + let mut assets = extract_linked_assets(&issue.fields.extra, &cmdb_field_id_list); + + if assets.is_empty() { + eprintln!("No assets linked to {}.", key); + return Ok(()); + } + + enrich_assets(client, &mut assets).await; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&assets)?); + } + OutputFormat::Table => { + let rows: Vec> = assets + .iter() + .map(|a| { + vec![ + a.key.clone().unwrap_or_else(|| { + a.id.as_ref() + .map(|id| format!("#{}", id)) + .unwrap_or_else(|| "-".into()) + }), + a.asset_type.clone().unwrap_or_else(|| "-".into()), + a.name.clone().unwrap_or_else(|| "-".into()), + ] + }) + .collect(); + + output::print_output(output_format, &["Key", "Type", "Name"], &rows, &assets)?; + } + } + + Ok(()) +} diff --git a/src/cli/issue/create.rs b/src/cli/issue/create.rs index a044669..fc9557c 100644 --- a/src/cli/issue/create.rs +++ b/src/cli/issue/create.rs @@ -5,9 +5,11 @@ use crate::adf; use crate::api::client::JiraClient; use crate::cli::{IssueCommand, OutputFormat}; use crate::config::Config; +use crate::error::JrError; use crate::output; use super::helpers; +use super::json_output; pub(super) async fn handle_create( command: IssueCommand, @@ -29,6 +31,8 @@ pub(super) async fn handle_create( points, markdown, parent, + to, + account_id, } = command else { unreachable!() @@ -45,7 +49,11 @@ pub(super) async fn handle_create( } }) .ok_or_else(|| { - anyhow::anyhow!("Project key is required. Use --project or configure .jr.toml") + JrError::UserError( + "Project key is required. Use --project or configure .jr.toml. \ + Run \"jr project list\" to see available projects." + .into(), + ) })?; // Resolve issue type @@ -57,7 +65,7 @@ pub(super) async fn handle_create( helpers::prompt_input("Issue type (e.g., Task, Bug, Story)").ok() } }) - .ok_or_else(|| anyhow::anyhow!("Issue type is required. Use --type"))?; + .ok_or_else(|| JrError::UserError("Issue type is required. Use --type".into()))?; // Resolve summary let summary_text = summary @@ -68,7 +76,7 @@ pub(super) async fn handle_create( helpers::prompt_input("Summary").ok() } }) - .ok_or_else(|| anyhow::anyhow!("Summary is required. Use --summary"))?; + .ok_or_else(|| JrError::UserError("Summary is required. Use --summary".into()))?; // Resolve description let desc_text = if description_stdin { @@ -118,14 +126,32 @@ pub(super) async fn handle_create( fields["parent"] = json!({"key": parent_key}); } + if let Some(ref id) = account_id { + fields["assignee"] = json!({"accountId": id}); + } else if let Some(ref user_query) = to { + let (acct_id, _display_name) = + helpers::resolve_assignee_by_project(client, user_query, &project_key, no_input) + .await?; + fields["assignee"] = json!({"accountId": acct_id}); + } + let response = client.create_issue(fields).await?; + let browse_url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key + ); + match output_format { OutputFormat::Json => { - println!("{}", serde_json::to_string_pretty(&response)?); + let mut json_response = serde_json::to_value(&response)?; + json_response["url"] = json!(browse_url); + println!("{}", serde_json::to_string_pretty(&json_response)?); } OutputFormat::Table => { output::print_success(&format!("Created issue {}", response.key)); + println!("{}", browse_url); } } @@ -149,6 +175,9 @@ pub(super) async fn handle_edit( points, no_points, parent, + description, + description_stdin, + markdown, } = command else { unreachable!() @@ -157,6 +186,25 @@ pub(super) async fn handle_edit( let mut fields = json!({}); let mut has_updates = false; + // Resolve description + let desc_text = if description_stdin { + let mut buf = String::new(); + std::io::Read::read_to_string(&mut std::io::stdin(), &mut buf)?; + Some(buf) + } else { + description + }; + + if let Some(ref text) = desc_text { + let adf_body = if markdown { + adf::markdown_to_adf(text) + } else { + adf::text_to_adf(text) + }; + fields["description"] = adf_body; + has_updates = true; + } + if let Some(ref s) = summary { fields["summary"] = json!(s); has_updates = true; @@ -225,7 +273,7 @@ pub(super) async fn handle_edit( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ "key": key, "updated": true }))? + serde_json::to_string_pretty(&json_output::edit_response(&key))? ); } OutputFormat::Table => { @@ -238,7 +286,7 @@ pub(super) async fn handle_edit( if !has_updates { bail!( - "No fields specified to update. Use --summary, --type, --priority, --label, --team, --points, --no-points, or --parent." + "No fields specified to update. Use --summary, --type, --priority, --label, --team, --points, --no-points, --parent, --description, or --description-stdin." ); } @@ -248,7 +296,7 @@ pub(super) async fn handle_edit( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ "key": key, "updated": true }))? + serde_json::to_string_pretty(&json_output::edit_response(&key))? ); } OutputFormat::Table => { @@ -258,3 +306,24 @@ pub(super) async fn handle_edit( Ok(()) } + +#[cfg(test)] +mod tests { + use crate::error::JrError; + + #[test] + fn missing_project_returns_user_error() { + let result: Option = None; + let err = result + .ok_or_else(|| { + JrError::UserError( + "Project key is required. Use --project or configure .jr.toml. \ + Run \"jr project list\" to see available projects." + .into(), + ) + }) + .unwrap_err(); + assert_eq!(err.exit_code(), 64); + assert!(err.to_string().contains("Project key is required")); + } +} diff --git a/src/cli/issue/format.rs b/src/cli/issue/format.rs index 5b0e3ee..152199c 100644 --- a/src/cli/issue/format.rs +++ b/src/cli/issue/format.rs @@ -1,16 +1,23 @@ +use crate::types::assets::LinkedAsset; +use crate::types::assets::linked::format_linked_assets_short; use crate::types::jira::Issue; /// Format issue rows for table output. pub fn format_issue_rows_public(issues: &[Issue]) -> Vec> { issues .iter() - .map(|issue| format_issue_row(issue, None)) + .map(|issue| format_issue_row(issue, None, None)) .collect() } -/// Build a single table row for an issue, optionally including story points. -pub fn format_issue_row(issue: &Issue, sp_field_id: Option<&str>) -> Vec { - let col_count = if sp_field_id.is_some() { 7 } else { 6 }; +/// Build a single table row for an issue, optionally including story points and linked assets. +pub fn format_issue_row( + issue: &Issue, + sp_field_id: Option<&str>, + assets: Option<&[LinkedAsset]>, +) -> Vec { + let col_count = + 6 + if sp_field_id.is_some() { 1 } else { 0 } + if assets.is_some() { 1 } else { 0 }; let mut row = Vec::with_capacity(col_count); row.push(issue.key.clone()); row.push( @@ -54,19 +61,25 @@ pub fn format_issue_row(issue: &Issue, sp_field_id: Option<&str>) -> Vec .map(|a| a.display_name.clone()) .unwrap_or_else(|| "Unassigned".into()), ); + if let Some(linked) = assets { + row.push(format_linked_assets_short(linked)); + } row.push(issue.fields.summary.clone()); row } /// Headers matching `format_issue_row` output. -pub fn issue_table_headers(show_points: bool) -> Vec<&'static str> { +pub fn issue_table_headers(show_points: bool, show_assets: bool) -> Vec<&'static str> { + let mut headers = vec!["Key", "Type", "Status", "Priority"]; if show_points { - vec![ - "Key", "Type", "Status", "Priority", "Points", "Assignee", "Summary", - ] - } else { - vec!["Key", "Type", "Status", "Priority", "Assignee", "Summary"] + headers.push("Points"); + } + headers.push("Assignee"); + if show_assets { + headers.push("Assets"); } + headers.push("Summary"); + headers } pub fn format_points(value: f64) -> String { diff --git a/src/cli/issue/helpers.rs b/src/cli/issue/helpers.rs index 5b30c1e..82f12e0 100644 --- a/src/cli/issue/helpers.rs +++ b/src/cli/issue/helpers.rs @@ -2,6 +2,8 @@ use anyhow::Result; use crate::api::client::JiraClient; use crate::config::Config; +use crate::error::JrError; +use crate::types::jira::User; pub(super) async fn resolve_team_field( config: &Config, @@ -17,8 +19,8 @@ pub(super) async fn resolve_team_field( .find_team_field_id() .await? .ok_or_else(|| { - anyhow::anyhow!( - "No \"Team\" field found on this Jira instance. This instance may not have the Team field configured." + JrError::ConfigError( + "No \"Team\" field found on this Jira instance. This instance may not have the Team field configured.".into(), ) })? }; @@ -33,11 +35,40 @@ pub(super) async fn resolve_team_field( let team_names: Vec = teams.iter().map(|t| t.name.clone()).collect(); match crate::partial_match::partial_match(team_name, &team_names) { crate::partial_match::MatchResult::Exact(matched_name) => { - let team = teams + let idx = teams .iter() - .find(|t| t.name == matched_name) + .position(|t| t.name == matched_name) .expect("matched name must exist in teams"); - Ok((field_id, team.id.clone())) + Ok((field_id, teams[idx].id.clone())) + } + crate::partial_match::MatchResult::ExactMultiple(_) => { + let name_lower = team_name.to_lowercase(); + let duplicates: Vec<&crate::cache::CachedTeam> = teams + .iter() + .filter(|t| t.name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|t| format!(" {} (id: {})", t.name, t.id)) + .collect(); + anyhow::bail!( + "Multiple teams named \"{}\" found:\n{}\nUse a more specific name.", + team_name, + lines.join("\n") + ); + } + + let labels: Vec = duplicates + .iter() + .map(|t| format!("{} ({})", t.name, t.id)) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple teams named \"{}\"", team_name)) + .items(&labels) + .interact()?; + Ok((field_id, duplicates[selection].id.clone())) } crate::partial_match::MatchResult::Ambiguous(matches) => { if no_input { @@ -53,11 +84,11 @@ pub(super) async fn resolve_team_field( .items(&matches) .interact()?; let selected_name = &matches[selection]; - let team = teams + let idx = teams .iter() - .find(|t| &t.name == selected_name) + .position(|t| t.name == *selected_name) .expect("selected name must exist in teams"); - Ok((field_id, team.id.clone())) + Ok((field_id, teams[idx].id.clone())) } crate::partial_match::MatchResult::None(_) => { anyhow::bail!( @@ -69,16 +100,16 @@ pub(super) async fn resolve_team_field( } pub(super) fn resolve_story_points_field_id(config: &Config) -> Result { - config + Ok(config .global .fields .story_points_field_id .clone() .ok_or_else(|| { - anyhow::anyhow!( - "Story points field not configured. Run \"jr init\" or set story_points_field_id under [fields] in ~/.config/jr/config.toml" + JrError::ConfigError( + "Story points field not configured. Run \"jr init\" or set story_points_field_id under [fields] in ~/.config/jr/config.toml".into(), ) - }) + })?) } pub(super) fn prompt_input(prompt: &str) -> Result { @@ -87,3 +118,355 @@ pub(super) fn prompt_input(prompt: &str) -> Result { .interact_text()?; Ok(input) } + +/// Check if a user input string is the "me" keyword (case-insensitive). +fn is_me_keyword(input: &str) -> bool { + input.eq_ignore_ascii_case("me") +} + +// ── Shared user disambiguation ────────────────────────────────────── + +/// Disambiguate a list of users by display name using partial matching. +/// +/// Handles: empty list, single result, exact match, duplicate display names, +/// ambiguous substring match, and no match. In interactive mode, prompts the +/// user to choose when ambiguous. +/// +/// Returns `(account_id, display_name)` of the selected user. +fn disambiguate_user( + users: &[User], + name: &str, + no_input: bool, + empty_msg: &str, + none_msg_fn: impl Fn(&[String]) -> String, +) -> Result<(String, String)> { + if users.is_empty() { + anyhow::bail!("{}", empty_msg); + } + + if users.len() == 1 { + return Ok((users[0].account_id.clone(), users[0].display_name.clone())); + } + + let display_names: Vec = users.iter().map(|u| u.display_name.clone()).collect(); + match crate::partial_match::partial_match(name, &display_names) { + crate::partial_match::MatchResult::Exact(matched_name) => { + let idx = users + .iter() + .position(|u| u.display_name == matched_name) + .expect("matched name must exist in users"); + Ok(( + users[idx].account_id.clone(), + users[idx].display_name.clone(), + )) + } + crate::partial_match::MatchResult::ExactMultiple(_) => { + let name_lower = name.to_lowercase(); + let duplicates: Vec<&User> = users + .iter() + .filter(|u| u.display_name.to_lowercase() == name_lower) + .collect(); + + if no_input { + let lines: Vec = duplicates + .iter() + .map(|u| match &u.email_address { + Some(email) => format!( + " {} ({}, account: {})", + u.display_name, email, u.account_id + ), + None => { + format!(" {} (account: {})", u.display_name, u.account_id) + } + }) + .collect(); + anyhow::bail!( + "Multiple users named \"{}\" found:\n{}\nSpecify the accountId directly or use a more specific name.", + name, + lines.join("\n") + ); + } + + let labels: Vec = duplicates + .iter() + .map(|u| match &u.email_address { + Some(email) => format!("{} ({})", u.display_name, email), + None => format!("{} ({})", u.display_name, u.account_id), + }) + .collect(); + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users named \"{}\"", name)) + .items(&labels) + .interact()?; + Ok(( + duplicates[selection].account_id.clone(), + duplicates[selection].display_name.clone(), + )) + } + crate::partial_match::MatchResult::Ambiguous(matches) => { + if no_input { + anyhow::bail!( + "Multiple users match \"{}\": {}. Use a more specific name.", + name, + matches.join(", ") + ); + } + let selection = dialoguer::Select::new() + .with_prompt(format!("Multiple users match \"{name}\"")) + .items(&matches) + .interact()?; + let selected_name = &matches[selection]; + let idx = users + .iter() + .position(|u| u.display_name == *selected_name) + .expect("selected name must exist in users"); + Ok(( + users[idx].account_id.clone(), + users[idx].display_name.clone(), + )) + } + crate::partial_match::MatchResult::None(all_names) => { + anyhow::bail!("{}", none_msg_fn(&all_names)); + } + } +} + +// ── Public resolve functions ───────────────────────────────────────── + +/// Resolve a user flag value to a JQL fragment. +/// +/// - `"me"` (case-insensitive) → `"currentUser()"` (no API call) +/// - Any other value → search users API, filter active, disambiguate via partial_match +/// +/// Returns the JQL value to use (either `"currentUser()"` or an unquoted accountId). +pub(super) async fn resolve_user( + client: &JiraClient, + name: &str, + no_input: bool, +) -> Result { + if is_me_keyword(name) { + return Ok("currentUser()".to_string()); + } + + let users = client.search_users(name).await?; + let active_users: Vec<_> = users + .into_iter() + .filter(|u| u.active == Some(true)) + .collect(); + + let (account_id, _) = disambiguate_user( + &active_users, + name, + no_input, + &format!( + "No active user found matching \"{}\". The user may be deactivated.", + name + ), + |_all_names| { + format!( + "No active user found matching \"{}\". The user may be deactivated.", + name + ) + }, + )?; + Ok(account_id) +} + +/// Resolve a user flag value to an (account_id, display_name) tuple for assignment. +/// +/// - `"me"` (case-insensitive) → `get_myself()` (no search API call) +/// - Any other value → assignable user search API scoped to issue, disambiguate via partial_match +/// +/// Unlike `resolve_user` (which returns JQL fragments), this returns concrete +/// account details for the `PUT /assignee` API. +pub(super) async fn resolve_assignee( + client: &JiraClient, + name: &str, + issue_key: &str, + no_input: bool, +) -> Result<(String, String)> { + if is_me_keyword(name) { + let me = client.get_myself().await?; + return Ok((me.account_id, me.display_name)); + } + + let users = client.search_assignable_users(name, issue_key).await?; + + disambiguate_user( + &users, + name, + no_input, + &format!( + "No assignable user matching \"{}\" on issue {}. The user may not exist or may lack permission for this project. Try a different name or check spelling.", + name, issue_key, + ), + |all_names| { + format!( + "No assignable user with a name matching \"{}\" on issue {}. Found: {}", + name, + issue_key, + all_names.join(", "), + ) + }, + ) +} + +/// Resolve a user flag value to an (account_id, display_name) tuple for assignment by project. +/// +/// - `"me"` (case-insensitive) → `get_myself()` (no search API call) +/// - Any other value → assignable user search API scoped to project, disambiguate via partial_match +/// +/// Unlike `resolve_assignee` (which takes an issue key), this takes a project key +/// and uses the `multiProjectSearch` endpoint. Used during issue creation when no +/// issue key exists yet. +pub(super) async fn resolve_assignee_by_project( + client: &JiraClient, + name: &str, + project_key: &str, + no_input: bool, +) -> Result<(String, String)> { + if is_me_keyword(name) { + let me = client.get_myself().await?; + return Ok((me.account_id, me.display_name)); + } + + // The multiProjectSearch endpoint returns only users eligible for assignment, + // which should exclude deactivated users. No client-side active filter needed + // (consistent with resolve_assignee for issue-scoped search). + let users = client + .search_assignable_users_by_project(name, project_key) + .await?; + + disambiguate_user( + &users, + name, + no_input, + &format!( + "No assignable user matching \"{}\" in project {}. The user may not exist or may lack permission for this project. Try a different name or check spelling.", + name, project_key, + ), + |all_names| { + format!( + "No assignable user with a name matching \"{}\" in project {}. Found: {}", + name, + project_key, + all_names.join(", "), + ) + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn is_me_keyword_lowercase() { + assert!(is_me_keyword("me")); + } + + #[test] + fn is_me_keyword_uppercase() { + assert!(is_me_keyword("ME")); + } + + #[test] + fn is_me_keyword_mixed_case() { + assert!(is_me_keyword("Me")); + } + + #[test] + fn is_me_keyword_not_me() { + assert!(!is_me_keyword("Jane")); + } + + // ── disambiguate_user tests ────────────────────────────────────── + + fn make_user(account_id: &str, display_name: &str) -> User { + User { + account_id: account_id.to_string(), + display_name: display_name.to_string(), + email_address: None, + active: Some(true), + } + } + + fn make_user_with_email(account_id: &str, display_name: &str, email: &str) -> User { + User { + account_id: account_id.to_string(), + display_name: display_name.to_string(), + email_address: Some(email.to_string()), + active: Some(true), + } + } + + fn dummy_none_msg(all_names: &[String]) -> String { + format!("No match. Found: {}", all_names.join(", ")) + } + + #[test] + fn disambiguate_empty_list_returns_error() { + let result = disambiguate_user(&[], "Jane", true, "No users found", dummy_none_msg); + let err = result.unwrap_err(); + assert!(err.to_string().contains("No users found")); + } + + #[test] + fn disambiguate_single_user_returns_directly() { + let users = vec![make_user("acc-1", "Jane Doe")]; + let (id, name) = disambiguate_user(&users, "Jane", true, "empty", dummy_none_msg).unwrap(); + assert_eq!(id, "acc-1"); + assert_eq!(name, "Jane Doe"); + } + + #[test] + fn disambiguate_exact_match() { + let users = vec![ + make_user("acc-1", "Jane Doe"), + make_user("acc-2", "Janet Smith"), + ]; + let (id, name) = + disambiguate_user(&users, "Jane Doe", true, "empty", dummy_none_msg).unwrap(); + assert_eq!(id, "acc-1"); + assert_eq!(name, "Jane Doe"); + } + + #[test] + fn disambiguate_exact_multiple_no_input_errors_with_details() { + let users = vec![ + make_user_with_email("acc-1", "Jane Doe", "jane1@example.com"), + make_user("acc-2", "Jane Doe"), + ]; + let result = disambiguate_user(&users, "Jane Doe", true, "empty", dummy_none_msg); + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple users named \"Jane Doe\"")); + assert!(msg.contains("jane1@example.com")); + assert!(msg.contains("acc-2")); + } + + #[test] + fn disambiguate_ambiguous_no_input_errors_with_candidates() { + let users = vec![ + make_user("acc-1", "Jane Doe"), + make_user("acc-2", "Jane Smith"), + ]; + let result = disambiguate_user(&users, "Jane", true, "empty", dummy_none_msg); + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple users match \"Jane\"")); + assert!(msg.contains("Jane Doe")); + assert!(msg.contains("Jane Smith")); + } + + #[test] + fn disambiguate_no_match_uses_none_msg_fn() { + let users = vec![make_user("acc-1", "Alice"), make_user("acc-2", "Bob")]; + let result = disambiguate_user(&users, "Zara", true, "empty", dummy_none_msg); + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No match. Found:")); + assert!(msg.contains("Alice")); + assert!(msg.contains("Bob")); + } +} diff --git a/src/cli/issue/json_output.rs b/src/cli/issue/json_output.rs new file mode 100644 index 0000000..67c6e7b --- /dev/null +++ b/src/cli/issue/json_output.rs @@ -0,0 +1,121 @@ +use serde_json::{Value, json}; + +/// JSON response for `issue move` — both changed and idempotent cases. +pub(crate) fn move_response(key: &str, status: &str, changed: bool) -> Value { + json!({ + "key": key, + "status": status, + "changed": changed + }) +} + +/// JSON response for `issue assign` when the assignment changed. +pub(crate) fn assign_changed_response(key: &str, display_name: &str, account_id: &str) -> Value { + json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": true + }) +} + +/// JSON response for `issue assign` when already assigned to the target user. +pub(crate) fn assign_unchanged_response(key: &str, display_name: &str, account_id: &str) -> Value { + json!({ + "key": key, + "assignee": display_name, + "assignee_account_id": account_id, + "changed": false + }) +} + +/// JSON response for `issue assign --unassign`. +pub(crate) fn unassign_response(key: &str, changed: bool) -> Value { + json!({ + "key": key, + "assignee": null, + "changed": changed + }) +} + +/// JSON response for `issue edit`. +pub(crate) fn edit_response(key: &str) -> Value { + json!({ + "key": key, + "updated": true + }) +} + +/// JSON response for `issue link`. +pub(crate) fn link_response(key1: &str, key2: &str, link_type: &str) -> Value { + json!({ + "key1": key1, + "key2": key2, + "type": link_type, + "linked": true + }) +} + +/// JSON response for `issue unlink` — covers both success and no-match cases. +pub(crate) fn unlink_response(unlinked: bool, count: usize) -> Value { + json!({ + "unlinked": unlinked, + "count": count + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::assert_json_snapshot; + + #[test] + fn test_move_response_changed() { + assert_json_snapshot!(move_response("TEST-1", "In Progress", true)); + } + + #[test] + fn test_move_response_unchanged() { + assert_json_snapshot!(move_response("TEST-1", "Done", false)); + } + + #[test] + fn test_assign_changed() { + assert_json_snapshot!(assign_changed_response("TEST-1", "Jane Doe", "abc123")); + } + + #[test] + fn test_assign_unchanged() { + assert_json_snapshot!(assign_unchanged_response("TEST-1", "Jane Doe", "abc123")); + } + + #[test] + fn test_unassign() { + assert_json_snapshot!(unassign_response("TEST-1", true)); + } + + #[test] + fn test_unassign_unchanged() { + assert_json_snapshot!(unassign_response("TEST-1", false)); + } + + #[test] + fn test_edit() { + assert_json_snapshot!(edit_response("TEST-1")); + } + + #[test] + fn test_link() { + assert_json_snapshot!(link_response("TEST-1", "TEST-2", "Blocks")); + } + + #[test] + fn test_unlink_success() { + assert_json_snapshot!(unlink_response(true, 2)); + } + + #[test] + fn test_unlink_no_match() { + assert_json_snapshot!(unlink_response(false, 0)); + } +} diff --git a/src/cli/issue/links.rs b/src/cli/issue/links.rs index 7717fda..02fd783 100644 --- a/src/cli/issue/links.rs +++ b/src/cli/issue/links.rs @@ -1,5 +1,5 @@ +use super::json_output; use anyhow::{Result, bail}; -use serde_json::json; use crate::api::client::JiraClient; use crate::cli::{IssueCommand, OutputFormat}; @@ -61,6 +61,8 @@ pub(super) async fn handle_link( let type_names: Vec = link_types.iter().map(|lt| lt.name.clone()).collect(); let resolved_name = match partial_match::partial_match(&link_type_name, &type_names) { MatchResult::Exact(name) => name, + // Link types are unique per Jira API; treat like Exact if duplicates ever occur + MatchResult::ExactMultiple(name) => name, MatchResult::Ambiguous(matches) => { if no_input { bail!( @@ -91,12 +93,11 @@ pub(super) async fn handle_link( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "key1": key1, - "key2": key2, - "type": resolved_name, - "linked": true - }))? + serde_json::to_string_pretty(&json_output::link_response( + &key1, + &key2, + &resolved_name, + ))? ); } OutputFormat::Table => { @@ -129,6 +130,8 @@ pub(super) async fn handle_unlink( let type_names: Vec = link_types.iter().map(|lt| lt.name.clone()).collect(); let resolved = match partial_match::partial_match(type_name, &type_names) { MatchResult::Exact(name) => name, + // Link types are unique per Jira API; treat like Exact if duplicates ever occur + MatchResult::ExactMultiple(name) => name, MatchResult::Ambiguous(matches) => { if no_input { bail!( @@ -186,10 +189,7 @@ pub(super) async fn handle_unlink( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "unlinked": false, - "count": 0 - }))? + serde_json::to_string_pretty(&json_output::unlink_response(false, 0))? ); } OutputFormat::Table => { @@ -208,10 +208,7 @@ pub(super) async fn handle_unlink( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "unlinked": true, - "count": count - }))? + serde_json::to_string_pretty(&json_output::unlink_response(true, count))? ); } OutputFormat::Table => { diff --git a/src/cli/issue/list.rs b/src/cli/issue/list.rs index dcbbfd8..5933c02 100644 --- a/src/cli/issue/list.rs +++ b/src/cli/issue/list.rs @@ -1,17 +1,59 @@ use anyhow::Result; use crate::adf; +use crate::api::assets::linked::{ + cmdb_field_ids, enrich_assets, enrich_json_assets, extract_linked_assets, + get_or_fetch_cmdb_fields, +}; use crate::api::client::JiraClient; -use crate::cli::{IssueCommand, OutputFormat}; +use crate::cli::{IssueCommand, OutputFormat, resolve_effective_limit}; use crate::config::Config; use crate::error::JrError; use crate::output; +use crate::types::assets::LinkedAsset; +use crate::types::assets::linked::format_linked_assets; use super::format; use super::helpers; +use crate::api::jira::projects::IssueTypeWithStatuses; +use crate::partial_match::{self, MatchResult}; + +/// Extract unique status names from project-scoped statuses response (deduplicated, sorted). +fn extract_unique_status_names(issue_types: &[IssueTypeWithStatuses]) -> Vec { + let mut seen = std::collections::HashSet::new(); + let mut names = Vec::new(); + for it in issue_types { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + names.push(s.name.clone()); + } + } + } + names.sort(); + names +} + // ── List ────────────────────────────────────────────────────────────── +/// Build base JQL parts when `--jql` is provided. +/// +/// Returns `(base_parts, order_by)`. Strips any trailing `ORDER BY` clause +/// from `jql` and prepends the project scope if `project_key` is set. +fn build_jql_base_parts(jql: &str, project_key: Option<&str>) -> (Vec, &'static str) { + let stripped = crate::jql::strip_order_by(jql); + let mut parts = Vec::new(); + + if let Some(pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + if !stripped.is_empty() { + parts.push(format!("({})", stripped)); + } + + (parts, "updated DESC") +} + pub(super) async fn handle_list( command: IssueCommand, output_format: &OutputFormat, @@ -25,14 +67,87 @@ pub(super) async fn handle_list( status, team, limit, + all, + assignee, + reporter, + recent, + open, points: show_points, + assets: show_assets, + asset: asset_key, + created_after, + created_before, + updated_after, + updated_before, } = command else { unreachable!() }; + let effective_limit = resolve_effective_limit(limit, all); + + // Auto-enable assets display column when filtering by asset + let show_assets = show_assets || asset_key.is_some(); + + // Validate --recent duration format early + if let Some(ref d) = recent { + crate::jql::validate_duration(d).map_err(JrError::UserError)?; + } + + // Validate --asset key format early + if let Some(ref key) = asset_key { + crate::jql::validate_asset_key(key).map_err(JrError::UserError)?; + } + + // Validate date filter flags early + let created_after_date = if let Some(ref d) = created_after { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let created_before_date = if let Some(ref d) = created_before { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let updated_after_date = if let Some(ref d) = updated_after { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + let updated_before_date = if let Some(ref d) = updated_before { + Some(crate::jql::validate_date(d).map_err(JrError::UserError)?) + } else { + None + }; + + // Build date filter JQL clauses + let created_after_clause = created_after_date.map(|d| format!("created >= \"{}\"", d)); + let created_before_clause = created_before_date.map(|d| { + let next_day = d + chrono::Days::new(1); + format!("created < \"{}\"", next_day) + }); + let updated_after_clause = updated_after_date.map(|d| format!("updated >= \"{}\"", d)); + let updated_before_clause = updated_before_date.map(|d| { + let next_day = d + chrono::Days::new(1); + format!("updated < \"{}\"", next_day) + }); + + // Resolve --assignee and --reporter to JQL values + let assignee_jql = if let Some(ref name) = assignee { + Some(helpers::resolve_user(client, name, no_input).await?) + } else { + None + }; + let reporter_jql = if let Some(ref name) = reporter { + Some(helpers::resolve_user(client, name, no_input).await?) + } else { + None + }; + let sp_field_id = config.global.fields.story_points_field_id.as_deref(); - let extra: Vec<&str> = sp_field_id.iter().copied().collect(); + let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); + // Resolve team name to (field_id, uuid) before building JQL let resolved_team = if let Some(ref team_name) = team { Some(helpers::resolve_team_field(config, client, team_name, no_input).await?) @@ -40,100 +155,356 @@ pub(super) async fn handle_list( None }; - let effective_jql = if let Some(raw_jql) = jql { - raw_jql + // Build pre-formatted team clause for build_filter_clauses + let team_clause = resolved_team.as_ref().map(|(field_id, team_uuid)| { + format!("{} = \"{}\"", field_id, crate::jql::escape_value(team_uuid)) + }); + + // Resolve CMDB fields for --asset filter (needs field names for aqlFunction) + let (asset_clause, asset_cmdb_fields) = if let Some(ref key) = asset_key { + let cmdb_fields = get_or_fetch_cmdb_fields(client).await?; + if cmdb_fields.is_empty() { + return Err(JrError::UserError( + "--asset requires Assets custom fields on this Jira instance. \ + Assets requires a paid Jira Service Management plan." + .into(), + ) + .into()); + } + let clause = crate::jql::build_asset_clause(key, &cmdb_fields); + (Some(clause), Some(cmdb_fields)) + } else { + (None, None) + }; + + // Resolve project key once, before validation and JQL building + let project_key = config.project_key(project_override); + + // Validate --project exists + if let Some(ref pk) = project_key { + // Skip if --status is set (project will be validated via statuses endpoint below) + if status.is_none() && !client.project_exists(pk).await? { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )) + .into()); + } + } + + // Validate --status and resolve to exact name + let resolved_status: Option = if let Some(ref status_input) = status { + let valid_statuses = if let Some(ref pk) = project_key { + // Project-scoped: also validates project existence (404 = not found) + match client.get_project_statuses(pk).await { + Ok(issue_types) => extract_unique_status_names(&issue_types), + Err(e) => { + if let Some(JrError::ApiError { status: 404, .. }) = e.downcast_ref::() + { + return Err(JrError::UserError(format!( + "Project \"{}\" not found. Run \"jr project list\" to see available projects.", + pk + )) + .into()); + } + return Err(e); + } + } + } else { + client.get_all_statuses().await? + }; + + match partial_match::partial_match(status_input, &valid_statuses) { + MatchResult::Exact(name) => Some(name), + // Case-sensitive dedup upstream; treat like Exact if case-variant duplicates slip through + MatchResult::ExactMultiple(name) => Some(name), + MatchResult::Ambiguous(matches) => { + return Err(JrError::UserError(format!( + "Ambiguous status \"{}\". Matches: {}", + status_input, + matches.join(", ") + )) + .into()); + } + MatchResult::None(all) => { + let available = all.join(", "); + let scope = if let Some(ref pk) = project_key { + format!(" for project {}", pk) + } else { + String::new() + }; + return Err(JrError::UserError(format!( + "No status matching \"{}\"{scope}. Available: {available}", + status_input, + )) + .into()); + } + } + } else { + None + }; + + // Build filter clauses from all flag values + let filter_parts = build_filter_clauses(FilterOptions { + assignee_jql: assignee_jql.as_deref(), + reporter_jql: reporter_jql.as_deref(), + status: resolved_status.as_deref(), + team_clause: team_clause.as_deref(), + recent: recent.as_deref(), + open, + asset_clause: asset_clause.as_deref(), + created_after_clause: created_after_clause.as_deref(), + created_before_clause: created_before_clause.as_deref(), + updated_after_clause: updated_after_clause.as_deref(), + updated_before_clause: updated_before_clause.as_deref(), + }); + + // Build base JQL + order by + let (base_parts, order_by): (Vec, &str) = if let Some(ref raw_jql) = jql { + build_jql_base_parts(raw_jql, project_key.as_deref()) } else { - // Try smart defaults: detect board type and build JQL let board_id = config.project.board_id; - let project_key = config.project_key(project_override); if let Some(bid) = board_id { - // Detect board type match client.get_board_config(bid).await { Ok(board_config) => { let board_type = board_config.board_type.to_lowercase(); if board_type == "scrum" { - // For scrum boards, find the active sprint match client.list_sprints(bid, Some("active")).await { Ok(sprints) if !sprints.is_empty() => { let sprint = &sprints[0]; - let mut jql_parts = vec![ - format!("sprint = {}", sprint.id), - "assignee = currentUser()".to_string(), - ]; - if let Some(ref s) = status { - jql_parts.push(format!( - "status = \"{}\"", - crate::jql::escape_value(s) - )); - } - if let Some((field_id, team_uuid)) = &resolved_team { - jql_parts.push(format!( - "{} = \"{}\"", - field_id, - crate::jql::escape_value(team_uuid) + (vec![format!("sprint = {}", sprint.id)], "rank ASC") + } + Ok(_) => { + // No active sprint — fall back to project-scoped JQL + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!( + "project = \"{}\"", + crate::jql::escape_value(pk) )); } - let where_clause = jql_parts.join(" AND "); - format!("{} ORDER BY rank ASC", where_clause) + (parts, "updated DESC") + } + Err(e) => { + return Err(e.context(format!( + "Failed to list sprints for board {}. \ + Use --jql to query directly.", + bid + ))); } - _ => build_fallback_jql( - project_key.as_deref(), - status.as_deref(), - resolved_team.as_ref(), - )?, } } else { - // Kanban: show open issues - let mut jql_parts: Vec = - vec!["assignee = currentUser()".to_string()]; + // Kanban: statusCategory != Done, no implicit assignee + let mut parts = Vec::new(); if let Some(ref pk) = project_key { - jql_parts - .push(format!("project = \"{}\"", crate::jql::escape_value(pk))); - } - jql_parts.push("statusCategory != Done".into()); - if let Some(ref s) = status { - jql_parts.push(format!("status = \"{}\"", crate::jql::escape_value(s))); + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); } - if let Some((field_id, team_uuid)) = &resolved_team { - jql_parts.push(format!( - "{} = \"{}\"", - field_id, - crate::jql::escape_value(team_uuid) - )); - } - let where_clause = jql_parts.join(" AND "); - format!("{} ORDER BY rank ASC", where_clause) + parts.push("statusCategory != Done".into()); + (parts, "rank ASC") + } + } + Err(e) => { + if let Some(JrError::ApiError { status: 404, .. }) = e.downcast_ref::() + { + return Err(JrError::UserError(format!( + "Board {} not found or not accessible. \ + Verify the board exists and you have permission, \ + or remove board_id from .jr.toml. \ + Use --jql to query directly.", + bid + )) + .into()); } + return Err(e.context(format!( + "Failed to fetch config for board {}. \ + Remove board_id from .jr.toml or use --jql to query directly.", + bid + ))); } - Err(_) => build_fallback_jql( - project_key.as_deref(), - status.as_deref(), - resolved_team.as_ref(), - )?, } } else { - build_fallback_jql( - project_key.as_deref(), - status.as_deref(), - resolved_team.as_ref(), - )? + let mut parts = Vec::new(); + if let Some(ref pk) = project_key { + parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + } + (parts, "updated DESC") + } + }; + + // Combine base + filters + let mut all_parts = base_parts; + all_parts.extend(filter_parts); + + // Guard against unbounded query + if all_parts.is_empty() { + return Err(JrError::UserError( + "No project or filters specified. Use --project, --assignee, --reporter, --status, --open, --team, --recent, --created-after, --created-before, --updated-after, --updated-before, --asset, or --jql. \ + You can also set a default project in .jr.toml or run \"jr init\"." + .into(), + ) + .into()); + } + + let where_clause = all_parts.join(" AND "); + let effective_jql = format!("{where_clause} ORDER BY {order_by}"); + + let cmdb_fields = if show_assets { + if let Some(fields) = asset_cmdb_fields { + fields + } else { + let fields = get_or_fetch_cmdb_fields(client).await.unwrap_or_default(); + if fields.is_empty() { + eprintln!( + "warning: --assets ignored. No Assets custom fields found on this Jira instance." + ); + } + fields } + } else { + Vec::new() }; + let cmdb_field_id_list = cmdb_field_ids(&cmdb_fields); + for f in &cmdb_field_id_list { + extra.push(f.as_str()); + } - let issues = client.search_issues(&effective_jql, limit, &extra).await?; + let search_result = client + .search_issues(&effective_jql, effective_limit, &extra) + .await?; + let has_more = search_result.has_more; + let mut issues = search_result.issues; let effective_sp = resolve_show_points(show_points, sp_field_id); + let show_assets_col = show_assets && !cmdb_field_id_list.is_empty(); + let mut issue_assets: Vec> = Vec::new(); + if show_assets_col { + // Extract linked assets for all issues first. + for issue in &issues { + issue_assets.push(extract_linked_assets( + &issue.fields.extra, + &cmdb_field_id_list, + )); + } + + // Collect unique (workspace_id, object_id) pairs that need enrichment, + // then resolve them all in one batch to avoid redundant API calls. + use std::collections::HashMap as StdHashMap; + let mut to_enrich: StdHashMap<(String, String), ()> = StdHashMap::new(); + let mut enrich_indices: Vec<(usize, usize)> = Vec::new(); // (issue_idx, asset_idx) + + for (i, assets) in issue_assets.iter().enumerate() { + for (j, asset) in assets.iter().enumerate() { + if asset.id.is_some() && asset.key.is_none() && asset.name.is_none() { + let wid = asset.workspace_id.clone().unwrap_or_default(); + let oid = asset.id.clone().unwrap(); + let key = (wid, oid); + to_enrich.entry(key.clone()).or_insert(()); + enrich_indices.push((i, j)); + } + } + } + + if !to_enrich.is_empty() { + // Get workspace ID for assets that don't carry their own. + let fallback_wid = crate::api::assets::workspace::get_or_fetch_workspace_id(client) + .await + .ok(); + + let futures: Vec<_> = to_enrich + .keys() + .map(|(wid, oid)| { + let wid = if wid.is_empty() { + fallback_wid.clone().unwrap_or_default() + } else { + wid.clone() + }; + let oid = oid.clone(); + async move { + let result = client.get_asset(&wid, &oid, false).await; + (oid, result) + } + }) + .collect(); + + let results = futures::future::join_all(futures).await; + let mut resolved: StdHashMap = StdHashMap::new(); + for (oid, result) in results { + if let Ok(obj) = result { + resolved.insert(oid, (obj.object_key, obj.label, obj.object_type.name)); + } + } + + // Apply enrichment back to assets. + for (i, j) in &enrich_indices { + if let Some(oid) = &issue_assets[*i][*j].id.clone() { + if let Some((key, name, asset_type)) = resolved.get(oid) { + issue_assets[*i][*j].key = Some(key.clone()); + issue_assets[*i][*j].name = Some(name.clone()); + issue_assets[*i][*j].asset_type = Some(asset_type.clone()); + } + } + } + } + } + + // For JSON output with --assets, inject enriched data back into issue JSON + if show_assets_col && matches!(output_format, OutputFormat::Json) { + for (i, issue) in issues.iter_mut().enumerate() { + if issue_assets[i].is_empty() { + continue; + } + // Build per-field-id enrichment: re-extract per field to get grouping, + // then match by position to enriched issue_assets[i] + let mut per_field_by_id: Vec<(String, Vec)> = Vec::new(); + let mut offset = 0; + for field_id in &cmdb_field_id_list { + let count = + extract_linked_assets(&issue.fields.extra, std::slice::from_ref(field_id)) + .len(); + if count > 0 && offset + count <= issue_assets[i].len() { + let enriched = issue_assets[i][offset..offset + count].to_vec(); + per_field_by_id.push((field_id.clone(), enriched)); + } + offset += count; + } + enrich_json_assets(&mut issue.fields.extra, &per_field_by_id); + } + } + let rows: Vec> = issues .iter() - .map(|issue| format::format_issue_row(issue, effective_sp)) + .enumerate() + .map(|(i, issue)| { + let assets = if show_assets_col { + Some(issue_assets[i].as_slice()) + } else { + None + }; + format::format_issue_row(issue, effective_sp, assets) + }) .collect(); - output::print_output( - output_format, - &format::issue_table_headers(effective_sp.is_some()), - &rows, - &issues, - )?; + let headers = format::issue_table_headers(effective_sp.is_some(), show_assets_col); + output::print_output(output_format, &headers, &rows, &issues)?; + + if has_more && !all { + let count_jql = crate::jql::strip_order_by(&effective_jql); + match client.approximate_count(count_jql).await { + Ok(total) if total > 0 => { + eprintln!( + "Showing {} of ~{} results. Use --limit or --all to see more.", + issues.len(), + total + ); + } + Ok(_) | Err(_) => { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issues.len() + ); + } + } + } Ok(()) } @@ -158,35 +529,59 @@ fn resolve_show_points(show_points: bool, sp_field_id: Option<&str>) -> Option<& } } -fn build_fallback_jql( - project_key: Option<&str>, - status: Option<&str>, - resolved_team: Option<&(String, String)>, -) -> Result { - if project_key.is_none() && status.is_none() && resolved_team.is_none() { - return Err(JrError::UserError( - "No project or filters specified. Use --project KEY, --status STATUS, or --team NAME. \ - You can also set a default project in .jr.toml or run \"jr init\"." - .into(), - ) - .into()); +/// Options bag for `build_filter_clauses` — groups all resolved JQL filter +/// fragments so the function stays within clippy's argument-count limit. +struct FilterOptions<'a> { + assignee_jql: Option<&'a str>, + reporter_jql: Option<&'a str>, + status: Option<&'a str>, + team_clause: Option<&'a str>, + recent: Option<&'a str>, + open: bool, + asset_clause: Option<&'a str>, + created_after_clause: Option<&'a str>, + created_before_clause: Option<&'a str>, + updated_after_clause: Option<&'a str>, + updated_before_clause: Option<&'a str>, +} + +/// Build JQL filter clauses from resolved flag values. +fn build_filter_clauses(opts: FilterOptions<'_>) -> Vec { + let mut parts = Vec::new(); + if let Some(a) = opts.assignee_jql { + parts.push(format!("assignee = {a}")); } - let mut parts: Vec = Vec::new(); - if let Some(pk) = project_key { - parts.push(format!("project = \"{}\"", crate::jql::escape_value(pk))); + if let Some(r) = opts.reporter_jql { + parts.push(format!("reporter = {r}")); } - if let Some(s) = status { + if let Some(s) = opts.status { parts.push(format!("status = \"{}\"", crate::jql::escape_value(s))); } - if let Some((field_id, team_uuid)) = resolved_team { - parts.push(format!( - "{} = \"{}\"", - field_id, - crate::jql::escape_value(team_uuid) - )); + if opts.open { + parts.push("statusCategory != Done".to_string()); + } + if let Some(t) = opts.team_clause { + parts.push(t.to_string()); + } + if let Some(d) = opts.recent { + parts.push(format!("created >= -{d}")); + } + if let Some(a) = opts.asset_clause { + parts.push(a.to_string()); + } + if let Some(c) = opts.created_after_clause { + parts.push(c.to_string()); } - let where_clause = parts.join(" AND "); - Ok(format!("{} ORDER BY updated DESC", where_clause)) + if let Some(c) = opts.created_before_clause { + parts.push(c.to_string()); + } + if let Some(c) = opts.updated_after_clause { + parts.push(c.to_string()); + } + if let Some(c) = opts.updated_before_clause { + parts.push(c.to_string()); + } + parts } // ── Comments ───────────────────────────────────────────────────────── @@ -255,11 +650,58 @@ pub(super) async fn handle_view( }; let sp_field_id = config.global.fields.story_points_field_id.as_deref(); - let extra: Vec<&str> = sp_field_id.iter().copied().collect(); - let issue = client.get_issue(&key, &extra).await?; + let cmdb_fields = get_or_fetch_cmdb_fields(client).await.unwrap_or_default(); + let cmdb_field_id_list = cmdb_field_ids(&cmdb_fields); + let mut extra: Vec<&str> = sp_field_id.iter().copied().collect(); + for f in &cmdb_field_id_list { + extra.push(f.as_str()); + } + let mut issue = client.get_issue(&key, &extra).await?; + + // Extract and enrich assets per-field (shared by both JSON and table paths). + // Iterate cmdb_fields directly so we always have (field_id, field_name) together — + // avoids any name-based reverse lookups that could break with duplicate field names. + let per_field_enriched: Vec<(String, String, Vec)> = if !cmdb_fields.is_empty() { + // Extract per-field, keeping both ID and name + let mut per_field: Vec<(String, String, Vec)> = Vec::new(); + for (field_id, field_name) in &cmdb_fields { + let assets = extract_linked_assets(&issue.fields.extra, std::slice::from_ref(field_id)); + if !assets.is_empty() { + per_field.push((field_id.clone(), field_name.clone(), assets)); + } + } + + // Collect all assets for batch enrichment + let mut all_assets: Vec = per_field + .iter() + .flat_map(|(_, _, assets)| assets.clone()) + .collect(); + enrich_assets(client, &mut all_assets).await; + + // Redistribute enriched assets back + let mut enriched = Vec::new(); + let mut offset = 0; + for (field_id, field_name, original_assets) in &per_field { + let count = original_assets.len(); + let assets = all_assets[offset..offset + count].to_vec(); + offset += count; + enriched.push((field_id.clone(), field_name.clone(), assets)); + } + enriched + } else { + Vec::new() + }; match output_format { OutputFormat::Json => { + // Inject enriched data back into JSON before printing + if !per_field_enriched.is_empty() { + let per_field_by_id: Vec<(String, Vec)> = per_field_enriched + .iter() + .map(|(id, _, assets)| (id.clone(), assets.clone())) + .collect(); + enrich_json_assets(&mut issue.fields.extra, &per_field_by_id); + } println!("{}", output::render_json(&issue)?); } OutputFormat::Table => { @@ -309,6 +751,33 @@ pub(super) async fn handle_view( .map(|a| a.display_name.clone()) .unwrap_or_else(|| "Unassigned".into()), ], + vec![ + "Reporter".into(), + issue + .fields + .reporter + .as_ref() + .map(|r| r.display_name.clone()) + .unwrap_or_else(|| "(none)".into()), + ], + vec![ + "Created".into(), + issue + .fields + .created + .as_deref() + .map(format_comment_date) + .unwrap_or_else(|| "-".into()), + ], + vec![ + "Updated".into(), + issue + .fields + .updated + .as_deref() + .map(format_comment_date) + .unwrap_or_else(|| "-".into()), + ], vec![ "Project".into(), issue @@ -390,6 +859,12 @@ pub(super) async fn handle_view( .unwrap_or_else(|| "(none)".into()); rows.push(vec!["Links".into(), links_display]); + // Per-field asset rows (replaces the old single "Assets" row) + for (_, field_name, assets) in &per_field_enriched { + let display = format_linked_assets(assets); + rows.push(vec![field_name.clone(), display]); + } + if let Some(field_id) = sp_field_id { let points_display = issue .fields @@ -412,67 +887,6 @@ pub(super) async fn handle_view( mod tests { use super::*; - #[test] - fn fallback_jql_order_by_not_joined_with_and() { - let jql = build_fallback_jql(Some("PROJ"), None, None).unwrap(); - assert!( - !jql.contains("AND ORDER BY"), - "ORDER BY must not be joined with AND: {jql}" - ); - assert!(jql.ends_with("ORDER BY updated DESC")); - } - - #[test] - fn fallback_jql_with_team_has_valid_order_by() { - let team = ("customfield_10001".to_string(), "uuid-123".to_string()); - let jql = build_fallback_jql(Some("PROJ"), None, Some(&team)).unwrap(); - assert!( - !jql.contains("AND ORDER BY"), - "ORDER BY must not be joined with AND: {jql}" - ); - assert!(jql.contains("customfield_10001 = \"uuid-123\"")); - assert!(jql.ends_with("ORDER BY updated DESC")); - } - - #[test] - fn fallback_jql_with_all_filters() { - let team = ("customfield_10001".to_string(), "uuid-456".to_string()); - let jql = build_fallback_jql(Some("PROJ"), Some("In Progress"), Some(&team)).unwrap(); - assert!( - !jql.contains("AND ORDER BY"), - "ORDER BY must not be joined with AND: {jql}" - ); - assert!(jql.contains("project = \"PROJ\"")); - assert!(jql.contains("status = \"In Progress\"")); - assert!(jql.contains("customfield_10001 = \"uuid-456\"")); - assert!(jql.ends_with("ORDER BY updated DESC")); - } - - #[test] - fn fallback_jql_errors_when_no_filters() { - let result = build_fallback_jql(None, None, None); - assert!(result.is_err(), "Expected error for unbounded query"); - let err_msg = result.unwrap_err().to_string(); - assert!( - err_msg.contains("--project"), - "Error should mention --project: {err_msg}" - ); - assert!( - err_msg.contains(".jr.toml"), - "Error should mention .jr.toml: {err_msg}" - ); - assert!( - err_msg.contains("jr init"), - "Error should mention jr init: {err_msg}" - ); - } - - #[test] - fn fallback_jql_with_status_only() { - let jql = build_fallback_jql(None, Some("Done"), None).unwrap(); - assert_eq!(jql, "status = \"Done\" ORDER BY updated DESC"); - } - #[test] fn resolve_show_points_flag_false() { assert_eq!(resolve_show_points(false, Some("customfield_10031")), None); @@ -493,15 +907,6 @@ mod tests { assert_eq!(resolve_show_points(true, None), None); } - #[test] - fn fallback_jql_escapes_special_chars_in_status() { - let jql = build_fallback_jql(None, Some(r#"In "Progress"#), None).unwrap(); - assert!( - jql.contains(r#"status = "In \"Progress""#), - "Status with quotes should be escaped: {jql}" - ); - } - #[test] fn format_comment_date_rfc3339() { assert_eq!( @@ -534,4 +939,413 @@ mod tests { let row = format_comment_row(Some("Jane Smith"), Some("2026-03-20T14:32:00+00:00"), None); assert_eq!(row[2], "(no content)"); } + + #[test] + fn build_jql_parts_assignee_me() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: Some("currentUser()"), + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec!["assignee = currentUser()"]); + } + + #[test] + fn build_jql_parts_reporter_account_id() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: Some("5b10ac8d82e05b22cc7d4ef5"), + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec!["reporter = 5b10ac8d82e05b22cc7d4ef5"]); + } + + #[test] + fn build_jql_parts_recent() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: Some("7d"), + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec!["created >= -7d"]); + } + + #[test] + fn build_jql_parts_all_filters() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: Some("currentUser()"), + reporter_jql: Some("currentUser()"), + status: Some("In Progress"), + team_clause: Some(r#"customfield_10001 = "uuid-123""#), + recent: Some("30d"), + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"status = \"In Progress\"".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); + } + + #[test] + fn build_jql_parts_empty() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert!(parts.is_empty()); + } + + #[test] + fn build_jql_parts_jql_plus_status_compose() { + let filter = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: Some("Done"), + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + let mut all_parts = vec!["type = Bug".to_string()]; + all_parts.extend(filter); + let jql = all_parts.join(" AND "); + assert_eq!(jql, r#"type = Bug AND status = "Done""#); + } + + #[test] + fn build_jql_parts_status_escaping() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: Some(r#"He said "hi" \o/"#), + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec![r#"status = "He said \"hi\" \\o/""#.to_string()]); + } + + #[test] + fn build_jql_parts_open() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: true, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec!["statusCategory != Done"]); + } + + #[test] + fn build_jql_parts_open_with_assignee() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: Some("currentUser()"), + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: true, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != Done".to_string())); + } + + #[test] + fn build_jql_parts_all_filters_with_open() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: Some("currentUser()"), + reporter_jql: Some("currentUser()"), + status: None, // status conflicts with open, so None here + team_clause: Some(r#"customfield_10001 = "uuid-123""#), + recent: Some("30d"), + open: true, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts.len(), 5); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&"reporter = currentUser()".to_string())); + assert!(parts.contains(&"statusCategory != Done".to_string())); + assert!(parts.contains(&r#"customfield_10001 = "uuid-123""#.to_string())); + assert!(parts.contains(&"created >= -30d".to_string())); + } + + #[test] + fn build_jql_parts_asset_clause() { + let clause = r#""Client" IN aqlFunction("Key = \"CUST-5\"")"#; + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: Some(clause), + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec![clause.to_string()]); + } + + #[test] + fn build_jql_parts_asset_with_assignee() { + let clause = r#""Client" IN aqlFunction("Key = \"CUST-5\"")"#; + let parts = build_filter_clauses(FilterOptions { + assignee_jql: Some("currentUser()"), + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: Some(clause), + created_after_clause: None, + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"assignee = currentUser()".to_string())); + assert!(parts.contains(&clause.to_string())); + } + + #[test] + fn build_jql_parts_created_after_clause() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: Some("created >= \"2026-03-18\""), + created_before_clause: None, + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts, vec!["created >= \"2026-03-18\""]); + } + + #[test] + fn build_jql_parts_updated_after_and_before_clauses() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: None, + created_before_clause: None, + updated_after_clause: Some("updated >= \"2026-03-01\""), + updated_before_clause: Some("updated < \"2026-04-01\""), + }); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"updated >= \"2026-03-01\"".to_string())); + assert!(parts.contains(&"updated < \"2026-04-01\"".to_string())); + } + + #[test] + fn build_jql_parts_created_date_range() { + let parts = build_filter_clauses(FilterOptions { + assignee_jql: None, + reporter_jql: None, + status: None, + team_clause: None, + recent: None, + open: false, + asset_clause: None, + created_after_clause: Some("created >= \"2026-03-01\""), + created_before_clause: Some("created < \"2026-04-01\""), + updated_after_clause: None, + updated_before_clause: None, + }); + assert_eq!(parts.len(), 2); + assert!(parts.contains(&"created >= \"2026-03-01\"".to_string())); + assert!(parts.contains(&"created < \"2026-04-01\"".to_string())); + } + + #[test] + fn build_jql_base_parts_jql_with_project() { + let (parts, order_by) = build_jql_base_parts("priority = Highest", Some("PROJ")); + assert_eq!( + parts, + vec![ + "project = \"PROJ\"".to_string(), + "(priority = Highest)".to_string(), + ] + ); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_without_project() { + let (parts, order_by) = build_jql_base_parts("priority = Highest", None); + assert_eq!(parts, vec!["(priority = Highest)".to_string()]); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_with_order_by_and_project() { + let (parts, order_by) = + build_jql_base_parts("priority = Highest ORDER BY created DESC", Some("PROJ")); + assert_eq!( + parts, + vec![ + "project = \"PROJ\"".to_string(), + "(priority = Highest)".to_string(), + ] + ); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_or_with_project_preserves_scope() { + let (parts, order_by) = + build_jql_base_parts("priority = Highest OR status = Done", Some("PROJ")); + assert_eq!( + parts, + vec![ + "project = \"PROJ\"".to_string(), + "(priority = Highest OR status = Done)".to_string(), + ] + ); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_order_by_only_with_project() { + let (parts, order_by) = build_jql_base_parts("ORDER BY created DESC", Some("PROJ")); + assert_eq!(parts, vec!["project = \"PROJ\"".to_string()]); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn build_jql_base_parts_jql_order_by_only_no_project() { + let (parts, order_by) = build_jql_base_parts("ORDER BY created DESC", None); + assert!(parts.is_empty()); + assert_eq!(order_by, "updated DESC"); + } + + #[test] + fn extract_unique_status_names_deduplicates_and_sorts() { + use crate::api::jira::projects::{IssueTypeWithStatuses, StatusMetadata}; + let issue_types = vec![ + IssueTypeWithStatuses { + id: "1".into(), + name: "Task".into(), + subtask: None, + statuses: vec![ + StatusMetadata { + id: "10".into(), + name: "To Do".into(), + description: None, + }, + StatusMetadata { + id: "20".into(), + name: "In Progress".into(), + description: None, + }, + StatusMetadata { + id: "30".into(), + name: "Done".into(), + description: None, + }, + ], + }, + IssueTypeWithStatuses { + id: "2".into(), + name: "Bug".into(), + subtask: None, + statuses: vec![ + StatusMetadata { + id: "10".into(), + name: "To Do".into(), + description: None, + }, + StatusMetadata { + id: "30".into(), + name: "Done".into(), + description: None, + }, + ], + }, + ]; + let names = extract_unique_status_names(&issue_types); + assert_eq!(names, vec!["Done", "In Progress", "To Do"]); + } + + #[test] + fn extract_unique_status_names_empty() { + let names = extract_unique_status_names(&[]); + assert!(names.is_empty()); + } } diff --git a/src/cli/issue/mod.rs b/src/cli/issue/mod.rs index f52e757..75b864c 100644 --- a/src/cli/issue/mod.rs +++ b/src/cli/issue/mod.rs @@ -1,6 +1,8 @@ +mod assets; mod create; mod format; mod helpers; +mod json_output; mod links; mod list; mod workflow; @@ -58,7 +60,7 @@ pub async fn handle( workflow::handle_transitions(command, output_format, client).await } IssueCommand::Assign { .. } => { - workflow::handle_assign(command, output_format, client).await + workflow::handle_assign(command, output_format, client, no_input).await } IssueCommand::Comment { .. } => { workflow::handle_comment(command, output_format, client).await @@ -74,5 +76,8 @@ pub async fn handle( links::handle_unlink(command, output_format, client, no_input).await } IssueCommand::LinkTypes => links::handle_link_types(output_format, client).await, + IssueCommand::Assets { key } => { + assets::handle_issue_assets(&key, output_format, client).await + } } } diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_changed.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_changed.snap new file mode 100644 index 0000000..8575e74 --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_changed.snap @@ -0,0 +1,10 @@ +--- +source: src/cli/issue/json_output.rs +expression: "assign_changed_response(\"TEST-1\", \"Jane Doe\", \"abc123\")" +--- +{ + "assignee": "Jane Doe", + "assignee_account_id": "abc123", + "changed": true, + "key": "TEST-1" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_unchanged.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_unchanged.snap new file mode 100644 index 0000000..a3bda6d --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__assign_unchanged.snap @@ -0,0 +1,10 @@ +--- +source: src/cli/issue/json_output.rs +expression: "assign_unchanged_response(\"TEST-1\", \"Jane Doe\", \"abc123\")" +--- +{ + "assignee": "Jane Doe", + "assignee_account_id": "abc123", + "changed": false, + "key": "TEST-1" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__edit.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__edit.snap new file mode 100644 index 0000000..7b7ae5b --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__edit.snap @@ -0,0 +1,8 @@ +--- +source: src/cli/issue/json_output.rs +expression: "edit_response(\"TEST-1\")" +--- +{ + "key": "TEST-1", + "updated": true +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__link.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__link.snap new file mode 100644 index 0000000..ba08bcd --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__link.snap @@ -0,0 +1,10 @@ +--- +source: src/cli/issue/json_output.rs +expression: "link_response(\"TEST-1\", \"TEST-2\", \"Blocks\")" +--- +{ + "key1": "TEST-1", + "key2": "TEST-2", + "linked": true, + "type": "Blocks" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_changed.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_changed.snap new file mode 100644 index 0000000..26082a3 --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_changed.snap @@ -0,0 +1,9 @@ +--- +source: src/cli/issue/json_output.rs +expression: "move_response(\"TEST-1\", \"In Progress\", true)" +--- +{ + "changed": true, + "key": "TEST-1", + "status": "In Progress" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_unchanged.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_unchanged.snap new file mode 100644 index 0000000..e5f4b9c --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__move_response_unchanged.snap @@ -0,0 +1,9 @@ +--- +source: src/cli/issue/json_output.rs +expression: "move_response(\"TEST-1\", \"Done\", false)" +--- +{ + "changed": false, + "key": "TEST-1", + "status": "Done" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign.snap new file mode 100644 index 0000000..5aa0b1d --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign.snap @@ -0,0 +1,9 @@ +--- +source: src/cli/issue/json_output.rs +expression: "unassign_response(\"TEST-1\", true)" +--- +{ + "assignee": null, + "changed": true, + "key": "TEST-1" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign_unchanged.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign_unchanged.snap new file mode 100644 index 0000000..c96925d --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unassign_unchanged.snap @@ -0,0 +1,9 @@ +--- +source: src/cli/issue/json_output.rs +expression: "unassign_response(\"TEST-1\", false)" +--- +{ + "assignee": null, + "changed": false, + "key": "TEST-1" +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_no_match.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_no_match.snap new file mode 100644 index 0000000..815d8df --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_no_match.snap @@ -0,0 +1,8 @@ +--- +source: src/cli/issue/json_output.rs +expression: "unlink_response(false, 0)" +--- +{ + "count": 0, + "unlinked": false +} diff --git a/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_success.snap b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_success.snap new file mode 100644 index 0000000..39fe2fa --- /dev/null +++ b/src/cli/issue/snapshots/jr__cli__issue__json_output__tests__unlink_success.snap @@ -0,0 +1,8 @@ +--- +source: src/cli/issue/json_output.rs +expression: "unlink_response(true, 2)" +--- +{ + "count": 2, + "unlinked": true +} diff --git a/src/cli/issue/workflow.rs b/src/cli/issue/workflow.rs index 023a31e..b5e387c 100644 --- a/src/cli/issue/workflow.rs +++ b/src/cli/issue/workflow.rs @@ -1,9 +1,10 @@ +use super::json_output; use anyhow::{Result, bail}; -use serde_json::json; use crate::adf; use crate::api::client::JiraClient; use crate::cli::{IssueCommand, OutputFormat}; +use crate::error::JrError; use crate::output; use crate::partial_match::{self, MatchResult}; @@ -58,17 +59,28 @@ pub(super) async fn handle_move( } }; - // Idempotent: if already in target status, exit 0 - if current_status.to_lowercase() == target_status.to_lowercase() { + // Idempotent: if already in target status, exit 0. + // Check both direct match and whether the input is a transition name whose + // target status matches the current status. + let current_lower = current_status.to_lowercase(); + let target_lower = target_status.to_lowercase(); + let already_in_target = current_lower == target_lower + || transitions.iter().any(|t| { + t.name.to_lowercase() == target_lower + && t.to + .as_ref() + .is_some_and(|s| s.name.to_lowercase() == current_lower) + }); + if already_in_target { match output_format { OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "key": key, - "status": current_status, - "changed": false - }))? + serde_json::to_string_pretty(&json_output::move_response( + &key, + ¤t_status, + false, + ))? ); } OutputFormat::Table => { @@ -95,10 +107,53 @@ pub(super) async fn handle_move( let selected_transition = if let Some(t) = selected_transition { t } else { - // Use partial matching on transition names - let transition_names: Vec = transitions.iter().map(|t| t.name.clone()).collect(); - match partial_match::partial_match(&target_status, &transition_names) { - MatchResult::Exact(name) => transitions.iter().find(|t| t.name == name).unwrap(), + // Build unified candidate pool: transition names + target status names. + // Each candidate maps to its transition index. + let mut candidates: Vec<(String, usize)> = Vec::new(); + let mut seen: std::collections::HashSet = std::collections::HashSet::new(); + for (i, t) in transitions.iter().enumerate() { + let t_lower = t.name.to_lowercase(); + if seen.insert(t_lower) { + candidates.push((t.name.clone(), i)); + } + if let Some(ref status) = t.to { + let s_lower = status.name.to_lowercase(); + if seen.insert(s_lower) { + candidates.push((status.name.clone(), i)); + } + } + } + + let candidate_names: Vec = + candidates.iter().map(|(name, _)| name.clone()).collect(); + match partial_match::partial_match(&target_status, &candidate_names) { + MatchResult::Exact(name) => { + let idx = candidates + .iter() + .find(|(n, _)| n == &name) + .map(|(_, i)| *i) + .ok_or_else(|| { + anyhow::anyhow!( + "Internal error: matched candidate \"{}\" not found. Please report this as a bug.", + name + ) + })?; + &transitions[idx] + } + // Case-insensitive dedup upstream; treat like Exact if case-variant duplicates slip through + MatchResult::ExactMultiple(name) => { + let idx = candidates + .iter() + .find(|(n, _)| n == &name) + .map(|(_, i)| *i) + .ok_or_else(|| { + anyhow::anyhow!( + "Internal error: matched candidate \"{}\" not found. Please report this as a bug.", + name + ) + })?; + &transitions[idx] + } MatchResult::Ambiguous(matches) => { if no_input { bail!( @@ -118,20 +173,35 @@ pub(super) async fn handle_move( let choice = helpers::prompt_input("Select (number)")?; let idx: usize = choice .parse() - .map_err(|_| anyhow::anyhow!("Invalid selection"))?; + .map_err(|_| JrError::UserError("Invalid selection".into()))?; if idx < 1 || idx > matches.len() { - bail!("Selection out of range"); + return Err(JrError::UserError("Selection out of range".into()).into()); } - transitions + let selected_name = &matches[idx - 1]; + let tidx = candidates .iter() - .find(|t| t.name == matches[idx - 1]) - .unwrap() + .find(|(n, _)| n == selected_name) + .map(|(_, i)| *i) + .ok_or_else(|| { + anyhow::anyhow!( + "Internal error: selected candidate \"{}\" not found. Please report this as a bug.", + selected_name + ) + })?; + &transitions[tidx] } - MatchResult::None(all) => { + MatchResult::None(_) => { + let labels: Vec = transitions + .iter() + .map(|t| match t.to.as_ref() { + Some(status) => format!("{} (→ {})", t.name, status.name), + None => t.name.clone(), + }) + .collect(); bail!( "No transition matching \"{}\". Available: {}", target_status, - all.join(", ") + labels.join(", ") ); } } @@ -151,11 +221,7 @@ pub(super) async fn handle_move( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "key": key, - "status": new_status, - "changed": true - }))? + serde_json::to_string_pretty(&json_output::move_response(&key, new_status, true,))? ); } OutputFormat::Table => { @@ -207,22 +273,42 @@ pub(super) async fn handle_assign( command: IssueCommand, output_format: &OutputFormat, client: &JiraClient, + no_input: bool, ) -> Result<()> { - let IssueCommand::Assign { key, to, unassign } = command else { + let IssueCommand::Assign { + key, + to, + account_id, + unassign, + } = command + else { unreachable!() }; if unassign { + // Idempotent: check if already unassigned + let issue = client.get_issue(&key, &[]).await?; + if issue.fields.assignee.is_none() { + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::to_string_pretty(&json_output::unassign_response(&key, false))? + ); + } + OutputFormat::Table => { + output::print_success(&format!("{} is already unassigned", key)); + } + } + return Ok(()); + } + client.assign_issue(&key, None).await?; match output_format { OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "key": key, - "assignee": null, - "changed": true - }))? + serde_json::to_string_pretty(&json_output::unassign_response(&key, true))? ); } OutputFormat::Table => { @@ -232,38 +318,43 @@ pub(super) async fn handle_assign( return Ok(()); } - let account_id = if let Some(ref user_query) = to { - // Assign to another user — use the provided value as account ID - user_query.clone() + // Resolve account ID and display name. + // When --account-id is provided, no search is performed so the raw + // account ID is used as the display name (no name available). + let (account_id, display_name) = if let Some(ref id) = account_id { + (id.clone(), id.clone()) + } else if let Some(ref user_query) = to { + helpers::resolve_assignee(client, user_query, &key, no_input).await? } else { - // Assign to self let me = client.get_myself().await?; + (me.account_id, me.display_name) + }; - // Idempotent: check if already assigned to self - let issue = client.get_issue(&key, &[]).await?; - if let Some(ref assignee) = issue.fields.assignee { - if assignee.account_id == me.account_id { - match output_format { - OutputFormat::Json => { - println!( - "{}", - serde_json::to_string_pretty(&json!({ - "key": key, - "assignee": me.display_name, - "changed": false - }))? - ); - } - OutputFormat::Table => { - output::print_success(&format!("{} is already assigned to you", key)); - } + // Idempotent: check if already assigned to target user + let issue = client.get_issue(&key, &[]).await?; + if let Some(ref assignee) = issue.fields.assignee { + if assignee.account_id == account_id { + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::to_string_pretty(&json_output::assign_unchanged_response( + &key, + &display_name, + &account_id, + ),)? + ); + } + OutputFormat::Table => { + output::print_success(&format!( + "{} is already assigned to {}", + key, display_name + )); } - return Ok(()); } + return Ok(()); } - - me.account_id - }; + } client.assign_issue(&key, Some(&account_id)).await?; @@ -271,15 +362,15 @@ pub(super) async fn handle_assign( OutputFormat::Json => { println!( "{}", - serde_json::to_string_pretty(&json!({ - "key": key, - "assignee": account_id, - "changed": true - }))? + serde_json::to_string_pretty(&json_output::assign_changed_response( + &key, + &display_name, + &account_id, + ))? ); } OutputFormat::Table => { - output::print_success(&format!("Assigned {} to {}", key, account_id)); + output::print_success(&format!("Assigned {} to {}", key, display_name)); } } diff --git a/src/cli/mod.rs b/src/cli/mod.rs index d89a1cd..e14b2c2 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -1,8 +1,10 @@ +pub mod assets; pub mod auth; pub mod board; pub mod init; pub mod issue; pub mod project; +pub mod queue; pub mod sprint; pub mod team; pub mod worklog; @@ -46,6 +48,11 @@ pub enum OutputFormat { pub enum Command { /// Initialize jr configuration Init, + /// Manage Assets/CMDB objects + Assets { + #[command(subcommand)] + command: AssetsCommand, + }, /// Manage authentication Auth { #[command(subcommand)] @@ -62,7 +69,7 @@ pub enum Command { /// Manage issues Issue { #[command(subcommand)] - command: IssueCommand, + command: Box, }, /// Manage boards Board { @@ -84,6 +91,11 @@ pub enum Command { #[command(subcommand)] command: TeamCommand, }, + /// Manage JSM queues + Queue { + #[command(subcommand)] + command: QueueCommand, + }, /// Generate shell completions Completion { /// Shell to generate completions for @@ -92,6 +104,59 @@ pub enum Command { }, } +#[derive(Subcommand)] +pub enum AssetsCommand { + /// Search assets with AQL query + Search { + /// AQL query (e.g. "objectType = Client") + query: String, + /// Maximum number of results + #[arg(long)] + limit: Option, + /// Include object attributes in output + #[arg(long)] + attributes: bool, + }, + /// View asset details + View { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Omit object attributes from output + #[arg(long)] + no_attributes: bool, + }, + /// Show Jira issues connected to an asset + Tickets { + /// Object key (e.g. OBJ-1) or numeric ID + key: String, + /// Maximum number of tickets to show + #[arg(long)] + limit: Option, + /// Show only open tickets (excludes Done status category) + #[arg(long, conflicts_with = "status")] + open: bool, + /// Filter by status (partial match supported) + #[arg(long, conflicts_with = "open")] + status: Option, + }, + /// List object schemas in the workspace + Schemas, + /// List object types (all schemas or filtered) + Types { + /// Filter by schema (partial name match or exact ID) + #[arg(long)] + schema: Option, + }, + /// Show attributes for an object type + Schema { + /// Object type name (partial match supported) + name: String, + /// Filter by schema (partial name match or exact ID) + #[arg(long)] + schema: Option, + }, +} + #[derive(Subcommand)] pub enum AuthCommand { /// Authenticate with Jira @@ -120,9 +185,42 @@ pub enum IssueCommand { /// Maximum number of results #[arg(long)] limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + /// Filter by assignee ("me" for current user, or a name to search) + #[arg(long)] + assignee: Option, + /// Filter by reporter ("me" for current user, or a name to search) + #[arg(long)] + reporter: Option, + /// Show issues created within duration (e.g., 7d, 4w, 2M) + #[arg(long)] + recent: Option, + /// Show only open issues (excludes Done status category) + #[arg(long, conflicts_with = "status")] + open: bool, /// Show story points column #[arg(long)] points: bool, + /// Show linked assets column + #[arg(long)] + assets: bool, + /// Filter by linked asset object key (e.g., CUST-5) + #[arg(long)] + asset: Option, + /// Show issues created on or after this date (YYYY-MM-DD) + #[arg(long, conflicts_with = "recent")] + created_after: Option, + /// Show issues created on or before this date (YYYY-MM-DD) + #[arg(long)] + created_before: Option, + /// Show issues updated on or after this date (YYYY-MM-DD) + #[arg(long)] + updated_after: Option, + /// Show issues updated on or before this date (YYYY-MM-DD) + #[arg(long)] + updated_before: Option, }, /// Create a new issue Create { @@ -136,10 +234,10 @@ pub enum IssueCommand { #[arg(short, long)] summary: Option, /// Description - #[arg(short, long)] + #[arg(short, long, conflicts_with = "description_stdin")] description: Option, /// Read description from stdin (for piping) - #[arg(long)] + #[arg(long, conflicts_with = "description")] description_stdin: bool, /// Priority #[arg(long)] @@ -159,6 +257,12 @@ pub enum IssueCommand { /// Parent issue key (e.g., for subtasks or stories under epics) #[arg(long)] parent: Option, + /// Assign to user (name/email, or "me" for self) + #[arg(long, conflicts_with = "account_id")] + to: Option, + /// Assign to this Jira accountId directly (bypasses name search) + #[arg(long, conflicts_with = "to")] + account_id: Option, }, /// View issue details View { @@ -193,6 +297,15 @@ pub enum IssueCommand { /// Parent issue key #[arg(long)] parent: Option, + /// Description + #[arg(short, long, conflicts_with = "description_stdin")] + description: Option, + /// Read description from stdin (for piping) + #[arg(long, conflicts_with = "description")] + description_stdin: bool, + /// Interpret description as Markdown + #[arg(long)] + markdown: bool, }, /// Transition issue to a new status Move { @@ -210,11 +323,14 @@ pub enum IssueCommand { Assign { /// Issue key key: String, - /// Assign to this user (omit to assign to self) - #[arg(long)] + /// Assign to this user (name/email, or "me" for self; omit to assign to self) + #[arg(long, conflicts_with_all = ["account_id", "unassign"])] to: Option, + /// Assign to this Jira accountId directly (bypasses name search) + #[arg(long, conflicts_with_all = ["to", "unassign"])] + account_id: Option, /// Remove assignee - #[arg(long)] + #[arg(long, conflicts_with_all = ["to", "account_id"])] unassign: bool, }, /// Add a comment @@ -271,31 +387,94 @@ pub enum IssueCommand { }, /// List available link types LinkTypes, + /// Show assets linked to an issue + Assets { + /// Issue key (e.g., FOO-123) + key: String, + }, } #[derive(Subcommand)] pub enum ProjectCommand { - /// Show valid issue types, priorities, and statuses - Fields { - /// Project key (uses configured project if omitted) - project: Option, + /// List accessible projects + List { + /// Filter by project type (software, service_desk, business) + #[arg(long = "type")] + project_type: Option, + /// Maximum number of results (default: 50) + #[arg(long)] + limit: Option, + /// Fetch all projects (paginate through all pages) + #[arg(long, conflicts_with = "limit")] + all: bool, }, + /// Show valid issue types, priorities, and statuses + Fields, } #[derive(Subcommand)] pub enum BoardCommand { /// List boards - List, + List { + /// Filter by board type + #[arg(long = "type", value_parser = clap::builder::PossibleValuesParser::new(["scrum", "kanban"]))] + board_type: Option, + }, /// View current board issues - View, + View { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, } #[derive(Subcommand)] pub enum SprintCommand { /// List sprints - List, + List { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + }, /// Show current sprint issues - Current, + Current { + /// Board ID (overrides board_id in .jr.toml) + #[arg(long)] + board: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + /// Fetch all results (no default limit) + #[arg(long, conflicts_with = "limit")] + all: bool, + }, + /// Add issues to a sprint + Add { + /// Sprint ID (from `jr sprint list`) + #[arg(long, required_unless_present = "current")] + sprint: Option, + /// Use the active sprint instead of specifying an ID + #[arg(long, conflicts_with = "sprint")] + current: bool, + /// Issue keys to add (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, + /// Board ID (used with --current to resolve the active sprint) + #[arg(long)] + board: Option, + }, + /// Remove issues from sprint (moves to backlog) + Remove { + /// Issue keys to remove (e.g. FOO-1 FOO-2) + #[arg(required = true, num_args = 1..)] + issues: Vec, + }, } #[derive(Subcommand)] @@ -326,3 +505,54 @@ pub enum WorklogCommand { key: String, }, } + +#[derive(Subcommand)] +pub enum QueueCommand { + /// List queues for the service desk + List, + /// View issues in a queue + View { + /// Queue name (partial match supported) + name: Option, + /// Queue ID (use if name is ambiguous) + #[arg(long)] + id: Option, + /// Maximum number of issues to return + #[arg(long)] + limit: Option, + }, +} + +pub(crate) const DEFAULT_LIMIT: u32 = 30; + +/// Resolve the effective limit from CLI flags. +/// +/// Returns `None` when `--all` is set (no limit), otherwise returns the +/// explicit `--limit` value or the default. +pub(crate) fn resolve_effective_limit(limit: Option, all: bool) -> Option { + if all { + None + } else { + Some(limit.unwrap_or(DEFAULT_LIMIT)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn effective_limit_defaults_to_30() { + assert_eq!(resolve_effective_limit(None, false), Some(30)); + } + + #[test] + fn effective_limit_respects_explicit_limit() { + assert_eq!(resolve_effective_limit(Some(50), false), Some(50)); + } + + #[test] + fn effective_limit_all_returns_none() { + assert_eq!(resolve_effective_limit(None, true), None); + } +} diff --git a/src/cli/project.rs b/src/cli/project.rs index 71d671a..c6c3313 100644 --- a/src/cli/project.rs +++ b/src/cli/project.rs @@ -1,8 +1,11 @@ use anyhow::Result; +use crate::api::assets::linked::get_or_fetch_cmdb_fields; use crate::api::client::JiraClient; use crate::cli::{OutputFormat, ProjectCommand}; use crate::config::Config; +use crate::error::JrError; +use crate::output; pub async fn handle( command: ProjectCommand, @@ -12,43 +15,119 @@ pub async fn handle( project_override: Option<&str>, ) -> Result<()> { match command { - ProjectCommand::Fields { project } => { - let project_key = project - .or_else(|| config.project_key(project_override)) - .ok_or_else(|| anyhow::anyhow!("No project specified"))?; - - let issue_types = client.get_project_issue_types(&project_key).await?; - let priorities = client.get_priorities().await?; - - match output_format { - OutputFormat::Json => { - println!( - "{}", - serde_json::json!({ - "project": project_key, - "issue_types": issue_types, - "priorities": priorities, - }) - ); - } - OutputFormat::Table => { - println!("Project: {project_key}\n"); - println!("Issue Types:"); - for t in &issue_types { - let suffix = if t.subtask == Some(true) { - " (subtask)" - } else { - "" - }; - println!(" - {}{}", t.name, suffix); + ProjectCommand::List { + project_type, + limit, + all, + } => handle_list(client, output_format, project_type.as_deref(), limit, all).await, + ProjectCommand::Fields => { + handle_fields(config, client, output_format, project_override).await + } + } +} + +async fn handle_list( + client: &JiraClient, + output_format: &OutputFormat, + project_type: Option<&str>, + limit: Option, + all: bool, +) -> Result<()> { + let max_results = if all { None } else { Some(limit.unwrap_or(50)) }; + let projects = client.list_projects(project_type, max_results).await?; + + let rows: Vec> = projects + .iter() + .map(|p| { + vec![ + p.key.clone(), + p.name.clone(), + p.lead + .as_ref() + .map(|l| l.display_name.clone()) + .unwrap_or_else(|| "\u{2014}".into()), + p.project_type_key.clone(), + ] + }) + .collect(); + + output::print_output( + output_format, + &["Key", "Name", "Lead", "Type"], + &rows, + &projects, + ) +} + +async fn handle_fields( + config: &Config, + client: &JiraClient, + output_format: &OutputFormat, + project_override: Option<&str>, +) -> Result<()> { + let project_key = config.project_key(project_override).ok_or_else(|| { + JrError::UserError( + "No project specified. Use --project or configure a default project in .jr.toml. \ + Run \"jr project list\" to see available projects." + .into(), + ) + })?; + + let issue_types = client.get_project_issue_types(&project_key).await?; + let priorities = client.get_priorities().await?; + let statuses = client.get_project_statuses(&project_key).await?; + let cmdb_fields = get_or_fetch_cmdb_fields(client).await.unwrap_or_default(); + + match output_format { + OutputFormat::Json => { + println!( + "{}", + serde_json::json!({ + "project": project_key, + "issue_types": issue_types, + "priorities": priorities, + "statuses_by_issue_type": statuses, + "asset_fields": cmdb_fields.iter().map(|(id, name)| { + serde_json::json!({"id": id, "name": name}) + }).collect::>(), + }) + ); + } + OutputFormat::Table => { + println!("Project: {project_key}\n"); + println!("Issue Types:"); + for t in &issue_types { + let suffix = if t.subtask == Some(true) { + " (subtask)" + } else { + "" + }; + println!(" - {}{}", t.name, suffix); + } + println!("\nPriorities:"); + for p in &priorities { + println!(" - {}", p.name); + } + let has_statuses = statuses.iter().any(|it| !it.statuses.is_empty()); + if has_statuses { + println!("\nStatuses by Issue Type:"); + for it in &statuses { + if it.statuses.is_empty() { + continue; } - println!("\nPriorities:"); - for p in &priorities { - println!(" - {}", p.name); + println!(" {}:", it.name); + for s in &it.statuses { + println!(" - {}", s.name); } } } - Ok(()) + if !cmdb_fields.is_empty() { + println!("\nCustom Fields (Assets) \u{2014} instance-wide:"); + for (id, name) in &cmdb_fields { + println!(" - {} ({})", name, id); + } + } } } + Ok(()) } diff --git a/src/cli/queue.rs b/src/cli/queue.rs new file mode 100644 index 0000000..51de59a --- /dev/null +++ b/src/cli/queue.rs @@ -0,0 +1,321 @@ +use std::collections::HashMap; + +use anyhow::Result; + +use crate::api::client::JiraClient; +use crate::api::jsm::servicedesks; +use crate::cli::issue::{format_issue_rows_public, issue_table_headers}; +use crate::cli::{OutputFormat, QueueCommand}; +use crate::config::Config; +use crate::error::JrError; +use crate::output; +use crate::partial_match::{self, MatchResult}; + +pub async fn handle( + command: QueueCommand, + output_format: &OutputFormat, + config: &Config, + client: &JiraClient, + project_override: Option<&str>, +) -> Result<()> { + let project_key = config.project_key(project_override).ok_or_else(|| { + JrError::UserError( + "No project configured. Run \"jr init\" or pass --project. \ + Run \"jr project list\" to see available projects." + .into(), + ) + })?; + + let service_desk_id = servicedesks::require_service_desk(client, &project_key).await?; + + match command { + QueueCommand::List => handle_list(&service_desk_id, output_format, client).await, + QueueCommand::View { name, id, limit } => { + handle_view(&service_desk_id, name, id, limit, output_format, client).await + } + } +} + +async fn handle_list( + service_desk_id: &str, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let queues = client.list_queues(service_desk_id).await?; + + let rows: Vec> = queues + .iter() + .map(|q| { + vec![ + q.name.clone(), + q.issue_count + .map(|c| c.to_string()) + .unwrap_or_else(|| "\u{2014}".into()), + ] + }) + .collect(); + + output::print_output(output_format, &["Queue", "Issues"], &rows, &queues) +} + +async fn handle_view( + service_desk_id: &str, + name: Option, + id: Option, + limit: Option, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + let queue_id = match id { + Some(id) => id, + None => { + let name = name.ok_or_else(|| { + JrError::UserError( + "Specify a queue name or use --id. \ + Run \"jr queue list\" to see available queues." + .into(), + ) + })?; + resolve_queue_by_name(service_desk_id, &name, client).await? + } + }; + + // Apply default limit consistent with other commands (issue list, board view, sprint current) + let effective_limit = limit.or(Some(crate::cli::DEFAULT_LIMIT)); + + // Step 1: Fetch issue keys from the queue (preserves queue membership and ordering) + let keys = client + .get_queue_issue_keys(service_desk_id, &queue_id, effective_limit) + .await?; + + if keys.is_empty() { + let headers = issue_table_headers(false, false); + let empty: Vec> = vec![]; + let empty_issues: Vec = vec![]; + return output::print_output(output_format, &headers, &empty, &empty_issues); + } + + // Step 2: Batch-fetch full issues via search API. + let jql = build_key_in_jql(&keys); + let search_result = client + .search_issues(&jql, Some(keys.len() as u32), &[]) + .await?; + + // Step 3: Re-order results to match original queue ordering + let issues = reorder_by_queue_position(search_result.issues, &keys); + + // Step 4: Output + let headers = issue_table_headers(false, false); + let rows = format_issue_rows_public(&issues); + output::print_output(output_format, &headers, &rows, &issues) +} + +/// Build a JQL `key IN (...)` clause from a list of issue keys. +/// Issue keys are identifiers in JQL and must NOT be quoted. +fn build_key_in_jql(keys: &[String]) -> String { + format!("key IN ({})", keys.join(", ")) +} + +/// Re-order issues to match the original queue key ordering. +/// Issues not found in the search results (e.g., permission-denied) are silently omitted. +fn reorder_by_queue_position( + mut issues: Vec, + queue_keys: &[String], +) -> Vec { + let position: HashMap<&str, usize> = queue_keys + .iter() + .enumerate() + .map(|(i, k)| (k.as_str(), i)) + .collect(); + issues.sort_by_key(|issue| { + position + .get(issue.key.as_str()) + .copied() + .unwrap_or(usize::MAX) + }); + issues +} + +pub async fn resolve_queue_by_name( + service_desk_id: &str, + name: &str, + client: &JiraClient, +) -> Result { + let queues = client.list_queues(service_desk_id).await?; + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + + match partial_match::partial_match(name, &names) { + MatchResult::Exact(matched_name) => Ok(queues + .iter() + .find(|q| q.name == matched_name) + .expect("matched name must exist in queues") + .id + .clone()), + MatchResult::ExactMultiple(matched_name) => { + let name_lower = name.to_lowercase(); + let matching: Vec<&crate::types::jsm::Queue> = queues + .iter() + .filter(|q| q.name.to_lowercase() == name_lower) + .collect(); + let ids: Vec = matching.iter().map(|q| q.id.clone()).collect(); + Err(JrError::UserError(format!( + "Multiple queues named \"{}\" found (IDs: {}). Use --id {} to specify.", + matched_name, + ids.join(", "), + ids[0] + )) + .into()) + } + MatchResult::Ambiguous(matches) => Err(JrError::UserError(format!( + "\"{}\" matches multiple queues: {}. Be more specific or use --id.", + name, + matches + .iter() + .map(|m| format!("\"{}\"", m)) + .collect::>() + .join(", ") + )) + .into()), + MatchResult::None(_) => Err(JrError::UserError(format!( + "No queue matching \"{}\" found. \ + Run \"jr queue list\" to see available queues.", + name + )) + .into()), + } +} + +#[cfg(test)] +mod tests { + use super::{build_key_in_jql, reorder_by_queue_position}; + use crate::types::jira::Issue; + use crate::types::jsm::Queue; + + fn make_queue(id: &str, name: &str) -> Queue { + Queue { + id: id.into(), + name: name.into(), + jql: None, + fields: None, + issue_count: None, + } + } + + fn find_queue_id(name: &str, queues: &[Queue]) -> Result { + let names: Vec = queues.iter().map(|q| q.name.clone()).collect(); + match crate::partial_match::partial_match(name, &names) { + crate::partial_match::MatchResult::Exact(matched_name) => Ok(queues + .iter() + .find(|q| q.name == matched_name) + .expect("matched name must exist in queues") + .id + .clone()), + crate::partial_match::MatchResult::ExactMultiple(_) => Err("duplicate".into()), + crate::partial_match::MatchResult::Ambiguous(m) => { + Err(format!("ambiguous: {}", m.len())) + } + crate::partial_match::MatchResult::None(_) => Err("none".into()), + } + } + + #[test] + fn exact_match() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "In Progress")]; + assert_eq!(find_queue_id("Triage", &queues).unwrap(), "10"); + } + + #[test] + fn partial_match() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "In Progress")]; + assert_eq!(find_queue_id("tri", &queues).unwrap(), "10"); + } + + #[test] + fn ambiguous_match() { + let queues = vec![ + make_queue("10", "Escalated - Client"), + make_queue("20", "Escalated - External"), + ]; + let err = find_queue_id("esc", &queues).unwrap_err(); + assert!(err.starts_with("ambiguous")); + } + + #[test] + fn no_match() { + let queues = vec![make_queue("10", "Triage")]; + let err = find_queue_id("nonexistent", &queues).unwrap_err(); + assert_eq!(err, "none"); + } + + #[test] + fn duplicate_names() { + let queues = vec![make_queue("10", "Triage"), make_queue("20", "Triage")]; + let err = find_queue_id("Triage", &queues).unwrap_err(); + assert!(err.starts_with("duplicate")); + } + + fn make_issue(key: &str) -> Issue { + Issue { + key: key.to_string(), + ..Default::default() + } + } + + #[test] + fn build_jql_single_key() { + let jql = build_key_in_jql(&["FOO-1".to_string()]); + assert_eq!(jql, "key IN (FOO-1)"); + } + + #[test] + fn build_jql_multiple_keys() { + let keys = vec![ + "FOO-1".to_string(), + "FOO-2".to_string(), + "BAR-99".to_string(), + ]; + let jql = build_key_in_jql(&keys); + assert_eq!(jql, "key IN (FOO-1, FOO-2, BAR-99)"); + } + + #[test] + fn reorder_matches_queue_order() { + let issues = vec![ + make_issue("FOO-3"), + make_issue("FOO-1"), + make_issue("FOO-2"), + ]; + let queue_keys = vec!["FOO-1".into(), "FOO-2".into(), "FOO-3".into()]; + let result = reorder_by_queue_position(issues, &queue_keys); + let keys: Vec<&str> = result.iter().map(|i| i.key.as_str()).collect(); + assert_eq!(keys, vec!["FOO-1", "FOO-2", "FOO-3"]); + } + + #[test] + fn reorder_omits_nothing_on_full_match() { + let issues = vec![make_issue("A-1"), make_issue("A-2")]; + let queue_keys = vec!["A-2".into(), "A-1".into()]; + let result = reorder_by_queue_position(issues, &queue_keys); + assert_eq!(result.len(), 2); + assert_eq!(result[0].key, "A-2"); + assert_eq!(result[1].key, "A-1"); + } + + #[test] + fn reorder_with_missing_key_from_search() { + let issues = vec![make_issue("A-1"), make_issue("A-3")]; + let queue_keys = vec!["A-1".into(), "A-2".into(), "A-3".into()]; + let result = reorder_by_queue_position(issues, &queue_keys); + assert_eq!(result.len(), 2); + assert_eq!(result[0].key, "A-1"); + assert_eq!(result[1].key, "A-3"); + } + + #[test] + fn reorder_empty_issues() { + let issues: Vec = vec![]; + let queue_keys = vec!["A-1".into()]; + let result = reorder_by_queue_position(issues, &queue_keys); + assert!(result.is_empty()); + } +} diff --git a/src/cli/snapshots/jr__cli__sprint__tests__sprint_add_response.snap b/src/cli/snapshots/jr__cli__sprint__tests__sprint_add_response.snap new file mode 100644 index 0000000..f5224e8 --- /dev/null +++ b/src/cli/snapshots/jr__cli__sprint__tests__sprint_add_response.snap @@ -0,0 +1,12 @@ +--- +source: src/cli/sprint.rs +expression: "sprint_add_response(100, &[\"TEST-1\".to_string(), \"TEST-2\".to_string()])" +--- +{ + "added": true, + "issues": [ + "TEST-1", + "TEST-2" + ], + "sprint_id": 100 +} diff --git a/src/cli/snapshots/jr__cli__sprint__tests__sprint_remove_response.snap b/src/cli/snapshots/jr__cli__sprint__tests__sprint_remove_response.snap new file mode 100644 index 0000000..543c546 --- /dev/null +++ b/src/cli/snapshots/jr__cli__sprint__tests__sprint_remove_response.snap @@ -0,0 +1,11 @@ +--- +source: src/cli/sprint.rs +expression: "sprint_remove_response(&[\"TEST-1\".to_string(), \"TEST-2\".to_string()])" +--- +{ + "issues": [ + "TEST-1", + "TEST-2" + ], + "removed": true +} diff --git a/src/cli/sprint.rs b/src/cli/sprint.rs index 2b0a85f..e96ae8f 100644 --- a/src/cli/sprint.rs +++ b/src/cli/sprint.rs @@ -1,4 +1,5 @@ use anyhow::{Result, bail}; +use serde_json::json; use crate::api::client::JiraClient; use crate::cli::{OutputFormat, SprintCommand}; @@ -12,12 +13,67 @@ pub async fn handle( config: &Config, client: &JiraClient, output_format: &OutputFormat, + project_override: Option<&str>, ) -> Result<()> { - let board_id = config.project.board_id.ok_or_else(|| { - anyhow::anyhow!("No board_id configured. Set board_id in .jr.toml or run \"jr init\".") - })?; + match command { + SprintCommand::List { board } => { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + handle_list(board_id, client, output_format).await + } + SprintCommand::Current { + board, limit, all, .. + } => { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + handle_current(board_id, client, output_format, config, limit, all).await + } + SprintCommand::Add { + sprint, + current, + issues, + board, + } => { + if issues.len() > MAX_SPRINT_ISSUES { + bail!( + "Too many issues (got {}). Maximum is {} per operation.", + issues.len(), + MAX_SPRINT_ISSUES + ); + } + let sprint_id = if current { + let board_id = resolve_scrum_board(config, client, board, project_override).await?; + let sprints = client.list_sprints(board_id, Some("active")).await?; + if sprints.is_empty() { + bail!("No active sprint found for board {}.", board_id); + } + sprints[0].id + } else { + sprint.expect("clap enforces --sprint when --current is absent") + }; + handle_add(sprint_id, issues, output_format, client).await + } + SprintCommand::Remove { issues } => { + if issues.len() > MAX_SPRINT_ISSUES { + bail!( + "Too many issues (got {}). Maximum is {} per operation.", + issues.len(), + MAX_SPRINT_ISSUES + ); + } + handle_remove(issues, output_format, client).await + } + } +} + +/// Resolve board ID and verify it's a scrum board. +async fn resolve_scrum_board( + config: &Config, + client: &JiraClient, + board: Option, + project_override: Option<&str>, +) -> Result { + let board_id = + crate::cli::board::resolve_board_id(config, client, board, project_override, true).await?; - // Guard: sprints only make sense for scrum boards let board_config = client.get_board_config(board_id).await?; let board_type = board_config.board_type.to_lowercase(); if board_type != "scrum" { @@ -28,10 +84,74 @@ pub async fn handle( ); } - match command { - SprintCommand::List => handle_list(board_id, client, output_format).await, - SprintCommand::Current => handle_current(board_id, client, output_format, config).await, + Ok(board_id) +} + +/// JSON response for `sprint add`. +fn sprint_add_response(sprint_id: u64, issues: &[String]) -> serde_json::Value { + json!({ + "sprint_id": sprint_id, + "issues": issues, + "added": true + }) +} + +/// JSON response for `sprint remove`. +fn sprint_remove_response(issues: &[String]) -> serde_json::Value { + json!({ + "issues": issues, + "removed": true + }) +} + +const MAX_SPRINT_ISSUES: usize = 50; + +/// Add issues to a sprint. +async fn handle_add( + sprint_id: u64, + issues: Vec, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + client.add_issues_to_sprint(sprint_id, &issues).await?; + + match output_format { + OutputFormat::Json => { + println!( + "{}", + output::render_json(&sprint_add_response(sprint_id, &issues))? + ); + } + OutputFormat::Table => { + output::print_success(&format!( + "Added {} issue(s) to sprint {}", + issues.len(), + sprint_id + )); + } } + + Ok(()) +} + +/// Remove issues from all sprints, moving them to the backlog. +async fn handle_remove( + issues: Vec, + output_format: &OutputFormat, + client: &JiraClient, +) -> Result<()> { + client.move_issues_to_backlog(&issues).await?; + + match output_format { + OutputFormat::Json => { + println!("{}", output::render_json(&sprint_remove_response(&issues))?); + } + OutputFormat::Table => { + output::print_success(&format!("Moved {} issue(s) to backlog", issues.len())); + } + } + + Ok(()) } async fn handle_list( @@ -98,7 +218,10 @@ async fn handle_current( client: &JiraClient, output_format: &OutputFormat, config: &Config, + limit: Option, + all: bool, ) -> Result<()> { + let effective_limit = crate::cli::resolve_effective_limit(limit, all); let sprints = client.list_sprints(board_id, Some("active")).await?; if sprints.is_empty() { @@ -108,7 +231,12 @@ async fn handle_current( let sprint = &sprints[0]; let sp_field_id = config.global.fields.story_points_field_id.as_deref(); let extra: Vec<&str> = sp_field_id.iter().copied().collect(); - let issues = client.get_sprint_issues(sprint.id, None, &extra).await?; + let result = client + .get_sprint_issues(sprint.id, None, effective_limit, &extra) + .await?; + let issues = result.issues; + let has_more = result.has_more; + let issue_count = issues.len(); let sprint_summary = sp_field_id.map(|field_id| compute_sprint_summary(&issues, field_id)); @@ -150,17 +278,24 @@ async fn handle_current( let rows: Vec> = issues .iter() - .map(|issue| super::issue::format_issue_row(issue, sp_field_id)) + .map(|issue| super::issue::format_issue_row(issue, sp_field_id, None)) .collect(); output::print_output( output_format, - &super::issue::issue_table_headers(sp_field_id.is_some()), + &super::issue::issue_table_headers(sp_field_id.is_some(), false), &rows, &issues, )?; } } + if has_more && !all { + eprintln!( + "Showing {} results. Use --limit or --all to see more.", + issue_count + ); + } + Ok(()) } @@ -236,4 +371,20 @@ mod tests { assert_eq!(completed, 0.0); assert_eq!(unestimated, 0); } + + #[test] + fn test_sprint_add_response() { + insta::assert_json_snapshot!(sprint_add_response( + 100, + &["TEST-1".to_string(), "TEST-2".to_string()] + )); + } + + #[test] + fn test_sprint_remove_response() { + insta::assert_json_snapshot!(sprint_remove_response(&[ + "TEST-1".to_string(), + "TEST-2".to_string() + ])); + } } diff --git a/src/cli/team.rs b/src/cli/team.rs index 98e45ed..842d89e 100644 --- a/src/cli/team.rs +++ b/src/cli/team.rs @@ -4,6 +4,7 @@ use crate::api::client::JiraClient; use crate::cache::{self, CachedTeam}; use crate::cli::OutputFormat; use crate::config::Config; +use crate::error::JrError; use crate::output; use super::TeamCommand; @@ -81,10 +82,9 @@ pub async fn resolve_org_id(config: &Config, client: &JiraClient) -> Result, @@ -95,7 +97,7 @@ impl Config { } let url = self.global.instance.url.as_ref().ok_or_else(|| { - anyhow::anyhow!("No Jira instance configured. Run \"jr init\" first.") + JrError::ConfigError("No Jira instance configured. Run \"jr init\" first.".into()) })?; if let Some(cloud_id) = &self.global.instance.cloud_id { @@ -112,6 +114,10 @@ impl Config { .or_else(|| self.project.project.clone()) } + pub fn board_id(&self, cli_override: Option) -> Option { + cli_override.or(self.project.board_id) + } + pub fn save_global(&self) -> anyhow::Result<()> { let dir = global_config_dir(); std::fs::create_dir_all(&dir)?; @@ -235,6 +241,24 @@ mod tests { assert_eq!(config.project_key(None), Some("FOO".into())); } + #[test] + fn test_board_id_cli_override() { + let config = Config { + global: GlobalConfig::default(), + project: ProjectConfig { + project: None, + board_id: Some(42), + }, + }; + // CLI override wins + assert_eq!(config.board_id(Some(99)), Some(99)); + // Config fallback + assert_eq!(config.board_id(None), Some(42)); + // Neither set + let empty = Config::default(); + assert_eq!(empty.board_id(None), None); + } + #[test] fn test_base_url_env_override() { let _guard = ENV_MUTEX.lock().unwrap(); diff --git a/src/error.rs b/src/error.rs index b7031ef..e64019f 100644 --- a/src/error.rs +++ b/src/error.rs @@ -11,7 +11,7 @@ pub enum JrError { #[error("API error ({status}): {message}")] ApiError { status: u16, message: String }, - #[error("Configuration error: {0}")] + #[error("{0}")] ConfigError(String), #[error("{0}")] @@ -41,3 +41,34 @@ impl JrError { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn config_error_exit_code() { + assert_eq!(JrError::ConfigError("test".into()).exit_code(), 78); + } + + #[test] + fn user_error_exit_code() { + assert_eq!(JrError::UserError("test".into()).exit_code(), 64); + } + + #[test] + fn config_error_display_no_prefix() { + assert_eq!( + JrError::ConfigError("No board_id configured.".into()).to_string(), + "No board_id configured." + ); + } + + #[test] + fn user_error_display_passthrough() { + assert_eq!( + JrError::UserError("Invalid selection".into()).to_string(), + "Invalid selection" + ); + } +} diff --git a/src/jql.rs b/src/jql.rs index 828471f..dfc1a53 100644 --- a/src/jql.rs +++ b/src/jql.rs @@ -7,6 +7,105 @@ pub fn escape_value(s: &str) -> String { s.replace('\\', "\\\\").replace('"', "\\\"") } +/// Validate a JQL relative date duration string. +/// +/// JQL relative dates use the format `` where unit is one of: +/// `y` (years), `M` (months), `w` (weeks), `d` (days), `h` (hours), `m` (minutes). +/// Units are case-sensitive — `M` is months, `m` is minutes. +/// Combined units like `4w2d` are not supported by Jira. +pub fn validate_duration(s: &str) -> Result<(), String> { + if s.len() < 2 { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + let (digits, unit) = s.split_at(s.len() - 1); + if digits.is_empty() || !digits.chars().all(|c| c.is_ascii_digit()) { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + if !matches!(unit, "y" | "M" | "w" | "d" | "h" | "m") { + return Err(format!( + "Invalid duration '{s}'. Use a number followed by y, M, w, d, h, or m (e.g., 7d, 4w, 2M)." + )); + } + Ok(()) +} + +/// Validate an asset object key matches the SCHEMA-NUMBER format. +/// +/// Asset keys follow the `-` format (e.g., CUST-5, SRV-42, ITSM-123). +pub fn validate_asset_key(key: &str) -> Result<(), String> { + let Some((prefix, number)) = key.split_once('-') else { + return Err(format!( + "Invalid asset key \"{key}\". Expected format: SCHEMA-NUMBER (e.g., CUST-5, SRV-42)." + )); + }; + if prefix.is_empty() + || !prefix.chars().all(|c| c.is_ascii_alphanumeric()) + || number.is_empty() + || !number.chars().all(|c| c.is_ascii_digit()) + { + return Err(format!( + "Invalid asset key \"{key}\". Expected format: SCHEMA-NUMBER (e.g., CUST-5, SRV-42)." + )); + } + Ok(()) +} + +/// Build a JQL clause that filters issues by a linked asset object key. +/// +/// Uses `aqlFunction()` with the human-readable field name (required by Jira Cloud). +/// When multiple CMDB fields exist, OR them together and wrap in parentheses. +pub fn build_asset_clause(asset_key: &str, cmdb_fields: &[(String, String)]) -> String { + debug_assert!( + !cmdb_fields.is_empty(), + "cmdb_fields must not be empty — callers should check before calling" + ); + let clauses: Vec = cmdb_fields + .iter() + .map(|(_, name)| { + format!( + "\"{}\" IN aqlFunction(\"Key = \\\"{}\\\"\")", + escape_value(name), + escape_value(asset_key), + ) + }) + .collect(); + + if clauses.len() == 1 { + clauses.into_iter().next().unwrap() + } else { + format!("({})", clauses.join(" OR ")) + } +} + +/// Validate and parse an absolute date string in ISO 8601 format (YYYY-MM-DD). +/// +/// Returns the parsed `NaiveDate` on success. The caller needs the parsed date +/// to compute +1 day for `--before` flag JQL generation. +pub fn validate_date(s: &str) -> Result { + chrono::NaiveDate::parse_from_str(s, "%Y-%m-%d").map_err(|_| { + format!("Invalid date \"{s}\". Expected format: YYYY-MM-DD (e.g., 2026-03-18).") + }) +} + +/// Strip `ORDER BY` clause from JQL for use with count-only endpoints. +/// +/// The approximate-count endpoint only needs the WHERE clause. ORDER BY is +/// meaningless for a count and may cause issues with bounded-JQL validation. +pub fn strip_order_by(jql: &str) -> &str { + let upper = jql.to_ascii_uppercase(); + if let Some(pos) = upper.find(" ORDER BY") { + jql[..pos].trim_end() + } else if upper.starts_with("ORDER BY") { + "" + } else { + jql + } +} + #[cfg(test)] mod tests { use super::*; @@ -35,6 +134,227 @@ mod tests { fn trailing_backslash() { assert_eq!(escape_value(r"foo\"), r"foo\\"); } + + #[test] + fn strip_order_by_removes_clause() { + assert_eq!( + strip_order_by("project = PROJ ORDER BY updated DESC"), + "project = PROJ" + ); + } + + #[test] + fn strip_order_by_no_clause() { + assert_eq!(strip_order_by("project = PROJ"), "project = PROJ"); + } + + #[test] + fn strip_order_by_case_insensitive() { + assert_eq!( + strip_order_by("project = PROJ order by rank ASC"), + "project = PROJ" + ); + } + + #[test] + fn strip_order_by_trims_whitespace() { + assert_eq!( + strip_order_by("project = PROJ ORDER BY rank ASC"), + "project = PROJ" + ); + } + + #[test] + fn strip_order_by_at_position_zero() { + assert_eq!(strip_order_by("ORDER BY created DESC"), ""); + } + + #[test] + fn strip_order_by_at_position_zero_lowercase() { + assert_eq!(strip_order_by("order by rank ASC"), ""); + } + + #[test] + fn validate_duration_valid_days() { + assert!(validate_duration("7d").is_ok()); + } + + #[test] + fn validate_duration_valid_weeks() { + assert!(validate_duration("4w").is_ok()); + } + + #[test] + fn validate_duration_valid_months_uppercase() { + assert!(validate_duration("2M").is_ok()); + } + + #[test] + fn validate_duration_valid_years() { + assert!(validate_duration("1y").is_ok()); + } + + #[test] + fn validate_duration_valid_hours() { + assert!(validate_duration("5h").is_ok()); + } + + #[test] + fn validate_duration_valid_minutes() { + assert!(validate_duration("10m").is_ok()); + } + + #[test] + fn validate_duration_valid_zero() { + assert!(validate_duration("0d").is_ok()); + } + + #[test] + fn validate_duration_invalid_unit() { + assert!(validate_duration("7x").is_err()); + } + + #[test] + fn validate_duration_reversed() { + assert!(validate_duration("d7").is_err()); + } + + #[test] + fn validate_duration_empty() { + assert!(validate_duration("").is_err()); + } + + #[test] + fn validate_duration_combined_units() { + assert!(validate_duration("4w2d").is_err()); + } + + #[test] + fn validate_duration_no_digits() { + assert!(validate_duration("d").is_err()); + } + + #[test] + fn validate_asset_key_valid_simple() { + assert!(validate_asset_key("CUST-5").is_ok()); + } + + #[test] + fn validate_asset_key_valid_long() { + assert!(validate_asset_key("SRV-42").is_ok()); + } + + #[test] + fn validate_asset_key_valid_itsm() { + assert!(validate_asset_key("ITSM-123").is_ok()); + } + + #[test] + fn validate_asset_key_invalid_no_number() { + assert!(validate_asset_key("CUST-").is_err()); + } + + #[test] + fn validate_asset_key_invalid_no_prefix() { + assert!(validate_asset_key("-5").is_err()); + } + + #[test] + fn validate_asset_key_invalid_no_hyphen() { + assert!(validate_asset_key("foo").is_err()); + } + + #[test] + fn validate_asset_key_invalid_empty() { + assert!(validate_asset_key("").is_err()); + } + + #[test] + fn validate_asset_key_invalid_spaces() { + assert!(validate_asset_key("CU ST-5").is_err()); + } + + #[test] + fn build_asset_clause_single_field() { + let fields = vec![("customfield_10191".to_string(), "Client".to_string())]; + let clause = build_asset_clause("CUST-5", &fields); + assert_eq!(clause, r#""Client" IN aqlFunction("Key = \"CUST-5\"")"#); + } + + #[test] + fn build_asset_clause_multiple_fields() { + let fields = vec![ + ("customfield_10191".to_string(), "Client".to_string()), + ("customfield_10245".to_string(), "Server".to_string()), + ]; + let clause = build_asset_clause("SRV-42", &fields); + assert_eq!( + clause, + r#"("Client" IN aqlFunction("Key = \"SRV-42\"") OR "Server" IN aqlFunction("Key = \"SRV-42\""))"# + ); + } + + #[test] + fn build_asset_clause_field_name_with_quotes() { + let fields = vec![( + "customfield_10191".to_string(), + r#"My "Assets""#.to_string(), + )]; + let clause = build_asset_clause("OBJ-1", &fields); + assert_eq!( + clause, + r#""My \"Assets\"" IN aqlFunction("Key = \"OBJ-1\"")"# + ); + } + + #[test] + fn validate_date_valid_simple() { + let d = validate_date("2026-03-18").unwrap(); + assert_eq!(d.to_string(), "2026-03-18"); + } + + #[test] + fn validate_date_valid_leap_day() { + let d = validate_date("2024-02-29").unwrap(); + assert_eq!(d.to_string(), "2024-02-29"); + } + + #[test] + fn validate_date_invalid_format_slash() { + let err = validate_date("2026/03/18").unwrap_err(); + assert!(err.contains("Invalid date")); + assert!(err.contains("YYYY-MM-DD")); + } + + #[test] + fn validate_date_invalid_format_us() { + let err = validate_date("03-18-2026").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_impossible_feb30() { + let err = validate_date("2026-02-30").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_impossible_month13() { + let err = validate_date("2026-13-01").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_empty() { + let err = validate_date("").unwrap_err(); + assert!(err.contains("Invalid date")); + } + + #[test] + fn validate_date_non_leap_feb29() { + let err = validate_date("2026-02-29").unwrap_err(); + assert!(err.contains("Invalid date")); + } } #[cfg(test)] diff --git a/src/main.rs b/src/main.rs index d27e74e..25cb3a7 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,7 +26,8 @@ async fn main() { let result = run(cli).await; if let Err(e) = result { let exit_code = e - .downcast_ref::() + .chain() + .find_map(|cause| cause.downcast_ref::()) .map(|je| je.exit_code()) .unwrap_or(1); @@ -63,6 +64,11 @@ async fn run(cli: Cli) -> anyhow::Result<()> { match cli.command { cli::Command::Completion { .. } => unreachable!(), cli::Command::Init => cli::init::handle().await, + cli::Command::Assets { command } => { + let config = config::Config::load()?; + let client = api::client::JiraClient::from_config(&config, cli.verbose)?; + cli::assets::handle(command, &cli.output, &client).await + } cli::Command::Auth { command } => match command { cli::AuthCommand::Login { oauth } => { if oauth { @@ -108,7 +114,7 @@ async fn run(cli: Cli) -> anyhow::Result<()> { let config = config::Config::load()?; let client = api::client::JiraClient::from_config(&config, cli.verbose)?; cli::issue::handle( - command, + *command, &cli.output, &config, &client, @@ -121,12 +127,26 @@ async fn run(cli: Cli) -> anyhow::Result<()> { cli::Command::Board { command } => { let config = config::Config::load()?; let client = api::client::JiraClient::from_config(&config, cli.verbose)?; - cli::board::handle(command, &config, &client, &cli.output).await + cli::board::handle( + command, + &config, + &client, + &cli.output, + cli.project.as_deref(), + ) + .await } cli::Command::Sprint { command } => { let config = config::Config::load()?; let client = api::client::JiraClient::from_config(&config, cli.verbose)?; - cli::sprint::handle(command, &config, &client, &cli.output).await + cli::sprint::handle( + command, + &config, + &client, + &cli.output, + cli.project.as_deref(), + ) + .await } cli::Command::Worklog { command } => { let config = config::Config::load()?; @@ -138,6 +158,18 @@ async fn run(cli: Cli) -> anyhow::Result<()> { let client = api::client::JiraClient::from_config(&config, cli.verbose)?; cli::team::handle(command, &cli.output, &config, &client).await } + cli::Command::Queue { command } => { + let config = config::Config::load()?; + let client = api::client::JiraClient::from_config(&config, cli.verbose)?; + cli::queue::handle( + command, + &cli.output, + &config, + &client, + cli.project.as_deref(), + ) + .await + } } }; diff --git a/src/partial_match.rs b/src/partial_match.rs index 3b6e0fa..6cd21dc 100644 --- a/src/partial_match.rs +++ b/src/partial_match.rs @@ -1,7 +1,10 @@ /// Result of attempting a partial match against a list of candidates. +#[derive(Debug)] pub enum MatchResult { /// Exactly one match found Exact(String), + /// Multiple candidates share the same exact (case-insensitive) name — carries the first matching candidate + ExactMultiple(String), /// Multiple matches — caller should prompt for disambiguation Ambiguous(Vec), /// No matches @@ -12,11 +15,17 @@ pub enum MatchResult { pub fn partial_match(input: &str, candidates: &[String]) -> MatchResult { let lower_input = input.to_lowercase(); - // Try exact match first (case-insensitive) - for candidate in candidates { - if candidate.to_lowercase() == lower_input { - return MatchResult::Exact(candidate.clone()); - } + // Collect all exact matches (case-insensitive) + let exact_matches: Vec = candidates + .iter() + .filter(|c| c.to_lowercase() == lower_input) + .cloned() + .collect(); + + match exact_matches.len() { + 1 => return MatchResult::Exact(exact_matches.into_iter().next().unwrap()), + n if n > 1 => return MatchResult::ExactMultiple(exact_matches.into_iter().next().unwrap()), + _ => {} } // Try substring match @@ -89,6 +98,45 @@ mod tests { _ => panic!("Expected unique match"), } } + + #[test] + fn test_exact_match_duplicate_returns_exact_multiple() { + let candidates = vec!["John Smith".into(), "Jane Doe".into(), "John Smith".into()]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(name) => { + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } + + #[test] + fn test_exact_match_duplicate_case_insensitive() { + let candidates = vec!["John Smith".into(), "john smith".into()]; + match partial_match("john smith", &candidates) { + MatchResult::ExactMultiple(name) => { + // Preserves casing of the first match + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } + + #[test] + fn test_exact_match_three_duplicates() { + let candidates = vec![ + "John Smith".into(), + "Jane Doe".into(), + "John Smith".into(), + "John Smith".into(), + ]; + match partial_match("John Smith", &candidates) { + MatchResult::ExactMultiple(name) => { + assert_eq!(name, "John Smith"); + } + other => panic!("Expected ExactMultiple, got {:?}", other), + } + } } #[cfg(test)] @@ -124,5 +172,23 @@ mod proptests { _ => prop_assert!(false, "Expected None for empty candidates"), } } + + #[test] + fn duplicate_candidates_yield_exact_multiple(idx in 0usize..4) { + let base: Vec = vec![ + "In Progress".into(), "In Review".into(), + "Blocked".into(), "Done".into(), + ]; + // Duplicate one candidate + let mut candidates = base.clone(); + candidates.push(base[idx].clone()); + let input = base[idx].clone(); + match partial_match(&input, &candidates) { + MatchResult::ExactMultiple(name) => { + prop_assert_eq!(name.to_lowercase(), input.to_lowercase()); + } + _ => prop_assert!(false, "Expected ExactMultiple for duplicated '{}'", input), + } + } } } diff --git a/src/types/assets/linked.rs b/src/types/assets/linked.rs new file mode 100644 index 0000000..53e9412 --- /dev/null +++ b/src/types/assets/linked.rs @@ -0,0 +1,246 @@ +use serde::Serialize; + +/// An asset reference extracted from a CMDB custom field on a Jira issue. +#[derive(Debug, Clone, Default, Serialize)] +pub struct LinkedAsset { + #[serde(skip_serializing_if = "Option::is_none")] + pub key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "type")] + pub asset_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub workspace_id: Option, +} + +impl LinkedAsset { + /// Human-readable display: "OBJ-1 (Acme Corp)", "OBJ-1", or "#12345 (run "jr init" to resolve asset names)". + pub fn display(&self) -> String { + match (&self.key, &self.name) { + (Some(key), Some(name)) => format!("{} ({})", key, name), + (Some(key), None) => key.clone(), + (None, Some(name)) => name.clone(), + (None, None) => match &self.id { + Some(id) => format!("#{} (run \"jr init\" to resolve asset names)", id), + None => "(unknown)".into(), + }, + } + } + + /// Name-only display for list tables: "Acme Corp", "OBJ-1", or "#12345 (run `jr init` to resolve asset names)". + pub fn display_name_only(&self) -> String { + self.name + .as_deref() + .or(self.key.as_deref()) + .map(|s| s.to_string()) + .unwrap_or_else(|| match &self.id { + Some(id) => format!("#{} (run \"jr init\" to resolve asset names)", id), + None => "(unknown)".into(), + }) + } +} + +/// Format a list of linked assets for display in a table cell. +pub fn format_linked_assets(assets: &[LinkedAsset]) -> String { + if assets.is_empty() { + return "(none)".into(); + } + assets + .iter() + .map(|a| a.display()) + .collect::>() + .join(", ") +} + +/// Format for list table: first asset + count if multiple. +pub fn format_linked_assets_short(assets: &[LinkedAsset]) -> String { + match assets.len() { + 0 => "-".into(), + 1 => assets[0].display_name_only(), + n => format!("{} (+{} more)", assets[0].display_name_only(), n - 1), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn display_key_and_name() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display(), "OBJ-1 (Acme Corp)"); + } + + #[test] + fn display_key_only() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + ..Default::default() + }; + assert_eq!(a.display(), "OBJ-1"); + } + + #[test] + fn display_name_only() { + let a = LinkedAsset { + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display(), "Acme Corp"); + } + + #[test] + fn display_id_fallback_with_hint() { + let a = LinkedAsset { + id: Some("12345".into()), + ..Default::default() + }; + assert_eq!( + a.display(), + "#12345 (run \"jr init\" to resolve asset names)" + ); + } + + #[test] + fn display_nothing() { + let a = LinkedAsset::default(); + assert_eq!(a.display(), "(unknown)"); + } + + #[test] + fn display_name_only_key_and_name() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display_name_only(), "Acme Corp"); + } + + #[test] + fn display_name_only_name_only() { + let a = LinkedAsset { + name: Some("Acme Corp".into()), + ..Default::default() + }; + assert_eq!(a.display_name_only(), "Acme Corp"); + } + + #[test] + fn display_name_only_key_fallback() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + ..Default::default() + }; + assert_eq!(a.display_name_only(), "OBJ-1"); + } + + #[test] + fn display_name_only_id_fallback() { + let a = LinkedAsset { + id: Some("12345".into()), + ..Default::default() + }; + assert_eq!( + a.display_name_only(), + "#12345 (run \"jr init\" to resolve asset names)" + ); + } + + #[test] + fn display_name_only_nothing() { + let a = LinkedAsset::default(); + assert_eq!(a.display_name_only(), "(unknown)"); + } + + #[test] + fn format_empty_list() { + assert_eq!(format_linked_assets(&[]), "(none)"); + } + + #[test] + fn format_single_asset() { + let assets = vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }]; + assert_eq!(format_linked_assets(&assets), "OBJ-1 (Acme)"); + } + + #[test] + fn format_multiple_assets() { + let assets = vec![ + LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-2".into()), + name: Some("Globex".into()), + ..Default::default() + }, + ]; + assert_eq!( + format_linked_assets(&assets), + "OBJ-1 (Acme), OBJ-2 (Globex)" + ); + } + + #[test] + fn format_short_empty() { + assert_eq!(format_linked_assets_short(&[]), "-"); + } + + #[test] + fn format_short_single() { + let assets = vec![LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }]; + assert_eq!(format_linked_assets_short(&assets), "Acme"); + } + + #[test] + fn format_short_multiple() { + let assets = vec![ + LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-2".into()), + ..Default::default() + }, + LinkedAsset { + key: Some("OBJ-3".into()), + ..Default::default() + }, + ]; + assert_eq!(format_linked_assets_short(&assets), "Acme (+2 more)"); + } + + #[test] + fn serialize_json_skips_none() { + let a = LinkedAsset { + key: Some("OBJ-1".into()), + name: Some("Acme".into()), + ..Default::default() + }; + let json = serde_json::to_value(&a).unwrap(); + assert_eq!(json.get("key").unwrap(), "OBJ-1"); + assert_eq!(json.get("name").unwrap(), "Acme"); + assert!(json.get("id").is_none()); + assert!(json.get("workspace_id").is_none()); + } +} diff --git a/src/types/assets/mod.rs b/src/types/assets/mod.rs new file mode 100644 index 0000000..f9ff828 --- /dev/null +++ b/src/types/assets/mod.rs @@ -0,0 +1,9 @@ +pub mod linked; +pub mod object; +pub mod schema; +pub mod ticket; + +pub use linked::*; +pub use object::*; +pub use schema::*; +pub use ticket::*; diff --git a/src/types/assets/object.rs b/src/types/assets/object.rs new file mode 100644 index 0000000..83eb77c --- /dev/null +++ b/src/types/assets/object.rs @@ -0,0 +1,329 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct AssetObject { + pub id: String, + pub label: String, + #[serde(rename = "objectKey")] + pub object_key: String, + #[serde(rename = "objectType")] + pub object_type: ObjectType, + pub created: Option, + pub updated: Option, + #[serde(default)] + pub attributes: Vec, +} + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct ObjectType { + pub id: String, + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct AssetAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttributeValue { + pub value: Option, + #[serde(rename = "displayValue")] + pub display_value: Option, +} + +/// A single attribute entry from `GET /object/{id}/attributes`. +/// Includes the full attribute definition with name, unlike `AssetAttribute` +/// which only has the numeric `objectTypeAttributeId`. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectAttribute { + pub id: String, + #[serde(rename = "objectTypeAttributeId")] + pub object_type_attribute_id: String, + #[serde(rename = "objectTypeAttribute")] + pub object_type_attribute: ObjectTypeAttributeDef, + #[serde(rename = "objectAttributeValues", default)] + pub values: Vec, +} + +/// Attribute definition from the object type schema. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeAttributeDef { + pub id: String, + pub name: String, + #[serde(default)] + pub system: bool, + #[serde(default)] + pub hidden: bool, + #[serde(default)] + pub label: bool, + #[serde(default)] + pub position: i32, + #[serde(rename = "defaultType")] + pub default_type: Option, + #[serde(rename = "referenceType")] + pub reference_type: Option, + #[serde(rename = "referenceObjectType")] + pub reference_object_type: Option, + #[serde(rename = "minimumCardinality", default)] + pub minimum_cardinality: i32, + #[serde(rename = "maximumCardinality", default)] + pub maximum_cardinality: i32, + #[serde(default)] + pub editable: bool, + pub description: Option, + pub options: Option, +} + +/// Attribute data type (e.g., Text, DateTime, Select). +#[derive(Debug, Deserialize, Serialize)] +pub struct DefaultType { + pub id: i32, + pub name: String, +} + +/// Reference link type (e.g., "Depends on", "References"). +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceType { + pub id: String, + pub name: String, +} + +/// Target object type for a reference attribute (e.g., "Service", "Employee"). +#[derive(Debug, Deserialize, Serialize)] +pub struct ReferenceObjectType { + pub id: String, + pub name: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_asset_object_minimal() { + let json = r#"{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "23", "name": "Client" } + }"#; + let obj: AssetObject = serde_json::from_str(json).unwrap(); + assert_eq!(obj.id, "88"); + assert_eq!(obj.label, "Acme Corp"); + assert_eq!(obj.object_key, "OBJ-88"); + assert_eq!(obj.object_type.name, "Client"); + assert!(obj.attributes.is_empty()); + assert!(obj.created.is_none()); + } + + #[test] + fn deserialize_asset_object_with_attributes() { + let json = r#"{ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "23", "name": "Client" }, + "created": "2025-12-17T14:58:00.000Z", + "updated": "2026-01-29T19:52:00.000Z", + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "contact@acme.com", "displayValue": "contact@acme.com" } + ] + } + ] + }"#; + let obj: AssetObject = serde_json::from_str(json).unwrap(); + assert_eq!(obj.attributes.len(), 1); + assert_eq!( + obj.attributes[0].values[0].display_value.as_deref(), + Some("contact@acme.com") + ); + } + + #[test] + fn deserialize_object_attribute_with_name() { + let json = r#"{ + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert_eq!(attr.id, "637"); + assert_eq!(attr.object_type_attribute_id, "134"); + assert_eq!(attr.object_type_attribute.name, "Location"); + assert!(!attr.object_type_attribute.system); + assert!(!attr.object_type_attribute.hidden); + assert!(!attr.object_type_attribute.label); + assert_eq!(attr.object_type_attribute.position, 4); + assert_eq!(attr.values.len(), 1); + assert_eq!( + attr.values[0].display_value.as_deref(), + Some("New York, NY") + ); + } + + #[test] + fn deserialize_object_attribute_defaults() { + let json = r#"{ + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name" + }, + "objectAttributeValues": [] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert_eq!(attr.object_type_attribute.name, "Name"); + assert!(!attr.object_type_attribute.system); + assert!(!attr.object_type_attribute.hidden); + assert!(!attr.object_type_attribute.label); + assert_eq!(attr.object_type_attribute.position, 0); + assert!(attr.values.is_empty()); + } + + #[test] + fn deserialize_object_attribute_system() { + let json = r#"{ + "id": "638", + "objectTypeAttributeId": "136", + "objectTypeAttribute": { + "id": "136", + "name": "Created", + "system": true, + "hidden": false, + "label": false, + "position": 2 + }, + "objectAttributeValues": [ + { "value": "2021-02-16T20:04:41.527Z", "displayValue": "16/Feb/21 8:04 PM" } + ] + }"#; + let attr: ObjectAttribute = serde_json::from_str(json).unwrap(); + assert!(attr.object_type_attribute.system); + assert_eq!( + attr.values[0].display_value.as_deref(), + Some("16/Feb/21 8:04 PM") + ); + } + + #[test] + fn deserialize_attribute_def_with_default_type() { + let json = r#"{ + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": true, + "description": "The name of the object" + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.name, "Name"); + assert!(def.label); + let dt = def.default_type.unwrap(); + assert_eq!(dt.id, 0); + assert_eq!(dt.name, "Text"); + assert_eq!(def.minimum_cardinality, 1); + assert!(def.editable); + assert_eq!(def.description.as_deref(), Some("The name of the object")); + assert!(def.reference_type.is_none()); + assert!(def.reference_object_type.is_none()); + } + + #[test] + fn deserialize_attribute_def_with_reference() { + let json = r#"{ + "id": "869", + "name": "Service relationships", + "system": false, + "hidden": false, + "label": false, + "position": 6, + "referenceType": { "id": "36", "name": "Depends on" }, + "referenceObjectTypeId": "122", + "referenceObjectType": { "id": "122", "name": "Service" }, + "minimumCardinality": 0, + "maximumCardinality": -1, + "editable": true + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.name, "Service relationships"); + assert!(def.default_type.is_none()); + let rt = def.reference_type.unwrap(); + assert_eq!(rt.name, "Depends on"); + let rot = def.reference_object_type.unwrap(); + assert_eq!(rot.name, "Service"); + assert_eq!(def.minimum_cardinality, 0); + assert_eq!(def.maximum_cardinality, -1); + } + + #[test] + fn deserialize_attribute_def_select_with_options() { + let json = r#"{ + "id": "868", + "name": "Tier", + "system": false, + "hidden": false, + "label": false, + "position": 5, + "defaultType": { "id": 10, "name": "Select" }, + "minimumCardinality": 1, + "maximumCardinality": 1, + "editable": true, + "options": "Tier 1,Tier 2,Tier 3" + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + let dt = def.default_type.unwrap(); + assert_eq!(dt.name, "Select"); + assert_eq!(def.options.as_deref(), Some("Tier 1,Tier 2,Tier 3")); + assert_eq!(def.minimum_cardinality, 1); + } + + #[test] + fn deserialize_attribute_def_backward_compat() { + // Existing JSON without the new fields — must still deserialize + let json = r#"{ + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }"#; + let def: ObjectTypeAttributeDef = serde_json::from_str(json).unwrap(); + assert_eq!(def.id, "134"); + assert!(def.system); + assert!(def.default_type.is_none()); + assert!(def.reference_type.is_none()); + assert!(def.reference_object_type.is_none()); + assert_eq!(def.minimum_cardinality, 0); + assert_eq!(def.maximum_cardinality, 0); + assert!(!def.editable); + assert!(def.description.is_none()); + assert!(def.options.is_none()); + } +} diff --git a/src/types/assets/schema.rs b/src/types/assets/schema.rs new file mode 100644 index 0000000..c3d4da0 --- /dev/null +++ b/src/types/assets/schema.rs @@ -0,0 +1,116 @@ +use serde::{Deserialize, Serialize}; + +/// Object schema from GET /objectschema/list. +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct ObjectSchema { + pub id: String, + pub name: String, + #[serde(rename = "objectSchemaKey")] + pub object_schema_key: String, + pub description: Option, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectTypeCount", default)] + pub object_type_count: i64, +} + +/// Object type entry from GET /objectschema/{id}/objecttypes/flat. +#[derive(Debug, Deserialize, Serialize)] +pub struct ObjectTypeEntry { + pub id: String, + pub name: String, + pub description: Option, + #[serde(default)] + pub position: i32, + #[serde(rename = "objectCount", default)] + pub object_count: i64, + #[serde(rename = "objectSchemaId")] + pub object_schema_id: String, + #[serde(default)] + pub inherited: bool, + #[serde(rename = "abstractObjectType", default)] + pub abstract_object_type: bool, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_object_schema_full() { + let json = r#"{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "description": "IT assets schema", + "objectCount": 95, + "objectTypeCount": 34 + }"#; + let schema: ObjectSchema = serde_json::from_str(json).unwrap(); + assert_eq!(schema.id, "6"); + assert_eq!(schema.name, "ITSM"); + assert_eq!(schema.object_schema_key, "ITSM"); + assert_eq!(schema.description.as_deref(), Some("IT assets schema")); + assert_eq!(schema.object_count, 95); + assert_eq!(schema.object_type_count, 34); + } + + #[test] + fn deserialize_object_schema_minimal() { + let json = r#"{ + "id": "1", + "name": "HR", + "objectSchemaKey": "HR" + }"#; + let schema: ObjectSchema = serde_json::from_str(json).unwrap(); + assert_eq!(schema.id, "1"); + assert_eq!(schema.name, "HR"); + assert!(schema.description.is_none()); + assert_eq!(schema.object_count, 0); + assert_eq!(schema.object_type_count, 0); + } + + #[test] + fn deserialize_object_type_entry() { + let json = r#"{ + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "1", + "inherited": false, + "abstractObjectType": false, + "parentObjectTypeInherited": false + }"#; + let entry: ObjectTypeEntry = serde_json::from_str(json).unwrap(); + assert_eq!(entry.id, "19"); + assert_eq!(entry.name, "Employee"); + assert_eq!(entry.position, 0); + assert_eq!(entry.object_count, 42); + assert_eq!(entry.object_schema_id, "1"); + assert!(!entry.inherited); + assert!(!entry.abstract_object_type); + assert!(entry.description.is_none()); + } + + #[test] + fn deserialize_object_type_entry_with_description() { + let json = r#"{ + "id": "23", + "name": "Office", + "description": "Physical office or site.", + "position": 2, + "objectCount": 0, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }"#; + let entry: ObjectTypeEntry = serde_json::from_str(json).unwrap(); + assert_eq!( + entry.description.as_deref(), + Some("Physical office or site.") + ); + assert_eq!(entry.position, 2); + } +} diff --git a/src/types/assets/ticket.rs b/src/types/assets/ticket.rs new file mode 100644 index 0000000..14e9610 --- /dev/null +++ b/src/types/assets/ticket.rs @@ -0,0 +1,79 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicketsResponse { + #[serde(default)] + pub tickets: Vec, + #[serde(rename = "allTicketsQuery")] + pub all_tickets_query: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ConnectedTicket { + pub key: String, + pub id: String, + pub title: String, + pub reporter: Option, + pub created: Option, + pub updated: Option, + pub status: Option, + #[serde(rename = "type")] + pub issue_type: Option, + pub priority: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketStatus { + pub name: String, + #[serde(rename = "colorName")] + pub color_name: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketType { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TicketPriority { + pub name: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_connected_tickets_response() { + let json = r#"{ + "tickets": [ + { + "key": "PROJ-42", + "id": "10968", + "title": "VPN access not working", + "reporter": "abc123", + "created": "2026-02-17T18:31:56.953Z", + "updated": "2026-03-22T18:59:23.333Z", + "status": { "name": "In Progress", "colorName": "yellow" }, + "type": { "name": "Service Request" }, + "priority": { "name": "High" } + } + ], + "allTicketsQuery": "issueFunction in assetsObject(\"objectId = 88\")" + }"#; + let resp: ConnectedTicketsResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.tickets.len(), 1); + assert_eq!(resp.tickets[0].key, "PROJ-42"); + assert_eq!(resp.tickets[0].title, "VPN access not working"); + assert_eq!(resp.tickets[0].status.as_ref().unwrap().name, "In Progress"); + assert!(resp.all_tickets_query.is_some()); + } + + #[test] + fn deserialize_empty_tickets() { + let json = r#"{ "tickets": [] }"#; + let resp: ConnectedTicketsResponse = serde_json::from_str(json).unwrap(); + assert!(resp.tickets.is_empty()); + assert!(resp.all_tickets_query.is_none()); + } +} diff --git a/src/types/jira/board.rs b/src/types/jira/board.rs index c311777..685a9e7 100644 --- a/src/types/jira/board.rs +++ b/src/types/jira/board.rs @@ -6,6 +6,16 @@ pub struct Board { pub name: String, #[serde(rename = "type")] pub board_type: String, + #[serde(default)] + pub location: Option, +} + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct BoardLocation { + #[serde(default, rename = "projectKey")] + pub project_key: Option, + #[serde(default, rename = "projectName")] + pub project_name: Option, } #[derive(Debug, Deserialize, Serialize)] @@ -15,3 +25,39 @@ pub struct BoardConfig { #[serde(rename = "type", default)] pub board_type: String, } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn board_deserializes_with_location() { + let json = r#"{ + "id": 42, + "name": "My Board", + "type": "scrum", + "location": { + "projectKey": "PROJ", + "projectName": "My Project" + } + }"#; + let board: Board = serde_json::from_str(json).unwrap(); + assert_eq!(board.id, 42); + assert_eq!(board.board_type, "scrum"); + let loc = board.location.unwrap(); + assert_eq!(loc.project_key.as_deref(), Some("PROJ")); + assert_eq!(loc.project_name.as_deref(), Some("My Project")); + } + + #[test] + fn board_deserializes_without_location() { + let json = r#"{ + "id": 99, + "name": "No Location Board", + "type": "kanban" + }"#; + let board: Board = serde_json::from_str(json).unwrap(); + assert_eq!(board.id, 99); + assert!(board.location.is_none()); + } +} diff --git a/src/types/jira/issue.rs b/src/types/jira/issue.rs index 4c7ba48..355755f 100644 --- a/src/types/jira/issue.rs +++ b/src/types/jira/issue.rs @@ -62,7 +62,15 @@ pub struct IssueFields { pub issue_type: Option, pub priority: Option, pub assignee: Option, + pub reporter: Option, pub project: Option, + pub created: Option, + pub updated: Option, + pub resolution: Option, + #[serde(default)] + pub components: Option>, + #[serde(rename = "fixVersions", default)] + pub fix_versions: Option>, #[serde(default)] pub labels: Option>, pub parent: Option, @@ -106,6 +114,24 @@ pub struct IssueProject { pub name: Option, } +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Resolution { + pub name: String, +} + +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Component { + pub name: String, +} + +#[derive(Debug, Deserialize, PartialEq, Serialize)] +pub struct Version { + pub name: String, + pub released: Option, + #[serde(rename = "releaseDate")] + pub release_date: Option, +} + #[derive(Debug, Deserialize, Serialize)] pub struct Transition { pub id: String, @@ -260,4 +286,102 @@ mod tests { let fields: IssueFields = serde_json::from_value(json).unwrap(); assert_eq!(fields.issuelinks.unwrap().len(), 0); } + + #[test] + fn new_fields_present() { + let json = json!({ + "summary": "test", + "created": "2026-03-20T14:32:00.000+0000", + "updated": "2026-03-25T09:15:22.000+0000", + "reporter": {"accountId": "abc123", "displayName": "Jane Smith"}, + "resolution": {"name": "Fixed"}, + "components": [{"name": "Backend"}, {"name": "API"}], + "fixVersions": [{"name": "v2.0", "released": false, "releaseDate": "2026-04-01"}] + }); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!( + fields.created.as_deref(), + Some("2026-03-20T14:32:00.000+0000") + ); + assert_eq!( + fields.updated.as_deref(), + Some("2026-03-25T09:15:22.000+0000") + ); + let reporter = fields.reporter.unwrap(); + assert_eq!(reporter.display_name, "Jane Smith"); + assert_eq!(reporter.account_id, "abc123"); + assert_eq!(fields.resolution.unwrap().name, "Fixed"); + let components = fields.components.unwrap(); + assert_eq!(components.len(), 2); + assert_eq!(components[0].name, "Backend"); + assert_eq!(components[1].name, "API"); + let versions = fields.fix_versions.unwrap(); + assert_eq!(versions.len(), 1); + assert_eq!(versions[0].name, "v2.0"); + assert_eq!(versions[0].released, Some(false)); + assert_eq!(versions[0].release_date.as_deref(), Some("2026-04-01")); + // New typed fields should NOT appear in extra + assert!(!fields.extra.contains_key("created")); + assert!(!fields.extra.contains_key("updated")); + assert!(!fields.extra.contains_key("reporter")); + assert!(!fields.extra.contains_key("resolution")); + assert!(!fields.extra.contains_key("components")); + assert!(!fields.extra.contains_key("fixVersions")); + } + + #[test] + fn new_fields_absent() { + let json = json!({"summary": "test"}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert!(fields.created.is_none()); + assert!(fields.updated.is_none()); + assert!(fields.reporter.is_none()); + assert!(fields.resolution.is_none()); + assert!(fields.components.is_none()); + assert!(fields.fix_versions.is_none()); + } + + #[test] + fn new_fields_null() { + let json = json!({ + "summary": "test", + "created": null, + "updated": null, + "reporter": null, + "resolution": null, + "components": null, + "fixVersions": null + }); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert!(fields.created.is_none()); + assert!(fields.updated.is_none()); + assert!(fields.reporter.is_none()); + assert!(fields.resolution.is_none()); + assert!(fields.components.is_none()); + assert!(fields.fix_versions.is_none()); + } + + #[test] + fn components_empty_array() { + let json = json!({"summary": "test", "components": []}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!(fields.components, Some(vec![])); + } + + #[test] + fn fix_versions_empty_array() { + let json = json!({"summary": "test", "fixVersions": []}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + assert_eq!(fields.fix_versions, Some(vec![])); + } + + #[test] + fn version_optional_fields_absent() { + let json = json!({"summary": "test", "fixVersions": [{"name": "v1.0"}]}); + let fields: IssueFields = serde_json::from_value(json).unwrap(); + let v = &fields.fix_versions.unwrap()[0]; + assert_eq!(v.name, "v1.0"); + assert!(v.released.is_none()); + assert!(v.release_date.is_none()); + } } diff --git a/src/types/jira/project.rs b/src/types/jira/project.rs index ba95899..950197a 100644 --- a/src/types/jira/project.rs +++ b/src/types/jira/project.rs @@ -5,3 +5,20 @@ pub struct Project { pub key: String, pub name: String, } + +#[derive(Debug, Clone, Default, Deserialize, Serialize)] +pub struct ProjectSummary { + pub key: String, + pub name: String, + #[serde(rename = "projectTypeKey")] + pub project_type_key: String, + pub lead: Option, +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize)] +pub struct ProjectLead { + #[serde(rename = "displayName")] + pub display_name: String, + #[serde(rename = "accountId")] + pub account_id: String, +} diff --git a/src/types/jsm/mod.rs b/src/types/jsm/mod.rs new file mode 100644 index 0000000..c2726b8 --- /dev/null +++ b/src/types/jsm/mod.rs @@ -0,0 +1,5 @@ +pub mod queue; +pub mod servicedesk; + +pub use queue::*; +pub use servicedesk::*; diff --git a/src/types/jsm/queue.rs b/src/types/jsm/queue.rs new file mode 100644 index 0000000..bb37429 --- /dev/null +++ b/src/types/jsm/queue.rs @@ -0,0 +1,83 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct Queue { + pub id: String, + pub name: String, + pub jql: Option, + pub fields: Option>, + #[serde(rename = "issueCount")] + pub issue_count: Option, +} + +/// Lightweight struct for extracting only the issue key from JSM queue issue +/// representations. The JSM queue endpoint returns issues containing only the +/// fields configured as queue columns, and we only need the key for the +/// two-step fetch (keys → search_issues). +#[derive(Debug, Default, Deserialize)] +pub struct QueueIssueKey { + pub key: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_queue_with_all_fields() { + let json = r#"{ + "id": "10", + "name": "Triage", + "jql": "project = HELPDESK AND status = New", + "fields": ["issuetype", "issuekey", "summary", "status"], + "issueCount": 12 + }"#; + let queue: Queue = serde_json::from_str(json).unwrap(); + assert_eq!(queue.id, "10"); + assert_eq!(queue.name, "Triage"); + assert_eq!(queue.issue_count, Some(12)); + assert!(queue.jql.is_some()); + } + + #[test] + fn deserialize_queue_without_optional_fields() { + let json = r#"{ + "id": "20", + "name": "All open" + }"#; + let queue: Queue = serde_json::from_str(json).unwrap(); + assert_eq!(queue.id, "20"); + assert!(queue.issue_count.is_none()); + assert!(queue.jql.is_none()); + assert!(queue.fields.is_none()); + } + + #[test] + fn deserialize_queue_issue_key() { + let json = r#"{ + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working", + "status": { "name": "New" } + } + }"#; + let issue_key: QueueIssueKey = serde_json::from_str(json).unwrap(); + assert_eq!(issue_key.key, "HELPDESK-42"); + } + + #[test] + fn deserialize_queue_issue_key_ignores_extra_fields() { + let json = r#"{ + "key": "SD-10", + "id": "17227", + "self": "https://example.atlassian.net/rest/api/2/issue/17227", + "fields": { + "summary": "Printer broken", + "issuetype": null, + "priority": null + } + }"#; + let issue_key: QueueIssueKey = serde_json::from_str(json).unwrap(); + assert_eq!(issue_key.key, "SD-10"); + } +} diff --git a/src/types/jsm/servicedesk.rs b/src/types/jsm/servicedesk.rs new file mode 100644 index 0000000..198a7ef --- /dev/null +++ b/src/types/jsm/servicedesk.rs @@ -0,0 +1,10 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct ServiceDesk { + pub id: String, + #[serde(rename = "projectId")] + pub project_id: String, + #[serde(rename = "projectName")] + pub project_name: String, +} diff --git a/src/types/mod.rs b/src/types/mod.rs index 7736f9f..335812a 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -1 +1,3 @@ +pub mod assets; pub mod jira; +pub mod jsm; diff --git a/tests/assets.rs b/tests/assets.rs new file mode 100644 index 0000000..f6393bd --- /dev/null +++ b/tests/assets.rs @@ -0,0 +1,1384 @@ +#[allow(dead_code)] +mod common; + +use serde_json::json; +use tokio::sync::Mutex; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Serialize tests that touch XDG_CACHE_HOME — all tests in this file that +/// manipulate the env var must hold this mutex for the entire test duration. +static ENV_MUTEX: Mutex<()> = Mutex::const_new(()); + +/// RAII guard that restores XDG_CACHE_HOME to its previous value on drop. +struct CacheDirGuard { + prev: Option, + _lock: tokio::sync::MutexGuard<'static, ()>, +} + +impl Drop for CacheDirGuard { + fn drop(&mut self) { + // SAFETY: _lock (ENV_MUTEX) is still held while we restore the env var. + unsafe { + match &self.prev { + Some(prev) => std::env::set_var("XDG_CACHE_HOME", prev), + None => std::env::remove_var("XDG_CACHE_HOME"), + } + } + } +} + +async fn set_cache_dir(dir: &std::path::Path) -> CacheDirGuard { + let guard = ENV_MUTEX.lock().await; + // SAFETY: ENV_MUTEX guard is held for the entire test duration via CacheDirGuard. + let prev = std::env::var_os("XDG_CACHE_HOME"); + unsafe { std::env::set_var("XDG_CACHE_HOME", dir) }; + CacheDirGuard { prev, _lock: guard } +} + +#[tokio::test] +async fn search_assets_returns_objects() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("startAt", "0")) + .and(query_param("maxResults", "25")) + .and(query_param("includeAttributes", "false")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + }, + { + "id": "71", + "label": "Globex Inc", + "objectKey": "OBJ-71", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client + .search_assets("ws-123", "objectType = Client", None, false) + .await + .unwrap(); + assert_eq!(results.len(), 2); + assert_eq!(results[0].label, "Acme Corp"); + assert_eq!(results[1].object_key, "OBJ-71"); +} + +#[tokio::test] +async fn search_assets_empty() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 0, + "isLast": true, + "values": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client + .search_assets("ws-123", "objectType = Nonexistent", None, false) + .await + .unwrap(); + assert!(results.is_empty()); +} + +#[tokio::test] +async fn search_assets_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("maxResults", "1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 1, + "total": 5, + "isLast": false, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client + .search_assets("ws-123", "objectType = Client", Some(1), false) + .await + .unwrap(); + assert_eq!(results.len(), 1); +} + +#[tokio::test] +async fn search_assets_is_last_as_string() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": "true", + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client + .search_assets("ws-123", "objectType = Client", None, false) + .await + .unwrap(); + assert_eq!(results.len(), 1); +} + +#[tokio::test] +async fn get_asset_returns_object() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/70")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "created": "2025-12-17T14:58:00.000Z", + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "contact@acme.com", "displayValue": "contact@acme.com" } + ] + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let obj = client.get_asset("ws-123", "70", true).await.unwrap(); + assert_eq!(obj.label, "Acme Corp"); + assert_eq!(obj.attributes.len(), 1); +} + +#[tokio::test] +async fn get_connected_tickets_returns_tickets() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectconnectedtickets/70/tickets", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tickets": [ + { + "key": "PROJ-42", + "id": "10968", + "title": "VPN access not working", + "status": { "name": "In Progress", "colorName": "yellow" }, + "type": { "name": "Service Request" }, + "priority": { "name": "High" } + } + ], + "allTicketsQuery": "issueFunction in assetsObject(\"objectId = 70\")" + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let resp = client.get_connected_tickets("ws-123", "70").await.unwrap(); + assert_eq!(resp.tickets.len(), 1); + assert_eq!(resp.tickets[0].key, "PROJ-42"); + assert_eq!(resp.tickets[0].title, "VPN access not working"); + assert!(resp.all_tickets_query.is_some()); +} + +#[tokio::test] +async fn search_assets_paginated() { + let server = MockServer::start().await; + + // Page 1 + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("startAt", "0")) + .and(query_param("maxResults", "25")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": false, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + // Page 2 + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("startAt", "25")) + .and(query_param("maxResults", "25")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 25, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "71", + "label": "Globex Inc", + "objectKey": "OBJ-71", + "objectType": { "id": "13", "name": "Client" } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let results = client + .search_assets("ws-123", "objectType = Client", None, false) + .await + .unwrap(); + assert_eq!(results.len(), 2); + assert_eq!(results[0].label, "Acme Corp"); + assert_eq!(results[1].label, "Globex Inc"); +} + +#[tokio::test] +async fn get_connected_tickets_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectconnectedtickets/99/tickets", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "tickets": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let resp = client.get_connected_tickets("ws-123", "99").await.unwrap(); + assert!(resp.tickets.is_empty()); +} + +#[tokio::test] +async fn get_object_attributes_returns_named_attributes() { + let server = MockServer::start().await; + + // Mock returns a mix of system, label, hidden, and user-defined attributes + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/88/attributes")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + "objectAttributeValues": [ + { "value": "OBJ-88", "displayValue": "OBJ-88" } + ] + }, + { + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1 + }, + "objectAttributeValues": [ + { "value": "Acme Corp", "displayValue": "Acme Corp" } + ] + }, + { + "id": "641", + "objectTypeAttributeId": "140", + "objectTypeAttribute": { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }, + { + "id": "642", + "objectTypeAttributeId": "141", + "objectTypeAttribute": { + "id": "141", + "name": "Internal Notes", + "system": false, + "hidden": true, + "label": false, + "position": 6 + }, + "objectAttributeValues": [ + { "value": "secret", "displayValue": "secret" } + ] + }, + { + "id": "643", + "objectTypeAttributeId": "142", + "objectTypeAttribute": { + "id": "142", + "name": "Seats", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + "objectAttributeValues": [ + { "value": "10", "displayValue": "10" } + ] + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let attrs = client.get_object_attributes("ws-123", "88").await.unwrap(); + + // All 5 attributes returned from API + assert_eq!(attrs.len(), 5); + + // Verify attribute names are present + assert_eq!(attrs[0].object_type_attribute.name, "Key"); + assert!(attrs[0].object_type_attribute.system); + + // Verify label attribute + assert_eq!(attrs[1].object_type_attribute.name, "Name"); + assert!(attrs[1].object_type_attribute.label); + + // Verify hidden attribute + assert_eq!(attrs[3].object_type_attribute.name, "Internal Notes"); + assert!(attrs[3].object_type_attribute.hidden); + + // Simulate the CLI filter: exclude system, hidden, label + let mut visible: Vec<_> = attrs + .into_iter() + .filter(|a| { + !a.object_type_attribute.system + && !a.object_type_attribute.hidden + && !a.object_type_attribute.label + }) + .collect(); + visible.sort_by_key(|a| a.object_type_attribute.position); + + // Only user-defined, non-hidden attributes remain + assert_eq!(visible.len(), 2); + // Sorted by position: Seats (4) before Location (5) + assert_eq!(visible[0].object_type_attribute.name, "Seats"); + assert_eq!(visible[0].object_type_attribute.position, 4); + assert_eq!(visible[1].object_type_attribute.name, "Location"); + assert_eq!(visible[1].object_type_attribute.position, 5); + + // Verify displayValue is available + assert_eq!( + visible[1].values[0].display_value.as_deref(), + Some("New York, NY") + ); +} + +#[tokio::test] +async fn get_object_type_attributes_returns_definitions() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0, + "editable": false, + "sortable": true + }, + { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1, + "editable": true, + "sortable": true + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5, + "editable": true, + "sortable": true + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let attrs = client + .get_object_type_attributes("ws-123", "23") + .await + .unwrap(); + + assert_eq!(attrs.len(), 3); + assert_eq!(attrs[0].id, "134"); + assert_eq!(attrs[0].name, "Key"); + assert!(attrs[0].system); + assert!(!attrs[0].hidden); + assert_eq!(attrs[1].id, "135"); + assert_eq!(attrs[1].name, "Name"); + assert!(attrs[1].label); + assert_eq!(attrs[2].id, "140"); + assert_eq!(attrs[2].name, "Location"); + assert_eq!(attrs[2].position, 5); +} + +#[tokio::test(flavor = "current_thread")] +async fn enrich_search_attributes_injects_names() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + // Mock: object type 13 attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + { + "id": "141", + "name": "Secret", + "system": false, + "hidden": true, + "label": false, + "position": 6 + } + ]))) + .expect(1) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + // Simulate search results with inline attributes (no names) + let objects = vec![jr::types::assets::AssetObject { + id: "70".into(), + label: "Acme Corp".into(), + object_key: "OBJ-70".into(), + object_type: jr::types::assets::ObjectType { + id: "13".into(), + name: "Client".into(), + description: None, + }, + created: None, + updated: None, + attributes: vec![ + jr::types::assets::AssetAttribute { + id: "637".into(), + object_type_attribute_id: "140".into(), + values: vec![jr::types::assets::ObjectAttributeValue { + value: Some("New York".into()), + display_value: Some("New York".into()), + }], + }, + jr::types::assets::AssetAttribute { + id: "638".into(), + object_type_attribute_id: "141".into(), + values: vec![jr::types::assets::ObjectAttributeValue { + value: Some("secret".into()), + display_value: Some("secret".into()), + }], + }, + ], + }]; + + let enriched = jr::api::assets::objects::enrich_search_attributes(&client, "ws-123", &objects) + .await + .unwrap(); + + // Returns the attribute definition map for use in output formatting + assert!(enriched.contains_key("140")); + assert_eq!(enriched["140"].name, "Location"); + assert!(enriched.contains_key("141")); + assert_eq!(enriched["141"].name, "Secret"); + assert!(enriched["141"].hidden); +} + +#[tokio::test(flavor = "current_thread")] +async fn search_attributes_json_includes_names() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + // Mock: AQL search with attributes + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "OBJ-70", "displayValue": "OBJ-70" } + ] + }, + { + "id": "638", + "objectTypeAttributeId": "140", + "objectAttributeValues": [ + { "value": "New York", "displayValue": "New York" } + ] + } + ] + } + ] + }))) + .mount(&server) + .await; + + // Mock: object type attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + } + ]))) + .mount(&server) + .await; + + // Mock: workspace discovery (needed for CLI command) + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .env("XDG_CACHE_HOME", cache_dir.path()) + .args([ + "--output", + "json", + "assets", + "search", + "--attributes", + "objectType = Client", + ]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + let objects = parsed.as_array().expect("array of objects"); + assert_eq!(objects.len(), 1); + + let attrs = objects[0]["attributes"] + .as_array() + .expect("attributes array"); + // System attribute (Key) should be filtered out + // Only Location should remain + assert_eq!(attrs.len(), 1); + assert_eq!(attrs[0]["objectTypeAttribute"]["name"], "Location"); + assert_eq!(attrs[0]["objectTypeAttribute"]["position"], 5); + assert_eq!( + attrs[0]["objectAttributeValues"][0]["displayValue"], + "New York" + ); +} + +#[tokio::test] +async fn cli_json_filter_excludes_system_and_hidden_attributes() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/88/attributes")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectTypeAttribute": { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + "objectAttributeValues": [ + { "value": "OBJ-88", "displayValue": "OBJ-88" } + ] + }, + { + "id": "640", + "objectTypeAttributeId": "135", + "objectTypeAttribute": { + "id": "135", + "name": "Name", + "system": false, + "hidden": false, + "label": true, + "position": 1 + }, + "objectAttributeValues": [ + { "value": "Acme Corp", "displayValue": "Acme Corp" } + ] + }, + { + "id": "641", + "objectTypeAttributeId": "140", + "objectTypeAttribute": { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + }, + "objectAttributeValues": [ + { "value": "New York, NY", "displayValue": "New York, NY" } + ] + }, + { + "id": "642", + "objectTypeAttributeId": "141", + "objectTypeAttribute": { + "id": "141", + "name": "Internal Notes", + "system": false, + "hidden": true, + "label": false, + "position": 6 + }, + "objectAttributeValues": [ + { "value": "secret", "displayValue": "secret" } + ] + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let mut attrs = client.get_object_attributes("ws-123", "88").await.unwrap(); + + // Apply the same filter used by handle_view for JSON output + attrs.retain(|a| !a.object_type_attribute.system && !a.object_type_attribute.hidden); + attrs.sort_by_key(|a| a.object_type_attribute.position); + + // System (Key) and hidden (Internal Notes) are excluded + assert_eq!(attrs.len(), 2); + assert_eq!(attrs[0].object_type_attribute.name, "Name"); + assert_eq!(attrs[1].object_type_attribute.name, "Location"); + assert_eq!( + attrs[1].values[0].display_value.as_deref(), + Some("New York, NY") + ); +} + +#[tokio::test(flavor = "current_thread")] +async fn search_attributes_table_shows_inline_values() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + // Mock: AQL search with attributes + Mock::given(method("POST")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/aql")) + .and(query_param("includeAttributes", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [ + { + "id": "70", + "label": "Acme Corp", + "objectKey": "OBJ-70", + "objectType": { "id": "13", "name": "Client" }, + "attributes": [ + { + "id": "637", + "objectTypeAttributeId": "134", + "objectAttributeValues": [ + { "value": "OBJ-70", "displayValue": "OBJ-70" } + ] + }, + { + "id": "639", + "objectTypeAttributeId": "142", + "objectAttributeValues": [ + { "value": "10", "displayValue": "10" } + ] + }, + { + "id": "638", + "objectTypeAttributeId": "140", + "objectAttributeValues": [ + { "value": "New York", "displayValue": "New York" } + ] + } + ] + } + ] + }))) + .mount(&server) + .await; + + // Mock: object type attribute definitions + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/13/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", + "name": "Key", + "system": true, + "hidden": false, + "label": false, + "position": 0 + }, + { + "id": "142", + "name": "Seats", + "system": false, + "hidden": false, + "label": false, + "position": 4 + }, + { + "id": "140", + "name": "Location", + "system": false, + "hidden": false, + "label": false, + "position": 5 + } + ]))) + .mount(&server) + .await; + + // Mock: workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .env("XDG_CACHE_HOME", cache_dir.path()) + .args(["assets", "search", "--attributes", "objectType = Client"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + // Table should contain the Attributes column with inline values + // Seats (position 4) comes before Location (position 5) + assert!( + stdout.contains("Seats: 10"), + "Expected 'Seats: 10' in table, got: {stdout}" + ); + assert!( + stdout.contains("Location: New York"), + "Expected 'Location: New York' in table, got: {stdout}" + ); + // System attribute Key should NOT appear + assert!( + !stdout.contains("Key: OBJ-70"), + "System attribute Key should be filtered, got: {stdout}" + ); + // Should have Attributes header instead of Created/Updated + assert!( + stdout.contains("Attributes"), + "Expected 'Attributes' header in table, got: {stdout}" + ); + assert!( + !stdout.contains("Created"), + "Should not have Created column, got: {stdout}" + ); +} + +#[tokio::test] +async fn list_object_schemas_returns_schemas() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectschema/list")) + .and(query_param("startAt", "0")) + .and(query_param("maxResults", "25")) + .and(query_param("includeCounts", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 34 + }, + { + "id": "1", + "name": "Human Resources", + "objectSchemaKey": "HR", + "description": "HR schema", + "status": "Ok", + "objectCount": 1023, + "objectTypeCount": 14 + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let schemas = client.list_object_schemas("ws-123").await.unwrap(); + assert_eq!(schemas.len(), 2); + assert_eq!(schemas[0].name, "ITSM"); + assert_eq!(schemas[0].object_schema_key, "ITSM"); + assert_eq!(schemas[0].object_type_count, 34); + assert_eq!(schemas[1].name, "Human Resources"); + assert_eq!(schemas[1].description.as_deref(), Some("HR schema")); +} + +#[tokio::test] +async fn list_object_types_returns_flat_array() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .and(query_param("includeObjectCounts", "true")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }, + { + "id": "23", + "name": "Office", + "description": "Physical office or site.", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let types = client.list_object_types("ws-123", "6").await.unwrap(); + assert_eq!(types.len(), 2); + assert_eq!(types[0].name, "Employee"); + assert_eq!(types[0].object_count, 42); + assert_eq!(types[1].name, "Office"); + assert_eq!( + types[1].description.as_deref(), + Some("Physical office or site.") + ); +} + +#[tokio::test] +async fn schemas_json_lists_all_schemas() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectschema/list")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 2, + "isLast": true, + "values": [ + { + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 34 + }, + { + "id": "1", + "name": "Human Resources", + "objectSchemaKey": "HR", + "status": "Ok", + "objectCount": 1023, + "objectTypeCount": 14 + } + ] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + let cache_dir = tempfile::tempdir().unwrap(); + let _guard = set_cache_dir(cache_dir.path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schemas", "--output", "json"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + assert_eq!(arr.len(), 2); + assert_eq!(arr[0]["name"], "ITSM"); + assert_eq!(arr[0]["objectSchemaKey"], "ITSM"); + assert_eq!(arr[1]["name"], "Human Resources"); +} + +#[tokio::test] +async fn types_json_lists_all_types() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectschema/list")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, + "maxResults": 25, + "total": 1, + "isLast": true, + "values": [{ + "id": "6", + "name": "ITSM", + "objectSchemaKey": "ITSM", + "status": "Ok", + "objectCount": 95, + "objectTypeCount": 2 + }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "19", + "name": "Employee", + "position": 0, + "objectCount": 42, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + }, + { + "id": "23", + "name": "Office", + "description": "Physical office.", + "position": 2, + "objectCount": 5, + "objectSchemaId": "6", + "inherited": false, + "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + let cache_dir = tempfile::tempdir().unwrap(); + let _guard = set_cache_dir(cache_dir.path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "types", "--output", "json"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + assert_eq!(arr.len(), 2); + assert_eq!(arr[0]["name"], "Employee"); + assert_eq!(arr[0]["schemaName"], "ITSM"); + assert_eq!(arr[1]["name"], "Office"); +} + +#[tokio::test] +async fn schema_json_shows_attributes() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, "start": 0, "limit": 50, "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectschema/list")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, "maxResults": 25, "total": 1, "isLast": true, + "values": [{ + "id": "6", "name": "ITSM", "objectSchemaKey": "ITSM", + "status": "Ok", "objectCount": 95, "objectTypeCount": 2 + }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "23", "name": "Office", "position": 2, + "objectCount": 5, "objectSchemaId": "6", + "inherited": false, "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", "name": "Key", "system": true, "hidden": false, + "label": false, "position": 0, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, "maximumCardinality": 1, "editable": false + }, + { + "id": "135", "name": "Name", "system": false, "hidden": false, + "label": true, "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, "maximumCardinality": 1, "editable": true, + "description": "The name of the object" + }, + { + "id": "869", "name": "Service relationships", "system": false, + "hidden": false, "label": false, "position": 6, + "referenceType": { "id": "36", "name": "Depends on" }, + "referenceObjectType": { "id": "122", "name": "Service" }, + "minimumCardinality": 0, "maximumCardinality": -1, "editable": true + } + ]))) + .mount(&server) + .await; + + let cache_dir = tempfile::tempdir().unwrap(); + let _guard = set_cache_dir(cache_dir.path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schema", "Office", "--output", "json"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let arr = json.as_array().unwrap(); + assert_eq!(arr.len(), 3); + assert_eq!(arr[0]["name"], "Key"); + assert_eq!(arr[0]["system"], true); + assert_eq!(arr[2]["name"], "Service relationships"); + assert!(arr[2].get("referenceObjectType").is_some()); +} + +#[tokio::test] +async fn schema_table_filters_system_attrs() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, "start": 0, "limit": 50, "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/objectschema/list")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "startAt": 0, "maxResults": 25, "total": 1, "isLast": true, + "values": [{ + "id": "6", "name": "ITSM", "objectSchemaKey": "ITSM", + "status": "Ok", "objectCount": 95, "objectTypeCount": 1 + }] + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objectschema/6/objecttypes/flat", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "23", "name": "Office", "position": 2, + "objectCount": 5, "objectSchemaId": "6", + "inherited": false, "abstractObjectType": false + } + ]))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path( + "/jsm/assets/workspace/ws-123/v1/objecttype/23/attributes", + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!([ + { + "id": "134", "name": "Key", "system": true, "hidden": false, + "label": false, "position": 0, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, "editable": false + }, + { + "id": "135", "name": "Name", "system": false, "hidden": false, + "label": true, "position": 1, + "defaultType": { "id": 0, "name": "Text" }, + "minimumCardinality": 1, "editable": true + }, + { + "id": "136", "name": "Created", "system": true, "hidden": false, + "label": false, "position": 2, + "defaultType": { "id": 6, "name": "DateTime" }, + "minimumCardinality": 1, "editable": false + } + ]))) + .mount(&server) + .await; + + let cache_dir = tempfile::tempdir().unwrap(); + let _guard = set_cache_dir(cache_dir.path()).await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["assets", "schema", "Office"]) + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + assert!(stdout.contains("Object Type: Office")); + assert!(stdout.contains("Name")); + // System attrs "Key" and "Created" should be filtered out + assert!(!stdout.contains("Created")); +} diff --git a/tests/board_commands.rs b/tests/board_commands.rs new file mode 100644 index 0000000..4791994 --- /dev/null +++ b/tests/board_commands.rs @@ -0,0 +1,292 @@ +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build N issues for testing. +fn make_issues(count: usize) -> Vec { + (1..=count) + .map(|i| { + common::fixtures::issue_response( + &format!("TEST-{}", i), + &format!("Issue {}", i), + "In Progress", + ) + }) + .collect() +} + +// --- Board view --limit tests (from PR #73) --- + +#[tokio::test] +async fn get_sprint_issues_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(make_issues(5), 5)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .get_sprint_issues(100, None, Some(3), &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 3); + assert!(result.has_more); + assert_eq!(result.issues[0].key, "TEST-1"); + assert_eq!(result.issues[2].key, "TEST-3"); +} + +#[tokio::test] +async fn get_sprint_issues_no_limit() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(make_issues(5), 5)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .get_sprint_issues(100, None, None, &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 5); + assert!(!result.has_more); +} + +#[tokio::test] +async fn search_issues_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response_with_next_page(make_issues(5)), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = client + .search_issues("statusCategory != Done ORDER BY rank ASC", Some(3), &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 3); + assert!(result.has_more); +} + +#[test] +fn board_view_limit_and_all_conflict() { + let mut cmd = Command::cargo_bin("jr").unwrap(); + cmd.arg("board") + .arg("view") + .arg("--limit") + .arg("3") + .arg("--all"); + + cmd.assert().failure().code(2); +} + +// --- Board auto-resolve tests (from #70) --- + +#[tokio::test] +async fn list_boards_with_project_and_type_filter() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![common::fixtures::board_response( + 42, "My Board", "scrum", "PROJ", + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client + .list_boards(Some("PROJ"), Some("scrum")) + .await + .unwrap(); + assert_eq!(boards.len(), 1); + assert_eq!(boards[0].id, 42); + assert_eq!(boards[0].name, "My Board"); +} + +#[tokio::test] +async fn list_boards_without_filters() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![ + common::fixtures::board_response(1, "Board A", "scrum", "FOO"), + common::fixtures::board_response(2, "Board B", "kanban", "BAR"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client.list_boards(None, None).await.unwrap(); + assert_eq!(boards.len(), 2); +} + +#[tokio::test] +async fn list_boards_empty_result() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "NOPE")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::board_list_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let boards = client.list_boards(Some("NOPE"), None).await.unwrap(); + assert!(boards.is_empty()); +} + +#[tokio::test] +async fn resolve_board_auto_discovers_single_scrum_board() { + let server = MockServer::start().await; + + // list_boards filtered by project+scrum returns 1 board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![common::fixtures::board_response( + 42, + "PROJ Scrum Board", + "scrum", + "PROJ", + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let board_id = jr::cli::board::resolve_board_id(&config, &client, None, Some("PROJ"), true) + .await + .unwrap(); + assert_eq!(board_id, 42); +} + +#[tokio::test] +async fn resolve_board_errors_on_multiple_boards() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![ + common::fixtures::board_response(42, "Board A", "scrum", "PROJ"), + common::fixtures::board_response(99, "Board B", "scrum", "PROJ"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, Some("PROJ"), true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("Multiple scrum boards"), "got: {msg}"); + assert!(msg.contains("42"), "should list board ID 42, got: {msg}"); + assert!(msg.contains("99"), "should list board ID 99, got: {msg}"); +} + +#[tokio::test] +async fn resolve_board_errors_on_no_boards() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "NOPE")) + .and(query_param("type", "scrum")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::board_list_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, Some("NOPE"), true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No scrum boards found"), "got: {msg}"); + assert!( + msg.contains("NOPE"), + "should mention project key, got: {msg}" + ); +} + +#[tokio::test] +async fn resolve_board_uses_explicit_board_override() { + let server = MockServer::start().await; + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let board_id = jr::cli::board::resolve_board_id(&config, &client, Some(42), None, true) + .await + .unwrap(); + assert_eq!(board_id, 42); +} + +#[tokio::test] +async fn resolve_board_errors_without_project_or_board() { + let server = MockServer::start().await; + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let config = jr::config::Config::default(); + + let err = jr::cli::board::resolve_board_id(&config, &client, None, None, true) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("No board configured"), "got: {msg}"); + assert!( + msg.contains("--project"), + "should suggest --project, got: {msg}" + ); +} diff --git a/tests/cli_handler.rs b/tests/cli_handler.rs new file mode 100644 index 0000000..766a391 --- /dev/null +++ b/tests/cli_handler.rs @@ -0,0 +1,604 @@ +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use predicates::prelude::*; +use wiremock::matchers::{body_partial_json, method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Build a `jr` command pre-configured for handler-level testing. +/// +/// Sets `JR_BASE_URL` and `JR_AUTH_HEADER` env vars so the binary +/// routes API calls to the mock server and bypasses keychain auth. +fn jr_cmd(server_uri: &str) -> Command { + let mut cmd = Command::cargo_bin("jr").unwrap(); + cmd.env("JR_BASE_URL", server_uri) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("--output") + .arg("json"); + cmd +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_with_account_id() { + let server = MockServer::start().await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-1", "Handler test", None), + )) + .mount(&server) + .await; + + // Mock PUT assignee + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-1/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "direct-id-001" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-1", "--account-id", "direct-id-001"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": true")) + .stdout(predicate::str::contains("\"key\": \"HDL-1\"")) + .stdout(predicate::str::contains("\"assignee\": \"direct-id-001\"")) + .stdout(predicate::str::contains( + "\"assignee_account_id\": \"direct-id-001\"", + )); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_with_to_name_search() { + let server = MockServer::start().await; + + // Mock assignable user search for issue HDL-2 + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Jane")) + .and(query_param("issueKey", "HDL-2")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-jane-456", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-2")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-2", "Name search test", None), + )) + .mount(&server) + .await; + + // Mock PUT assignee + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-2/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "acc-jane-456" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-2", "--to", "Jane"]) + .assert() + .success() + .stdout(predicate::str::contains("\"assignee\": \"Jane Doe\"")) + .stdout(predicate::str::contains("\"changed\": true")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_self() { + let server = MockServer::start().await; + + // Mock GET myself + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-3")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-3", "Self-assign test", None), + )) + .mount(&server) + .await; + + // Mock PUT assignee + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-3/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "abc123" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-3"]) + .assert() + .success() + .stdout(predicate::str::contains("\"assignee\": \"Test User\"")) + .stdout(predicate::str::contains("\"changed\": true")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_unassign() { + let server = MockServer::start().await; + + // Mock GET issue — currently assigned (so unassign proceeds) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-4")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "HDL-4", + "Unassign test", + Some(("someone-123", "Someone")), + ), + )) + .mount(&server) + .await; + + // Mock PUT assignee with null (unassign) + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-4/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": null + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-4", "--unassign"]) + .assert() + .success() + .stdout(predicate::str::contains("\"assignee\": null")) + .stdout(predicate::str::contains("\"changed\": true")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_idempotent() { + let server = MockServer::start().await; + + // Mock GET issue — already assigned to the target account + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-5")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "HDL-5", + "Already assigned", + Some(("direct-id-001", "direct-id-001")), + ), + )) + .mount(&server) + .await; + + // PUT assignee should NOT be called — explicitly expect 0 requests + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-5/assignee")) + .respond_with(ResponseTemplate::new(204)) + .expect(0) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-5", "--account-id", "direct-id-001"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": false")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_create_with_account_id() { + let server = MockServer::start().await; + + // Mock POST create issue + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "HDL"}, + "issuetype": {"name": "Task"}, + "summary": "Created via handler", + "assignee": {"accountId": "direct-create-789"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("HDL-100")), + ) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args([ + "issue", + "create", + "-p", + "HDL", + "-t", + "Task", + "-s", + "Created via handler", + "--account-id", + "direct-create-789", + ]) + .assert() + .success() + .stdout(predicate::str::contains("\"key\": \"HDL-100\"")) + .stdout(predicate::str::contains("/browse/HDL-100")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_create_with_to_name_search() { + let server = MockServer::start().await; + + // Mock multi-project assignable user search + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/multiProjectSearch")) + .and(query_param("query", "Bob")) + .and(query_param("projectKeys", "HDL")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::multi_project_user_search_response(vec![( + "acc-bob-555", + "Bob Smith", + )]), + )) + .mount(&server) + .await; + + // Mock POST create issue — verify assignee uses accountId + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "HDL"}, + "issuetype": {"name": "Bug"}, + "summary": "Created with --to", + "assignee": {"accountId": "acc-bob-555"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("HDL-101")), + ) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args([ + "issue", + "create", + "-p", + "HDL", + "-t", + "Bug", + "-s", + "Created with --to", + "--to", + "Bob", + ]) + .assert() + .success() + .stdout(predicate::str::contains("\"key\": \"HDL-101\"")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_create_basic() { + let server = MockServer::start().await; + + // Mock POST create issue — no assignee field + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "HDL"}, + "issuetype": {"name": "Task"}, + "summary": "Basic create" + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("HDL-102")), + ) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args([ + "issue", + "create", + "-p", + "HDL", + "-t", + "Task", + "-s", + "Basic create", + ]) + .assert() + .success() + .stdout(predicate::str::contains("\"key\": \"HDL-102\"")) + .stdout(predicate::str::contains("\"url\":")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_to_me() { + let server = MockServer::start().await; + + // This test covers the explicit `--to me` keyword path (resolve_assignee → is_me_keyword). + // test_handler_assign_self covers the no-flag default path (handler calls get_myself directly). + + // Mock GET myself — resolve_assignee() detects "me" keyword via is_me_keyword() and calls get_myself() + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-6")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-6", "Assign to me test", None), + )) + .mount(&server) + .await; + + // Mock PUT assignee + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-6/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "abc123" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-6", "--to", "me"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": true")) + .stdout(predicate::str::contains("\"key\": \"HDL-6\"")) + .stdout(predicate::str::contains("\"assignee\": \"Test User\"")) + .stdout(predicate::str::contains( + "\"assignee_account_id\": \"abc123\"", + )); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_create_to_me() { + let server = MockServer::start().await; + + // Mock GET myself — resolve_assignee_by_project() detects "me" keyword via is_me_keyword() and calls get_myself() + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock POST create issue — verify "me" keyword resolves to accountId via get_myself() + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "HDL"}, + "issuetype": {"name": "Task"}, + "summary": "Created with --to me", + "assignee": {"accountId": "abc123"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("HDL-200")), + ) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args([ + "issue", + "create", + "-p", + "HDL", + "-t", + "Task", + "-s", + "Created with --to me", + "--to", + "me", + ]) + .assert() + .success() + .stdout(predicate::str::contains("\"key\": \"HDL-200\"")) + .stdout(predicate::str::contains("\"url\":")); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_assign_idempotent_with_name_search() { + let server = MockServer::start().await; + + // Mock assignable user search — returns Jane Doe + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Jane")) + .and(query_param("issueKey", "HDL-7")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-jane-456", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + // Mock GET issue — already assigned to Jane Doe (same account ID) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-7")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "HDL-7", + "Already assigned to Jane", + Some(("acc-jane-456", "Jane Doe")), + ), + )) + .mount(&server) + .await; + + // PUT assignee should NOT be called — already assigned to target + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-7/assignee")) + .respond_with(ResponseTemplate::new(204)) + .expect(0) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-7", "--to", "Jane"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": false")) + .stdout(predicate::str::contains("\"key\": \"HDL-7\"")) + .stdout(predicate::str::contains( + "\"assignee_account_id\": \"acc-jane-456\"", + )); +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_handler_unassign_idempotent() { + let server = MockServer::start().await; + + // Mock GET issue — already unassigned (assignee is null) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/HDL-8")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("HDL-8", "Already unassigned", None), + )) + .mount(&server) + .await; + + // PUT assignee should NOT be called — already unassigned + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/HDL-8/assignee")) + .respond_with(ResponseTemplate::new(204)) + .expect(0) + .mount(&server) + .await; + + jr_cmd(&server.uri()) + .args(["issue", "assign", "HDL-8", "--unassign"]) + .assert() + .success() + .stdout(predicate::str::contains("\"changed\": false")) + .stdout(predicate::str::contains("\"key\": \"HDL-8\"")) + .stdout(predicate::str::contains("\"assignee\": null")); +} + +#[tokio::test] +async fn test_handler_list_created_after() { + let server = MockServer::start().await; + + // Project existence check + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "key": "PROJ", + "id": "10000", + "name": "Test Project" + }))) + .mount(&server) + .await; + + // The search endpoint should receive JQL with the date clause + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": "project = \"PROJ\" AND created >= \"2026-03-18\" ORDER BY updated DESC" + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "Test issue", + "To Do", + )]), + )) + .expect(1) + .mount(&server) + .await; + + Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "issue", + "list", + "--project", + "PROJ", + "--created-after", + "2026-03-18", + "--no-input", + ]) + .assert() + .success(); +} + +#[tokio::test] +async fn test_handler_list_created_before() { + let server = MockServer::start().await; + + // Project existence check + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "key": "PROJ", + "id": "10000", + "name": "Test Project" + }))) + .mount(&server) + .await; + + // --created-before 2026-03-18 should produce created < "2026-03-19" (next day) + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": "project = \"PROJ\" AND created < \"2026-03-19\" ORDER BY updated DESC" + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "Test issue", + "To Do", + )]), + )) + .expect(1) + .mount(&server) + .await; + + Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "issue", + "list", + "--project", + "PROJ", + "--created-before", + "2026-03-18", + "--no-input", + ]) + .assert() + .success(); +} diff --git a/tests/cli_smoke.rs b/tests/cli_smoke.rs index 350ea41..8d94b18 100644 --- a/tests/cli_smoke.rs +++ b/tests/cli_smoke.rs @@ -29,3 +29,306 @@ fn test_no_args_shows_help() { .failure() .stderr(predicate::str::contains("Usage")); } + +#[test] +fn test_edit_description_and_description_stdin_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "edit", + "FOO-1", + "--description", + "text", + "--description-stdin", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_assets_tickets_open_and_status_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "tickets", "OBJ-1", "--open", "--status", "Done"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_queue_view_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["queue", "view", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("View issues in a queue")) + .stdout(predicate::str::contains("--limit")); +} + +#[test] +fn test_queue_list_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["queue", "list", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("List queues")); +} + +#[test] +fn test_assets_view_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "view", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("--no-attributes")); +} + +#[test] +fn test_sprint_add_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Add issues to a sprint")) + .stdout(predicate::str::contains("--sprint")) + .stdout(predicate::str::contains("--current")) + .stdout(predicate::str::contains("--board")); +} + +#[test] +fn test_sprint_remove_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "remove", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Remove issues from sprint")) + .stdout(predicate::str::contains("ISSUES")); +} + +#[test] +fn test_sprint_add_sprint_and_current_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "--sprint", "100", "--current", "FOO-1"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_sprint_add_requires_sprint_or_current() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "add", "FOO-1"]) + .assert() + .failure() + .stderr(predicate::str::contains("--sprint")); +} + +#[test] +fn test_assets_schemas_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "schemas", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("List object schemas")); +} + +#[test] +fn test_assets_types_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "types", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("List object types")) + .stdout(predicate::str::contains("--schema")); +} + +#[test] +fn test_assets_schema_help() { + Command::cargo_bin("jr") + .unwrap() + .args(["assets", "schema", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("Show attributes")) + .stdout(predicate::str::contains("--schema")); +} + +// --- conflicts_with smoke tests --- + +#[test] +fn test_assign_to_and_account_id_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "assign", + "FOO-1", + "--to", + "Jane", + "--account-id", + "abc123", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_assign_to_and_unassign_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["issue", "assign", "FOO-1", "--to", "Jane", "--unassign"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_assign_account_id_and_unassign_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "assign", + "FOO-1", + "--account-id", + "abc123", + "--unassign", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_create_to_and_account_id_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "create", + "-p", + "FOO", + "-t", + "Task", + "-s", + "Test", + "--to", + "Jane", + "--account-id", + "abc123", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_create_description_and_description_stdin_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "create", + "-p", + "FOO", + "-t", + "Task", + "-s", + "Test", + "--description", + "text", + "--description-stdin", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_issue_list_all_and_limit_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["issue", "list", "--all", "--limit", "10"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_issue_list_open_and_status_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["issue", "list", "--open", "--status", "Done"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_edit_points_and_no_points_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["issue", "edit", "FOO-1", "--points", "5", "--no-points"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_project_list_all_and_limit_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["project", "list", "--all", "--limit", "10"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_board_view_all_and_limit_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["board", "view", "--all", "--limit", "10"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_sprint_current_all_and_limit_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args(["sprint", "current", "--all", "--limit", "10"]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} + +#[test] +fn test_issue_list_created_after_and_recent_conflict() { + Command::cargo_bin("jr") + .unwrap() + .args([ + "issue", + "list", + "--created-after", + "2026-03-18", + "--recent", + "7d", + ]) + .assert() + .failure() + .stderr(predicate::str::contains("cannot be used with")); +} diff --git a/tests/cmdb_fields.rs b/tests/cmdb_fields.rs new file mode 100644 index 0000000..f95dedc --- /dev/null +++ b/tests/cmdb_fields.rs @@ -0,0 +1,189 @@ +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +fn fields_response_with_cmdb() -> serde_json::Value { + json!([ + { + "id": "summary", + "name": "Summary", + "custom": false, + "schema": { "type": "string" } + }, + { + "id": "customfield_10191", + "name": "Client", + "custom": true, + "schema": { + "type": "any", + "custom": "com.atlassian.jira.plugins.cmdb:cmdb-object-cftype", + "customId": 10191 + } + }, + { + "id": "customfield_10031", + "name": "Story Points", + "custom": true, + "schema": { + "type": "number", + "custom": "com.atlassian.jira.plugin.system.customfieldtypes:float", + "customId": 10031 + } + } + ]) +} + +fn fields_response_no_cmdb() -> serde_json::Value { + json!([ + { + "id": "summary", + "name": "Summary", + "custom": false, + "schema": { "type": "string" } + } + ]) +} + +#[tokio::test] +async fn discover_cmdb_field_ids() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_with_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let fields = client.find_cmdb_fields().await.unwrap(); + assert_eq!( + fields, + vec![("customfield_10191".to_string(), "Client".to_string())] + ); +} + +#[tokio::test] +async fn discover_cmdb_field_ids_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/field")) + .respond_with(ResponseTemplate::new(200).set_body_json(fields_response_no_cmdb())) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let fields: Vec<(String, String)> = client.find_cmdb_fields().await.unwrap(); + assert!(fields.is_empty()); +} + +#[tokio::test] +async fn issue_with_modern_cmdb_fields() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/PROJ-1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "key": "PROJ-1", + "fields": { + "summary": "Test issue", + "customfield_10191": [ + { + "label": "Acme Corp", + "objectKey": "OBJ-1" + } + ] + } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issue = client + .get_issue("PROJ-1", &["customfield_10191"]) + .await + .unwrap(); + + let cmdb_ids = vec!["customfield_10191".to_string()]; + let assets = jr::api::assets::linked::extract_linked_assets(&issue.fields.extra, &cmdb_ids); + assert_eq!(assets.len(), 1); + assert_eq!(assets[0].key.as_deref(), Some("OBJ-1")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); +} + +#[tokio::test] +async fn issue_with_null_cmdb_field() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/PROJ-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "key": "PROJ-2", + "fields": { + "summary": "No assets", + "customfield_10191": null + } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let issue = client + .get_issue("PROJ-2", &["customfield_10191"]) + .await + .unwrap(); + + let cmdb_ids = vec!["customfield_10191".to_string()]; + let assets = jr::api::assets::linked::extract_linked_assets(&issue.fields.extra, &cmdb_ids); + assert!(assets.is_empty()); +} + +#[tokio::test] +async fn enrichment_resolves_ids_to_names() { + let server = MockServer::start().await; + + // Mock workspace discovery + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/assets/workspace")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, "start": 0, "limit": 25, "isLastPage": true, + "values": [{ "workspaceId": "ws-123" }] + }))) + .mount(&server) + .await; + + // Mock asset fetch + Mock::given(method("GET")) + .and(path("/jsm/assets/workspace/ws-123/v1/object/88")) + .and(query_param("includeAttributes", "false")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "88", + "label": "Acme Corp", + "objectKey": "OBJ-88", + "objectType": { "id": "13", "name": "Client" } + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + let mut assets = vec![jr::types::assets::LinkedAsset { + id: Some("88".into()), + workspace_id: Some("ws-123".into()), + ..Default::default() + }]; + + jr::api::assets::linked::enrich_assets(&client, &mut assets).await; + + assert_eq!(assets[0].key.as_deref(), Some("OBJ-88")); + assert_eq!(assets[0].name.as_deref(), Some("Acme Corp")); + assert_eq!(assets[0].asset_type.as_deref(), Some("Client")); +} diff --git a/tests/common/fixtures.rs b/tests/common/fixtures.rs index 2f22651..3684931 100644 --- a/tests/common/fixtures.rs +++ b/tests/common/fixtures.rs @@ -26,12 +26,34 @@ pub fn issue_search_response(issues: Vec) -> Value { json!({ "issues": issues, "nextPageToken": Value::Null }) } +/// Search response with `nextPageToken` set (indicating more results exist). +pub fn issue_search_response_with_next_page(issues: Vec) -> Value { + json!({ "issues": issues, "nextPageToken": "next-page-token-abc" }) +} + +/// Response for the approximate-count endpoint. +pub fn approximate_count_response(count: u64) -> Value { + json!({ "count": count }) +} + pub fn transitions_response(transitions: Vec<(&str, &str)>) -> Value { json!({ "transitions": transitions.iter().map(|(id, name)| json!({"id": id, "name": name})).collect::>() }) } +/// Transitions response with target status names. +/// Each tuple is (transition_id, transition_name, target_status_name). +pub fn transitions_response_with_status(transitions: Vec<(&str, &str, &str)>) -> Value { + json!({ + "transitions": transitions.iter().map(|(id, name, status_name)| json!({ + "id": id, + "name": name, + "to": {"name": status_name} + })).collect::>() + }) +} + pub fn error_response(messages: &[&str]) -> Value { json!({ "errorMessages": messages }) } @@ -140,6 +162,22 @@ pub fn issue_with_links_response(key: &str, summary: &str) -> Value { }) } +/// User search response — flat array of User objects. +pub fn user_search_response(users: Vec<(&str, &str, bool)>) -> Value { + let user_objects: Vec = users + .into_iter() + .map(|(account_id, display_name, active)| { + json!({ + "accountId": account_id, + "displayName": display_name, + "emailAddress": format!("{}@test.com", display_name.to_lowercase().replace(' ', ".")), + "active": active, + }) + }) + .collect(); + json!(user_objects) +} + pub fn teams_list_json() -> Value { json!({ "entities": [ @@ -150,3 +188,247 @@ pub fn teams_list_json() -> Value { "cursor": null }) } + +/// Project search response — paginated envelope with `values` array. +pub fn project_search_response(projects: Vec) -> Value { + let total = projects.len() as u32; + json!({ + "values": projects, + "startAt": 0, + "maxResults": 50, + "total": total, + }) +} + +pub fn project_response(key: &str, name: &str, type_key: &str, lead_name: Option<&str>) -> Value { + let lead = lead_name.map(|name| { + json!({ + "accountId": format!("acc-{}", key.to_lowercase()), + "displayName": name, + }) + }); + json!({ + "key": key, + "name": name, + "projectTypeKey": type_key, + "lead": lead, + }) +} + +/// Project statuses response — top-level array of issue types with nested statuses. +pub fn project_statuses_response() -> Value { + json!([ + { + "id": "3", + "name": "Task", + "self": "https://test.atlassian.net/rest/api/3/issueType/3", + "subtask": false, + "statuses": [ + { + "id": "10000", + "name": "To Do", + "description": "Work that has not been started.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/open.png", + "self": "https://test.atlassian.net/rest/api/3/status/10000" + }, + { + "id": "10001", + "name": "In Progress", + "description": "The issue is currently being worked on.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/inprogress.png", + "self": "https://test.atlassian.net/rest/api/3/status/10001" + }, + { + "id": "10002", + "name": "Done", + "description": "Work has been completed.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/closed.png", + "self": "https://test.atlassian.net/rest/api/3/status/10002" + } + ] + }, + { + "id": "1", + "name": "Bug", + "self": "https://test.atlassian.net/rest/api/3/issueType/1", + "subtask": false, + "statuses": [ + { + "id": "10000", + "name": "To Do", + "description": "Work that has not been started.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/open.png", + "self": "https://test.atlassian.net/rest/api/3/status/10000" + }, + { + "id": "10002", + "name": "Done", + "description": "Work has been completed.", + "iconUrl": "https://test.atlassian.net/images/icons/statuses/closed.png", + "self": "https://test.atlassian.net/rest/api/3/status/10002" + } + ] + } + ]) +} + +/// Board configuration response. +pub fn board_config_response(board_type: &str) -> Value { + json!({ + "id": 382, + "name": "Test Board", + "type": board_type + }) +} + +/// Sprint list response (offset-paginated). +pub fn sprint_list_response(sprints: Vec) -> Value { + let total = sprints.len() as u32; + json!({ + "startAt": 0, + "maxResults": 50, + "total": total, + "values": sprints + }) +} + +/// Single sprint object. +pub fn sprint(id: u64, name: &str, state: &str) -> Value { + json!({ + "id": id, + "name": name, + "state": state, + "startDate": "2026-03-20T00:00:00.000Z", + "endDate": "2026-04-03T00:00:00.000Z" + }) +} + +/// Sprint issues response (offset-paginated). +pub fn sprint_issues_response(issues: Vec, total: u32) -> Value { + json!({ + "startAt": 0, + "maxResults": 50, + "total": total, + "issues": issues + }) +} + +pub fn board_response(id: u64, name: &str, board_type: &str, project_key: &str) -> Value { + json!({ + "id": id, + "name": name, + "type": board_type, + "location": { + "projectKey": project_key, + "projectName": format!("{} Project", project_key) + } + }) +} + +pub fn board_list_response(boards: Vec) -> Value { + let total = boards.len() as u32; + json!({ + "values": boards, + "startAt": 0, + "maxResults": 50, + "total": total + }) +} + +/// Issue response with a specific assignee (or null if None). +pub fn issue_response_with_assignee( + key: &str, + summary: &str, + assignee: Option<(&str, &str)>, +) -> Value { + let assignee_value = match assignee { + Some((account_id, display_name)) => json!({ + "accountId": account_id, + "displayName": display_name, + }), + None => Value::Null, + }; + json!({ + "key": key, + "fields": { + "summary": summary, + "status": {"name": "To Do"}, + "issuetype": {"name": "Task"}, + "priority": {"name": "Medium"}, + "assignee": assignee_value, + "project": {"key": key.split('-').next().unwrap_or("TEST")} + } + }) +} + +pub fn issue_response_with_standard_fields(key: &str, summary: &str) -> Value { + json!({ + "key": key, + "fields": { + "summary": summary, + "status": {"name": "In Progress", "statusCategory": {"name": "In Progress", "key": "indeterminate"}}, + "issuetype": {"name": "Bug"}, + "priority": {"name": "High"}, + "assignee": {"accountId": "abc123", "displayName": "John Doe"}, + "reporter": {"accountId": "def456", "displayName": "Jane Smith"}, + "project": {"key": key.split('-').next().unwrap_or("TEST"), "name": "Test Project"}, + "created": "2026-03-20T14:32:00.000+0000", + "updated": "2026-03-25T09:15:22.000+0000", + "resolution": {"name": "Fixed"}, + "components": [{"name": "Backend"}, {"name": "API"}], + "fixVersions": [{"name": "v2.0", "released": false, "releaseDate": "2026-04-01"}], + "labels": ["bug"], + "parent": null, + "issuelinks": [] + } + }) +} + +pub fn issue_response_with_labels_parent_links(key: &str, summary: &str) -> Value { + json!({ + "key": key, + "fields": { + "summary": summary, + "status": {"name": "To Do"}, + "issuetype": {"name": "Story"}, + "priority": {"name": "Medium"}, + "assignee": {"accountId": "abc123", "displayName": "Test User"}, + "project": {"key": key.split('-').next().unwrap_or("TEST")}, + "labels": ["bug", "frontend"], + "parent": {"key": "FOO-1", "fields": {"summary": "Parent Epic"}}, + "issuelinks": [ + { + "id": "30001", + "type": {"name": "Blocks", "inward": "is blocked by", "outward": "blocks"}, + "outwardIssue": {"key": "FOO-3", "fields": {"summary": "Blocked issue"}} + } + ] + } + }) +} + +/// Multi-project assignable user search response — flat array of User objects. +/// Simpler than `user_search_response`: takes (account_id, display_name) pairs +/// and always sets active=true. No email field generated. +pub fn multi_project_user_search_response(users: Vec<(&str, &str)>) -> Value { + let user_objects: Vec = users + .into_iter() + .map(|(account_id, display_name)| { + json!({ + "accountId": account_id, + "displayName": display_name, + "active": true, + }) + }) + .collect(); + json!(user_objects) +} + +/// Create issue response. +pub fn create_issue_response(key: &str) -> Value { + json!({ + "id": "10001", + "key": key, + "self": format!("https://test.atlassian.net/rest/api/3/issue/{}", key) + }) +} diff --git a/tests/duplicate_user_disambiguation.rs b/tests/duplicate_user_disambiguation.rs new file mode 100644 index 0000000..f54fc6d --- /dev/null +++ b/tests/duplicate_user_disambiguation.rs @@ -0,0 +1,275 @@ +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build a user JSON object for wiremock responses. +fn user_json(account_id: &str, display_name: &str, email: Option<&str>) -> serde_json::Value { + let mut obj = serde_json::json!({ + "accountId": account_id, + "displayName": display_name, + "active": true, + }); + if let Some(e) = email { + obj["emailAddress"] = serde_json::json!(e); + } + obj +} + +#[tokio::test] +async fn issue_list_assignee_duplicate_names_no_input_errors() { + let server = MockServer::start().await; + + // User search returns two users with same display name + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write(project_dir.path().join(".jr.toml"), "project = \"PROJ\"\n").unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list", "--assignee", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("john1@acme.com"), + "Should list first user's email, got: {stderr}" + ); + assert!( + stderr.contains("john2@other.org"), + "Should list second user's email, got: {stderr}" + ); + assert!( + stderr.contains("acc-john-1") && stderr.contains("acc-john-2"), + "Should list accountIds for both users, got: {stderr}" + ); + assert!( + stderr.contains("John Smith"), + "Should mention the duplicate name, got: {stderr}" + ); +} + +#[tokio::test] +async fn issue_assign_duplicate_names_no_input_errors() { + let server = MockServer::start().await; + + // Assignable user search returns two users with same display name + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + // Mock get issue (needed for assign flow idempotency check, though error + // happens before this is reached) + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("FOO-1", "Test issue", None), + )) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write(project_dir.path().join(".jr.toml"), "project = \"PROJ\"\n").unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args([ + "issue", + "assign", + "FOO-1", + "--to", + "John Smith", + "--no-input", + ]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("john1@acme.com"), + "Should list first user's email, got: {stderr}" + ); + assert!( + stderr.contains("john2@other.org"), + "Should list second user's email, got: {stderr}" + ); + assert!( + stderr.contains("acc-john-1") && stderr.contains("acc-john-2"), + "Should list accountIds for both users, got: {stderr}" + ); +} + +#[tokio::test] +async fn issue_list_assignee_duplicate_names_no_email_shows_account_id() { + let server = MockServer::start().await; + + // User search returns two users with same display name — one has no email + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", None), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write(project_dir.path().join(".jr.toml"), "project = \"PROJ\"\n").unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list", "--assignee", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + // First user should show email + assert!( + stderr.contains("john1@acme.com"), + "Should show email for user with email, got: {stderr}" + ); + // Second user has no email — should fall back to accountId + assert!( + stderr.contains("acc-john-2"), + "Should show accountId when email is missing, got: {stderr}" + ); +} + +#[tokio::test] +async fn issue_create_assignee_duplicate_names_no_input_errors() { + let server = MockServer::start().await; + + // Multi-project assignable user search returns two users with same display name + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/multiProjectSearch")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write(project_dir.path().join(".jr.toml"), "project = \"PROJ\"\n").unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args([ + "issue", + "create", + "--type", + "Task", + "--summary", + "Test", + "--to", + "John Smith", + "--no-input", + ]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names in create, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("john1@acme.com"), + "Should list first user's email, got: {stderr}" + ); + assert!( + stderr.contains("john2@other.org"), + "Should list second user's email, got: {stderr}" + ); + assert!( + stderr.contains("John Smith"), + "Should mention the duplicate name, got: {stderr}" + ); +} + +#[tokio::test] +async fn issue_list_assignee_exact_match_among_multiple_results_no_input_errors() { + let server = MockServer::start().await; + + // Three users: two share "John Smith", one is "John Smithson" + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + user_json("acc-john-1", "John Smith", Some("john1@acme.com")), + user_json("acc-smithson", "John Smithson", None), + user_json("acc-john-2", "John Smith", Some("john2@other.org")), + ]))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write(project_dir.path().join(".jr.toml"), "project = \"PROJ\"\n").unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list", "--assignee", "John Smith", "--no-input"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on duplicate user names even with extra results, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("john1@acme.com") && stderr.contains("john2@other.org"), + "Should list both duplicate users' emails, got: {stderr}" + ); + assert!( + !stderr.contains("acc-smithson") && !stderr.contains("Smithson"), + "Should not mention non-duplicate user, got: {stderr}" + ); +} diff --git a/tests/input_validation.rs b/tests/input_validation.rs new file mode 100644 index 0000000..53d51be --- /dev/null +++ b/tests/input_validation.rs @@ -0,0 +1,248 @@ +#[allow(dead_code)] +mod common; + +use std::collections::HashSet; + +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn project_exists_returns_true_for_valid_project() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": "10000", + "key": "PROJ", + "name": "My Project" + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + assert!(client.project_exists("PROJ").await.unwrap()); +} + +#[tokio::test] +async fn project_exists_returns_false_for_invalid_project() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/NOPE")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["No project could be found with key 'NOPE'."], + "errors": {} + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + assert!(!client.project_exists("NOPE").await.unwrap()); +} + +#[tokio::test] +async fn get_all_statuses_returns_status_names() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/status")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + {"id": "1", "name": "To Do", "statusCategory": {"key": "new"}}, + {"id": "2", "name": "In Progress", "statusCategory": {"key": "indeterminate"}}, + {"id": "3", "name": "Done", "statusCategory": {"key": "done"}} + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses = client.get_all_statuses().await.unwrap(); + assert_eq!(statuses.len(), 3); + assert!(statuses.contains(&"To Do".to_string())); + assert!(statuses.contains(&"In Progress".to_string())); + assert!(statuses.contains(&"Done".to_string())); +} + +fn project_statuses_response(statuses: Vec<&str>) -> serde_json::Value { + let status_objects: Vec = statuses + .iter() + .enumerate() + .map(|(i, name)| { + serde_json::json!({ + "id": format!("{}", i + 1), + "name": name, + "description": null + }) + }) + .collect(); + serde_json::json!([{ + "id": "1", + "name": "Task", + "subtask": false, + "statuses": status_objects + }]) +} + +#[tokio::test] +async fn invalid_status_with_project_returns_no_match() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with( + ResponseTemplate::new(200).set_body_json(project_statuses_response(vec![ + "To Do", + "In Progress", + "Done", + ])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + + let names: Vec = { + let mut seen = HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("Nonexistant", &names); + assert!(matches!(result, jr::partial_match::MatchResult::None(_))); +} + +#[tokio::test] +async fn valid_status_partial_match_resolves() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with( + ResponseTemplate::new(200).set_body_json(project_statuses_response(vec![ + "To Do", + "In Progress", + "Done", + ])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + let names: Vec = { + let mut seen = HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("in prog", &names); + match result { + jr::partial_match::MatchResult::Exact(name) => assert_eq!(name, "In Progress"), + other => panic!("Expected Exact, got {:?}", std::mem::discriminant(&other)), + } +} + +#[tokio::test] +async fn ambiguous_status_returns_multiple_matches() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ/statuses")) + .respond_with( + ResponseTemplate::new(200).set_body_json(project_statuses_response(vec![ + "In Progress", + "In Review", + "Done", + ])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses_response = client.get_project_statuses("PROJ").await.unwrap(); + let names: Vec = { + let mut seen = HashSet::new(); + let mut n = Vec::new(); + for it in &statuses_response { + for s in &it.statuses { + if seen.insert(s.name.clone()) { + n.push(s.name.clone()); + } + } + } + n.sort(); + n + }; + + let result = jr::partial_match::partial_match("in", &names); + match result { + jr::partial_match::MatchResult::Ambiguous(matches) => { + assert!(matches.contains(&"In Progress".to_string())); + assert!(matches.contains(&"In Review".to_string())); + } + other => panic!( + "Expected Ambiguous, got {:?}", + std::mem::discriminant(&other) + ), + } +} + +#[tokio::test] +async fn status_validation_with_global_statuses() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/status")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + {"id": "1", "name": "Open", "statusCategory": {"key": "new"}}, + {"id": "2", "name": "Closed", "statusCategory": {"key": "done"}} + ]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let statuses = client.get_all_statuses().await.unwrap(); + + let result = jr::partial_match::partial_match("Nonexistant", &statuses); + assert!(matches!(result, jr::partial_match::MatchResult::None(_))); +} + +#[tokio::test] +async fn project_statuses_404_means_project_not_found() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/NOPE/statuses")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["No project could be found with key 'NOPE'."], + "errors": {} + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("NOPE").await; + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!( + err.downcast_ref::() + .is_some_and(|e| matches!(e, jr::error::JrError::ApiError { status: 404, .. })) + ); +} diff --git a/tests/issue_commands.rs b/tests/issue_commands.rs index 084d57d..cee4b43 100644 --- a/tests/issue_commands.rs +++ b/tests/issue_commands.rs @@ -1,7 +1,7 @@ #[allow(dead_code)] mod common; -use wiremock::matchers::{method, path}; +use wiremock::matchers::{body_partial_json, method, path, query_param}; use wiremock::{Mock, MockServer, ResponseTemplate}; #[tokio::test] @@ -21,12 +21,13 @@ async fn test_search_issues() { let client = jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); - let issues = client + let result = client .search_issues("assignee = currentUser()", None, &[]) .await .unwrap(); - assert_eq!(issues.len(), 1); - assert_eq!(issues[0].key, "FOO-1"); + assert_eq!(result.issues.len(), 1); + assert_eq!(result.issues[0].key, "FOO-1"); + assert!(!result.has_more); } #[tokio::test] @@ -96,17 +97,21 @@ async fn test_search_issues_with_story_points() { let client = jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); - let issues = client + let result = client .search_issues("project = FOO", None, &["customfield_10031"]) .await .unwrap(); - assert_eq!(issues.len(), 2); + assert_eq!(result.issues.len(), 2); assert_eq!( - issues[0].fields.story_points("customfield_10031"), + result.issues[0].fields.story_points("customfield_10031"), Some(5.0) ); - assert_eq!(issues[1].fields.story_points("customfield_10031"), None); + assert_eq!( + result.issues[1].fields.story_points("customfield_10031"), + None + ); + assert!(!result.has_more); } #[tokio::test] @@ -204,3 +209,1484 @@ async fn test_delete_issue_link() { jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); client.delete_issue_link("10001").await.unwrap(); } + +#[tokio::test] +async fn test_search_issues_has_more_flag() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response_with_next_page(vec![ + common::fixtures::issue_response("FOO-1", "Test issue", "To Do"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", Some(1), &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert!(result.has_more); +} + +#[tokio::test] +async fn test_search_issues_no_more_results() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "FOO-1", + "Test issue", + "To Do", + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", Some(10), &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert!(!result.has_more); +} + +#[tokio::test] +async fn test_search_issues_no_limit_fetches_all() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![ + common::fixtures::issue_response("FOO-1", "Issue 1", "To Do"), + common::fixtures::issue_response("FOO-2", "Issue 2", "To Do"), + common::fixtures::issue_response("FOO-3", "Issue 3", "To Do"), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", None, &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 3); + assert!(!result.has_more); +} + +#[tokio::test] +async fn test_approximate_count() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::approximate_count_response(42)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let count = client.approximate_count("project = FOO").await.unwrap(); + assert_eq!(count, 42); +} + +#[tokio::test] +async fn test_approximate_count_zero() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::approximate_count_response(0)), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let count = client.approximate_count("project = FOO").await.unwrap(); + assert_eq!(count, 0); +} + +#[tokio::test] +async fn test_approximate_count_server_error_returns_err() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/approximate-count")) + .respond_with(ResponseTemplate::new(500)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.approximate_count("project = FOO").await; + assert!(result.is_err()); +} + +#[tokio::test] +async fn test_search_users_single_result() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-123", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Jane").await.unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-123"); + assert_eq!(users[0].display_name, "Jane Doe"); + assert_eq!(users[0].active, Some(true)); +} + +#[tokio::test] +async fn test_search_users_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::user_search_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Nobody").await.unwrap(); + assert!(users.is_empty()); +} + +#[tokio::test] +async fn test_search_users_multiple() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![ + ("acc-1", "Jane Doe", true), + ("acc-2", "Jane Smith", true), + ("acc-3", "Jane Inactive", false), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Jane").await.unwrap(); + assert_eq!(users.len(), 3); +} + +#[tokio::test] +async fn test_search_users_paginated_response() { + let server = MockServer::start().await; + // Test the paginated { "values": [...] } response shape + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "total": 1, + "values": [ + { + "accountId": "acc-paged", + "displayName": "Paged User", + "emailAddress": "paged@test.com", + "active": true + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client.search_users("Paged").await.unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-paged"); + assert_eq!(users[0].display_name, "Paged User"); +} + +#[tokio::test] +async fn test_search_users_unrecognized_response_errors() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/search")) + .respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!({"error": "unexpected"})), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.search_users("Test").await; + assert!(result.is_err()); +} + +#[tokio::test] +async fn test_search_issues_jql_with_project_scope() { + let server = MockServer::start().await; + + // The mock only matches if the POST body contains the expected composed JQL + let expected_jql = r#"project = "PROJ" AND (priority = Highest) ORDER BY updated DESC"#; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "jql": expected_jql + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "High priority issue", + "To Do", + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // This is the JQL that handle_list would compose when given + // --project PROJ --jql "priority = Highest" + let result = client + .search_issues(expected_jql, Some(30), &[]) + .await + .unwrap(); + assert_eq!(result.issues.len(), 1); + assert_eq!(result.issues[0].key, "PROJ-1"); +} + +#[tokio::test] +async fn get_issue_includes_standard_fields() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-42")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_standard_fields("FOO-42", "Test with all fields"), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let issue = client.get_issue("FOO-42", &[]).await.unwrap(); + + // Verify new fields are deserialized + assert_eq!( + issue.fields.created.as_deref(), + Some("2026-03-20T14:32:00.000+0000") + ); + assert_eq!( + issue.fields.updated.as_deref(), + Some("2026-03-25T09:15:22.000+0000") + ); + + let reporter = issue.fields.reporter.as_ref().unwrap(); + assert_eq!(reporter.display_name, "Jane Smith"); + assert_eq!(reporter.account_id, "def456"); + + assert_eq!(issue.fields.resolution.as_ref().unwrap().name, "Fixed"); + + let components = issue.fields.components.as_ref().unwrap(); + assert_eq!(components.len(), 2); + assert_eq!(components[0].name, "Backend"); + assert_eq!(components[1].name, "API"); + + let versions = issue.fields.fix_versions.as_ref().unwrap(); + assert_eq!(versions.len(), 1); + assert_eq!(versions[0].name, "v2.0"); + assert_eq!(versions[0].released, Some(false)); + assert_eq!(versions[0].release_date.as_deref(), Some("2026-04-01")); + + // Verify JSON serialization includes the new fields at the expected paths + let json_str = serde_json::to_string(&issue).unwrap(); + let value: serde_json::Value = serde_json::from_str(&json_str).unwrap(); + + assert!(value["fields"]["created"].is_string()); + assert!(value["fields"]["reporter"].is_object()); + assert!(value["fields"]["resolution"].is_object()); + assert!(value["fields"]["components"].is_array()); + assert!(value["fields"]["fixVersions"].is_array()); +} + +#[tokio::test] +async fn get_issue_null_standard_fields() { + let server = MockServer::start().await; + + // Issue with all new fields null/absent + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-43")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-43", + "Minimal issue", + "To Do", + )), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let issue = client.get_issue("FOO-43", &[]).await.unwrap(); + + // All new fields should be None (the fixture doesn't include them) + assert!(issue.fields.created.is_none()); + assert!(issue.fields.updated.is_none()); + assert!(issue.fields.reporter.is_none()); + assert!(issue.fields.resolution.is_none()); + assert!(issue.fields.components.is_none()); + assert!(issue.fields.fix_versions.is_none()); +} + +#[tokio::test] +async fn test_edit_issue_with_description() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-10")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { "type": "text", "text": "Updated description" } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-10", + serde_json::json!({ + "description": jr::adf::text_to_adf("Updated description") + }), + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn test_edit_issue_with_markdown_description() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-11")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { + "type": "text", + "text": "bold text", + "marks": [{"type": "strong"}] + } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-11", + serde_json::json!({ + "description": jr::adf::markdown_to_adf("**bold text**") + }), + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn test_edit_issue_description_with_other_fields() { + let server = MockServer::start().await; + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-12")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "summary": "New summary", + "description": { + "version": 1, + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { "type": "text", "text": "New description" } + ] + } + ] + } + } + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + client + .edit_issue( + "FOO-12", + serde_json::json!({ + "summary": "New summary", + "description": jr::adf::text_to_adf("New description") + }), + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn test_search_assignable_users_single() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Jane")) + .and(query_param("issueKey", "FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-assign-1", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client + .search_assignable_users("Jane", "FOO-1") + .await + .unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-assign-1"); + assert_eq!(users[0].display_name, "Jane Doe"); +} + +#[tokio::test] +async fn test_search_assignable_users_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Nobody")) + .and(query_param("issueKey", "FOO-1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::user_search_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client + .search_assignable_users("Nobody", "FOO-1") + .await + .unwrap(); + assert!(users.is_empty()); +} + +#[tokio::test] +async fn test_search_assignable_users_paginated_response() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .and(query_param("query", "Paged")) + .and(query_param("issueKey", "FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "total": 1, + "values": [ + { + "accountId": "acc-paged-assign", + "displayName": "Paged Assignee", + "emailAddress": "paged@test.com", + "active": true + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client + .search_assignable_users("Paged", "FOO-1") + .await + .unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-paged-assign"); +} + +#[tokio::test] +async fn assign_to_user_resolves_display_name() { + let server = MockServer::start().await; + + // Mock assignable user search → single result + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-jane-123", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + // Mock get issue → currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("FOO-1", "Test issue", None), + )) + .mount(&server) + .await; + + // Mock assign → 204 + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-1/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "acc-jane-123" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // Resolve and assign + let users = client + .search_assignable_users("Jane", "FOO-1") + .await + .unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-jane-123"); + + client + .assign_issue("FOO-1", Some(&users[0].account_id)) + .await + .unwrap(); +} + +#[tokio::test] +async fn assign_to_user_not_found() { + let server = MockServer::start().await; + + // Mock assignable user search → empty results + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::user_search_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let users = client + .search_assignable_users("Nonexistent", "FOO-1") + .await + .unwrap(); + assert!(users.is_empty()); +} + +#[tokio::test] +async fn assign_to_me_keyword() { + let server = MockServer::start().await; + + // Mock get myself + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock get issue → currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee("FOO-1", "Test issue", None), + )) + .mount(&server) + .await; + + // Mock assign → 204 + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/FOO-1/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "abc123" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // "me" should resolve to get_myself(), not search API + let me = client.get_myself().await.unwrap(); + assert_eq!(me.account_id, "abc123"); + + client + .assign_issue("FOO-1", Some(&me.account_id)) + .await + .unwrap(); +} + +#[tokio::test] +async fn assign_idempotent_already_assigned() { + let server = MockServer::start().await; + + // Mock assignable user search → single result + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::user_search_response(vec![("acc-jane-123", "Jane Doe", true)]), + )) + .mount(&server) + .await; + + // Mock get issue → already assigned to Jane + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "FOO-1", + "Test issue", + Some(("acc-jane-123", "Jane Doe")), + ), + )) + .mount(&server) + .await; + + // NO mock for PUT /assignee — if the code tries to call it, the test fails + // because wiremock returns 404 for unmocked paths. + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // Resolve user + let users = client + .search_assignable_users("Jane", "FOO-1") + .await + .unwrap(); + assert_eq!(users[0].account_id, "acc-jane-123"); + + // Get issue and verify already assigned + let issue = client.get_issue("FOO-1", &[]).await.unwrap(); + let assignee = issue.fields.assignee.unwrap(); + assert_eq!(assignee.account_id, "acc-jane-123"); +} + +#[tokio::test] +async fn test_search_issues_includes_labels_parent_issuelinks() { + let server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .and(body_partial_json(serde_json::json!({ + "fields": [ + "summary", "status", "issuetype", "priority", "assignee", + "reporter", "project", "description", "created", "updated", + "resolution", "components", "fixVersions", + "labels", "parent", "issuelinks" + ] + }))) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![ + common::fixtures::issue_response_with_labels_parent_links( + "FOO-10", + "Labeled issue", + ), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client + .search_issues("project = FOO", Some(10), &[]) + .await + .unwrap(); + + assert_eq!(result.issues.len(), 1); + let issue = &result.issues[0]; + + // Labels + let labels = issue.fields.labels.as_ref().expect("labels should be Some"); + assert_eq!(labels, &vec!["bug".to_string(), "frontend".to_string()]); + + // Parent + let parent = issue.fields.parent.as_ref().expect("parent should be Some"); + assert_eq!(parent.key, "FOO-1"); + assert_eq!( + parent.fields.as_ref().unwrap().summary.as_deref(), + Some("Parent Epic") + ); + + // Issue links + let links = issue + .fields + .issuelinks + .as_ref() + .expect("issuelinks should be Some"); + assert_eq!(links.len(), 1); + assert_eq!(links[0].link_type.name, "Blocks"); + assert_eq!(links[0].outward_issue.as_ref().unwrap().key, "FOO-3"); +} + +#[tokio::test] +async fn test_create_issue_with_assignee() { + let server = MockServer::start().await; + + // Mock multiProjectSearch → single result + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/multiProjectSearch")) + .and(query_param("projectKeys", "FOO")) + .and(query_param("query", "Jane")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::multi_project_user_search_response(vec![( + "acc-jane-123", + "Jane Doe", + )]), + )) + .mount(&server) + .await; + + // Mock create issue → verify assignee in request body + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Test with assignee", + "assignee": {"accountId": "acc-jane-123"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("FOO-99")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // Resolve user + let users = client + .search_assignable_users_by_project("Jane", "FOO") + .await + .unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0].account_id, "acc-jane-123"); + assert_eq!(users[0].display_name, "Jane Doe"); + + // Create issue with assignee field + let mut fields = serde_json::json!({ + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Test with assignee", + }); + fields["assignee"] = serde_json::json!({"accountId": users[0].account_id}); + + let response = client.create_issue(fields).await.unwrap(); + assert_eq!(response.key, "FOO-99"); +} + +#[tokio::test] +async fn test_create_issue_with_assignee_me() { + let server = MockServer::start().await; + + // Mock get_myself + Mock::given(method("GET")) + .and(path("/rest/api/3/myself")) + .respond_with(ResponseTemplate::new(200).set_body_json(common::fixtures::user_response())) + .mount(&server) + .await; + + // Mock create issue → verify assignee uses "me" account ID + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "assignee": {"accountId": "abc123"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("FOO-100")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // "me" resolves via get_myself, not search API + let me = client.get_myself().await.unwrap(); + assert_eq!(me.account_id, "abc123"); + + // Create issue with self-assignment + let mut fields = serde_json::json!({ + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Assigned to me", + }); + fields["assignee"] = serde_json::json!({"accountId": me.account_id}); + + let response = client.create_issue(fields).await.unwrap(); + assert_eq!(response.key, "FOO-100"); +} + +#[tokio::test] +async fn test_create_issue_without_assignee() { + let server = MockServer::start().await; + + // No multiProjectSearch mock registered — this test only exercises create. + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("FOO-101")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + let fields = serde_json::json!({ + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "No assignee", + }); + + let response = client.create_issue(fields).await.unwrap(); + assert_eq!(response.key, "FOO-101"); +} + +#[tokio::test] +async fn test_create_issue_assignee_not_found() { + let server = MockServer::start().await; + + // Mock multiProjectSearch → empty results + Mock::given(method("GET")) + .and(path("/rest/api/3/user/assignable/multiProjectSearch")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::multi_project_user_search_response(vec![])), + ) + .mount(&server) + .await; + + // Only testing that the API returns an empty list — no create call is made. + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + let users = client + .search_assignable_users_by_project("Nonexistent", "FOO") + .await + .unwrap(); + assert!(users.is_empty()); +} + +#[tokio::test] +async fn test_create_issue_with_account_id() { + let server = MockServer::start().await; + + // Mock create issue — verify assignee uses accountId format + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .and(body_partial_json(serde_json::json!({ + "fields": { + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Assigned by accountId", + "assignee": {"accountId": "direct-acct-789"} + } + }))) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("FOO-200")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + // Build fields with accountId directly — no user search mock needed + let mut fields = serde_json::json!({ + "project": {"key": "FOO"}, + "issuetype": {"name": "Task"}, + "summary": "Assigned by accountId", + }); + fields["assignee"] = serde_json::json!({"accountId": "direct-acct-789"}); + + let response = client.create_issue(fields).await.unwrap(); + assert_eq!(response.key, "FOO-200"); +} + +#[tokio::test] +async fn test_move_by_transition_name() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "To Do", + )), + ) + .mount(&server) + .await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json( + serde_json::json!({"transition": {"id": "21"}}), + )) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Complete") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Expected success, stderr: {stderr}" + ); + assert!( + stdout.contains("Moved FOO-1"), + "Expected move confirmation in stdout: {stdout}" + ); +} + +#[tokio::test] +async fn test_move_by_status_name() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "To Do", + )), + ) + .mount(&server) + .await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json( + serde_json::json!({"transition": {"id": "21"}}), + )) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Completed") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Expected success, stderr: {stderr}" + ); + assert!( + stdout.contains("Moved FOO-1"), + "Expected move confirmation in stdout: {stdout}" + ); +} + +#[tokio::test] +async fn test_move_dedup_same_transition_and_status_name() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "In Progress", "In Progress"), + ("31", "Done", "Done"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "To Do", + )), + ) + .mount(&server) + .await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .and(body_partial_json( + serde_json::json!({"transition": {"id": "31"}}), + )) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Done") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Expected success, stderr: {stderr}" + ); + assert!( + stdout.contains("Moved FOO-1"), + "Expected move confirmation in stdout: {stdout}" + ); +} + +#[tokio::test] +async fn test_move_ambiguous_across_transition_and_status_names() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Reopen", "Open"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "Closed", + )), + ) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Re") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Expected failure, stderr: {stderr}" + ); + assert!( + stderr.contains("Ambiguous"), + "Expected ambiguity error in stderr: {stderr}" + ); +} + +#[tokio::test] +async fn test_move_no_match_shows_status_names() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![ + ("21", "Complete", "Completed"), + ("31", "Review", "In Review"), + ]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "To Do", + )), + ) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Nonexistent") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Expected failure, stderr: {stderr}" + ); + assert!( + stderr.contains("Complete (→ Completed)"), + "Expected enriched error format in stderr: {stderr}" + ); + assert!( + stderr.contains("Review (→ In Review)"), + "Expected enriched error format in stderr: {stderr}" + ); +} + +#[tokio::test] +async fn test_move_idempotent_with_status_name() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![( + "21", + "Complete", + "Completed", + )]), + )) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "Completed", + )), + ) + .mount(&server) + .await; + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Completed") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Expected success (idempotent), stderr: {stderr}" + ); + assert!( + stdout.contains("already in status"), + "Expected idempotent message in stdout: {stdout}" + ); +} + +#[tokio::test] +async fn test_move_idempotent_with_transition_name() { + let server = MockServer::start().await; + + // Transition "Complete" leads to status "Completed" + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1/transitions")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::transitions_response_with_status(vec![( + "21", + "Complete", + "Completed", + )]), + )) + .mount(&server) + .await; + + // Issue is already in "Completed" — user types transition name "Complete" + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/FOO-1")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::issue_response( + "FOO-1", + "Test issue", + "Completed", + )), + ) + .mount(&server) + .await; + + // No POST mock — should not transition + + let output = assert_cmd::Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--no-input") + .arg("issue") + .arg("move") + .arg("FOO-1") + .arg("Complete") + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Expected success (idempotent via transition name), stderr: {stderr}" + ); + assert!( + stdout.contains("already in status"), + "Expected idempotent message in stdout: {stdout}" + ); +} + +#[tokio::test] +async fn test_create_issue_response_includes_browse_url() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/api/3/issue")) + .respond_with( + ResponseTemplate::new(201) + .set_body_json(common::fixtures::create_issue_response("URL-1")), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + + let response = client + .create_issue(serde_json::json!({ + "project": {"key": "URL"}, + "issuetype": {"name": "Task"}, + "summary": "Test browse URL", + })) + .await + .unwrap(); + + // Verify the key is returned + assert_eq!(response.key, "URL-1"); + + // Verify browse URL can be constructed from instance_url + let browse_url = format!( + "{}/browse/{}", + client.instance_url().trim_end_matches('/'), + response.key + ); + assert!( + browse_url.contains("/browse/URL-1"), + "Expected browse URL to contain /browse/URL-1, got: {browse_url}" + ); +} + +#[tokio::test] +async fn test_assign_issue_with_account_id() { + let server = MockServer::start().await; + + // Mock GET issue — currently unassigned + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/ACC-1")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "ACC-1", + "Test assign by accountId", + None, + ), + )) + .mount(&server) + .await; + + // Mock PUT assignee — verify accountId in request body + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/ACC-1/assignee")) + .and(body_partial_json(serde_json::json!({ + "accountId": "direct-account-id-456" + }))) + .respond_with(ResponseTemplate::new(204)) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + // Assign directly by accountId — no user search mock needed + client + .assign_issue("ACC-1", Some("direct-account-id-456")) + .await + .unwrap(); + + // Verify fixture correctly represents an already-assigned issue + let server2 = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/issue/ACC-2")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_response_with_assignee( + "ACC-2", + "Already assigned", + Some(("direct-account-id-456", "direct-account-id-456")), + ), + )) + .mount(&server2) + .await; + + let client2 = + jr::api::client::JiraClient::new_for_test(server2.uri(), "Basic dGVzdDp0ZXN0".into()); + + let issue = client2.get_issue("ACC-2", &[]).await.unwrap(); + let assignee = issue.fields.assignee.unwrap(); + assert_eq!(assignee.account_id, "direct-account-id-456"); +} + +#[tokio::test] +async fn test_assign_issue_invalid_account_id_returns_error() { + let server = MockServer::start().await; + + // Mock PUT assignee returning 404 with Jira error body for invalid accountId + Mock::given(method("PUT")) + .and(path("/rest/api/3/issue/ERR-1/assignee")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["User 'bogus-account-id' does not exist."] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + + let result = client.assign_issue("ERR-1", Some("bogus-account-id")).await; + + let err = result.unwrap_err(); + + // Verify correct error variant and status code structurally + assert!( + err.downcast_ref::() + .is_some_and(|e| matches!(e, jr::error::JrError::ApiError { status: 404, .. })), + "Expected JrError::ApiError with status 404, got: {err}" + ); + + // Verify Jira error message was extracted from the JSON body + let msg = err.to_string(); + assert!( + msg.contains("does not exist"), + "Expected Jira error message in error, got: {msg}" + ); +} diff --git a/tests/issue_list_errors.rs b/tests/issue_list_errors.rs new file mode 100644 index 0000000..8d9658b --- /dev/null +++ b/tests/issue_list_errors.rs @@ -0,0 +1,263 @@ +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +async fn mock_project_exists(server: &MockServer) { + Mock::given(method("GET")) + .and(path("/rest/api/3/project/PROJ")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "key": "PROJ", + "id": "10000", + "name": "Test Project" + }))) + .mount(server) + .await; +} + +#[tokio::test] +async fn issue_list_board_config_404_reports_error() { + let server = MockServer::start().await; + + // Project exists check passes + mock_project_exists(&server).await; + + // Board config returns 404 (board deleted or no access) + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "errorMessages": ["Board does not exist or you do not have permission to see it."] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on board config 404, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Board 42 not found or not accessible"), + "Should mention board ID and accessibility, got: {stderr}" + ); + assert!( + stderr.contains("board_id"), + "Should suggest removing board_id from config, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); + assert_eq!( + output.status.code(), + Some(64), + "Board-not-found should exit with UserError code 64, got: {:?}", + output.status.code() + ); +} + +#[tokio::test] +async fn issue_list_board_config_server_error_propagates() { + let server = MockServer::start().await; + + // Project exists check passes + mock_project_exists(&server).await; + + // Board config returns 500 + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with(ResponseTemplate::new(500).set_body_json(serde_json::json!({ + "errorMessages": ["Internal server error"] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on board config 500, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Failed to fetch config for board 42"), + "Should include board ID and context, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); + assert_eq!( + output.status.code(), + Some(1), + "Server error should exit with code 1, got: {:?}", + output.status.code() + ); +} + +#[tokio::test] +async fn issue_list_sprint_error_propagates() { + let server = MockServer::start().await; + + // Project exists check passes + mock_project_exists(&server).await; + + // Board config succeeds → scrum board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(&server) + .await; + + // Sprint list returns 500 + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .respond_with(ResponseTemplate::new(500).set_body_json(serde_json::json!({ + "errorMessages": ["Internal server error"] + }))) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !output.status.success(), + "Should fail on sprint list error, got stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + assert!( + stderr.contains("Failed to list sprints for board 42"), + "Should mention board ID and sprints, got: {stderr}" + ); + assert!( + stderr.contains("--jql"), + "Should suggest --jql as alternative, got: {stderr}" + ); + assert_eq!( + output.status.code(), + Some(1), + "Sprint list error should exit with code 1, got: {:?}", + output.status.code() + ); +} + +#[tokio::test] +async fn issue_list_no_active_sprint_falls_back_to_project_jql() { + let server = MockServer::start().await; + + // Project exists check passes + mock_project_exists(&server).await; + + // Board config succeeds → scrum board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(&server) + .await; + + // Sprint list returns empty (no active sprint) + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .and(query_param("state", "active")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_list_response(vec![])), + ) + .mount(&server) + .await; + + // Search endpoint returns issues (fallback JQL works) + Mock::given(method("POST")) + .and(path("/rest/api/3/search/jql")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::issue_search_response(vec![common::fixtures::issue_response( + "PROJ-1", + "Test Issue", + "To Do", + )]), + )) + .mount(&server) + .await; + + let project_dir = tempfile::tempdir().unwrap(); + std::fs::write( + project_dir.path().join(".jr.toml"), + "project = \"PROJ\"\nboard_id = 42\n", + ) + .unwrap(); + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .current_dir(project_dir.path()) + .args(["issue", "list"]) + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + output.status.success(), + "Should succeed with fallback JQL, stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + assert!( + stdout.contains("PROJ-1"), + "Should show fallback results, got: {stdout}" + ); +} diff --git a/tests/project_commands.rs b/tests/project_commands.rs new file mode 100644 index 0000000..48ee49d --- /dev/null +++ b/tests/project_commands.rs @@ -0,0 +1,202 @@ +#[allow(dead_code)] +mod common; + +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn test_list_projects() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![ + common::fixtures::project_response( + "FOO", + "Project Foo", + "software", + Some("Jane Doe"), + ), + common::fixtures::project_response( + "BAR", + "Project Bar", + "service_desk", + Some("John Smith"), + ), + ]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert_eq!(projects.len(), 2); + assert_eq!(projects[0].key, "FOO"); + assert_eq!(projects[0].name, "Project Foo"); + assert_eq!(projects[0].project_type_key, "software"); + assert_eq!(projects[0].lead.as_ref().unwrap().display_name, "Jane Doe"); + assert_eq!(projects[1].key, "BAR"); + assert_eq!(projects[1].project_type_key, "service_desk"); +} + +#[tokio::test] +async fn test_list_projects_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::project_search_response(vec![])), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert!(projects.is_empty()); +} + +#[tokio::test] +async fn test_list_projects_lead_missing() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![common::fixtures::project_response( + "FOO", + "Project Foo", + "software", + None, + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client.list_projects(None, Some(50)).await.unwrap(); + assert_eq!(projects.len(), 1); + assert!(projects[0].lead.is_none()); +} + +#[tokio::test] +async fn test_list_projects_with_type_filter() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .and(query_param("typeKey", "software")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::project_search_response(vec![common::fixtures::project_response( + "FOO", + "Project Foo", + "software", + Some("Jane Doe"), + )]), + )) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let projects = client + .list_projects(Some("software"), Some(50)) + .await + .unwrap(); + assert_eq!(projects.len(), 1); + assert_eq!(projects[0].project_type_key, "software"); +} + +#[tokio::test] +async fn test_list_projects_all_paginates() { + let server = MockServer::start().await; + + // Page 1: startAt=0, maxResults=50, total=3 (but only 2 returned → has_more=true) + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .and(query_param("startAt", "0")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "values": [ + common::fixtures::project_response("FOO", "Project Foo", "software", Some("Jane")), + common::fixtures::project_response("BAR", "Project Bar", "software", Some("John")), + ], + "startAt": 0, + "maxResults": 2, + "total": 3, + }))) + .expect(1) + .mount(&server) + .await; + + // Page 2: startAt=2, maxResults=2, total=3 (1 returned → last page) + Mock::given(method("GET")) + .and(path("/rest/api/3/project/search")) + .and(query_param("startAt", "2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "values": [ + common::fixtures::project_response("BAZ", "Project Baz", "business", None), + ], + "startAt": 2, + "maxResults": 2, + "total": 3, + }))) + .expect(1) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + // max_results=None triggers the --all pagination path + let projects = client.list_projects(None, None).await.unwrap(); + assert_eq!(projects.len(), 3); + assert_eq!(projects[0].key, "FOO"); + assert_eq!(projects[1].key, "BAR"); + assert_eq!(projects[2].key, "BAZ"); +} + +#[tokio::test] +async fn test_get_project_statuses() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/FOO/statuses")) + .respond_with( + ResponseTemplate::new(200).set_body_json(common::fixtures::project_statuses_response()), + ) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("FOO").await.unwrap(); + assert_eq!(result.len(), 2); + assert_eq!(result[0].name, "Task"); + assert_eq!(result[0].id, "3"); + assert_eq!(result[0].subtask, Some(false)); + assert_eq!(result[0].statuses.len(), 3); + assert_eq!(result[0].statuses[0].name, "To Do"); + assert_eq!(result[0].statuses[0].id, "10000"); + assert_eq!( + result[0].statuses[0].description.as_deref(), + Some("Work that has not been started.") + ); + assert_eq!(result[0].statuses[1].name, "In Progress"); + assert_eq!(result[0].statuses[2].name, "Done"); + assert_eq!(result[1].name, "Bug"); + assert_eq!(result[1].statuses.len(), 2); +} + +#[tokio::test] +async fn test_get_project_statuses_empty() { + let server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/rest/api/3/project/FOO/statuses")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([]))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".to_string()); + let result = client.get_project_statuses("FOO").await.unwrap(); + assert!(result.is_empty()); +} diff --git a/tests/project_meta.rs b/tests/project_meta.rs new file mode 100644 index 0000000..8655c31 --- /dev/null +++ b/tests/project_meta.rs @@ -0,0 +1,126 @@ +#[allow(dead_code)] +mod common; + +use serde_json::json; +use tokio::sync::Mutex; +use wiremock::matchers::{method, path}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Serialize project_meta tests — they share the XDG_CACHE_HOME env var. +/// The guard MUST be held for the entire test body, not just the set_var call, +/// to prevent another test from changing XDG_CACHE_HOME while async work is in progress. +static ENV_MUTEX: Mutex<()> = Mutex::const_new(()); + +/// Acquire the env mutex and set XDG_CACHE_HOME. Caller MUST hold the returned +/// guard for the duration of the test to prevent env var races. +async fn set_cache_dir(dir: &std::path::Path) -> tokio::sync::MutexGuard<'static, ()> { + let guard = ENV_MUTEX.lock().await; + // SAFETY: ENV_MUTEX guard is held by caller for the entire test duration, + // and all tests use current_thread flavor so no concurrent env mutation. + unsafe { std::env::set_var("XDG_CACHE_HOME", dir) }; + guard +} + +#[tokio::test(flavor = "current_thread")] +async fn project_meta_cache_miss_fetches_from_api() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/project/HELPDESK")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10042", + "key": "HELPDESK", + "name": "Help Desk", + "projectTypeKey": "service_desk", + "simplified": false + }))) + .mount(&server) + .await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "15", "projectId": "10042", "projectName": "Help Desk" } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let meta = jr::api::jsm::servicedesks::get_or_fetch_project_meta(&client, "HELPDESK") + .await + .unwrap(); + + assert_eq!(meta.project_type, "service_desk"); + assert_eq!(meta.project_id, "10042"); + assert_eq!(meta.service_desk_id.as_deref(), Some("15")); + assert!(!meta.simplified); +} + +#[tokio::test(flavor = "current_thread")] +async fn project_meta_software_project_has_no_service_desk_id() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/project/DEV")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10001", + "key": "DEV", + "name": "Development", + "projectTypeKey": "software", + "simplified": true + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let meta = jr::api::jsm::servicedesks::get_or_fetch_project_meta(&client, "DEV") + .await + .unwrap(); + + assert_eq!(meta.project_type, "software"); + assert!(meta.service_desk_id.is_none()); + assert!(meta.simplified); +} + +#[tokio::test(flavor = "current_thread")] +async fn require_service_desk_errors_for_software_project() { + let cache_dir = tempfile::tempdir().unwrap(); + let _env_guard = set_cache_dir(cache_dir.path()).await; + + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/api/3/project/DEV")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "id": "10001", + "key": "DEV", + "name": "Development", + "projectTypeKey": "software", + "simplified": true + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = jr::api::jsm::servicedesks::require_service_desk(&client, "DEV").await; + + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Jira Software project")); + assert!(err.contains("Queue commands require")); +} diff --git a/tests/queue.rs b/tests/queue.rs new file mode 100644 index 0000000..fd36108 --- /dev/null +++ b/tests/queue.rs @@ -0,0 +1,275 @@ +#[allow(dead_code)] +mod common; + +use serde_json::json; +use wiremock::matchers::{method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +#[tokio::test] +async fn list_queues_returns_all_queues() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .and(query_param("includeCount", "true")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "10", "name": "Triage", "jql": "project = HELPDESK AND status = New", "issueCount": 12 }, + { "id": "20", "name": "In Progress", "jql": "project = HELPDESK AND status = \"In Progress\"", "issueCount": 7 } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let queues = client.list_queues("15").await.unwrap(); + assert_eq!(queues.len(), 2); + assert_eq!(queues[0].name, "Triage"); + assert_eq!(queues[0].issue_count, Some(12)); + assert_eq!(queues[1].name, "In Progress"); +} + +#[tokio::test] +async fn list_queues_empty() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 0, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let queues = client.list_queues("15").await.unwrap(); + assert!(queues.is_empty()); +} + +#[tokio::test] +async fn get_queue_issue_keys_returns_keys() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working", + "status": { "name": "New", "statusCategory": { "name": "To Do", "key": "new" } }, + "issuetype": { "name": "Service Request" }, + "priority": { "name": "High" }, + "assignee": null + } + }, + { + "key": "HELPDESK-41", + "fields": { + "summary": "Need license renewal", + "status": { "name": "New", "statusCategory": { "name": "To Do", "key": "new" } }, + "issuetype": { "name": "Service Request" }, + "assignee": { "accountId": "abc", "displayName": "Jane D." } + } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let keys = client.get_queue_issue_keys("15", "10", None).await.unwrap(); + assert_eq!(keys.len(), 2); + assert_eq!(keys[0], "HELPDESK-42"); + assert_eq!(keys[1], "HELPDESK-41"); +} + +#[tokio::test] +async fn get_queue_issue_keys_with_limit() { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "1")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 1, + "isLastPage": false, + "values": [ + { + "key": "HELPDESK-42", + "fields": { + "summary": "VPN not working" + } + } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let keys = client + .get_queue_issue_keys("15", "10", Some(1)) + .await + .unwrap(); + assert_eq!(keys.len(), 1); + assert_eq!(keys[0], "HELPDESK-42"); +} + +#[tokio::test] +async fn get_queue_issue_keys_paginated() { + let server = MockServer::start().await; + + // Page 1 + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 0, + "limit": 1, + "isLastPage": false, + "values": [ + { "key": "HELPDESK-2", "fields": { "summary": "Issue A" } } + ] + }))) + .mount(&server) + .await; + + // Page 2 + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue/10/issue")) + .and(query_param("start", "1")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 1, + "start": 1, + "limit": 1, + "isLastPage": true, + "values": [ + { "key": "HELPDESK-1", "fields": { "summary": "Issue B" } } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let keys = client.get_queue_issue_keys("15", "10", None).await.unwrap(); + assert_eq!(keys.len(), 2); + assert_eq!(keys[0], "HELPDESK-2"); + assert_eq!(keys[1], "HELPDESK-1"); +} + +#[tokio::test] +async fn resolve_queue_duplicate_names_error_message() { + let server = MockServer::start().await; + + // Two queues with the same name but different IDs + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .and(query_param("includeCount", "true")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "10", "name": "Triage", "issueCount": 5 }, + { "id": "20", "name": "Triage", "issueCount": 3 } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + let result = jr::cli::queue::resolve_queue_by_name("15", "Triage", &client).await; + + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!( + msg.contains("Multiple queues named \"Triage\""), + "Expected queue name in error, got: {msg}" + ); + assert!( + msg.contains("10, 20"), + "Expected both queue IDs in error, got: {msg}" + ); + assert!( + msg.contains("Use --id 10 to specify"), + "Expected --id suggestion in error, got: {msg}" + ); +} + +#[tokio::test] +async fn resolve_queue_mixed_case_duplicate_names_error_message() { + let server = MockServer::start().await; + + // Two queues whose names differ only in casing — unlike the exact-duplicate + // test above, this exercises the to_lowercase() normalization path + Mock::given(method("GET")) + .and(path("/rest/servicedeskapi/servicedesk/15/queue")) + .and(query_param("includeCount", "true")) + .and(query_param("start", "0")) + .and(query_param("limit", "50")) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "size": 2, + "start": 0, + "limit": 50, + "isLastPage": true, + "values": [ + { "id": "30", "name": "Triage", "issueCount": 5 }, + { "id": "40", "name": "TRIAGE", "issueCount": 3 } + ] + }))) + .mount(&server) + .await; + + let client = + jr::api::client::JiraClient::new_for_test(server.uri(), "Basic dGVzdDp0ZXN0".into()); + // Lowercase input — differs in casing from both stored names, + // so to_lowercase() must normalize both input and candidates + let result = jr::cli::queue::resolve_queue_by_name("15", "triage", &client).await; + + let err = result.unwrap_err(); + let msg = err.to_string(); + assert!( + msg.contains("Multiple queues named \"Triage\""), + "Expected queue name in error, got: {msg}" + ); + assert!( + msg.contains("30, 40"), + "Expected both queue IDs in error, got: {msg}" + ); + assert!( + msg.contains("Use --id 30 to specify"), + "Expected --id suggestion in error, got: {msg}" + ); +} diff --git a/tests/sprint_commands.rs b/tests/sprint_commands.rs new file mode 100644 index 0000000..201de1d --- /dev/null +++ b/tests/sprint_commands.rs @@ -0,0 +1,394 @@ +#[allow(dead_code)] +mod common; + +use assert_cmd::Command; +use serde_json::json; +use wiremock::matchers::{body_partial_json, method, path, query_param}; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +/// Helper: build N issues for testing. +fn make_issues(count: usize) -> Vec { + (1..=count) + .map(|i| { + common::fixtures::issue_response( + &format!("TEST-{}", i), + &format!("Issue {}", i), + "In Progress", + ) + }) + .collect() +} + +/// Mount prereq mocks (board list, board config, active sprint) on the server. +async fn mount_prereqs(server: &MockServer) { + // Board auto-resolve: list boards for project PROJ, type=scrum → 1 board + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board")) + .and(query_param("projectKeyOrId", "PROJ")) + .and(query_param("type", "scrum")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::board_list_response(vec![common::fixtures::board_response( + 42, + "PROJ Scrum Board", + "scrum", + "PROJ", + )]), + )) + .mount(server) + .await; + + // Board config → scrum + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/configuration")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::board_config_response("scrum")), + ) + .mount(server) + .await; + + // Active sprint list → one sprint + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/board/42/sprint")) + .and(query_param("state", "active")) + .respond_with(ResponseTemplate::new(200).set_body_json( + common::fixtures::sprint_list_response(vec![common::fixtures::sprint( + 100, "Sprint 1", "active", + )]), + )) + .mount(server) + .await; +} + +#[tokio::test] +async fn sprint_current_default_limit_caps_at_30() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(35); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 35)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + // Should show exactly 30 issues (default limit) + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 30, "Expected 30 issues, got {issue_count}"); + + // Should show "more results" hint + assert!( + stderr.contains("Showing 30 results"), + "Expected 'Showing 30 results' in stderr, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_limit_flag() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(20); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 20)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .arg("--limit") + .arg("5") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 5, "Expected 5 issues, got {issue_count}"); + + assert!( + stderr.contains("Showing 5 results"), + "Expected 'Showing 5 results' in stderr, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_all_flag_returns_everything() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(35); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 35)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .arg("--all") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 35, "Expected 35 issues, got {issue_count}"); + + assert!( + !stderr.contains("Showing"), + "Should NOT show 'Showing' hint with --all, got: {stderr}" + ); +} + +#[tokio::test] +async fn sprint_current_under_limit_no_hint() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + let issues = make_issues(10); + Mock::given(method("GET")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(common::fixtures::sprint_issues_response(issues, 10)), + ) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .arg("sprint") + .arg("current") + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + let issue_count = stdout.lines().filter(|l| l.contains("TEST-")).count(); + assert_eq!(issue_count, 10, "Expected 10 issues, got {issue_count}"); + + assert!( + !stderr.contains("Showing"), + "Should NOT show hint when under limit, got: {stderr}" + ); +} + +#[test] +fn sprint_current_limit_and_all_conflict() { + let mut cmd = Command::cargo_bin("jr").unwrap(); + cmd.arg("sprint") + .arg("current") + .arg("--limit") + .arg("3") + .arg("--all"); + + cmd.assert().failure().code(2); +} + +#[tokio::test] +async fn sprint_add_with_sprint_id() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .and(body_partial_json(json!({"issues": ["FOO-1", "FOO-2"]}))) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["sprint", "add", "--sprint", "100", "FOO-1", "FOO-2"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, got: {:?}", + output + ); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Added 2 issue(s) to sprint 100"), + "Expected success message, got: {stdout}" + ); +} + +#[tokio::test] +async fn sprint_add_json_output() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/200/issue")) + .and(body_partial_json(json!({"issues": ["BAR-1"]}))) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args([ + "--output", "json", "sprint", "add", "--sprint", "200", "BAR-1", + ]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, got: {:?}", + output + ); + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + assert_eq!(parsed["sprint_id"], 200); + assert_eq!(parsed["issues"], serde_json::json!(["BAR-1"])); + assert_eq!(parsed["added"], true); +} + +#[tokio::test] +async fn sprint_remove_moves_to_backlog() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/backlog/issue")) + .and(body_partial_json(json!({"issues": ["FOO-1", "FOO-3"]}))) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["sprint", "remove", "FOO-1", "FOO-3"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, got: {:?}", + output + ); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Moved 2 issue(s) to backlog"), + "Expected success message, got: {stdout}" + ); +} + +#[tokio::test] +async fn sprint_remove_json_output() { + let server = MockServer::start().await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/backlog/issue")) + .and(body_partial_json(json!({"issues": ["QUX-5"]}))) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .args(["--output", "json", "sprint", "remove", "QUX-5"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, got: {:?}", + output + ); + let stdout = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout).expect("valid JSON"); + assert_eq!(parsed["issues"], serde_json::json!(["QUX-5"])); + assert_eq!(parsed["removed"], true); +} + +#[tokio::test] +async fn sprint_add_with_current_flag() { + let server = MockServer::start().await; + mount_prereqs(&server).await; + + Mock::given(method("POST")) + .and(path("/rest/agile/1.0/sprint/100/issue")) + .and(body_partial_json(json!({"issues": ["TEST-1", "TEST-2"]}))) + .respond_with(ResponseTemplate::new(204)) + .expect(1) + .mount(&server) + .await; + + let output = Command::cargo_bin("jr") + .unwrap() + .env("JR_BASE_URL", server.uri()) + .env("JR_AUTH_HEADER", "Basic dGVzdDp0ZXN0") + .arg("--project") + .arg("PROJ") + .args(["sprint", "add", "--current", "TEST-1", "TEST-2"]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "Expected success, got: {:?}", + output + ); + let stdout = String::from_utf8_lossy(&output.stdout); + assert!( + stdout.contains("Added 2 issue(s) to sprint 100"), + "Expected success message, got: {stdout}" + ); +}